index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
34,619,072
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_race_explorer.py
|
import unittest
import json
import rh.app.race_explorer_core as racex
class RaceExplorerTest(unittest.TestCase):
def test_pilot_results(self):
with open('tests/test_result_msgs.json') as f:
msgs = json.loads(f.read())['messages']
results = racex.pilot_results(msgs)
# compensate for numeric indices to json property strings
actual = json.loads(json.dumps(results))
with open('tests/test_results.json') as f:
expected = json.loads(f.read())
self.assertDictEqual(actual, expected)
def test_calculate_metrics(self):
with open('tests/test_results.json') as f:
results = json.loads(f.read())
with open('tests/test_results_event.json') as f:
event_data = json.loads(f.read())
actual = racex.calculate_metrics(results, event_data)
with open('tests/test_results_metrics.json') as f:
expected = json.loads(f.read())
self.maxDiff=None
self.assertDictEqual(actual, expected)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,073
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/vrx/mqtt_topics.py
|
import json
#########################
# Established MQTT Topics
#########################
# These topics contain the topic headers and then what variables are substituted in later.
# If the topic doesn't have any substitute params, 2nd param is None
### ClearView Topics v1 ###
# Publish Topics
# Send command to all receivers
receiver_command_all = ("rx/cv1/cmd_all", None)
# Send command to a seat_number
receiver_command_seat_topic = ("rx/cv1/cmd_seat/%d","seat_number")
# Send command to a specific receiver
receiver_command_targeted_topic = ("rx/cv1/cmd_target/%s","receiver_serial_num")
# Send a kick command to a specific receiver (leave the network)
receiver_kick_topic = ("rx/cv1/kick/%s","receiver_serial_num")
# Send an active promotion or demotion to a specific receiver
receiver_active_topic = ("rx/cv1/active/%s","receiver_serial_num")
# Make a request to all receivers (all receivers reply)
receiver_request_all_topic = ("rx/cv1/req_all", None)
# Make a request to all seats at a seat number (all seats at that seat_number reply)
receiver_request_seat_all_topic = ("rx/cv1/req_seat_all/%d", "seat_number")
# Make a request to the active seat at a seat index
# Only the active receiver at that seat replies
receiver_request_seat_active_topic = ("rx/cv1/req_seat_active/%d", "seat_number")
# Make a request to a specific receiver
receiver_request_targeted_topic = ("rx/cv1/req_target/%s","receiver_serial_num")
# All command topic for ESP commands
receiver_command_esp_all = ("rx/cv1/cmd_esp_all", None)
# Send command to a seat_number
receiver_command_esp_seat_topic = ("rx/cv1/cmd_esp_seat/%d","seat_number")
# Send command to a specific receiver
receiver_command_esp_targeted_topic = ("rx/cv1/cmd_esp_target/%s","receiver_serial_num")
# Subscribe Topics
# Response for all
receiver_response_all_topic = ("rx/cv1/resp_all", None)
# Response for a seat number
receiver_response_seat_topic = ("rx/cv1/resp_seat/%s", "+")
# Response for a specific recevier
receiver_response_targeted_topic = ("rx/cv1/resp_target/%s", "+")
# Connection status for receivers
receiver_connection_topic = ("rxcn/%s", "+")
# Receiver static status
receiver_status_static_topic = ("status_static/%s", "+")
# Request variable status
receiver_status_variable_topic = ("status_variable/%s", "+")
mqtt_publish_topics = {
"cv1" :
{
"receiver_command_all":receiver_command_all,
"receiver_command_seat_topic":receiver_command_seat_topic,
"receiver_command_targeted_topic":receiver_command_targeted_topic,
"receiver_request_all_topic":receiver_request_all_topic,
"receiver_request_seat_all_topic":receiver_request_seat_all_topic,
"receiver_request_seat_active_topic":receiver_request_seat_active_topic,
"receiver_request_targeted_topic":receiver_request_targeted_topic,
"receiver_kick_topic":receiver_kick_topic,
"receiver_command_esp_all_topic": receiver_command_esp_all,
"receiver_command_esp_seat_topic":receiver_command_esp_seat_topic,
"receiver_command_esp_targeted_topic": receiver_command_esp_targeted_topic,
}
}
mqtt_subscribe_topics = {
"cv1" :
{
"receiver_response_all":receiver_response_all_topic,
"receiver_response_seat":receiver_response_seat_topic,
"receiver_connection":receiver_connection_topic,
"receiver_response_targeted":receiver_response_targeted_topic,
"receiver_static_status":receiver_status_static_topic,
"receiver_variable_status":receiver_status_variable_topic
}
}
ESP_COMMANDS = {
"Request Static Status" : json.dumps(
{"cv_version": "?",
"cvcm_version": "?",
"mac_addr": "?",
"device_type": "?"}
),
"Request Variable Status" : json.dumps(
{"seat": "?",
"device_name": "?",
"video_format": "?",
"ip_addr": "?"}
)
}
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,074
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/leds/led_handler_bitmap.py
|
'''LED visual effects'''
# to use this handler, run:
# sudo apt-get install libjpeg-dev
# sudo pip install pillow
from . import setPixels, millis_to_secs
from rh.events.eventmanager import Evt
from rh.events.led_event_manager import LEDEffect
import gevent
from PIL import Image
IMAGE_PATH = 'rh/static/image/'
def showBitmap(args):
if 'strip' in args:
strip = args['strip']
else:
return False
bitmaps = args['bitmaps']
if bitmaps and bitmaps is not None:
for bitmap in bitmaps:
img = Image.open(bitmap['image'])
delay_ms = bitmap['delay']
img = img.rotate(90 * args['panelRotate'])
img = img.resize((strip.numPixels() // args['ledRows'], args['ledRows']))
setPixels(strip, img, args['invertedPanelRows'])
strip.show()
gevent.sleep(millis_to_secs(delay_ms))
def discover(config, *args, **kwargs):
# state bitmaps
return [
LEDEffect("bitmapRHLogo", "Image: RotorHazard", showBitmap, {
'include': [Evt.SHUTDOWN],
'recommended': [Evt.STARTUP]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'bitmaps': [
{"image": IMAGE_PATH + "LEDpanel-16x16-RotorHazard.png", "delay": 0}
],
'time': 60
},
),
LEDEffect("bitmapOrangeEllipsis", "Image: Orange Ellipsis", showBitmap, {
'include': [Evt.SHUTDOWN],
'recommended': [Evt.RACE_STAGE]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'bitmaps': [
{"image": IMAGE_PATH + "LEDpanel-16x16-ellipsis.png", "delay": 0}
],
'time': 8
}),
LEDEffect("bitmapGreenArrow", "Image: Green Upward Arrow", showBitmap, {
'include': [Evt.SHUTDOWN],
'recommended': [Evt.RACE_START]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'bitmaps': [
{"image": IMAGE_PATH + "LEDpanel-16x16-arrow.png", "delay": 0}
],
'time': 8
}),
LEDEffect("bitmapRedX", "Image: Red X", showBitmap, {
'include': [Evt.SHUTDOWN],
'recommended': [Evt.RACE_STOP]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'bitmaps': [
{"image": IMAGE_PATH + "LEDpanel-16x16-X.png", "delay": 0}
],
'time': 8
}),
LEDEffect("bitmapCheckerboard", "Image: Checkerboard", showBitmap, {
'include': [Evt.SHUTDOWN],
'recommended': [Evt.RACE_FINISH, Evt.RACE_STOP]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'bitmaps': [
{"image": IMAGE_PATH + "LEDpanel-16x16-checkerboard.png", "delay": 0}
],
'time': 20
})
]
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,075
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_database.py
|
import unittest
from rh.app import Database
class DatabaseTest(unittest.TestCase):
def test_db_uri_1(self):
actual = Database.db_uri('path with space', 'file.db')
self.assertEqual(actual, 'sqlite:////path with space/file.db')
def test_db_uri_2(self):
actual = Database.db_uri('/path with space', 'file.db')
self.assertEqual(actual, 'sqlite:////path with space/file.db')
def test_db_uri_3(self):
actual = Database.db_uri('c:\\path with space', 'file.db')
self.assertEqual(actual, 'sqlite:///c:/path with space/file.db')
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,076
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/sensors/__init__.py
|
'''Sensor class for the hardware interface.'''
from rh.util.Plugins import Plugins
import logging
logger = logging.getLogger(__name__)
def Reading(units):
def decorator(func):
func.units = units
return func
return decorator
class Sensor:
def __init__(self, url, name):
self.url = url
self.name = name
self.description = ''
def getMeasures(self):
measures = []
for fname in dir(self):
f = getattr(self, fname)
if hasattr(f, 'units'):
measures.append(f.__name__)
return measures
def getReadings(self):
readings = {}
for fname in dir(self):
f = getattr(self, fname)
if hasattr(f, 'units'):
value = f()
if value is not None:
readings[f.__name__] = {'value': value, 'units': f.units}
return readings
def update(self):
pass
class I2CSensor(Sensor):
def __init__(self, name, i2c_addr, i2c_bus):
super().__init__(url=i2c_bus.url_of(i2c_addr), name=name)
self.i2c_address = i2c_addr
self.i2c_bus = i2c_bus
def update(self):
self.i2c_bus.with_i2c_quietly(self._readData)
class Sensors(Plugins):
def __init__(self):
super().__init__(suffix='sensor')
self.environmental_data_update_tracker = 0
def _post_discover(self):
for sensor in self:
logger.info("{} ({}): {} ({})".format(sensor.name, sensor.url, sensor.description, ', '.join(sensor.getMeasures())))
def update_environmental_data(self):
'''Updates environmental data.'''
self.environmental_data_update_tracker += 1
partition = (self.environmental_data_update_tracker % 2)
for index, sensor in enumerate(self.data):
if (index % 2) == partition:
sensor.update()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,077
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/MockInterface.py
|
'''Mock hardware interface layer.'''
import os
import gevent
import logging
import json
import random
from rh.util.RHUtils import FREQUENCY_ID_NONE
from rh.util import ms_counter
from .BaseHardwareInterface import BaseHardwareInterface
from .RHInterface import TIMER_MODE, SCANNER_MODE, RSSI_HISTORY_MODE, RHNodeManager, RHNode, \
DEFAULT_RECORD_FORMAT, BINARY_RECORD_FORMAT, \
READ_RSSI, READ_RSSI_STATS, READ_ENTER_STATS, READ_EXIT_STATS, READ_LAP_STATS, READ_ANALYTICS
logger = logging.getLogger(__name__)
class MockNodeManager(RHNodeManager):
TYPE = "Mock"
def __init__(self, index):
super().__init__()
self.api_level = 0
self.max_rssi_value = 255
self.addr = 'mock:'+str(index)
self.firmware_version_str = 'Mock'
self.firmware_proctype_str = 'Mock'
self.firmware_timestamp_str = ''
def _create_node(self, index, multi_node_index):
node = MockNode(index, multi_node_index, self)
return node
class MockNode(RHNode):
def __init__(self, index, multi_node_index, manager):
super().__init__(index, multi_node_index, manager)
self.enter_at_level = 20
self.exit_at_level = 15
self.data_reader = None
class MockInterface(BaseHardwareInterface):
def __init__(self, num_nodes=8, use_random=False, use_datafiles=False, *args, **kwargs):
super().__init__(update_sleep=0.5)
self.warn_loop_time = kwargs['warn_loop_time'] if 'warn_loop_time' in kwargs else 1500
self.use_random = use_random
self.use_datafiles = use_datafiles
self.data_logger_format = os.environ.get('RH_RECORD_FORMAT', DEFAULT_RECORD_FORMAT)
for index in range(num_nodes):
manager = MockNodeManager(index)
node = manager.add_node(index) # New node instance
self.node_managers.append(manager)
self.nodes.append(node)
def start(self):
if self.use_datafiles:
for node in self.nodes:
if node.data_reader is None:
file_format = 'b' if self.data_logger_format == BINARY_RECORD_FORMAT else 't'
try:
f = open("mock_data_{}.{}".format(node.index+1, self.data_logger_format), 'r'+file_format)
logger.info("Loaded {}".format(f.name))
except IOError:
f = None
node.data_reader = f
super().start()
def stop(self):
super().stop()
for node in self.nodes:
f = node.data_reader
if f is not None:
f.close()
logger.info("Closed {}".format(f.name))
node.data_reader = None
#
# Update Loop
#
def _update(self):
node_sleep_interval = self.update_sleep/max(len(self.nodes), 1)
if self.nodes:
for node in self.nodes:
if node.scan_enabled and callable(self.read_scan_history):
freqs, rssis = self.read_scan_history(node.index)
for freq, rssi in zip(freqs, rssis):
node.scan_data[freq] = rssi
elif node.frequency:
server_roundtrip_ms = 0
node._roundtrip_stats.append(server_roundtrip_ms)
now_ms = ms_counter()
data_file = node.data_reader
if data_file is not None:
if self.data_logger_format == BINARY_RECORD_FORMAT:
cmd = data_file.read(1)
if cmd == '':
data_file.seek(0)
cmd = data_file.read(1)
cmd_size = data_file.read(1)
cmd_data = data_file.read(cmd_size)
node.io_response_ms = node.io_request_ms = now_ms
if cmd == READ_RSSI:
cmd_values = node.unpack_rssi(cmd_data)
elif cmd == READ_ENTER_STATS or cmd == READ_EXIT_STATS:
cmd_values = node.unpack_trigger_stats(cmd, cmd_data)
elif cmd == READ_LAP_STATS:
cmd_values = node.unpack_lap_stats(cmd_data)
elif cmd == READ_ANALYTICS:
cmd_values = node.unpack_analytics(cmd_data)
elif cmd == READ_RSSI_STATS:
cmd_values = node.unpack_rssi_stats(cmd_data)
else:
raise ValueError("Unsupported command: {}".format(cmd))
else:
data_line = data_file.readline()
if data_line == '':
data_file.seek(0)
data_line = data_file.readline()
json_data = json.loads(data_line)
cmd = json_data['cmd']
cmd_values = json_data['data']
if cmd == READ_RSSI:
cmd_values = [now_ms] + cmd_values
elif cmd == READ_ENTER_STATS or cmd == READ_EXIT_STATS:
cmd_values[1] = now_ms - cmd_values[1]
elif cmd == READ_LAP_STATS:
cmd_values[1] = now_ms - cmd_values[1]
elif cmd == READ_ANALYTICS:
cmd_values[4] = now_ms - cmd_values[4]
cmd_values = [now_ms] + cmd_values
if cmd == READ_RSSI:
self.is_new_lap(node, *cmd_values)
elif cmd == READ_ENTER_STATS:
self.process_enter_trigger(node, *cmd_values)
elif cmd == READ_EXIT_STATS:
self.process_exit_trigger(node, *cmd_values)
elif cmd == READ_LAP_STATS:
self.process_lap_stats(node, *cmd_values)
elif cmd == READ_ANALYTICS:
self.process_analytics(node, *cmd_values)
elif cmd == READ_RSSI_STATS:
self.process_rssi_stats(node, *cmd_values)
else:
raise ValueError("Unsupported command: {}".format(cmd))
elif self.use_random:
self.is_new_lap(node, now_ms, random.randint(0, 20), 0, False)
self.process_analytics(node, now_ms, random.randint(-5, 5), 1000, None, None, None)
self.process_capturing(node)
self._restore_lowered_thresholds(node)
if node.loop_time > self.warn_loop_time:
logger.warning("Abnormal loop time for node {}: {}us ({})".format(node, node.loop_time, node._loop_time_stats.formatted(0)))
gevent.sleep(node_sleep_interval)
else:
gevent.sleep(node_sleep_interval)
#
# External functions for setting data
#
def set_mode(self, node_index, mode):
node = self.nodes[node_index]
node.mode = mode
def set_frequency_scan(self, node_index, scan_enabled):
'''Frequency scanning protocol'''
node = self.nodes[node_index]
if scan_enabled != node.scan_enabled:
if scan_enabled:
node.scan_enabled = scan_enabled
node.saved_frequency = node.frequency
self.set_frequency(node_index, FREQUENCY_ID_NONE)
# reset/clear data
node.scan_data = {}
self.set_mode(node_index, SCANNER_MODE)
else:
self.set_mode(node_index, TIMER_MODE)
# reset/clear data
node.scan_data = {}
# restore original frequency
self.set_frequency(node_index, node.saved_frequency)
del node.saved_frequency
node.scan_enabled = scan_enabled
def read_scan_history(self, node_index):
freqs = list(range(5645, 5945, 5))
rssis = [random.randint(0, 200) for f in freqs]
return freqs, rssis
def read_rssi_history(self, node_index):
return [random.randint(0, 200) for _ in range(16)]
def send_status_message(self, msgTypeVal, msgDataVal):
return False
def send_shutdown_button_state(self, stateVal):
return False
def send_shutdown_started_message(self):
return False
def send_server_idle_message(self):
return False
def get_hardware_interface(*args, **kwargs):
'''Returns the interface object.'''
logger.info('Using mock hardware interface')
num_nodes = int(os.environ.get('RH_NODES', '8'))
return MockInterface(num_nodes=num_nodes, use_datafiles=True, *args, **kwargs)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,078
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/tools/scanner.py
|
import gevent.monkey
gevent.monkey.patch_all()
import logging
import sys
from flask import Flask, render_template
from flask_socketio import SocketIO
import webbrowser
from . import get_interface
from rh.interface import RHInterface
def scan(port, socket):
INTERFACE = get_interface(port)
for node in INTERFACE.nodes:
INTERFACE.set_mode(node.index, RHInterface.SCANNER_MODE)
def scan_thread_function():
while True:
for node in INTERFACE.nodes:
freqs, rssis = INTERFACE.read_scan_history(node.index)
if freqs and rssis:
socket.emit('scan_data', {'node' : node.index, 'frequency' : freqs, 'rssi' : rssis})
gevent.sleep(0.1)
gevent.spawn(scan_thread_function)
return INTERFACE
def start(com_port, web_port = 5080):
APP = Flask(__name__, template_folder='../templates', static_folder='../static',static_url_path='/static')
SOCKET_IO = SocketIO(APP, async_mode='gevent', cors_allowed_origins='*')
INTERFACE = scan(com_port, SOCKET_IO)
@APP.route('/')
def scanner():
return render_template('scannerapp.html', num_nodes=len(INTERFACE.nodes), __=lambda s: s)
print("Running http server at port {0}".format(web_port))
def openWindow():
webbrowser.open('http://127.0.0.1:'+str(web_port))
gevent.spawn(openWindow)
try:
SOCKET_IO.run(APP, host='0.0.0.0', port=web_port, debug=True, use_reloader=False)
except KeyboardInterrupt:
print("Server terminated by keyboard interrupt")
except Exception as ex:
print("Server exception: {0}".format(ex))
INTERFACE.close()
# Start HTTP server
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
logging.getLogger('socketio').setLevel(logging.WARN)
logging.getLogger('engineio').setLevel(logging.WARN)
logging.getLogger('geventwebsocket').setLevel(logging.WARN)
if len(sys.argv) < 2:
print('Please specify serial port, e.g. COM12 (or I2C address, e.g. i2c:1/0x08, or socket port, e.g. :5005).')
exit()
port = sys.argv[1]
start(port)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,079
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/util/Plugins.py
|
'''Generic plugin manager'''
import logging
import importlib
import pkgutil
from collections import UserList
logger = logging.getLogger(__name__)
def search_modules(pkg, prefix=None, suffix=None):
plugin_modules = []
pkg_prefix = pkg.__name__ + '.'
for loader, name, ispkg in pkgutil.iter_modules(pkg.__path__):
if (prefix is None or name.startswith(prefix+'_')) and (suffix is None or name.endswith('_'+suffix)):
try:
plugin_module = importlib.import_module(pkg_prefix+name)
plugin_modules.append(plugin_module)
logger.info('Loaded module {0}'.format(name))
except ImportError as ex:
logger.debug('Module {0} not imported (not supported or may require additional dependencies)\n\t{1}'.format(name, ex))
return plugin_modules
class Plugins(UserList):
def __init__(self, prefix=None, suffix=None):
super().__init__()
self.prefix = prefix
self.suffix = suffix
def discover(self, pkg, includeOffset=False, *args, **kwargs):
for plugin_module in search_modules(pkg, prefix = self.prefix, suffix = self.suffix):
if includeOffset:
kwargs['idxOffset'] = len(self.data)
try:
self.data.extend(plugin_module.discover(*args, **kwargs))
except TypeError as ex:
logger.debug('Plugin {0} not loaded (not supported - required arguments are not available)\n\t{1}'.format(plugin_module.__name__, ex))
except AttributeError as err:
logger.error('Error loading plugin {0}: {1}'.format(plugin_module.__name__, err))
self._post_discover()
def _post_discover(self):
pass
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,080
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_server.py
|
import os
import unittest
import gevent
from datetime import datetime
import json
from monotonic import monotonic
from rh.util import ms_counter
os.environ['RH_CONFIG'] = 'config-dist.json'
TEST_DB = 'test-database.db'
if os.path.isfile(TEST_DB):
os.remove(TEST_DB)
os.environ['RH_DATABASE'] = TEST_DB
os.environ['RH_INTERFACE'] = 'Mock'
from rh import server
from rh.app import RHRace
import tests as tests_pkg
import logging
logger = logging.getLogger(__name__)
class ServerTest(unittest.TestCase):
def setUp(self):
logger.info('Starting test '+self._testMethodName)
for node in server.INTERFACE.nodes:
node.reset()
self.client = server.SOCKET_IO.test_client(server.APP)
gevent.sleep(0.1)
self.get_response('load_all')
self.wait_for_response('heartbeat', 1)
def tearDown(self):
self.client.disconnect()
@classmethod
def setUpClass(cls):
server.start()
@classmethod
def tearDownClass(cls):
server.shutdown('Tear down')
server.stop()
def get_response(self, event):
responses = self.client.get_received()
for resp in reversed(responses):
if resp['name'] == event:
return resp['args'][0]
self.fail('No response of type {0}'.format(event))
def get_responses(self, *events):
responses = self.client.get_received()
evt_resps = filter(lambda resp: resp['name'] in events, responses)
return [{resp['name']: resp['args'][0]} for resp in evt_resps]
def wait_for_response(self, event, max_wait, filter_func=None):
expiry_time = monotonic() + max_wait
while monotonic() < expiry_time:
responses = self.client.get_received()
for resp in reversed(responses):
if resp['name'] == event and (filter_func is None or filter_func(resp['args'][0])):
return resp['args'][0]
gevent.sleep(0.1)
self.fail('No response of type {0} within {1}secs'.format(event, max_wait))
def test_node_data(self):
resp = self.wait_for_response('node_data', 4)
json.dumps(resp, allow_nan=False)
def test_sensors(self):
server.SENSORS.clear()
# discovery
server.SENSORS.discover(tests_pkg)
self.assertEqual(len(server.SENSORS), 1)
expected_name = 'TestSensor'
self.assertEqual(server.SENSORS[0].name, expected_name)
# environmental data
server.rhconfig.SENSORS['test:/test'] = {
'max_alarms': {
'counter': 1
}
}
server.emit_environmental_data()
resp = self.get_response('environmental_data')
self.assertEqual(resp[0][expected_name]['counter']['value'], 0)
# alarms
server.SENSORS[0].update()
server.emit_environmental_data()
resps = self.get_responses('environmental_data', 'priority_message')
self.assertEqual(resps[0]['environmental_data'][0][expected_name]['counter']['value'], 1)
self.assertEqual(resps[1]['priority_message']['key'], expected_name+' counter')
def test_add_pilot(self):
self.client.emit('load_data', {'load_types': ['pilot_data']})
resp = self.get_response('pilot_data')
num_pilots = len(resp['pilots'])
self.client.emit('add_pilot')
resp = self.get_response('pilot_data')
self.assertEqual(len(resp['pilots']), num_pilots+1)
last_pilot = resp['pilots'][-1]
self.assertGreater(len(last_pilot['name']), 0)
self.assertGreater(len(last_pilot['callsign']), 0)
def test_add_pilot_init(self):
self.client.emit('load_data', {'load_types': ['pilot_data']})
resp = self.get_response('pilot_data')
num_pilots = len(resp['pilots'])
self.client.emit('add_pilot', {'name': 'foobar', 'callsign': 'Test new', 'team': 'Team T'})
resp = self.get_response('pilot_data')
self.assertEqual(len(resp['pilots']), num_pilots+1)
pilots_by_id = sorted(resp['pilots'], key=lambda p: p['pilot_id'])
last_pilot = pilots_by_id[-1]
self.assertEqual(last_pilot['name'], 'foobar')
self.assertEqual(last_pilot['callsign'], 'Test new')
self.assertEqual(last_pilot['team'], 'Team T')
def test_alter_pilot(self):
for i in range(1, len(server.INTERFACE.nodes)):
data = {
'pilot_id': i,
'callsign': 'Test '+str(i),
'team_name': 'team T',
'phonetic': 'Teeest',
'name': 'Tester'
}
self.client.emit('alter_pilot', data)
self.client.emit('load_data', {'load_types': ['pilot_data']})
resp = self.get_response('pilot_data')
for item in resp['pilots']:
if item['pilot_id'] == i:
pilot = item
break
self.assertEqual(pilot['callsign'], data['callsign'])
self.assertEqual(pilot['phonetic'], data['phonetic'])
self.assertEqual(pilot['name'], data['name'])
def test_add_profile(self):
self.client.emit('load_data', {'load_types': ['node_tuning']})
resp = self.get_response('node_tuning')
num_profiles = len(resp['profile_ids'])
self.client.emit('add_profile')
resp = self.get_response('node_tuning')
self.assertEqual(len(resp['profile_ids']), num_profiles+1)
def test_alter_profile(self):
data = {
'profile_name': 'Test ' + str(datetime.now()),
'profile_description': 'Testing'
}
self.client.emit('alter_profile', data)
resp = self.get_response('node_tuning')
self.assertEqual(resp['profile_name'], data['profile_name'])
self.assertEqual(resp['profile_description'], data['profile_description'])
def test_add_race_format(self):
self.client.emit('load_data', {'load_types': ['race_format']})
resp = self.get_response('race_format')
num_formats = len(resp['format_ids'])
self.client.emit('add_race_format')
resp = self.get_response('race_format')
self.assertEqual(len(resp['format_ids']), num_formats+1)
def test_alter_race_format(self):
data = {
'format_name': 'Test ' + str(datetime.now()),
'race_mode': RHRace.RaceMode.FIXED_TIME,
'race_time_sec': 33,
'start_delay_min': 1,
'start_delay_max': 4,
'number_laps_win': 5,
'win_condition': RHRace.WinCondition.FIRST_TO_LAP_X,
'team_racing_mode': True
}
self.client.emit('alter_race_format', data)
resp = self.get_response('race_format')
self.assertEqual(resp['format_name'], data['format_name'])
self.assertEqual(resp['race_mode'], data['race_mode'])
self.assertEqual(resp['race_time_sec'], data['race_time_sec'])
self.assertEqual(resp['start_delay_min'], data['start_delay_min'])
self.assertEqual(resp['start_delay_max'], data['start_delay_max'])
self.assertEqual(resp['number_laps_win'], data['number_laps_win'])
self.assertEqual(resp['win_condition'], data['win_condition'])
self.assertEqual(resp['team_racing_mode'], data['team_racing_mode'])
def test_add_race_class(self):
self.client.emit('load_data', {'load_types': ['class_data']})
resp = self.get_response('class_data')
num_classes = len(resp['classes'])
self.client.emit('add_race_class')
resp = self.get_response('class_data')
self.assertEqual(len(resp['classes']), num_classes+1)
def test_alter_race_class(self):
data = {
'id': 1,
'name': 'New name',
'format_id': 0,
'description': 'Test class'
}
self.client.emit('alter_race_class', data)
self.client.emit('load_data', {'load_types': ['class_data']})
resp = self.get_response('class_data')
self.assertEqual(resp['classes'][0]['name'], data['name'])
self.assertEqual(resp['classes'][0]['format'], data['format_id'])
self.assertEqual(resp['classes'][0]['description'], data['description'])
def test_add_heat(self):
self.client.emit('load_data', {'load_types': ['heat_data']})
resp = self.get_response('heat_data')
num_heats = len(resp['heats'])
self.client.emit('add_heat')
resp = self.get_response('heat_data')
self.assertEqual(len(resp['heats']), num_heats+1)
def test_alter_heat(self):
data = {
'heat': 1,
'node': 0,
'pilot': 1,
'note': 'Test',
'class': 1
}
self.client.emit('alter_heat', data)
self.client.emit('load_data', {'load_types': ['heat_data']})
resp = self.get_response('heat_data')
self.assertEqual(resp['heats'][1]['pilots'][0], data['pilot'])
self.assertEqual(resp['heats'][1]['note'], data['note'])
self.assertEqual(resp['heats'][1]['class_id'], data['class'])
def test_node_crossing(self):
server.INTERFACE.race_start_time_ms = 0
node_index = 1
node = server.INTERFACE.nodes[node_index]
enter_ts = ms_counter()
server.INTERFACE.is_new_lap(node, 0, 20, 3, True)
self.assertEqual(node.pass_count, 3)
server.INTERFACE.process_enter_trigger(node, 4, enter_ts, 60, 6)
self.assertEqual(node.enter_at_sample, (enter_ts, 60))
resp = self.wait_for_response('node_crossing_change', 0.5)
self.assertEqual(resp['node_index'], node_index)
self.assertTrue(resp['crossing_flag'])
self.assertEqual(resp['timestamp'], enter_ts)
self.assertEqual(resp['rssi'], 60)
pass_ts = enter_ts + 1
exit_ts = pass_ts + 1
server.INTERFACE.process_exit_trigger(node, 4, exit_ts, 58, 5)
self.assertEqual(node.exit_at_sample, (exit_ts, 58))
server.INTERFACE.process_lap_stats(node, 4, pass_ts, 65, 5)
self.assertEqual(node.pass_count, 4)
self.assertEqual(node.pass_peak_rssi, 65)
resp = self.wait_for_response('node_crossing_change', 0.5)
self.assertEqual(resp['node_index'], node_index)
self.assertFalse(resp['crossing_flag'])
self.assertEqual(resp['timestamp'], exit_ts)
self.assertEqual(resp['rssi'], 58)
def test_no_race(self):
node_index = 1
node = server.INTERFACE.nodes[node_index]
self.assertIsNone(node.pass_count)
gevent.sleep(1)
# simulate a lap
server.INTERFACE.simulate_lap(node_index)
self.assertIsNone(node.pass_count)
gevent.sleep(1)
server.INTERFACE.is_new_lap(node, 0, 20, 0, False)
self.assertEqual(node.pass_count, 0)
# hardware lap
now = ms_counter()
server.INTERFACE.process_lap_stats(node, 1, now, 89, 43)
self.assertEqual(node.pass_count, 1)
self.assertEqual(node.pass_peak_rssi, 89)
def test_run_a_race(self):
node_index = 1
node = server.INTERFACE.nodes[node_index]
self.client.emit('alter_heat', {'heat':1, 'node':node_index, 'pilot':1})
self.client.emit('set_race_format', {'race_format': 5})
self.client.emit('set_min_lap', {'min_lap': 0})
self.client.emit('stage_race')
self.get_response('stage_ready')
resp = self.wait_for_response('race_status', 2)
self.assertEqual(resp['race_status'], RHRace.RaceStatus.RACING)
gevent.sleep(1)
# test: simulate a lap
server.INTERFACE.simulate_lap(node_index)
self.assertIsNone(node.pass_count)
resp = self.wait_for_response('pass_record', 1)
self.assertEqual(resp['node'], node_index)
# initialize hardware lap stats
server.INTERFACE.is_new_lap(node, 100, 20, 0, False)
self.assertEqual(node.pass_count, 0)
gevent.sleep(1)
# test: hardware lap
now = ms_counter()
server.INTERFACE.process_lap_stats(node, 1, now, 89, 43)
self.assertEqual(node.pass_count, 1)
self.assertEqual(node.pass_peak_rssi, 89)
resp = self.wait_for_response('pass_record', 1)
self.assertEqual(resp['node'], node_index)
self.assertEqual(resp['timestamp'], server.PROGRAM_START.monotonic_to_epoch_millis(now))
self.client.emit('stop_race')
node_laps = server.RACE.node_passes[node_index]
self.assertEqual(len(node_laps), 2)
self.client.emit('discard_laps')
def test_min_lap(self):
node_index = 1
min_lap = 1
node = server.INTERFACE.nodes[node_index]
self.client.emit('alter_heat', {'heat':1, 'node':node_index, 'pilot':1})
self.client.emit('set_race_format', {'race_format': 5})
self.client.emit('set_min_lap', {'min_lap': min_lap})
resp = self.get_response('min_lap')
self.assertEqual(resp['min_lap'], min_lap)
self.client.emit('set_min_lap_behavior', {'min_lap_behavior': RHRace.MinLapBehavior.DISCARD_SHORT_LAPS})
resp = self.get_response('min_lap')
self.assertEqual(resp['min_lap_behavior'], RHRace.MinLapBehavior.DISCARD_SHORT_LAPS)
self.client.emit('stage_race')
self.get_response('stage_ready')
resp = self.wait_for_response('race_status', 2)
self.assertEqual(resp['race_status'], RHRace.RaceStatus.RACING)
gevent.sleep(1)
# initialize hardware lap stats
server.INTERFACE.is_new_lap(node, 100, 20, 0, False)
self.assertEqual(node.pass_count, 0)
gevent.sleep(1)
# pass start line
now = ms_counter()
server.INTERFACE.process_lap_stats(node, 1, now, 89, 43)
self.assertEqual(node.pass_count, 1)
self.assertEqual(node.pass_peak_rssi, 89)
resp = self.wait_for_response('pass_record', 1)
self.assertEqual(resp['node'], node_index)
self.assertEqual(resp['timestamp'], server.PROGRAM_START.monotonic_to_epoch_millis(now))
# end of first lap - min lap
now = ms_counter()
server.INTERFACE.process_lap_stats(node, 2, now, 86, 42)
self.assertEqual(node.pass_count, 2)
self.assertEqual(node.pass_peak_rssi, 86)
gevent.sleep(min_lap)
self.assertEqual(len(server.RACE.get_valid_laps()[node_index]), 1)
# end of 2nd lap
now = ms_counter()
server.INTERFACE.process_lap_stats(node, 3, now, 87, 45)
self.assertEqual(node.pass_count, 3)
self.assertEqual(node.pass_peak_rssi, 87)
gevent.sleep(1)
resps = self.get_responses('pass_record', 'phonetic_data')
[evt] = filter(lambda evt: 'pass_record' in evt, resps)
resp = evt['pass_record']
self.assertEqual(resp['node'], node_index)
self.assertEqual(resp['timestamp'], server.PROGRAM_START.monotonic_to_epoch_millis(now))
self.assertEqual(len(server.RACE.get_valid_laps()[node_index]), 2)
[evt] = filter(lambda evt: 'phonetic_data' in evt, resps)
resp = evt['phonetic_data']
self.assertEqual(resp['lap'], 1)
self.client.emit('stop_race')
node_laps = server.RACE.node_passes[node_index]
self.assertEqual(len(node_laps), 3)
self.assertEqual(node_laps[1]['deleted'], True)
self.assertEqual(node_laps[2]['deleted'], False)
self.client.emit('discard_laps')
# scanner
def test_scanner(self):
self.client.emit('set_frequency', {
'node': 0,
'frequency': 5888
})
is_freq_set = lambda f: lambda d:d['frequency'][0] == f
self.wait_for_response('heartbeat', 1, is_freq_set(5888))
self.client.emit('set_scan', {
'node': 0,
'scan': True,
})
# allow some scanning to happen
gevent.sleep(1)
resp = self.wait_for_response('scan_data', 1)
num_freqs = len(resp['frequency'])
self.assertGreater(num_freqs, 0)
self.assertEqual(len(resp['rssi']), num_freqs)
self.client.emit('set_scan', {
'node': 0,
'scan': False,
})
# check original frequency is restored
resp = self.wait_for_response('heartbeat', 1, is_freq_set(5888))
# verify LiveTime compatibility
def test_livetime_get_version(self):
resp = self.client.emit('get_version', callback=True)
self.assertIn('major', resp)
self.assertIn('minor', resp)
def test_livetime_get_timestamp(self):
resp = self.client.emit('get_timestamp', callback=True)
self.assertIn('timestamp', resp)
def test_livetime_get_settings(self):
resp = self.client.emit('get_settings', callback=True)
self.assertIn('nodes', resp)
for n in resp['nodes']:
self.assertTrue('frequency' in n)
self.assertTrue('trigger_rssi' in n)
def test_livetime_set_calibration_threshold(self):
self.client.emit('set_calibration_threshold', {
'calibration_threshold': 0
})
def test_livetime_set_calibration_offset(self):
self.client.emit('set_calibration_offset', {
'calibration_offset': 0
})
def test_livetime_set_trigger_threshold(self):
self.client.emit('set_trigger_threshold', {
'trigger_threshold': 0
})
def test_livetime_set_frequency(self):
data = {
'node': 0,
'frequency': 5800
}
# trigger livetime client mode
self.client.emit('get_version')
self.client.emit('set_frequency', data)
is_same_node = lambda d: d['node'] == 0
resp = self.wait_for_response('frequency_set', 1, is_same_node)
self.assertEqual(resp, data)
def test_livetime_reset_auto_calibration(self):
self.client.emit('reset_auto_calibration', {
'node': -1
})
self.client.emit('stop_race')
def test_livetime_heartbeat(self):
# trigger livetime client mode
self.client.emit('get_version')
resp = self.wait_for_response('heartbeat', 1)
self.assertIn('current_rssi', resp)
self.assertTrue(len(resp['current_rssi']) > 0)
def test_livetime_pass_record(self):
# trigger livetime client mode
self.client.emit('get_version')
server.RACE.race_status = 1
node = server.INTERFACE.nodes[0]
server.RACE.start_time_ms = 10000
server.RACE.start_time_epoch_ms = server.PROGRAM_START.monotonic_to_epoch_millis(server.RACE.start_time_ms)
server.pass_record_callback(node, 19800, 0)
resp = self.wait_for_response('pass_record', 1)
self.assertIn('node', resp)
self.assertIn('frequency', resp)
self.assertIn('timestamp', resp)
self.assertEqual(resp['timestamp'], server.PROGRAM_START.monotonic_to_epoch_millis(server.RACE.start_time_ms) + 19800)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,081
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/MqttLapRFInterface.py
|
import logging
from .BaseHardwareInterface import BaseHardwareInterface
from .MqttInterface import MqttInterface
from .LapRFInterface import LapRFInterfaceListener
logger = logging.getLogger(__name__)
class MqttLapRFInterface(MqttInterface, LapRFInterfaceListener):
def __init__(self, mqtt_client, ann_topic: str, ctrl_topic: str, timer_id: str, hw_interface: BaseHardwareInterface):
super().__init__(mqtt_client=mqtt_client, ann_topic=ann_topic, ctrl_topic=ctrl_topic, timer_id=timer_id, hw_interface=hw_interface)
def on_threshold_changed(self, node, threshold):
self._mqtt_publish_threshold(node, threshold)
def on_gain_changed(self, node, gain):
self._mqtt_publish_gain(node, gain)
def _mqtt_node_manager_start(self, node_manager):
self._mqtt_node_subscribe_to(node_manager, "threshold", self._mqtt_set_threshold)
self._mqtt_node_subscribe_to(node_manager, "gain", self._mqtt_set_gain)
super()._mqtt_node_manager_start(node_manager)
def _mqtt_node_start(self, node):
super()._mqtt_node_start(node)
self._mqtt_publish_threshold(node, node.threshold)
self._mqtt_publish_gain(node, node.gain)
def _mqtt_set_threshold(self, node_manager, client, userdata, msg):
node = self._mqtt_get_node_from_topic(node_manager, msg.topic)
if node:
try:
level = int(msg.payload.decode('utf-8'))
self.hw_interface.set_threshold(node.index, level)
except:
logger.warning('Invalid threshold message')
def _mqtt_set_gain(self, node_manager, client, userdata, msg):
node = self._mqtt_get_node_from_topic(node_manager, msg.topic)
if node:
try:
level = int(msg.payload.decode('utf-8'))
self.hw_interface.set_gain(node.index, level)
except:
logger.warning('Invalid gain message')
def _mqtt_publish_threshold(self, node, threshold):
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "threshold"), str(threshold))
def _mqtt_publish_gain(self, node, gain):
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "gain"), str(gain))
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,082
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/sensors/bme280_sensor.py
|
# coding=UTF-8
import logging
from . import I2CSensor, Reading
from smbus2 import SMBus
import bme280
logger = logging.getLogger(__name__)
class BME280Sensor(I2CSensor):
def __init__(self, name, addr, i2c_bus):
super().__init__(name=name, i2c_addr=addr, i2c_bus=i2c_bus)
self.description = 'BME280'
self._readData()
def _readData(self):
with SMBus(self.i2c_bus.id) as bus:
self.data = bme280.sample(bus, self.i2c_address)
@Reading(units='°C')
def temperature(self):
return self.data.temperature
@Reading(units='hPa')
def pressure(self):
return self.data.pressure
@Reading(units='%rH')
def humidity(self):
return self.data.humidity
def discover(config, i2c_helper, *args, **kwargs):
sensors = []
supported_bme280_addrs = [0x76, 0x77]
for i2c_bus in i2c_helper:
for addr in supported_bme280_addrs:
url = i2c_bus.url_of(addr)
sensor_config = config.get(url, {})
if sensor_config.get('enabled', True):
name = sensor_config.get('name', url)
try:
sensors.append(BME280Sensor(name, addr, i2c_bus))
except IOError:
lvl = logging.INFO if sensor_config else logging.DEBUG
logger.log(lvl, "No BME280 found on bus {} at address {:#04x}".format(i2c_bus.id, addr))
return sensors
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,083
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/endpoints/heat_generator_endpoints.py
|
from flask import request
from flask.blueprints import Blueprint
from numpy.random import default_rng
from rh.app import race_explorer_core as racex
rng = default_rng()
def createBlueprint(RHData):
APP = Blueprint('heat_generator', __name__)
@APP.route('/heat-generators/random')
def random_get():
return {
"parameters": [
{"name": "class", "label": "Class", "type": "class"},
{"name": "seats", "label": "Seats", "type": "seats"},
{"name": "pilots", "type": "pilots"}
]
}
@APP.route('/heat-generators/random', methods=['POST'])
def random_post():
data = request.get_json()
race_class = data['class']
n_seats = int(data['seats'])
pilots = data['pilots']
heats = []
seats = [None] * n_seats
i = 0
for pilot in rng.choice(pilots, len(pilots), replace=False):
seats[i] = pilot
i += 1
if i == n_seats:
heats.append({'name': 'Heat '+str(len(heats)+1), 'class': race_class, 'seats': seats})
seats = [None] * n_seats
i = 0
if i > 0:
heats.append({'name': 'Heat '+str(len(heats)+1), 'class': race_class, 'seats': seats})
return {'type': 'Random', 'heats': heats}
@APP.route('/heat-generators/mains')
def mains_get():
return {
"parameters": [
{"name": "resultsClass", "label": "Results class", "type": "class"},
{"name": "mainsClass", "label": "Mains class", "type": "class"},
{"name": "seats", "label": "Seats", "type": "seats"}
]
}
@APP.route('/heat-generators/mains', methods=['POST'])
def mains_post():
data = request.get_json()
stage_idx = data['stage']
results_class = data['resultsClass']
mains_class = data['mainsClass']
n_seats = int(data['seats'])
leaderboards = racex.export_leaderboard(RHData)
stages = leaderboards['stages']
if stage_idx-1 < 0 or stage_idx-1 >= len(stages):
return {'heats': []}
stage = stages[stage_idx-1]
stage_leaderboard = stage['leaderboards'].get(results_class)
if stage_leaderboard is None:
return {'heats': []}
pilots_to_seats = {}
for heat in stage['heats']:
for seat_idx, pilot in enumerate(heat['seats']):
pilots_to_seats[pilot] = seat_idx
prs = stage_leaderboard['ranking']
mains = []
i = 0
main_letter = 'A'
while i < len(prs):
heat_prs = prs[i:i+n_seats]
i += len(heat_prs)
# assign pilots to seats
# prioritise freq assignments to higher ranked pilots
# (an alternate strategy would be to minimize freq changes)
seats = [None] * n_seats
available_seats = list(range(n_seats))
unassigned = []
for pr in heat_prs:
pilot = pr['pilot']
seat_idx = pilots_to_seats[pilot]
if seat_idx < len(seats) and not seats[seat_idx]:
seats[seat_idx] = pilot
available_seats.remove(seat_idx)
else:
unassigned.append(pilot)
for pilot in unassigned:
seat_idx = available_seats.pop(0)
seats[seat_idx] = pilot
mains.append({'name': main_letter+' Main', 'class': mains_class, 'seats': seats})
main_letter = chr(ord(main_letter) + 1)
mains.reverse()
return {'type': 'Mains', 'heats': mains, 'leaderboards': {mains_class: {'method': 'best'}}}
@APP.route('/heat-generators/mgp-brackets')
def mgp_brackets_get():
return {
"parameters": [
{"name": "resultsClass", "label": "Results class", "type": "class"},
{"name": "mainsClass", "label": "Mains class", "type": "class"},
{"name": "bracket", "label": "Bracket", "type": "integer", "default": 1, "min": 1, "max": 6}
]
}
@APP.route('/heat-generators/mgp-brackets', methods=['POST'])
def mgp_brackets_post():
data = request.get_json()
stage_idx = data['stage']
results_class = data['resultsClass']
mains_class = data['mainsClass']
bracket = int(data['bracket'])
leaderboards = racex.export_leaderboard(RHData)
return mgp_brackets(leaderboards, stage_idx, results_class, mains_class, bracket)
@APP.route('/heat-generators/fai-single-16')
def fai_single_16_get():
return {
"parameters": [
{"name": "resultsClass", "label": "Results class", "type": "class"},
{"name": "mainsClass", "label": "Mains class", "type": "class"},
{"name": "bracket", "label": "Bracket", "type": "integer", "default": 1, "min": 1, "max": 3}
]
}
@APP.route('/heat-generators/fai-single-16', methods=['POST'])
def fai_single_16_post():
data = request.get_json()
stage_idx = data['stage']
results_class = data['resultsClass']
mains_class = data['mainsClass']
bracket = int(data['bracket'])
leaderboards = racex.export_leaderboard(RHData)
return fai_single_brackets_16(leaderboards, stage_idx, results_class, mains_class, bracket)
@APP.route('/heat-generators/fai-double-16')
def fai_double_16_get():
return {
"parameters": [
{"name": "resultsClass", "label": "Results class", "type": "class"},
{"name": "mainsClass", "label": "Mains class", "type": "class"},
{"name": "bracket", "label": "Bracket", "type": "integer", "default": 1, "min": 1, "max": 6}
]
}
@APP.route('/heat-generators/fai-double-16', methods=['POST'])
def fai_double_16_post():
data = request.get_json()
stage_idx = data['stage']
results_class = data['resultsClass']
mains_class = data['mainsClass']
bracket = int(data['bracket'])
leaderboards = racex.export_leaderboard(RHData)
return fai_double_brackets_16(leaderboards, stage_idx, results_class, mains_class, bracket)
return APP
def mgp_brackets(leaderboards, stage_idx, results_class, mains_class, bracket):
'''
2021 MultiGP Rules & Regulations
7.9.1. Double Elimination Brackets
https://docs.google.com/document/d/1x-otorbEruq5oD6b1yzoBTHO9SwUNmb2itguUoY8x3s/
As per the diagram
https://www.multigp.com/wp-content/uploads/2019/04/multigp-double-elim-brackets1.png
'''
# 1-index based!
seeding_table = [
[[4,7,11,13], [3,6,10,14], [2,5,9,15], [1,8,12,16]],
{'previous_races': 4,
'race_offset': 5,
'races': [
# (race, position)
[(1,3),(2,3),(1,4),(2,4)],
[(1,2),(2,1),(1,1),(2,2)],
[(3,4),(4,4),(3,3),(4,3)],
[(3,1),(4,2),(3,2),(4,1)]
]
},
{'previous_races': 4,
'race_offset': 9,
'races': [
[(5,1),(5,2),(7,1),(7,2)],
[(6,3),(6,4),(8,3),(8,4)],
[(6,1),(6,2),(8,1),(8,2)],
]
},
{'previous_races': 3,
'race_offset': 12,
'races': [
[(9,1),(9,2),(10,1),(10,2)]
]
},
{'previous_races': 2,
'race_offset': 13,
'races': [
[(12,1),(12,2),(11,3),(11,4)]
]
},
{'previous_races': 3,
'race_offset': 14,
'races': [
[(11,1),(11,2),(13,1),(13,2)]
]
}
]
leaderboard_positions = [
(14,1), (14,2), (14,3), (14,4),
(13,3), (13,4), (12,3), (12,4),
(10,3), (10,4), (9,3), (9,4),
(5,3), (5,4), (7,3), (7,4)
]
return brackets(leaderboards, stage_idx, results_class, mains_class, bracket, seeding_table, leaderboard_positions)
def fai_single_brackets_16(leaderboards, stage_idx, results_class, mains_class, bracket):
'''
https://www.fai.org/sites/default/files/ciam/wcup_drones/sc4_vol_f9_dronesport_2021.pdf
'''
# 1-index based!
seeding_table = [
[[16,1,8,9], [13,4,5,12], [14,3,6,10], [15,2,7,11]],
{'previous_races': 4,
'race_offset': 5,
'races': [
# (race, position)
[(1,2),(1,1),(2,1),(2,2)],
[(3,2),(3,1),(4,1),(4,2)]
]
},
{'previous_races': 2,
'race_offset': 7,
'races': [
[(5,4),(5,3),(6,3),(6,4)],
[(5,2),(5,1),(6,1),(6,2)],
]
}
]
leaderboard_positions = [
(8,1), (8,2), (8,3), (8,4),
(7,1), (7,2), (7,3), (7,4)
]
return brackets(leaderboards, stage_idx, results_class, mains_class, bracket, seeding_table, leaderboard_positions)
def fai_double_brackets_16(leaderboards, stage_idx, results_class, mains_class, bracket):
'''
https://www.fai.org/sites/default/files/ciam/wcup_drones/sc4_vol_f9_dronesport_2021.pdf
'''
# 1-index based!
seeding_table = [
[[16,1,8,9], [13,4,5,12], [14,3,6,10], [15,2,7,11]],
{'previous_races': 4,
'race_offset': 5,
'races': [
# (race, position)
[(1,4),(2,3),(3,3),(4,4)],
[(2,4),(1,3),(4,3),(3,4)],
[(1,2),(1,1),(2,1),(2,2)],
[(3,2),(3,1),(4,1),(4,2)]
]
},
{'previous_races': 4,
'race_offset': 9,
'races': [
[(8,4),(6,2),(5,1),(7,3)],
[(7,4),(5,2),(6,1),(8,3)],
]
},
{'previous_races': 4,
'race_offset': 11,
'races': [
[(9,2),(9,1),(10,1),(10,2)],
[(7,2),(7,1),(8,1),(8,2)],
]
},
{'previous_races': 2,
'race_offset': 13,
'races': [
[(12,4),(11,2),(11,1),(12,3)]
]
},
{'previous_races': 2,
'race_offset': 14,
'races': [
[(13,2),(12,2),(12,1),(13,1)]
]
}
]
leaderboard_positions = [
(14,1), (14,2), (14,3), (14,4),
(13,3), (13,4), (11,3), (11,4)
]
return brackets(leaderboards, stage_idx, results_class, mains_class, bracket, seeding_table, leaderboard_positions)
def brackets(leaderboards, stage_idx, results_class, mains_class, bracket, seeding_table, leaderboard_positions):
stages = leaderboards['stages']
mains = []
if bracket == 1:
stage = get_previous_stage(stages, stage_idx, results_class)
if not stage:
return {'heats': []}
stage_leaderboard = stage['leaderboards'][results_class]
prs = stage_leaderboard['ranking']
seeding = seeding_table[bracket-1]
for i, race_seeds in enumerate(seeding):
seats = [prs[seed_pos-1]['pilot'] if seed_pos <= len(prs) else None for seed_pos in race_seeds]
mains.append({'name': 'Race '+str(i+1), 'class': mains_class, 'seats': seats})
elif bracket >= 2 and bracket <= len(seeding_table):
seeding = seeding_table[bracket-1]
n_bracket_races = seeding['previous_races']
heats = racex.get_previous_n_races(stages, stage_idx, [results_class], n_bracket_races)
if not heats:
return {'heats': []}
race_offset = seeding['race_offset']
heat_offset = race_offset - n_bracket_races
for i, race_seeds in enumerate(seeding['races']):
seats = []
for seed_pos in race_seeds:
pilot = None
heat = heats[seed_pos[0]-heat_offset]
ranking = heat['ranking']
if seed_pos[1] <= len(ranking):
pilot = ranking[seed_pos[1]-1]['pilot']
seats.append(pilot)
mains.append({'name': 'Race '+str(i+race_offset), 'class': mains_class, 'seats': seats})
else:
raise ValueError("Invalid bracket: {}".format(bracket))
bracket_heats = {'type': 'Bracket '+str(bracket), 'heats': mains}
if bracket == len(seeding_table):
bracket_heats['leaderboards'] = {mains_class: {'method': 'heatPositions', 'heatPositions': leaderboard_positions}}
return bracket_heats
def get_previous_stage(stages, stage_idx, race_class_name):
if stage_idx-1 < 0 or stage_idx-1 >= len(stages):
return None
for i in range(stage_idx-1, -1, -1):
stage = stages[i]
if race_class_name in stage['leaderboards']:
return stage
return None
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,084
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/app/config.py
|
import logging
import random
import json
import jsonschema
import sys
logger = logging.getLogger(__name__)
class Config:
FILE_NAME = 'config.json'
SCHEMA_FILE_NAME = 'config.schema.json'
DB_FILE_NAME = 'database.db'
def __init__(self):
self.GENERAL = {}
self.HARDWARE = {}
self.SENSORS = {}
self.LED = {}
self.MQTT = {}
self.SERIAL_PORTS = []
self.SOCKET_PORTS = []
self.LOGGING = {}
self.VRX_CONTROL = {}
self.AUDIO = {}
self.LAPRF = {}
self.CHORUS = {}
self.apply_defaults()
with open(Config.SCHEMA_FILE_NAME, 'r') as f:
self.schema = json.load(f)
def apply_defaults(self):
# LED strip configuration:
self.LED['LED_COUNT'] = 0 # Number of LED pixels.
self.LED['LED_PIN'] = 10 # GPIO pin connected to the pixels (10 uses SPI /dev/spidev0.0).
self.LED['LED_FREQ_HZ'] = 800000 # LED signal frequency in hertz (usually 800khz)
self.LED['LED_DMA'] = 10 # DMA channel to use for generating signal (try 10)
self.LED['LED_INVERT'] = False # True to invert the signal (when using NPN transistor level shift)
self.LED['LED_CHANNEL'] = 0 # set to '1' for GPIOs 13, 19, 41, 45 or 53
self.LED['LED_STRIP'] = 'GRB' # Strip type and colour ordering
self.LED['LED_ROWS'] = 1 # Number of rows in LED array
self.LED['PANEL_ROTATE'] = 0
self.LED['INVERTED_PANEL_ROWS'] = False
# MQTT configuration
self.MQTT['BROKER'] = 'localhost'
self.MQTT['PORT'] = 1883
self.MQTT['USERNAME'] = 'race-admin'
self.MQTT['PASSWORD'] = 'fu56rg20'
self.MQTT['TIMER_ANN_TOPIC'] = 'timer/ann'
self.MQTT['TIMER_CTRL_TOPIC'] = 'timer/ctrl'
self.MQTT['RACE_ANN_TOPIC'] = 'race/ann'
self.MQTT['SENSOR_ANN_TOPIC'] = 'sensor/ann'
# Video Receiver configuration
self.VRX_CONTROL['HOST'] = 'localhost' # MQTT broker IP Address
self.VRX_CONTROL['ENABLED'] = False
self.VRX_CONTROL['OSD_LAP_HEADER'] = 'L'
# hardware default configurations
self.HARDWARE['I2C_BUSES'] = [1]
# other default configurations
self.GENERAL['HTTP_PORT'] = 5000
self.GENERAL['DATABASE'] = ''
self.GENERAL['SECRET_KEY'] = random.random()
self.GENERAL['ADMIN_USERNAME'] = 'admin'
self.GENERAL['ADMIN_PASSWORD'] = 'rotorhazard'
self.GENERAL['SECONDARIES'] = []
self.GENERAL['SECONDARY_TIMEOUT'] = 300 # seconds
self.GENERAL['DEBUG'] = False
self.GENERAL['CORS_ALLOWED_HOSTS'] = '*'
self.GENERAL['FORCE_S32_BPILL_FLAG'] = False
self.GENERAL['DEF_NODE_FWUPDATE_URL'] = ''
self.GENERAL['SHUTDOWN_BUTTON_GPIOPIN'] = 18
self.GENERAL['SHUTDOWN_BUTTON_DELAYMS'] = 2500
self.GENERAL['DB_AUTOBKP_NUM_KEEP'] = 30
def load(self, file=FILE_NAME):
# override defaults above with config from file
try:
with open(file, 'r') as f:
externalConfig = json.load(f)
jsonschema.validate(instance=externalConfig, schema=self.schema)
self.GENERAL.update(externalConfig['GENERAL'])
if 'HARDWARE' in externalConfig:
self.HARDWARE.update(externalConfig['HARDWARE'])
if 'LOGGING' in externalConfig:
self.LOGGING.update(externalConfig['LOGGING'])
if 'LED' in externalConfig:
self.LED.update(externalConfig['LED'])
if 'MQTT' in externalConfig:
self.MQTT.update(externalConfig['MQTT'])
if 'AUDIO' in externalConfig:
self.AUDIO.update(externalConfig['AUDIO'])
if 'VRX_CONTROL' in externalConfig:
self.VRX_CONTROL.update(externalConfig['VRX_CONTROL'])
if 'LAPRF' in externalConfig:
self.LAPRF.update(externalConfig['LAPRF'])
if 'CHORUS' in externalConfig:
self.CHORUS.update(externalConfig['CHORUS'])
'''
# Subtree updating
try:
bitmaptree = LED['BITMAPS']
LED'].update(ExternalLED'])
LED['BITMAPS'] = bitmaptree
LED['BITMAPS'].update(ExternalLED['BITMAPS'])
except KeyError:
if 'LED' in externalConfig:
LED'].update(ExternalLED'])
else:
print "No 'LED' entry found in configuration file "
'''
if 'SENSORS' in externalConfig:
self.SENSORS.update(externalConfig['SENSORS'])
if 'SERIAL_PORTS' in externalConfig:
self.SERIAL_PORTS.extend(externalConfig['SERIAL_PORTS'])
if 'SOCKET_PORTS' in externalConfig:
self.SOCKET_PORTS.extend(externalConfig['SOCKET_PORTS'])
# Apply legacy config options for backward compatibility
if not self.GENERAL['SECONDARIES'] and 'SLAVES' in self.GENERAL and self.GENERAL['SLAVES']:
self.GENERAL['SECONDARIES'] = self.GENERAL['SLAVES']
if not self.GENERAL['SECONDARY_TIMEOUT'] and 'SLAVE_TIMEOUT' in self.GENERAL and self.GENERAL['SLAVE_TIMEOUT']:
self.GENERAL['SECONDARY_TIMEOUT'] = self.GENERAL['SLAVE_TIMEOUT']
self.GENERAL['configFile'] = 'loaded'
logger.info("Using configuration file '{0}'".format(file))
except IOError:
self.GENERAL['configFile'] = 'defaults'
logger.warn("No configuration file found, using defaults")
except ValueError as ex:
self.GENERAL['configFile'] = 'error'
logger.error("Configuration file invalid, using defaults; error is: {}".format(ex))
sys.exit(1)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,085
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/app/Results.py
|
#
# Results generators and caching
#
import copy
from rh.util import RHUtils, secs_to_millis
import logging
import cachetools
from threading import Lock
from .RHRace import RaceStatus, StartBehavior, WinCondition, WinStatus
from rh.app.RHRace import RHRace
from typing import Any, Dict, List, Mapping, Union
logger = logging.getLogger(__name__)
def fastest_3_consecutive_builder(leaderboard):
return 'by_consecutives'
def fastest_lap_builder(leaderboard):
return 'by_fastest_lap'
def race_time_builder(leaderboard):
return 'by_race_time'
LEADERBOARD_BUILDERS = {
WinCondition.FASTEST_3_CONSECUTIVE: fastest_3_consecutive_builder,
WinCondition.FASTEST_LAP: fastest_lap_builder,
WinCondition.NONE: race_time_builder,
WinCondition.MOST_LAPS: race_time_builder,
WinCondition.FIRST_TO_LAP_X: race_time_builder,
}
def _current_race_cache_key(*args, **kwargs):
current_race = args[-1] # first argument maybe self
return cachetools.keys.hashkey(id(current_race), current_race.modification_count)
class ResultsCache:
def __init__(self):
self._race_cache = cachetools.LRUCache(maxsize=64)
self._race_cache_lock = Lock()
self._heat_cache = cachetools.LRUCache(maxsize=13)
self._heat_cache_lock = Lock()
self._class_cache = cachetools.LRUCache(maxsize=7)
self._class_cache_lock = Lock()
self._event_cache = cachetools.LRUCache(maxsize=3)
self._event_cache_lock = Lock()
self._current_cache = cachetools.LRUCache(maxsize=7)
self._current_cache_lock = Lock()
self._team_cache = cachetools.LRUCache(maxsize=7)
self._team_cache_lock = Lock()
def evict_race(self, race_id):
self._race_cache.pop(cachetools.keys.hashkey(race_id), None)
def clear_races(self):
self._race_cache.clear()
def evict_heat(self, heat_id):
self._heat_cache.pop(cachetools.keys.hashkey(heat_id), None)
def clear_heats(self):
self._heat_cache.clear()
def evict_class(self, class_id):
self._heat_cache.pop(cachetools.keys.hashkey(class_id), None)
def clear_classes(self):
self._class_cache.clear()
def evict_event(self):
self._event_cache.clear()
def clear_events(self):
self._event_cache.clear()
def clear_all(self):
self.clear_races()
self.clear_heats()
self.clear_classes()
self.clear_events()
class Results:
def __init__(self, rhDataObj, resultsCache):
self.rhDataObj = rhDataObj
self.resultsCache = resultsCache
@cachetools.cachedmethod(cache=lambda self:self.resultsCache._event_cache, key=lambda *args,**kwargs:'event', lock=lambda self:self.resultsCache._event_cache_lock)
def calc_event_leaderboard(self):
return self.calc_leaderboard()
@cachetools.cachedmethod(cache=lambda self:self.resultsCache._class_cache, lock=lambda self:self.resultsCache._class_cache_lock)
def calc_class_leaderboard(self, class_id):
return self.calc_leaderboard(class_id=class_id)
@cachetools.cachedmethod(cache=lambda self:self.resultsCache._heat_cache, lock=lambda self:self.resultsCache._heat_cache_lock)
def calc_heat_leaderboard(self, heat_id):
'''
Leaderboard for each heat across all rounds
'''
return self.calc_leaderboard(heat_id=heat_id)
@cachetools.cachedmethod(cache=lambda self:self.resultsCache._race_cache, lock=lambda self:self.resultsCache._race_cache_lock)
def calc_race_leaderboard(self, heat_id, round_id):
return self.calc_leaderboard(heat_id=heat_id, round_id=round_id)
@cachetools.cachedmethod(cache=lambda self:self.resultsCache._current_cache, key=_current_race_cache_key, lock=lambda self:self.resultsCache._current_cache_lock)
def calc_current_race_leaderboard(self, current_race: RHRace):
return self.calc_leaderboard(current_race=current_race)
def calc_leaderboard(self, **params) -> Mapping[str,Any]:
''' Generates leaderboards '''
USE_CURRENT = False
USE_ROUND = None
USE_HEAT = None
USE_CLASS = None
selected_race_laps = []
timeFormat = self.rhDataObj.get_option('timeFormat')
if ('current_race' in params):
USE_CURRENT = True
if ('class_id' in params):
USE_CLASS = params['class_id']
elif ('round_id' in params and 'heat_id' in params):
USE_ROUND = params['round_id']
USE_HEAT = params['heat_id']
elif ('heat_id' in params):
USE_ROUND = None
USE_HEAT = params['heat_id']
# Get race query (saved), and race format (all)
if USE_CURRENT:
raceObj = params['current_race']
race_format = raceObj.format
else:
if USE_CLASS:
selected_races = self.rhDataObj.get_savedRaceMetas_by_raceClass(USE_CLASS)
if len(selected_races) >= 1:
current_format = self.rhDataObj.get_raceClass(USE_CLASS).format_id
else:
current_format = None
elif USE_HEAT:
if USE_ROUND:
selected_races = [self.rhDataObj.get_savedRaceMeta_by_heat_round(USE_HEAT, USE_ROUND)]
current_format = selected_races[0].format_id
else:
selected_races = self.rhDataObj.get_savedRaceMetas_by_heat(USE_HEAT)
if len(selected_races) >= 1:
heat_class = selected_races[0].class_id
if heat_class:
current_format = self.rhDataObj.get_raceClass(heat_class).format_id
else:
current_format = None
else:
current_format = None
else:
selected_races = self.rhDataObj.get_savedRaceMetas()
current_format = None
selected_races_keyed = {}
for race in selected_races:
selected_races_keyed[race.id] = race
selected_pilotraces = {}
racelist = []
for race in selected_races:
racelist.append(race.id)
selected_pilotraces[race.id] = self.rhDataObj.get_savedPilotRaces_by_savedRaceMeta(race.id)
# Generate heat list with key
heats_keyed = {}
all_heats = self.rhDataObj.get_heats()
for heat in all_heats:
heats_keyed[heat.id] = heat
if current_format:
race_format = self.rhDataObj.get_raceFormat(current_format)
else:
race_format = None
# filter laps
all_laps = self.rhDataObj.get_active_savedRaceLaps()
for lap in all_laps:
if lap.race_id in racelist:
selected_race_laps.append(lap)
leaderboard = []
for pilot in self.rhDataObj.get_pilots():
if USE_CURRENT:
found_pilot = False
node_index = 0
laps = []
for node_index, node_pilot in raceObj.node_pilots.items():
if node_pilot and node_pilot.id == pilot.id and node_index < raceObj.num_nodes and len(raceObj.get_valid_laps()):
laps = raceObj.get_valid_laps()[node_index]
found_pilot = True
break
if laps:
if race_format and race_format.start_behavior == StartBehavior.FIRST_LAP:
total_laps = len(laps)
else:
total_laps = len(laps) - 1
else:
total_laps = 0
if found_pilot:
leaderboard.append({
'pilot_id': pilot.id,
'callsign': pilot.callsign,
'team_name': pilot.team,
'laps': total_laps,
'holeshots': None,
'starts': 1 if len(laps) > 0 else 0,
'node': node_index,
'current_laps': laps
})
else:
# find hole shots
holeshot_laps = []
pilotnode = None
total_laps = 0
race_starts = 0
for race in selected_races:
if race_format:
this_race_format = race_format
else:
this_race_format = self.rhDataObj.get_raceFormat(race.format_id)
pilotraces = selected_pilotraces[race.id]
if len(pilotraces):
pilot_crossings = []
for lap in selected_race_laps:
if lap.pilot_id == pilot.id:
pilot_crossings.append(lap)
for pilotrace in pilotraces:
if pilotrace.pilot_id == pilot.id:
pilotnode = pilotrace.node_index
race_laps = []
for lap in pilot_crossings:
if lap.pilotrace_id == pilotrace.id:
race_laps.append(lap)
total_laps += len(race_laps)
if this_race_format and this_race_format.start_behavior == StartBehavior.FIRST_LAP:
if len(race_laps):
race_starts += 1
else:
if len(race_laps):
holeshot_lap = race_laps[0]
if holeshot_lap:
holeshot_laps.append(holeshot_lap.id)
race_starts += 1
total_laps -= 1
pilot_laps = []
if len(holeshot_laps):
for lap in selected_race_laps:
if lap.pilot_id == pilot.id and \
lap.id not in holeshot_laps:
pilot_laps.append(lap)
else:
pilot_laps = pilot_crossings
if race_starts > 0:
leaderboard.append({
'pilot_id': pilot.id,
'callsign': pilot.callsign,
'team_name': pilot.team,
'laps': total_laps,
'holeshots': holeshot_laps,
'starts': race_starts,
'node': pilotnode,
'pilot_crossings': pilot_crossings,
'pilot_laps': pilot_laps
})
for result_pilot in leaderboard:
# Get the total race time for each pilot
if USE_CURRENT:
race_total = 0
laps_total = 0
for lap in result_pilot['current_laps']:
race_total += lap['lap_time']
if lap['lap_number']:
laps_total += lap['lap_time']
result_pilot['total_time'] = race_total
result_pilot['total_time_laps'] = laps_total
else:
result_pilot['total_time'] = 0
for lap in result_pilot['pilot_crossings']:
result_pilot['total_time'] += lap.lap_time
result_pilot['total_time_laps'] = 0
for lap in result_pilot['pilot_laps']:
result_pilot['total_time_laps'] += lap.lap_time
# Get the last lap for each pilot (current race only)
if result_pilot['laps'] == 0:
result_pilot['last_lap'] = None # Add zero if no laps completed
else:
if USE_CURRENT:
result_pilot['last_lap'] = result_pilot['current_laps'][-1]['lap_time']
else:
result_pilot['last_lap'] = None
# Get the average lap time for each pilot
if result_pilot['laps'] == 0:
result_pilot['average_lap'] = 0 # Add zero if no laps completed
else:
if USE_CURRENT:
if race_format and race_format.start_behavior == StartBehavior.FIRST_LAP:
avg_lap = result_pilot['current_laps'][-1]['lap_time_stamp'] / len(result_pilot['current_laps'])
else:
avg_lap = (result_pilot['current_laps'][-1]['lap_time_stamp'] - result_pilot['current_laps'][0]['lap_time_stamp']) / (len(result_pilot['current_laps']) - 1)
else:
avg_lap = result_pilot['total_time_laps'] / result_pilot['laps']
result_pilot['average_lap'] = avg_lap
# Get the fastest lap time for each pilot
if result_pilot['laps'] == 0:
result_pilot['fastest_lap'] = 0 # Add zero if no laps completed
result_pilot['fastest_lap_source'] = None
else:
if USE_CURRENT:
if race_format and race_format.start_behavior == StartBehavior.FIRST_LAP:
timed_laps = result_pilot['current_laps']
else:
timed_laps = filter(lambda x : x['lap_number'], result_pilot['current_laps'])
fast_lap = sorted(timed_laps, key=lambda val : val['lap_time'])[0]['lap_time']
result_pilot['fastest_lap'] = fast_lap
result_pilot['fastest_lap_source'] = None
else:
fast_lap = None
for lap in result_pilot['pilot_laps']:
if fast_lap:
if lap.lap_time <= fast_lap.lap_time:
fast_lap = lap
else:
fast_lap = lap
if USE_HEAT:
result_pilot['fastest_lap_source'] = None
else:
for race in selected_races:
if race.id == fast_lap.race_id:
result_pilot['fastest_lap_source'] = {
'round': race.round_id,
'heat': race.heat_id,
'note': heats_keyed[race.heat_id].note
}
break
result_pilot['fastest_lap'] = fast_lap.lap_time
# find best consecutive 3 laps
if result_pilot['laps'] < 3:
result_pilot['consecutives'] = None
result_pilot['consecutives_source'] = None
else:
all_consecutives = []
if USE_CURRENT:
if race_format and race_format.start_behavior == StartBehavior.FIRST_LAP:
thisrace = result_pilot['current_laps']
else:
thisrace = result_pilot['current_laps'][1:]
for i in range(len(thisrace) - 2):
all_consecutives.append({
'time': thisrace[i]['lap_time'] + thisrace[i+1]['lap_time'] + thisrace[i+2]['lap_time'],
'race_id': None,
})
else:
# build race lap store
race_laps_by_id: Dict[int,List[Any]] = {}
for race in selected_races:
race_laps_by_id[race.id] = []
for lap in result_pilot['pilot_laps']:
if lap.race_id == race.id:
race_laps_by_id[race.id].append(lap)
for race in selected_races:
if len(race_laps_by_id[race.id]) >= 3:
for i in range(len(race_laps_by_id[race.id]) - 2):
all_consecutives.append({
'time': race_laps_by_id[race.id][i].lap_time + race_laps_by_id[race.id][i+1].lap_time + race_laps_by_id[race.id][i+2].lap_time,
'race_id': race.id
})
# Get lowest not-none value (if any)
if all_consecutives:
# Sort consecutives
all_consecutives.sort(key = lambda x: (x['time'] is None, x['time']))
result_pilot['consecutives'] = all_consecutives[0]['time']
if USE_CURRENT:
result_pilot['consecutives_source'] = None
else:
source_race = selected_races_keyed[all_consecutives[0]['race_id']]
if source_race:
result_pilot['consecutives_source'] = {
'round': source_race.round_id,
'heat': source_race.heat_id,
'note': heats_keyed[source_race.heat_id].note
}
else:
result_pilot['consecutives_source'] = None
else:
result_pilot['consecutives'] = None
result_pilot['consecutives_source'] = None
# Combine leaderboard
for result_pilot in leaderboard:
# Clean up calc data
if 'current_laps' in result_pilot:
result_pilot.pop('current_laps')
if 'holeshots' in result_pilot:
result_pilot.pop('holeshots')
if 'pilot_crossings' in result_pilot:
result_pilot.pop('pilot_crossings')
if 'pilot_laps' in result_pilot:
result_pilot.pop('pilot_laps')
# formatted output
result_pilot['total_time_raw'] = result_pilot['total_time']
result_pilot['total_time'] = RHUtils.time_format(result_pilot['total_time'], timeFormat)
result_pilot['total_time_laps_raw'] = result_pilot['total_time_laps']
result_pilot['total_time_laps'] = RHUtils.time_format(result_pilot['total_time_laps'], timeFormat)
result_pilot['average_lap_raw'] = result_pilot['average_lap']
result_pilot['average_lap'] = RHUtils.time_format(result_pilot['average_lap'], timeFormat)
result_pilot['fastest_lap_raw'] = result_pilot['fastest_lap']
result_pilot['fastest_lap'] = RHUtils.time_format(result_pilot['fastest_lap'], timeFormat)
result_pilot['consecutives_raw'] = result_pilot['consecutives']
result_pilot['consecutives'] = RHUtils.time_format(result_pilot['consecutives'], timeFormat)
result_pilot['last_lap_raw'] = result_pilot['last_lap']
result_pilot['last_lap'] = RHUtils.time_format(result_pilot['last_lap'], timeFormat)
if race_format and race_format.start_behavior == StartBehavior.STAGGERED:
# Sort by laps time
leaderboard_by_race_time = copy.deepcopy(sorted(leaderboard, key = lambda x: (
-x['laps'], # reverse lap count
x['total_time_laps_raw'] if x['total_time_laps_raw'] and x['total_time_laps_raw'] > 0 else float('inf') # total time ascending except 0
)))
# determine ranking
last_rank: Union[int,str] = '-'
last_rank_laps = 0
last_rank_time = 0
for i, row in enumerate(leaderboard_by_race_time, start=1):
pos: Union[int,str] = i
if last_rank_laps == row['laps'] and last_rank_time == row['total_time_laps_raw']:
pos = last_rank
last_rank = pos
last_rank_laps = row['laps']
last_rank_time = row['total_time_laps_raw']
row['position'] = pos
row['behind'] = leaderboard_by_race_time[0]['laps'] - row['laps']
else:
# Sort by race time
leaderboard_by_race_time = copy.deepcopy(sorted(leaderboard, key = lambda x: (
-x['laps'], # reverse lap count
x['total_time_raw'] if x['total_time_raw'] and x['total_time_raw'] > 0 else float('inf') # total time ascending except 0
)))
# determine ranking
last_rank = '-'
last_rank_laps = 0
last_rank_time = 0
for i, row in enumerate(leaderboard_by_race_time, start=1):
pos = i
if last_rank_laps == row['laps'] and last_rank_time == row['total_time_raw']:
pos = last_rank
last_rank = pos
last_rank_laps = row['laps']
last_rank_time = row['total_time_raw']
row['position'] = pos
row['behind'] = leaderboard_by_race_time[0]['laps'] - row['laps']
# Sort by fastest laps
leaderboard_by_fastest_lap = copy.deepcopy(sorted(leaderboard, key = lambda x: (
x['fastest_lap_raw'] if x['fastest_lap_raw'] and x['fastest_lap_raw'] > 0 else float('inf'), # fastest lap
x['total_time_raw'] if x['total_time_raw'] and x['total_time_raw'] > 0 else float('inf') # total time
)))
# determine ranking
last_rank = '-'
last_rank_fastest_lap = 0
for i, row in enumerate(leaderboard_by_fastest_lap, start=1):
pos = i
if last_rank_fastest_lap == row['fastest_lap_raw']:
pos = last_rank
last_rank = pos
last_rank_fastest_lap = row['fastest_lap_raw']
row['position'] = pos
# Sort by consecutive laps
leaderboard_by_consecutives = copy.deepcopy(sorted(leaderboard, key = lambda x: (
x['consecutives_raw'] if x['consecutives_raw'] and x['consecutives_raw'] > 0 else float('inf'), # fastest consecutives
-x['laps'], # lap count
x['total_time_raw'] if x['total_time_raw'] and x['total_time_raw'] > 0 else float('inf') # total time
)))
# determine ranking
last_rank = '-'
last_rank_laps = 0
last_rank_time = 0
last_rank_consecutive = 0
for i, row in enumerate(leaderboard_by_consecutives, start=1):
pos = i
if last_rank_consecutive == row['consecutives_raw']:
if row['laps'] < 3:
if last_rank_laps == row['laps'] and last_rank_time == row['total_time_raw']:
pos = last_rank
else:
pos = last_rank
last_rank = pos
last_rank_laps = row['laps']
last_rank_time = row['total_time_raw']
last_rank_consecutive = row['consecutives_raw']
row['position'] = pos
leaderboard_output: Dict[str,Any] = {
'by_race_time': leaderboard_by_race_time,
'by_fastest_lap': leaderboard_by_fastest_lap,
'by_consecutives': leaderboard_by_consecutives
}
if race_format:
builder = LEADERBOARD_BUILDERS.get(race_format.win_condition, None)
if builder:
primary_leaderboard = builder(leaderboard_output)
else:
primary_leaderboard = 'by_race_time'
leaderboard_output['meta'] = {
'primary_leaderboard': primary_leaderboard,
'win_condition': race_format.win_condition,
'team_racing_mode': race_format.team_racing_mode,
'start_behavior': race_format.start_behavior,
}
else:
leaderboard_output['meta'] = {
'primary_leaderboard': 'by_race_time',
'win_condition': WinCondition.NONE,
'team_racing_mode': False,
'start_behavior': StartBehavior.HOLESHOT,
}
return leaderboard_output
@cachetools.cachedmethod(cache=lambda self:self.resultsCache._team_cache, key=_current_race_cache_key, lock=lambda self:self.resultsCache._team_cache_lock)
def calc_team_leaderboard(self, current_race: RHRace):
'''Calculates and returns team-racing info.'''
race_format = current_race.format
current_race_results = self.calc_current_race_leaderboard(current_race)
results = current_race_results['by_race_time']
teams: Dict[str,Any] = {}
for line in results:
contributing = 0
if race_format and race_format.win_condition == WinCondition.FASTEST_3_CONSECUTIVE:
if line['laps'] >= 3:
contributing = 1
else:
# race_format.win_condition == WinCondition.MOST_LAPS or \
# race_format.win_condition == WinCondition.FIRST_TO_LAP_X or \
# race_format.win_condition == WinCondition.FASTEST_LAP:
if line['laps'] > 0:
contributing = 1
if line['team_name'] in teams:
teams[line['team_name']]['contributing'] += contributing
teams[line['team_name']]['members'] += 1
teams[line['team_name']]['laps'] += line['laps']
teams[line['team_name']]['total_time_raw'] += line['total_time_raw']
if line['average_lap_raw']:
teams[line['team_name']]['combined_average_lap_raw'] += line['average_lap_raw']
if line['fastest_lap_raw']:
teams[line['team_name']]['combined_fastest_lap_raw'] += line['fastest_lap_raw']
if line['consecutives_raw']:
teams[line['team_name']]['combined_consecutives_raw'] += line['consecutives_raw']
else:
teams[line['team_name']] = {}
teams[line['team_name']]['contributing'] = contributing
teams[line['team_name']]['members'] = 1
teams[line['team_name']]['laps'] = line['laps']
teams[line['team_name']]['total_time_raw'] = line['total_time_raw']
teams[line['team_name']]['combined_average_lap_raw'] = line['average_lap_raw']
teams[line['team_name']]['combined_fastest_lap_raw'] = line['fastest_lap_raw']
teams[line['team_name']]['combined_consecutives_raw'] = line['consecutives_raw']
# convert dict to list
leaderboard = []
for team in teams:
contribution_amt = float(teams[team]['contributing']) / teams[team]['members']
average_lap_raw = 0
average_fastest_lap_raw = 0
average_consecutives_raw = 0
if teams[team]['contributing']:
if teams[team]['combined_average_lap_raw']:
average_lap_raw = float(teams[team]['combined_average_lap_raw']) / teams[team]['contributing']
if teams[team]['combined_fastest_lap_raw']:
average_fastest_lap_raw = float(teams[team]['combined_fastest_lap_raw']) / teams[team]['contributing']
if teams[team]['combined_consecutives_raw']:
average_consecutives_raw = float(teams[team]['combined_consecutives_raw']) / teams[team]['contributing']
time_format = self.rhDataObj.get_option('timeFormat')
leaderboard.append({
'name': team,
'contributing': teams[team]['contributing'],
'members': teams[team]['members'],
'contribution_amt': contribution_amt,
'laps': teams[team]['laps'],
'total_time_raw': teams[team]['total_time_raw'],
'average_lap_raw': average_lap_raw,
'average_fastest_lap_raw': average_fastest_lap_raw,
'average_consecutives_raw': average_consecutives_raw,
'total_time': RHUtils.time_format(teams[team]['total_time_raw'], time_format),
'average_lap': RHUtils.time_format(average_lap_raw, time_format),
'average_fastest_lap': RHUtils.time_format(average_fastest_lap_raw, time_format),
'average_consecutives': RHUtils.time_format(average_consecutives_raw, time_format),
})
# sort race_time
leaderboard_by_race_time = copy.deepcopy(sorted(leaderboard, key = lambda x: (
-x['laps'],
x['average_lap_raw'] if x['average_lap_raw'] > 0 else float('inf'),
)))
# determine ranking
last_rank: Union[int,str] = '-'
last_rank_laps = 0
last_rank_time = 0
for i, row in enumerate(leaderboard_by_race_time, start=1):
pos: Union[int,str] = i
if last_rank_laps == row['laps'] and last_rank_time == row['average_lap_raw']:
pos = last_rank
last_rank = pos
last_rank_laps = row['laps']
last_rank_time = row['average_lap_raw']
row['position'] = pos
# sort fastest lap
leaderboard_by_fastest_lap = copy.deepcopy(sorted(leaderboard, key = lambda x: (
-x['contribution_amt'],
x['average_fastest_lap_raw'] if x['average_fastest_lap_raw'] > 0 else float('inf'),
-x['laps'],
)))
# determine ranking
last_rank = '-'
last_rank_contribution_amt = 0
last_rank_fastest_lap = 0
for i, row in enumerate(leaderboard_by_fastest_lap, start=1):
pos = i
if row['contribution_amt'] == last_rank_contribution_amt:
if last_rank_fastest_lap == row['average_fastest_lap_raw']:
pos = last_rank
last_rank = pos
last_rank_fastest_lap = row['average_fastest_lap_raw']
row['position'] = pos
# sort consecutives
leaderboard_by_consecutives = copy.deepcopy(sorted(leaderboard, key = lambda x: (
-x['contribution_amt'],
x['average_consecutives_raw'] if x['average_consecutives_raw'] > 0 else float('inf'),
-x['laps'],
)))
# determine ranking
last_rank = '-'
last_rank_contribution_amt = 0
last_rank_laps = 0
last_rank_time = 0
last_rank_consecutive = 0
for i, row in enumerate(leaderboard_by_consecutives, start=1):
pos = i
if row['contribution_amt'] == last_rank_contribution_amt:
if last_rank_consecutive == row['average_consecutives_raw']:
if row['laps'] < 3:
if last_rank_laps == row['laps'] and last_rank_time == row['total_time_raw']:
pos = last_rank
else:
pos = last_rank
last_rank = pos
last_rank_laps = row['laps']
last_rank_time = row['total_time_raw']
last_rank_consecutive = row['average_consecutives_raw']
row['position'] = pos
leaderboard_output: Dict[str,Any] = {
'by_race_time': leaderboard_by_race_time,
'by_avg_fastest_lap': leaderboard_by_fastest_lap,
'by_avg_consecutives': leaderboard_by_consecutives
}
if race_format:
if race_format.win_condition == WinCondition.FASTEST_3_CONSECUTIVE:
primary_leaderboard = 'by_avg_consecutives'
elif race_format.win_condition == WinCondition.FASTEST_LAP:
primary_leaderboard = 'by_avg_fastest_lap'
else:
# WinCondition.NONE
# WinCondition.MOST_LAPS
# WinCondition.FIRST_TO_LAP_X
primary_leaderboard = 'by_race_time'
leaderboard_output['meta'] = {
'primary_leaderboard': primary_leaderboard,
'win_condition': race_format.win_condition,
'teams': teams
}
else:
leaderboard_output['meta'] = {
'primary_leaderboard': 'by_race_time',
'win_condition': WinCondition.NONE,
'teams': teams
}
return leaderboard_output
def check_win_condition_result(self, raceObj: RHRace, interfaceObj, **kwargs):
race_format = raceObj.format
if race_format:
if race_format.team_racing_mode:
if race_format.win_condition == WinCondition.MOST_PROGRESS:
return self.check_win_team_laps_and_time(raceObj, interfaceObj, **kwargs)
elif race_format.win_condition == WinCondition.MOST_LAPS:
return self.check_win_team_most_laps(raceObj, interfaceObj, **kwargs)
elif race_format.win_condition == WinCondition.FIRST_TO_LAP_X:
return self.check_win_team_first_to_x(raceObj, interfaceObj, **kwargs)
elif race_format.win_condition == WinCondition.FASTEST_LAP:
return self.check_win_team_fastest_lap(raceObj, **kwargs)
elif race_format.win_condition == WinCondition.FASTEST_3_CONSECUTIVE:
return self.check_win_team_fastest_consecutive(raceObj, **kwargs)
elif race_format.win_condition == WinCondition.MOST_LAPS_OVERTIME:
return self.check_win_team_laps_and_overtime(raceObj, interfaceObj, **kwargs)
else:
if race_format.win_condition == WinCondition.MOST_PROGRESS:
return self.check_win_laps_and_time(raceObj, interfaceObj, **kwargs)
elif race_format.win_condition == WinCondition.MOST_LAPS:
return self.check_win_most_laps(raceObj, interfaceObj, **kwargs)
elif race_format.win_condition == WinCondition.FIRST_TO_LAP_X:
return self.check_win_first_to_x(raceObj, interfaceObj, **kwargs)
elif race_format.win_condition == WinCondition.FASTEST_LAP:
return self.check_win_fastest_lap(raceObj, **kwargs)
elif race_format.win_condition == WinCondition.FASTEST_3_CONSECUTIVE:
return self.check_win_fastest_consecutive(raceObj, **kwargs)
elif race_format.win_condition == WinCondition.MOST_LAPS_OVERTIME:
return self.check_win_laps_and_overtime(raceObj, interfaceObj, **kwargs)
return None
def check_win_laps_and_time(self, raceObj: RHRace, interfaceObj, **kwargs):
# if racing is stopped, all pilots have completed last lap after time expired,
# or a forced determination condition, make a final call
if raceObj.race_status == RaceStatus.DONE or \
raceObj.check_all_nodes_finished() or 'forced' in kwargs:
leaderboard = raceObj.results['by_race_time']
if len(leaderboard) > 1:
lead_lap = leaderboard[0]['laps']
if lead_lap > 0: # must have at least one lap
# if race stopped then don't wait for crossing to finish
if raceObj.race_status != RaceStatus.DONE:
# prevent win declaration if there are active crossings coming onto lead lap
for line in leaderboard[1:]:
if line['laps'] >= lead_lap - 1:
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
else:
# lower results no longer need checked
break
# check for tie
if leaderboard[1]['laps'] == lead_lap:
if leaderboard[1]['total_time_raw'] == leaderboard[0]['total_time_raw']:
logger.info('Race tied at {0}/{1}'.format(leaderboard[0]['laps'], leaderboard[0]['total_time']))
return {
'status': WinStatus.TIE
}
# no tie or active crossings; declare winner
return {
'status': WinStatus.DECLARED,
'data': leaderboard[0]
}
elif raceObj.race_status == RaceStatus.RACING and raceObj.timer_running == False:
# time has ended; check if winning is assured
leaderboard = raceObj.results['by_race_time']
if len(leaderboard) > 1:
lead_lap = leaderboard[0]['laps']
if lead_lap > 0: # must have at least one lap
# prevent win declaration if there are active crossings coming onto lead lap
for line in leaderboard[1:]:
if line['laps'] >= lead_lap - 1:
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
else:
# lower results no longer need checked
break
# check if any pilot below lead can potentially pass or tie
pilots_can_pass = 0
for line in leaderboard[1:]:
if line['laps'] >= lead_lap:
# pilot is on lead lap
node_index = line['node']
if raceObj.get_node_finished_flag(node_index) == False:
pilots_can_pass += 1
else:
# lower results no longer need checked
break
if pilots_can_pass == 0:
return self.check_win_laps_and_time(raceObj, interfaceObj, forced=True, **kwargs)
return {
'status': WinStatus.NONE
}
def check_win_most_laps(self, raceObj: RHRace, interfaceObj, **kwargs):
if raceObj.race_status == RaceStatus.DONE or \
raceObj.check_all_nodes_finished() or 'forced' in kwargs: # racing must be completed
leaderboard = raceObj.results['by_race_time']
if len(leaderboard) > 1:
lead_lap = leaderboard[0]['laps']
if lead_lap > 0: # must have at least one lap
# if race stopped then don't wait for crossing to finish
if raceObj.race_status != RaceStatus.DONE:
# check if there are active crossings coming onto lead lap
for line in leaderboard[1:]:
if line['laps'] >= lead_lap - 1:
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
else:
# lower results no longer need checked
break
# check for tie
if leaderboard[1]['laps'] == lead_lap:
logger.info('Race tied at %d laps', leaderboard[1]['laps'])
return {
'status': WinStatus.TIE
}
# no tie or active crossings; declare winner
return {
'status': WinStatus.DECLARED,
'data': leaderboard[0]
}
elif raceObj.race_status == RaceStatus.RACING and raceObj.timer_running == False:
# time has ended; check if winning is assured
leaderboard = raceObj.results['by_race_time']
if len(leaderboard) > 1:
lead_lap = leaderboard[0]['laps']
if lead_lap > 0: # must have at least one lap
# check if there are active crossings coming onto lead lap
for line in leaderboard[1:]:
if line['laps'] >= lead_lap - 1:
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
else:
# lower results no longer need checked
break
# check if any pilot below lead can potentially pass or tie
pilots_can_pass = 0
pilots_can_tie = 0
pilots_tied = 0
for line in leaderboard[1:]:
node_index = line['node']
if line['laps'] >= lead_lap: # pilot is on lead lap
pilots_tied += 1
if raceObj.get_node_finished_flag(node_index) == False:
pilots_can_pass += 1
elif line['laps'] >= lead_lap - 1: # pilot can reach lead lap
if raceObj.get_node_finished_flag(node_index) == False:
pilots_can_tie += 1
else:
# lower results no longer need checked
break
# call race if possible
if pilots_can_pass == 0:
if pilots_can_tie == 0 and pilots_tied == 0:
return self.check_win_most_laps(raceObj, interfaceObj, forced=True, **kwargs)
elif pilots_tied > 0: # add "and pilots_can_tie == 0" to wait for 3+-way?
node_index = leaderboard[0]['node']
if raceObj.get_node_finished_flag(node_index) == True:
return self.check_win_most_laps(raceObj, interfaceObj, forced=True, **kwargs)
return {
'status': WinStatus.NONE
}
def check_win_laps_and_overtime(self, raceObj: RHRace, interfaceObj, **kwargs):
if (raceObj.race_status == RaceStatus.RACING and raceObj.timer_running == False) or \
raceObj.race_status == RaceStatus.DONE or 'at_finish' in kwargs:
race_format = raceObj.format
leaderboard = raceObj.results['by_race_time']
if len(leaderboard):
race_duration_ms = secs_to_millis(race_format.race_time_sec + race_format.lap_grace_sec)
pilot_crossed_after_time = False
for line in leaderboard:
if line['total_time_raw'] > race_duration_ms:
pilot_crossed_after_time = True
break
if pilot_crossed_after_time:
return self.check_win_laps_and_time(raceObj, interfaceObj, **kwargs)
else:
win_status = self.check_win_most_laps(raceObj, interfaceObj, forced=True, **kwargs)
if win_status['status'] == WinStatus.TIE and raceObj.race_status == RaceStatus.RACING:
# ties here change status to overtime
win_status['status'] = WinStatus.OVERTIME
return win_status
return {
'status': WinStatus.NONE
}
def check_win_first_to_x(self, raceObj: RHRace, interfaceObj, **kwargs):
race_format = raceObj.format
if race_format.number_laps_win: # must have laps > 0 to win
leaderboard = raceObj.results['by_race_time']
if len(leaderboard) > 1:
lead_lap = leaderboard[0]['laps']
if lead_lap >= race_format.number_laps_win: # lead lap passes win threshold
# if race stopped then don't wait for crossing to finish
if raceObj.race_status != RaceStatus.DONE:
# prevent win declaration if there are active crossings coming onto lead lap
for line in leaderboard[1:]: # check lower position
if line['laps'] >= lead_lap - 1:
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
else:
# lower results no longer need checked
break
# check for tie
if leaderboard[1]['laps'] == lead_lap:
if leaderboard[1]['total_time_raw'] == leaderboard[0]['total_time_raw']:
logger.info('Race tied at {0}/{1}'.format(leaderboard[0]['laps'], leaderboard[0]['total_time']))
return {
'status': WinStatus.TIE
}
# no active crossings; declare winner
return {
'status': WinStatus.DECLARED,
'data': leaderboard[0]
}
return {
'status': WinStatus.NONE
}
def check_win_fastest_lap(self, raceObj: RHRace, **kwargs):
if raceObj.race_status == RaceStatus.DONE or \
raceObj.check_all_nodes_finished() or 'forced' in kwargs: # racing must be completed
leaderboard = raceObj.results['by_fastest_lap']
if len(leaderboard) > 1:
fast_lap = leaderboard[0]['fastest_lap_raw']
if fast_lap > 0: # must have at least one lap
# check for tie
if leaderboard[1]['fastest_lap_raw'] == fast_lap:
logger.info('Race tied at %s', leaderboard[1]['fastest_lap'])
return {
'status': WinStatus.TIE
}
# declare winner
return {
'status': WinStatus.DECLARED,
'data': leaderboard[0]
}
elif 'at_finish' in kwargs:
race_format = raceObj.format
leaderboard = raceObj.results['by_fastest_lap']
if len(leaderboard) > 1:
fast_lap = leaderboard[0]['fastest_lap_raw']
if fast_lap > 0: # must have at least one lap
max_ttc = 0
race_duration_ms = secs_to_millis(race_format.race_time_sec + race_format.lap_grace_sec)
for node in raceObj.node_passes:
if len(raceObj.node_passes[node]) > 0:
most_recent_lap = raceObj.node_passes[node][-1]['lap_time_stamp']
time_to_complete = fast_lap - (race_duration_ms - most_recent_lap)
max_ttc = max(max_ttc, time_to_complete)
max_consideration = min(fast_lap, max_ttc)
return {
'status': WinStatus.NONE,
'max_consideration': max_consideration
}
return {
'status': WinStatus.NONE
}
def check_win_fastest_consecutive(self, raceObj: RHRace, **kwargs):
if raceObj.race_status == RaceStatus.DONE or \
raceObj.check_all_nodes_finished() or 'forced' in kwargs: # racing must be completed
leaderboard = raceObj.results['by_consecutives']
if len(leaderboard) > 1:
fast_lap = leaderboard[0]['consecutives_raw']
if fast_lap and fast_lap > 3: # must have at least 3 laps
# check for tie
if leaderboard[1]['consecutives_raw'] == fast_lap:
logger.info('Race tied at %s', leaderboard[1]['consecutives'])
return {
'status': WinStatus.TIE
}
# declare winner
return {
'status': WinStatus.DECLARED,
'data': leaderboard[0]
}
elif 'at_finish' in kwargs:
leaderboard = raceObj.results['by_consecutives']
if len(leaderboard) > 1:
fast_consecutives = leaderboard[0]['consecutives_raw']
if fast_consecutives and fast_consecutives > 0: # must have recorded time (otherwise impossible to set bounds)
max_node_consideration = 0
for node in raceObj.node_passes:
laps = raceObj.node_passes[node]
if len(laps) >= 2:
last_2_laps = laps[-1]['lap_time'] + laps[-2]['lap_time']
max_node_consideration = max(max_node_consideration, (fast_consecutives - last_2_laps))
return {
'status': WinStatus.NONE,
'max_consideration': max_node_consideration
}
return {
'status': WinStatus.NONE
}
def check_win_team_laps_and_time(self, raceObj: RHRace, interfaceObj, **kwargs):
if raceObj.race_status == RaceStatus.DONE or \
raceObj.check_all_nodes_finished() or 'forced' in kwargs: # racing must be completed
team_info = self.calc_team_leaderboard(raceObj)
team_leaderboard = team_info['by_race_time']
individual_leaderboard = raceObj.results['by_race_time']
if len(team_leaderboard) > 1 and len(individual_leaderboard):
lead_laps = team_leaderboard[0]['laps']
lead_lap_time = team_leaderboard[0]['total_time_raw']
if lead_laps > 0: # must have at least one lap
# if race stopped then don't wait for crossing to finish
if raceObj.race_status != RaceStatus.DONE:
# prevent win declaration if there are active crossings
for line in individual_leaderboard:
if team_info['meta']['teams'][line['team_name']]['laps'] >= lead_laps - 1: # check for deterministic crossing
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
# check for tie
if team_leaderboard[1]['laps'] == lead_laps:
if team_leaderboard[1]['total_time_raw'] == team_leaderboard[0]['total_time_raw']:
logger.info('Race tied at {0}/{1}'.format(team_leaderboard[0]['laps'], team_leaderboard[0]['total_time']))
return {
'status': WinStatus.TIE
}
# no tie or active crossings; declare winner
return {
'status': WinStatus.DECLARED,
'data': team_leaderboard[0]
}
elif raceObj.race_status == RaceStatus.RACING and raceObj.timer_running == False:
# time has ended; check if winning is assured
team_info = self.calc_team_leaderboard(raceObj)
team_leaderboard = team_info['by_race_time']
individual_leaderboard = raceObj.results['by_race_time']
if len(team_leaderboard) > 1 and len(individual_leaderboard):
lead_laps = team_leaderboard[0]['laps']
lead_lap_time = team_leaderboard[0]['total_time_raw']
if lead_laps > 0: # must have at least one lap
# prevent win declaration if there are active crossings
for line in individual_leaderboard:
if team_info['meta']['teams'][line['team_name']]['laps'] >= lead_laps - 1: # check for deterministic crossing
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
# check if team can potentially pass or tie
teams_can_pass = 0
team_members_finished = {}
for line in individual_leaderboard:
node_index = line['node']
team = line['team_name']
if team not in team_members_finished:
team_members_finished[team] = 0
if raceObj.get_node_finished_flag(node_index):
team_members_finished[team] += 1
leader_has_finished = team_members_finished[team_leaderboard[0]['name']] == team_leaderboard[0]['members']
max_consideration = 0
if 'overtime' in kwargs:
if team_members_finished[team_leaderboard[0]['name']]:
return self.check_win_team_laps_and_time(raceObj, interfaceObj, forced=True, **kwargs)
for line in team_leaderboard[1:]:
max_potential_laps = line['laps'] + line['members'] - team_members_finished[line['name']]
if lead_laps <= max_potential_laps:
teams_can_pass += 1
elif leader_has_finished:
time_to_complete = (lead_lap_time - line['total_time_raw']) * (line['members'] - team_members_finished[line['name']])
max_consideration = max(max_consideration, time_to_complete)
if teams_can_pass == 0:
return self.check_win_team_laps_and_time(raceObj, interfaceObj, forced=True, **kwargs)
elif leader_has_finished:
return {
'status': WinStatus.NONE,
'max_consideration': max_consideration
}
return {
'status': WinStatus.NONE
}
def check_win_team_most_laps(self, raceObj: RHRace, interfaceObj, **kwargs):
if raceObj.race_status == RaceStatus.DONE or \
raceObj.check_all_nodes_finished() or 'forced' in kwargs: # racing must be completed
team_info = self.calc_team_leaderboard(raceObj)
team_leaderboard = team_info['by_race_time']
individual_leaderboard = raceObj.results['by_race_time']
if len(team_leaderboard) > 1 and len(individual_leaderboard):
lead_laps = team_leaderboard[0]['laps']
if lead_laps > 0: # must have at least one lap
# if race stopped then don't wait for crossing to finish
if raceObj.race_status != RaceStatus.DONE:
# prevent win declaration if there are active crossings
for line in individual_leaderboard:
if team_info['meta']['teams'][line['team_name']]['laps'] >= lead_laps - 1: # check for deterministic crossing
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
# check for tie
if team_leaderboard[1]['laps'] == lead_laps:
logger.info('Race tied at %d laps', team_leaderboard[1]['laps'])
return {
'status': WinStatus.TIE
}
# no tie or active crossings; declare winner
return {
'status': WinStatus.DECLARED,
'data': team_leaderboard[0]
}
elif raceObj.race_status == RaceStatus.RACING and raceObj.timer_running == False:
# time has ended; check if winning is assured
team_info = self.calc_team_leaderboard(raceObj)
team_leaderboard = team_info['by_race_time']
individual_leaderboard = raceObj.results['by_race_time']
if len(team_leaderboard) > 1 and len(individual_leaderboard):
lead_laps = team_leaderboard[0]['laps']
if lead_laps > 0: # must have at least one lap
# prevent win declaration if there are active crossings
for line in individual_leaderboard:
if team_info['meta']['teams'][line['team_name']]['laps'] >= lead_laps - 1: # check for deterministic crossing
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
# check if team can potentially pass or tie
team_members_finished = {}
for line in individual_leaderboard:
node_index = line['node']
team = line['team_name']
if team not in team_members_finished:
team_members_finished[team] = 0
if raceObj.get_node_finished_flag(node_index):
team_members_finished[team] += 1
teams_can_pass = 0
teams_can_tie = 0
teams_tied = 0
for line in team_leaderboard[1:]:
max_potential_laps = line['laps'] + line['members'] - team_members_finished[line['name']]
if lead_laps == line['laps']:
teams_tied += 1
if lead_laps < max_potential_laps:
teams_can_pass += 1
elif lead_laps == max_potential_laps:
teams_can_tie += 1
# call race if possible
if teams_can_pass == 0:
if teams_can_tie == 0 and teams_tied == 0:
return self.check_win_team_laps_and_time(raceObj, interfaceObj, forced=True)
elif teams_tied > 0: # add "and teams_can_tie == 0" to wait for 3+-way?
leading_team = team_leaderboard[0]
if team_members_finished[leading_team['name']] == leading_team['members']:
return self.check_win_team_laps_and_time(raceObj, interfaceObj, forced=True)
return {
'status': WinStatus.NONE
}
def check_win_team_laps_and_overtime(self, raceObj: RHRace, interfaceObj, **kwargs):
if (raceObj.race_status == RaceStatus.RACING and not raceObj.timer_running) or \
raceObj.race_status == RaceStatus.DONE or 'at_finish' in kwargs:
race_format = raceObj.format
leaderboard = raceObj.results['by_race_time']
if len(leaderboard):
race_duration_ms = secs_to_millis(race_format.race_time_sec + race_format.lap_grace_sec)
pilot_crossed_after_time = False
for line in leaderboard:
if line['total_time_raw'] > race_duration_ms:
pilot_crossed_after_time = True
break
if pilot_crossed_after_time:
return self.check_win_team_laps_and_time(raceObj, interfaceObj, overtime=True, **kwargs)
else:
win_status = self.check_win_team_most_laps(raceObj, interfaceObj, forced=True, **kwargs)
if win_status['status'] == WinStatus.TIE and raceObj.race_status == RaceStatus.RACING:
# ties here change status to overtime
win_status['status'] = WinStatus.OVERTIME
return win_status
return {
'status': WinStatus.NONE
}
def check_win_team_first_to_x(self, raceObj: RHRace, interfaceObj, **kwargs):
race_format = raceObj.format
if race_format.number_laps_win: # must have laps > 0 to win
team_leaderboard = self.calc_team_leaderboard(raceObj)['by_race_time']
individual_leaderboard = raceObj.results['by_race_time']
if len(team_leaderboard) > 1 and len(individual_leaderboard):
lead_lap = team_leaderboard[0]['laps']
if lead_lap >= race_format.number_laps_win: # lead lap passes win threshold
# if race stopped then don't wait for crossing to finish
if raceObj.race_status != RaceStatus.DONE:
# prevent win declaration if there are active crossings
for line in individual_leaderboard:
node = interfaceObj.nodes[line['node']]
if node.pass_crossing_flag:
logger.info('Waiting for node {0} crossing to decide winner'.format(line['node']+1))
return {
'status': WinStatus.PENDING_CROSSING
}
# check for tie
if team_leaderboard[1]['laps'] == lead_lap:
logger.info('Race tied at %d laps', team_leaderboard[1]['laps'])
return {
'status': WinStatus.TIE
}
# no active crossings; declare winner
return {
'status': WinStatus.DECLARED,
'data': team_leaderboard[0]
}
return {
'status': WinStatus.NONE
}
def check_win_team_fastest_lap(self, raceObj:RHRace, **kwargs):
if raceObj.race_status == RaceStatus.DONE or \
raceObj.check_all_nodes_finished() or 'forced' in kwargs: # racing must be completed
team_leaderboard = self.calc_team_leaderboard(raceObj)['by_avg_fastest_lap']
if len(team_leaderboard) > 1:
if team_leaderboard[0]['laps'] > 0: # must have at least one lap
# check for tie
if team_leaderboard[1]['contribution_amt'] == team_leaderboard[0]['contribution_amt'] and \
team_leaderboard[1]['average_fastest_lap_raw'] == team_leaderboard[0]['average_fastest_lap_raw'] and \
team_leaderboard[1]['laps'] == team_leaderboard[1]['laps']:
logger.info('Race tied at %s', team_leaderboard[1]['average_fastest_lap'])
return {
'status': WinStatus.TIE
}
# declare winner
return {
'status': WinStatus.DECLARED,
'data': team_leaderboard[0]
}
elif 'at_finish' in kwargs:
race_format = raceObj.format
team_leaderboard = self.calc_team_leaderboard(raceObj)['by_avg_fastest_lap']
if len(team_leaderboard) > 1:
if team_leaderboard[0]['laps'] > 0: # must have at least one lap
fast_lap_average = team_leaderboard[0]['average_fastest_lap_raw']
if fast_lap_average > 0: # must have recorded time (otherwise impossible to set bounds)
team_laps = {}
for line in team_leaderboard:
team = line['name']
team_laps[team] = {
'spent_time': 0,
'members': line['members'],
}
race_duration_ms = secs_to_millis(race_format.race_time_sec + race_format.lap_grace_sec)
for node in raceObj.node_passes:
if len(raceObj.node_passes[node]) > 0:
team = raceObj.node_pilots[node].team
if team is not None:
most_recent_lap = raceObj.node_passes[node][-1]['lap_time_stamp']
spent_time = race_duration_ms - most_recent_lap
team_laps[team]['spent_time'] += spent_time
max_consideration = 0
for team in team_laps:
time_to_complete = fast_lap_average * team_laps[team]['members']
time_to_complete -= team_laps[team]['spent_time']
max_consideration = max(max_consideration, time_to_complete)
return {
'status': WinStatus.NONE,
'max_consideration': max_consideration
}
return {
'status': WinStatus.NONE
}
def check_win_team_fastest_consecutive(self, raceObj: RHRace, **kwargs):
if raceObj.race_status == RaceStatus.DONE or \
raceObj.check_all_nodes_finished() or 'forced' in kwargs: # racing must be completed
team_leaderboard = self.calc_team_leaderboard(raceObj)['by_avg_consecutives']
if len(team_leaderboard) > 1:
race_format = raceObj.format
if team_leaderboard[0]['laps'] > 3 or \
(race_format.start_behavior == StartBehavior.FIRST_LAP and team_leaderboard[0]['laps'] > 2): # must have at least 3 laps
# check for tie
if team_leaderboard[1]['contribution_amt'] == team_leaderboard[0]['contribution_amt'] and \
team_leaderboard[1]['average_consecutives_raw'] == team_leaderboard[0]['average_consecutives_raw'] and \
team_leaderboard[1]['laps'] == team_leaderboard[1]['laps']:
logger.info('Race tied at %s', team_leaderboard[1]['average_consecutives'])
return {
'status': WinStatus.TIE
}
# declare winner
return {
'status': WinStatus.DECLARED,
'data': team_leaderboard[0]
}
elif 'at_finish' in kwargs:
team_leaderboard = self.calc_team_leaderboard(raceObj)['by_avg_consecutives']
if len(team_leaderboard) > 1:
fast_consecutives = team_leaderboard[0]['average_consecutives_raw']
if fast_consecutives and fast_consecutives > 0: # must have recorded time (otherwise impossible to set bounds)
team_laps = {}
for line in team_leaderboard:
team = line['name']
team_laps[team] = {
'time': 0,
'members': line['members']
}
for node in raceObj.node_passes:
team = raceObj.node_pilots[node].team
if team is not None:
laps = raceObj.node_passes[node]
if len(laps) >= 2:
last_2_laps = laps[-1]['lap_time'] + laps[-2]['lap_time']
team_laps[team]['time'] += last_2_laps
max_consideration = 0
for team in team_laps:
if team != team_leaderboard[0]['name']: # skip leader
team_laps[team]['time'] = team_laps[team]['time'] / team_laps[team]['members']
max_consideration = max(max_consideration, fast_consecutives - team_laps[team]['time'] / team_laps[team]['members'])
return {
'status': WinStatus.NONE,
'max_consideration': max_consideration
}
return {
'status': WinStatus.NONE
}
def get_leading_pilot_id(results):
try:
primary_leaderboard = results['meta']['primary_leaderboard']
results_list = results[primary_leaderboard]
if len(results_list) > 1: # only return leader if more than one pilot
return results_list[0]['pilot_id']
except Exception:
logger.exception("Error in Results 'get_leading_pilot_id()'")
return RHUtils.PILOT_ID_NONE
def get_leading_team_name(results):
try:
primary_leaderboard = results['meta']['primary_leaderboard']
results_list = results[primary_leaderboard]
if len(results_list) > 1: # only return leader if more than one team
return results_list[0]['name']
except Exception:
logger.exception("Error in Results 'get_leading_team_name()'")
return ''
def get_pilot_lap_counts_str(results):
try:
primary_leaderboard = results['meta']['primary_leaderboard']
results_list = results[primary_leaderboard]
lap_strs_list = []
for res_obj in results_list:
lap_strs_list.append("{}={}".format(res_obj['callsign'], res_obj['laps']))
return ", ".join(lap_strs_list)
except Exception:
logger.exception("Error in Results 'get_pilot_lap_totals_str()'")
return ''
def get_team_lap_totals_str(results):
try:
primary_leaderboard = results['meta']['primary_leaderboard']
results_list = results[primary_leaderboard]
lap_strs_list = []
for res_obj in results_list:
lap_strs_list.append("{}={}".format(res_obj['name'], res_obj['laps']))
lap_strs_list.sort()
return ", ".join(lap_strs_list)
except Exception:
logger.exception("Error in Results 'get_team_lap_totals_str()'")
return ''
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,086
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_mqtt.py
|
import unittest
from rh.interface.BaseHardwareInterface import BaseHardwareInterface,\
BaseHardwareInterfaceListener
from rh.interface.MockInterface import MockInterface
from rh.interface.MqttInterface import MqttInterface
from rh.apis.mqtt_api import MqttAPI
import json
class StubMqttClient:
def publish(self, topic, payload):
pass
class StubMqttMessage:
def __init__(self, topic, payload):
self.topic = topic
self.payload = payload.encode('utf-8')
class MqttTest(unittest.TestCase):
def test_mqtt_frequency(self):
ann_topic = '/ann'
ctrl_topic = '/ctrl'
timer_id = 'local'
hw = MockInterface(1)
intf = MqttInterface(StubMqttClient(), ann_topic, ctrl_topic, timer_id, hw)
msg = {
'topic': intf._mqtt_create_node_topic('system', hw.nodes[0], "frequency"),
'payload': '5675'
}
intf._mqtt_set_frequency(hw.node_managers[0], None, None, StubMqttMessage(**msg))
self.assertEqual(hw.nodes[0].frequency, 5675)
def test_mqtt_frequency_bandChannel(self):
ann_topic = '/ann'
ctrl_topic = '/ctrl'
timer_id = 'local'
hw = MockInterface(1)
intf = MqttInterface(StubMqttClient(), ann_topic, ctrl_topic, timer_id, hw)
msg = {
'topic': intf._mqtt_create_node_topic('system', hw.nodes[0], "frequency"),
'payload': '5675,X4'
}
intf._mqtt_set_frequency(hw.node_managers[0], None, None, StubMqttMessage(**msg))
self.assertEqual(hw.nodes[0].frequency, 5675)
self.assertEqual(hw.nodes[0].bandChannel, 'X4')
def test_mqtt_bandChannel(self):
ann_topic = '/ann'
ctrl_topic = '/ctrl'
timer_id = 'local'
hw = MockInterface(1)
intf = MqttInterface(StubMqttClient(), ann_topic, ctrl_topic, timer_id, hw)
msg = {
'topic': intf._mqtt_create_node_topic('system', hw.nodes[0], "bandChannel"),
'payload': 'R8'
}
intf._mqtt_set_bandChannel(hw.node_managers[0], None, None, StubMqttMessage(**msg))
self.assertEqual(hw.nodes[0].frequency, 5917)
self.assertEqual(hw.nodes[0].bandChannel, 'R8')
def test_mqtt_pass_handler_realtime(self):
self.check_mqtt_pass_handler('realtime', BaseHardwareInterface.LAP_SOURCE_REALTIME)
def test_mqtt_pass_handler_manual(self):
self.check_mqtt_pass_handler('manual', BaseHardwareInterface.LAP_SOURCE_MANUAL)
def check_mqtt_pass_handler(self, mqtt_lap_source, expected_lap_source):
ann_topic = '/ann'
ctrl_topic = '/ctrl'
timer_id = 'local'
new_lap_ts = None
new_lap_source = None
def pass_callback(node, lap_ts, lap_source, lap_rssi):
nonlocal new_lap_ts
nonlocal new_lap_source
new_lap_ts = lap_ts
new_lap_source = lap_source
listener = BaseHardwareInterfaceListener()
listener.on_pass = pass_callback
hw = MockInterface(1)
api = MqttAPI(StubMqttClient(), ann_topic, timer_id, hw, listener)
api.ann_topic = ann_topic
api.timer_id = timer_id
intf = MqttInterface(StubMqttClient(), ann_topic, ctrl_topic, timer_id, hw)
intf.ann_topic = ann_topic
intf.timer_id = timer_id
msg = {
'topic': intf._mqtt_create_node_topic(ann_topic, hw.nodes[0], 'pass'),
'payload': json.dumps({
'source': mqtt_lap_source,
'timestamp': 11,
'rssi': 49
})
}
api.pass_handler(api.client, None, StubMqttMessage(**msg))
self.assertEqual(new_lap_ts, 11)
self.assertEqual(new_lap_source, expected_lap_source)
def test_frequency_handler(self):
ann_topic = '/ann'
ctrl_topic = '/ctrl'
timer_id = 'local'
new_freq = None
def set_frequency_callback(node, frequency, band=None, channel=None):
nonlocal new_freq
new_freq = frequency
listener = BaseHardwareInterfaceListener()
listener.on_frequency_changed = set_frequency_callback
hw = MockInterface(1)
api = MqttAPI(StubMqttClient(), ann_topic, timer_id, hw, listener)
intf = MqttInterface(StubMqttClient(), ann_topic, ctrl_topic, timer_id, hw)
msg = {
'topic': intf._mqtt_create_node_topic(ann_topic, hw.nodes[0], 'frequency'),
'payload': '5808'
}
api.set_frequency_handler(api.client, None, StubMqttMessage(**msg))
self.assertEqual(new_freq, 5808)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,087
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/util/persistent_homology.py
|
import numpy as np
import jenkspy
import scipy.signal as signal
class ConnectedComponent:
def __init__(self, idx, birth, death):
self.left = idx
self.right = idx
self.birth = birth
self.death = death
def __str__(self):
return "{} -> {} ({})".format(self.birth[1], self.death[1], self.lifetime())
def __repr__(self):
return "{} -> {}".format(self.birth, self.death)
def to_pair(self):
return [self.birth[1], self.death[1]]
def to_upair(self):
'''Unsigned/unordered pair'''
return [self.death[1], self.birth[1]] if self.death[1] < self.birth[1] else [self.birth[1], self.death[1]]
def lifetime(self):
return abs(self.birth[1] - self.death[1])
def calculatePeakPersistentHomology(data):
ccs = []
N = len(data)
idxToCC = [None]*N
sorted_idxs = sorted(range(N), key=lambda i: (data[i], -i), reverse=True)
min_idx = sorted_idxs[-1]
def arrange_peak_centers():
k = 0
while k < N:
# prefer peak centers
end = k
while end < N-1 and sorted_idxs[end+1] == sorted_idxs[end] + 1 and data[sorted_idxs[end+1]] == data[sorted_idxs[end]]:
end += 1
end += 1 # exclusive
if end - k > 2:
mid = (k + end - 1)//2
# rearrange to do the peak center first
left_part = sorted_idxs[k:mid]
left_part.reverse()
right_part = sorted_idxs[mid:end]
sorted_idxs[k:end] = right_part + left_part
k = end
arrange_peak_centers()
for i in sorted_idxs:
leftCC = idxToCC[i-1] if i > 0 else None
rightCC = idxToCC[i+1] if i < N-1 else None
if leftCC is None and rightCC is None:
cc = ConnectedComponent(i, (i, data[i]), (min_idx, data[min_idx]))
ccs.append(cc)
idxToCC[i] = cc
elif leftCC is not None and rightCC is None:
leftCC.right += 1
idxToCC[i] = leftCC
elif leftCC is None and rightCC is not None:
rightCC.left -= 1
idxToCC[i] = rightCC
else:
if leftCC.birth[1] > rightCC.birth[1]:
rightCC.death = (i, data[i])
leftCC.right = rightCC.right
idxToCC[i] = leftCC
idxToCC[leftCC.right] = leftCC
else:
leftCC.death = (i, data[i])
rightCC.left = leftCC.left
idxToCC[i] = rightCC
idxToCC[rightCC.left] = rightCC
return ccs
def sortByLifetime(ccs):
'''Sorts in descending order (i.e. most prominent first)'''
return sorted(ccs, key=lambda cc: cc.lifetime(), reverse=True)
def calculateRealtimePeakPersistentHomology(rssi_history, window_size):
'''rssi_history is a list containing all past RSSIs up-to and including the current time'''
if not type(rssi_history) is np.ndarray:
rssi_history = np.array(rssi_history)
n = len(rssi_history)
current_rssi = rssi_history[-1]
peak_idxs = signal.find_peaks(rssi_history)[0]
nadir_idxs = signal.find_peaks(-rssi_history)[0]
idxs = np.sort(np.hstack([peak_idxs, nadir_idxs]))
window_idxs = idxs[-window_size:]
rssi_window = np.hstack([rssi_history[window_idxs], [current_rssi]])
ccs = calculatePeakPersistentHomology(rssi_window)
last_pos = len(rssi_window) - 1
for cc in ccs:
if cc.birth[0] == last_pos:
if cc.death < cc.birth:
cc.death = (window_idxs[cc.death[0]], cc.death[1])
else:
cc.death = (n - 1, cc.death[1])
cc.birth = (n - 1, cc.birth[1])
return cc
return None
def findBreak(ccs):
lifetimes = [cc.lifetime() for cc in ccs]
breaks = jenkspy.jenks_breaks(lifetimes, nb_class=2)
levels = np.unique(lifetimes)
i = np.min(np.nonzero(levels==breaks[1])[0])
return (levels[i], levels[i+1])
def plotSampleLifetimes(axs, ts, ccs):
ccs = sorted(ccs, key=lambda cc: cc.birth[0])
data = np.array([[ts[cc.birth[0]], cc.lifetime()] for cc in ccs])
axs.set_xlim((ts[0], ts[-1]))
axs.set_xlabel('Time / s')
axs.set_ylabel('Lifetime')
axs.plot(data[:,0], data[:,1])
def plotPersistenceDiagram(axs, ccs):
data = np.array([cc.to_pair() for cc in ccs])
axs.scatter(data[:,1], data[:,0], s=2, color='blue')
minv = np.min(data)*0.95
maxv = np.max(data)*1.05
axs.set_xlim((minv, maxv))
axs.set_ylim((minv, maxv))
axs.set_xlabel('Death')
axs.set_ylabel('Birth')
axs.plot([minv,maxv], [minv,maxv], "--", c='gray')
def plotLifetimes(axs, ccs):
data = np.array([[cc.death[1], cc.lifetime()] for cc in ccs])
axs.scatter(data[:,0], data[:,1], s=2, color='blue')
minx = np.min(data[:,0])*0.95
maxx = np.max(data[:,0])*1.05
miny = np.min(data[:,1])*0.95
maxy = np.max(data[:,1])*1.05
axs.set_xlim((minx, maxx))
axs.set_ylim((miny, maxy))
axs.set_xlabel('Death')
axs.set_ylabel('Lifetime')
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,088
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_interface.py
|
import unittest
from rh.interface import calculate_checksum, ensure_iter, ExtremumFilter, SampleHistory
from rh.interface.MockInterface import MockInterface
class InterfaceTest(unittest.TestCase):
def test_checksum(self):
data = bytearray([200, 145])
checksum = calculate_checksum(data)
self.assertEqual(89, checksum)
def test_ensure_iter(self):
self.assertEqual(['foo'], ensure_iter('foo'))
self.assertEqual(['foo'], ensure_iter(['foo']))
self.assertEqual([1], ensure_iter(1))
self.assertEqual([1], ensure_iter([1]))
def test_extremum_filter(self):
f = ExtremumFilter()
input_data = [2, 5, 5, 5, 4, 8, 9, 7, 7, 1, 3]
# NB: includes inflexion points
expected_rssi = [2, 5, None, 5, 4, None, 9, 7, 7, 1]
expected = [(i,v) for i,v in enumerate(expected_rssi)]
actual = [f.filter(i,x) for i, x in enumerate(input_data)]
self.assertListEqual(expected, actual[1:])
def test_rssi_history_append(self):
history = SampleHistory()
history.set([0,1], [5,5])
history.append(10, 5)
actual_times, actual_values = history.get()
self.assertListEqual(actual_times, [0,10])
self.assertListEqual(actual_values, [5,5])
def test_rssi_history_merge(self):
history = SampleHistory()
history.set([0,1,2,3], [5,3,6,7])
pass_history = [(0.5,8), (2,9)]
history.merge(pass_history)
actual_times, actual_values = history.get()
self.assertListEqual(actual_times, [0,0.5,1,2,3])
self.assertListEqual(actual_values, [5,8,3,9,7])
def test_is_new_lap(self):
laps = 0
lap_timestamp = 0
lap_rssi = 0
def on_pass(node, lap_ts, source, rssi):
nonlocal laps
nonlocal lap_timestamp
nonlocal lap_rssi
laps += 1
lap_timestamp = lap_ts
lap_rssi = rssi
enter_timestamp = 0
enter_rssi = 0
def on_enter_triggered(node, cross_ts, cross_rssi, cross_lifetime):
nonlocal enter_timestamp
nonlocal enter_rssi
enter_timestamp = cross_ts
enter_rssi = cross_rssi
exit_timestamp = 0
exit_rssi = 0
def on_exit_triggered(node, cross_ts, cross_rssi, cross_lifetime):
nonlocal exit_timestamp
nonlocal exit_rssi
exit_timestamp = cross_ts
exit_rssi = cross_rssi
intf = MockInterface(1)
intf.listener.on_pass = on_pass
intf.listener.on_enter_triggered = on_enter_triggered
intf.listener.on_exit_triggered = on_exit_triggered
node = intf.nodes[0]
lap_enter_exit = intf.is_new_lap(node, 0, 20, 0, False)
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 110, 20, 0, True) # enter
self.assertTupleEqual(lap_enter_exit, (False, True, False))
intf.process_enter_trigger(node, 1, 100, 22, 8)
lap_enter_exit = intf.is_new_lap(node, 210, 20, 0, False) # exit
self.assertTupleEqual(lap_enter_exit, (False, False, True))
intf.process_exit_trigger(node, 1, 200, 21, 8)
lap_enter_exit = intf.is_new_lap(node, 230, 20, 1, False) # lap
self.assertTupleEqual(lap_enter_exit, (True, False, False))
intf.process_lap_stats(node, 1, 150, 40, 5)
self.assertEqual(node.pass_count, 1)
self.assertEqual(laps, 1)
self.assertEqual(lap_timestamp, 150)
self.assertEqual(lap_rssi, 40)
self.assertEqual(enter_timestamp, 100)
self.assertEqual(enter_rssi, 22)
self.assertEqual(exit_timestamp, 200)
self.assertEqual(exit_rssi, 21)
def test_is_new_lap_init(self):
intf = MockInterface(1)
node = intf.nodes[0]
intf.is_new_lap(node, 0, 20, 4, False)
self.assertEqual(node.pass_count, 4)
def test_is_new_lap_retry_enter(self):
intf = MockInterface(1)
node = intf.nodes[0]
lap_enter_exit = intf.is_new_lap(node, 0, 20, 0, False)
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 10, 20, 0, True) # enter
lap_enter_exit = intf.is_new_lap(node, 20, 20, 0, True) # enter
self.assertTupleEqual(lap_enter_exit, (False, True, False))
intf.process_enter_trigger(node, 1, 100, 22, 8)
lap_enter_exit = intf.is_new_lap(node, 110, 20, 0, True) # enter
self.assertTupleEqual(lap_enter_exit, (False, False, False))
self.assertEqual(node.pass_count, 0)
def test_is_new_lap_missed_enter(self):
intf = MockInterface(1)
node = intf.nodes[0]
lap_enter_exit = intf.is_new_lap(node, 0, 20, 0, False)
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 10, 20, 0, False) # exit
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 20, 20, 1, False) # lap
self.assertTupleEqual(lap_enter_exit, (True, True, True))
intf.process_lap_stats(node, 1, 100, 40, 5)
self.assertEqual(node.pass_count, 1)
def test_is_new_lap_missed_exit(self):
intf = MockInterface(1)
node = intf.nodes[0]
lap_enter_exit = intf.is_new_lap(node, 0, 20, 0, False)
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 10, 20, 0, True) # enter
self.assertTupleEqual(lap_enter_exit, (False, True, False))
lap_enter_exit = intf.is_new_lap(node, 20, 20, 1, False) # lap
self.assertTupleEqual(lap_enter_exit, (True, True, True))
intf.process_lap_stats(node, 1, 100, 40, 5)
self.assertEqual(node.pass_count, 1)
def test_is_new_lap_missed_lap(self):
intf = MockInterface(1)
node = intf.nodes[0]
lap_enter_exit = intf.is_new_lap(node, 0, 20, 0, False)
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 10, 20, 0, False) # exit
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 20, 20, 1, True) # enter
self.assertTupleEqual(lap_enter_exit, (True, True, True))
intf.process_lap_stats(node, 1, 100, 40, 5)
self.assertEqual(node.pass_count, 1)
def test_is_new_lap_missed_enter_stats(self):
intf = MockInterface(1)
node = intf.nodes[0]
lap_enter_exit = intf.is_new_lap(node, 0, 20, 0, False)
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 10, 20, 0, True) # enter
self.assertTupleEqual(lap_enter_exit, (False, True, False))
lap_enter_exit = intf.is_new_lap(node, 20, 20, 0, False) # exit
self.assertTupleEqual(lap_enter_exit, (False, True, True))
intf.process_exit_trigger(node, 1, 200, 22, 8)
lap_enter_exit = intf.is_new_lap(node, 210, 20, 1, False) # lap
self.assertTupleEqual(lap_enter_exit, (True, True, False))
intf.process_lap_stats(node, 1, 150, 40, 5)
self.assertEqual(node.pass_count, 1)
def test_is_new_lap_missed_exit_stats(self):
intf = MockInterface(1)
node = intf.nodes[0]
lap_enter_exit = intf.is_new_lap(node, 0, 20, 0, False)
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 10, 20, 0, True) # enter
self.assertTupleEqual(lap_enter_exit, (False, True, False))
intf.process_enter_trigger(node, 1, 100, 22, 8)
lap_enter_exit = intf.is_new_lap(node, 110, 20, 0, False) # exit
self.assertTupleEqual(lap_enter_exit, (False, False, True))
lap_enter_exit = intf.is_new_lap(node, 120, 20, 1, False) # lap
self.assertTupleEqual(lap_enter_exit, (True, False, True))
intf.process_lap_stats(node, 1, 200, 40, 5)
self.assertEqual(node.pass_count, 1)
def test_is_new_lap_missed_all(self):
intf = MockInterface(1)
node = intf.nodes[0]
lap_enter_exit = intf.is_new_lap(node, 0, 20, 0, False)
self.assertTupleEqual(lap_enter_exit, (False, False, False))
lap_enter_exit = intf.is_new_lap(node, 10, 20, 0, True) # enter
self.assertTupleEqual(lap_enter_exit, (False, True, False))
lap_enter_exit = intf.is_new_lap(node, 20, 20, 0, False) # exit
self.assertTupleEqual(lap_enter_exit, (False, True, True))
lap_enter_exit = intf.is_new_lap(node, 30, 20, 1, False) # lap
self.assertTupleEqual(lap_enter_exit, (True, True, True))
self.assertEqual(node.pass_count, 0)
def test_ai_calibrate_nodes(self):
intf = MockInterface(1)
node = intf.nodes[0]
node.ai_calibrate = True
node.first_cross_flag = True
node.enter_at_level = 12
node.exit_at_level = 12
rssis = [2,2,3,4,2,4,20,22,18,2,3,4,2]
node.history.set(list(range(len(rssis))), rssis)
new_enter_at_level = None
new_exit_at_level = None
def new_enter_callback(node, enter_level):
nonlocal new_enter_at_level
new_enter_at_level = enter_level
def new_exit_callback(node, exit_level):
nonlocal new_exit_at_level
new_exit_at_level = exit_level
intf.listener.on_enter_trigger_changed = new_enter_callback
intf.listener.on_exit_trigger_changed = new_exit_callback
intf.ai_calibrate_nodes()
self.assertEqual(new_enter_at_level, 11)
self.assertEqual(new_exit_at_level, 9)
def test_calibrate_nodes(self):
intf = MockInterface(1)
node = intf.nodes[0]
node.ai_calibrate = True
history_values = [2,2,3,4,2,4,20,22,18,2,3,4,2]
history_times = list(range(len(history_values)))
new_enter_at_level = None
new_exit_at_level = None
def new_enter_callback(node, enter_level):
nonlocal new_enter_at_level
new_enter_at_level = enter_level
def new_exit_callback(node, exit_level):
nonlocal new_exit_at_level
new_exit_at_level = exit_level
intf.listener.on_enter_trigger_changed = new_enter_callback
intf.listener.on_exit_trigger_changed = new_exit_callback
intf.calibrate_nodes(0, {
0: ([{'lap_time_stamp': 7, 'deleted': False}], history_times, history_values)
})
self.assertEqual(new_enter_at_level, 11)
self.assertEqual(new_exit_at_level, 6)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,089
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_rssi_dump.py
|
import unittest
import gevent
from rh.tools import rssi_dump
class RssiDumpTest(unittest.TestCase):
def test(self):
buffers = {}
def write_buffer(filename, buf):
nonlocal buffers
buffers[filename] = buf
thread = gevent.spawn(rssi_dump.start, 'MOCK', 5885, write_buffer)
gevent.sleep(0.3)
thread.kill()
self.assertGreater(len(buffers), 0)
self.assertGreaterEqual(len(next(iter(buffers.values()))), 16, buffers)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,090
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/leds/led_handler_graph.py
|
'''LED visual effects'''
# to use this handler, run:
# sudo apt-get install libjpeg-dev
# sudo pip install pillow
from . import ColorVal, setPixels
from rh.events.led_event_manager import LEDEffect
import gevent
from PIL import Image, ImageDraw
from itertools import repeat
def rssiGraph(args):
if 'strip' in args:
strip = args['strip']
else:
return False
if 'INTERFACE' in args:
INTERFACE = args['INTERFACE']
else:
return False
if len(INTERFACE.nodes) < 1:
return False
height = args['ledRows']
width = strip.numPixels() // height
im = Image.new('RGB', [width, height])
draw = ImageDraw.Draw(im)
if width < len(INTERFACE.nodes):
barWidth = 1
else:
barWidth = width // len(INTERFACE.nodes)
loop = range(args['iterations']) if 'iterations' in args else repeat(True)
for _ in loop:
draw.rectangle((0, 0, width, height), fill=(0, 0, 0))
for node in INTERFACE.nodes:
rssi_min = node.node_nadir_rssi
rssi_max = node.node_peak_rssi
rssi_val = node.current_rssi.rssi
color = convertColor(args['manager'].getDisplayColor(node.index))
rssi_range = rssi_max - rssi_min
if rssi_range:
point = (rssi_max - rssi_val) / float(rssi_range) * height
draw.rectangle((barWidth * node.index, point, (barWidth * node.index) + barWidth - 1, height), fill=color)
img = im.rotate(90 * args['panelRotate'])
setPixels(strip, img, args['invertedPanelRows'])
strip.show()
gevent.idle()
def clearPixels(strip):
for i in range(strip.numPixels()):
strip.setPixelColor(i, ColorVal.NONE)
def convertColor(color):
return color >> 16, (color >> 8) % 256, color % 256
def discover(config, *args, **kwargs):
effects = [
LEDEffect(
"graphRSSI",
"Graph: RSSI",
rssiGraph, {
'include': [],
'exclude': [],
'recommended': []
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS']
}
)
]
return effects
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,091
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_persistent_homology.py
|
import rh.util.persistent_homology as ph
import unittest
import numpy as np
class PersistentHomologyTest(unittest.TestCase):
def test_PeakPersistentHomology(self):
data = [30, 29, 41, 4, 114, 1, 3, 2, 33, 9, 112, 40, 118]
ccs = ph.calculatePeakPersistentHomology(data)
ccs = ph.sortByLifetime(ccs)
self.assertEqual(str(ccs), '[(12, 118) -> (5, 1), (4, 114) -> (5, 1), (10, 112) -> (11, 40), (2, 41) -> (3, 4), (8, 33) -> (9, 9), (0, 30) -> (1, 29), (6, 3) -> (7, 2)]')
def test_RealtimePeakPersistentHomology(self):
data = [30, 29, 41, 4, 114, 1, 3, 2, 33, 9, 112, 40, 118]
ccs = [ph.calculateRealtimePeakPersistentHomology(data[:i+1], 6) for i in range(len(data))]
ccs = [cc for cc in ccs if cc is not None and cc.lifetime() > 0]
ccs = ph.sortByLifetime(ccs)
self.assertEqual(str(ccs), '[(12, 118) -> (7, 2), (10, 112) -> (5, 1), (4, 114) -> (3, 4), (8, 33) -> (5, 1), (2, 41) -> (1, 29), (6, 3) -> (5, 1)]')
def test_findBreak_1(self):
data = [2,0,5,0,2,0,8,2,4,0,6,0,9,0,15,6,10,8]
ccs = ph.calculatePeakPersistentHomology(data)
actual_levels = np.unique([cc.lifetime() for cc in ccs]).tolist()
expected_levels = [2, 4, 5, 6, 8, 9, 15]
self.assertListEqual(actual_levels, expected_levels)
bounds = ph.findBreak(ccs)
self.assertEqual(bounds, (6, 8))
def test_findBreak_2(self):
data = [6,0,5,0,6,0,8,2,4,0,6,0,9,0,15,6,10,8]
ccs = ph.calculatePeakPersistentHomology(data)
actual_levels = np.unique([cc.lifetime() for cc in ccs]).tolist()
expected_levels = [2, 4, 5, 6, 8, 9, 15]
self.assertListEqual(actual_levels, expected_levels)
bounds = ph.findBreak(ccs)
self.assertEqual(bounds, (9, 15))
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,092
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/util/RHUtils.py
|
'''
RotorHazard Helper and utility functions
'''
import os
import sys
import re
import logging
import platform
import subprocess
import glob
import socket
import random
import json
import jsonschema
logger = logging.getLogger(__name__)
DEF_TEAM_NAME = 'A' # default team
PILOT_ID_NONE = 0 # indicator value for no pilot configured
HEAT_ID_NONE = 0 # indicator value for practice heat
CLASS_ID_NONE = 0 # indicator value for unclassified heat
FORMAT_ID_NONE = 0 # indicator value for unformatted class
FREQUENCY_ID_NONE = 0 # indicator value for node disabled
IS_SYS_RASPBERRY_PI = True # set by 'idAndLogSystemInfo()'
VTX_TABLE = {}
FREQS = {}
def load_vtx_table():
global VTX_TABLE, FREQS
with open('vtxconfig_schema-1.0.json', 'r') as f:
schema = json.load(f)
with open('vtx_table.json', 'r') as f:
vtx_table = json.load(f)
jsonschema.validate(instance=vtx_table, schema=schema)
# validated
VTX_TABLE = vtx_table
for band in vtx_table['vtx_table']['bands_list']:
for idx, freq in enumerate(band['frequencies']):
FREQS[band['letter']+str(idx+1)] = freq
load_vtx_table();
def time_format(millis, timeformat='{m}:{s}.{d}'):
'''Convert milliseconds to 00:00.000'''
if millis is None:
return ''
millis = int(round(millis, 0))
minutes = millis // 60000
over = millis % 60000
seconds = over // 1000
over = over % 1000
milliseconds = over
if not timeformat:
timeformat = '{m}:{s}.{d}'
return timeformat.format(m=str(minutes), s=str(seconds).zfill(2), d=str(milliseconds).zfill(3))
def phonetictime_format(millis, timeformat='{m} {s}.{d}'):
'''Convert milliseconds to phonetic'''
if millis is None:
return ''
millis = int(millis + 50) # round to nearest tenth of a second
minutes = millis // 60000
over = millis % 60000
seconds = over // 1000
over = over % 1000
tenths = over // 100
if not timeformat:
timeformat = '{m} {s}.{d}'
if minutes <= 0:
return timeformat.format(m='', s=str(seconds), d=str(tenths))
else:
return timeformat.format(m=str(minutes), s=str(seconds).zfill(2), d=str(tenths))
def isVersionPython2():
return sys.version.startswith("2.")
def getPythonVersionStr():
return sys.version.split()[0]
def idAndLogSystemInfo():
global IS_SYS_RASPBERRY_PI
IS_SYS_RASPBERRY_PI = False
try:
modelStr = None
try:
fileHnd = open("/proc/device-tree/model", "r")
modelStr = fileHnd.read()
fileHnd.close()
except:
pass
if modelStr and "raspberry pi" in modelStr.lower():
IS_SYS_RASPBERRY_PI = True
logger.info("Host machine: " + modelStr.strip('\0'))
logger.info("Host OS: {} {}".format(platform.system(), platform.release()))
logger.info("Python version: {}".format(getPythonVersionStr()))
except Exception:
logger.exception("Error in 'idAndLogSystemInfo()'")
def isSysRaspberryPi():
return IS_SYS_RASPBERRY_PI
# Returns "primary" IP address for local host. Based on:
# https://stackoverflow.com/questions/166506/finding-local-ip-addresses-using-pythons-stdlib
# and https://stackoverflow.com/questions/24196932/how-can-i-get-the-ip-address-from-nic-in-python
def getLocalIPAddress():
try:
s = None
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('10.255.255.255', 1))
IP = s.getsockname()[0]
finally:
if s:
s.close()
except:
IP = None
if IP:
return IP
# use alternate method that does not rely on internet access
ips = subprocess.check_output(['hostname', '--all-ip-addresses']).decode("utf-8").rstrip()
logger.debug("Result of 'hostname --all-ip-addresses': " + str(ips))
if ips:
for IP in ips.split(' '):
if IP.find('.') > 0 and not IP.startswith("127."):
return IP
raise RuntimeError("Unable to determine IP address via 'hostname' command")
# Substitutes asterisks in the IP address 'destAddrStr' with values from the IP address
# fetched via the given 'determineHostAddressFn' function.
def substituteAddrWildcards(determineHostAddressFn, destAddrStr):
try:
if determineHostAddressFn and destAddrStr and destAddrStr.find('*') >= 0:
colonPos = destAddrStr.find(':') # find position of port specifier (i.e., ":5000")
if colonPos <= 0:
colonPos = len(destAddrStr)
sourceAddrStr = determineHostAddressFn()
# single "*" == full substitution
if destAddrStr[:colonPos] == "*":
return sourceAddrStr + destAddrStr[colonPos:]
sourceParts = sourceAddrStr.split('.')
destParts = destAddrStr.split('.')
# ("192.168.0.130", "*.*.*.97") => "192.168.0.97"
if len(sourceParts) == len(destParts):
for i in range(len(destParts)):
if destParts[i] == "*":
destParts[i] = sourceParts[i]
return '.'.join(destParts)
# ("192.168.0.130", "*.97") => "192.168.0.97"
elif len(destParts) == 2 and len(sourceParts) == 4 and destParts[0] == "*":
return '.'.join(sourceParts[:-1]) + '.' + destParts[1]
except Exception:
logger.exception("Error in 'substituteAddrWildcards()'")
return destAddrStr
# Checks if given file or directory is owned by 'root' and changes owner to 'pi' user if so.
# Returns True if owner changed to 'pi' user; False if not.
def checkSetFileOwnerPi(fileNameStr):
try:
if IS_SYS_RASPBERRY_PI:
# check that 'pi' user exists, file/dir exists, and owner is 'root'
if os.path.isdir("/home/pi") and os.path.exists(fileNameStr) and os.stat(fileNameStr).st_uid == 0:
subprocess.check_call(["sudo", "chown", "pi:pi", fileNameStr])
if os.stat(fileNameStr).st_uid != 0:
if os.path.isdir(fileNameStr): # if dir then also apply to files in dir
file_list = list(filter(os.path.isfile, glob.glob(fileNameStr + "/*.*")))
for chk_path in file_list:
checkSetFileOwnerPi(chk_path)
return True
logger.info("Unable to change owner in 'checkSetFileOwnerPi()': " + fileNameStr)
except Exception:
logger.exception("Error in 'checkSetFileOwnerPi()'")
return False
# Scans the given binary-data string for a "prefixed" substring and returns the substring.
# dataStr format: b'PREFIXSTR: substr\0'
def findPrefixedSubstring(dataStr, prefixStr, maxTextSize):
sPos = dataStr.find(prefixStr.encode())
if sPos >= 0:
sPos += len(prefixStr)
ePos = dataStr.find(b'\0', sPos)
if ePos < 0:
ePos = len(dataStr)
if ePos > sPos and ePos - sPos <= maxTextSize:
return dataStr[sPos:ePos].decode()
return None
# Wrapper to be used as a decorator on thread functions, etc, so their exception
# details are sent to the log file (instead of 'stderr').
def catchLogExceptionsWrapper(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception:
logger.exception("Exception via catchLogExceptionsWrapper")
return wrapper
# Modifies a name with a human-readable suffix (name 2, name 3, etc.)
# guaranteed to be unique within supplied list of selections
def uniqueName(desiredName, otherNames):
if desiredName in otherNames:
newName = desiredName
match = re.match('^(.*) ([0-9]*)$', desiredName)
if match:
nextInt = int(match.group(2))
nextInt += 1
newName = match.group(1) + ' ' + str(nextInt)
else:
newName = desiredName + " 2"
newName = uniqueName(newName, otherNames)
return newName
else:
return desiredName
# Appends the given string to the "base" part of the given filename.
def appendToBaseFilename(fileNameStr, addStr):
sList = fileNameStr.rsplit('.', 1)
retStr = sList[0] + addStr
if len(sList) > 1:
retStr += '.' + sList[1]
return retStr
def hslToHex(h, s, l):
if not h:
h = random.randint(0, 359)
if not s:
s = random.randint(0, 100)
if not l:
l = random.randint(0, 100)
h = h / 360.0
s = s / 100.0
l = l / 100.0
if s == 0:
r = g = b = l
else:
def hue2rgb(p, q, t):
if t < 0:
t += 1
if t > 1:
t -= 1
if t < 1 / 6:
return p + (q - p) * 6 * t
if t < 1 / 2:
return q
if t < 2 / 3:
return p + (q - p) * (2 / 3 - t) * 6
return p
if l < 0.5:
q = l * (1 + s)
else:
q = l + s - l * s
p = 2 * l - q
r = int(round(hue2rgb(p, q, h + 1 / 3) * 255))
g = int(round(hue2rgb(p, q, h) * 255))
b = int(round(hue2rgb(p, q, h - 1 / 3) * 255))
return '#{0:02x}{1:02x}{2:02x}'.format(r, g, b)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,093
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_brackets.py
|
import unittest
import json
import rh.app.race_explorer_core as racex
import rh.endpoints.heat_generator_endpoints as heatgen
from rh.util import StrictJsonEncoder
class BracketsTest(unittest.TestCase):
DEBUG = False
def test_multigp_brackets(self):
actual_positions = self.run_brackets("multigp", heatgen.mgp_brackets, 6)
expected_positions = ['P1', 'P2', 'P3', 'P4', 'P5', 'P6',
'P9', 'P10', 'P7', 'P8', 'P11', 'P12',
'P13', 'P14', 'P15', 'P16'
]
self.assertListEqual(actual_positions, expected_positions)
def test_fai_single_brackets(self):
actual_positions = self.run_brackets("fai-single", heatgen.fai_single_brackets_16, 3)
expected_positions = ['P1', 'P2', 'P3', 'P4', 'P5', 'P6', 'P7', 'P8']
self.assertListEqual(actual_positions, expected_positions)
def test_fai_double_brackets(self):
actual_positions = self.run_brackets("fai-single", heatgen.fai_double_brackets_16, 6)
expected_positions = ['P1', 'P2', 'P3', 'P4', 'P5', 'P6', 'P7', 'P8']
self.assertListEqual(actual_positions, expected_positions)
def run_brackets(self, test_name, bracket_func, n_brackets):
with open('tests/test_converted_ifpv_event.json') as f:
event_data = json.loads(f.read())
event_data['formats'] = {'BDRA Qualifying': {'objective': 'most-laps-quickest-time'}}
event_name = event_data['name']
results = {'pilots': {pilot: {'events': {event_name: {'stages': {}}}} for pilot in event_data['pilots']}}
stage_idx = 0
race_class_name = 'BDRA Open'
event_data['stages'][stage_idx]['leaderboards'] = {race_class_name: {'method': 'best'}}
results_class = race_class_name
mains_class = race_class_name
for bracket in range(1, n_brackets+1):
self.generate_heat_results(event_data, event_name, stage_idx, results)
self.debugJson('test-{}-stage-{}-generated-results.json'.format(test_name, stage_idx), results)
results = racex.calculate_metrics(results, event_data)
self.debugJson('test-{}-stage-{}-metrics.json'.format(test_name, stage_idx), results)
leaderboards = racex.calculate_leaderboard(results, event_data)
self.debugJson('test-{}-stage-{}-leaderboards.json'.format(test_name, stage_idx), leaderboards)
# prep next stage
bracket_name = 'Bracket '+str(bracket)
event_data['stages'].append({'name': bracket_name, 'heats': []})
stage_idx = len(event_data['stages']) - 1
bracket_data = bracket_func(leaderboards, stage_idx, results_class, mains_class, bracket)
self.assertGreater(len(bracket_data['heats']), 0, bracket_name)
event_data['stages'][stage_idx].update(bracket_data)
self.debugJson('test-{}-stage-{}-bracket-{}.json'.format(test_name, stage_idx, bracket), event_data)
self.generate_heat_results(event_data, event_name, stage_idx, results)
self.debugJson('test-{}-stage-{}-generated-results.json'.format(test_name, stage_idx), results)
results = racex.calculate_metrics(results, event_data)
self.debugJson('test-{}-stage-{}-metrics.json'.format(test_name, stage_idx), results)
leaderboards = racex.calculate_leaderboard(results, event_data)
self.debugJson('test-{}-stage-{}-leaderboards.json'.format(test_name, stage_idx), leaderboards)
ranking = list(map(lambda e: e['pilot'], leaderboards['stages'][stage_idx]['leaderboards'][mains_class]['ranking']))
return ranking
def generate_heat_results(self, event_data, event_name, stage_idx, results):
max_laps = 17
for heat_idx, heat in enumerate(event_data['stages'][stage_idx]['heats']):
for seat in heat['seats']:
laps_to_assign = max_laps - int(seat[1:])
lap_time = 1/laps_to_assign
laps = []
for i in range(laps_to_assign):
laps.append({'timestamp': i*lap_time, 'lap': i, 'location': 0, 'seat': 0})
round_results = {'laps': laps}
heat_results = {'rounds': [round_results]}
stage_results = {'heats': {heat_idx: heat_results}}
event_results = results['pilots'][seat]['events'][event_name]
event_results['stages'][stage_idx] = stage_results
results['pilots'][seat]['events'][event_name] = event_results
def debugJson(self, filename, data):
if BracketsTest.DEBUG:
with open(filename, 'wt') as f:
f.write(json.dumps(data, cls=StrictJsonEncoder))
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,094
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_util.py
|
import unittest
from rh.util import Averager
import numpy as np
class UtilTest(unittest.TestCase):
def test_average(self):
window_size = 10
avg = Averager(window_size)
samples = np.random.sample(10*window_size)
for i, v in enumerate(samples):
avg.append(v)
offset = 1 if i >= window_size else 0
last_window = samples[max(i-window_size,0)+offset:i+1]
self.assert_stats(avg, last_window)
def test_average_clear(self):
window_size = 10
avg = Averager(window_size)
samples = np.random.sample(window_size)
for v in samples:
avg.append(v)
self.assert_stats(avg, samples)
avg.clear()
self.assertIsNone(avg.min)
self.assertIsNone(avg.max)
self.assertIsNone(avg.mean)
self.assertIsNone(avg.std)
samples = np.random.sample(window_size)
for v in samples:
avg.append(v)
self.assert_stats(avg, samples)
def assert_stats(self, avg, expectedSamples):
self.assertEqual(avg.min, np.min(expectedSamples))
self.assertEqual(avg.max, np.max(expectedSamples))
self.assertAlmostEqual(avg.mean, np.mean(expectedSamples), 10)
self.assertAlmostEqual(avg.std, np.std(expectedSamples), 10)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,095
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/server.py
|
'''RotorHazard server script'''
RELEASE_VERSION = "4.0.0-dev.1" # Public release version code
SERVER_API = 32+9 # Server API version
NODE_API_SUPPORTED = 37 # Minimum supported node version
NODE_API_BEST = 37 # Most recent node API
JSON_API = 3 # JSON API version
# This must be the first import for the time being. It is
# necessary to set up logging *before* anything else
# because there is a lot of code run through imports, and
# we would miss messages otherwise.
import logging
from rh.util import log
from datetime import datetime, timezone
from monotonic import monotonic
from rh.util import RHTimeFns, ms_counter, secs_to_millis, millis_to_secs
log.early_stage_setup()
logger = logging.getLogger(__name__)
PROGRAM_START = RHTimeFns.MonotonicEpochSync()
# Normal importing resumes here
import gevent.monkey
gevent.monkey.patch_all()
import copy
import io
import os
import sys
import base64
import argparse
import subprocess
import importlib
import functools
import socket
import random
import string
import math
from collections import OrderedDict
from six import unichr, string_types
from typing import Any, Dict, Optional
from flask import Flask, send_file, request, Response, session, templating, redirect, json
from flask_compress import Compress
from flask_socketio import emit
from flasgger import Swagger
from rh.app import SOCKET_IO
from rh.app.config import Config
from rh.app import Database, Language, PageCache, Results, RHData, RHRace, web
from rh.util import RHGPIO, RHUtils
from rh.util.RHUtils import catchLogExceptionsWrapper
from rh.cluster.ClusterNodeSet import SecondaryNode, ClusterNodeSet
from rh.cluster.SendAckQueue import SendAckQueue
from rh.util.ButtonInputHandler import ButtonInputHandler
from rh.util import StrictJsonEncoder, stm32loader
from rh.interface.BaseHardwareInterface import BaseHardwareInterface, BaseHardwareInterfaceEventBroadcaster
from rh.interface.RHInterface import RHInterface, RHFEAT_PH
# Events manager
from rh.events.eventmanager import Evt, EventManager
Events = EventManager()
# LED imports
from rh.leds import Color, ColorVal, hexToColor
from rh.events.led_event_manager import LEDEventManager, NoLEDManager, ClusterLEDManager, LEDEvent, ColorPattern
from rh.events.audio_event_manager import AudioEventManager
from rh.events.mqtt_event_manager import MqttEventManager
from rh.interface.MqttInterface import get_mqtt_interface_for
import rh.helpers as helper_pkg
import rh.interface as interface_pkg
import rh.sensors as sensor_pkg
import rh.leds as led_pkg
import rh.data_export as export_pkg
from rh.util.Plugins import Plugins, search_modules
from rh.sensors import Sensors
from rh.data_export import DataExportManager
APP: Any = Flask(__name__, static_url_path='/static')
APP.json_encoder = StrictJsonEncoder
Compress(APP)
APP.config['SWAGGER'] = {
'title': 'Race explorer API',
'uiversion': 3,
'openapi': '3.0.3'
}
Swagger(APP)
HEARTBEAT_THREAD = None
BACKGROUND_THREADS_ENABLED = True
HEARTBEAT_DATA_RATE_FACTOR = 5
ERROR_REPORT_INTERVAL_SECS = 600 # delay between comm-error reports to log
DB_BKP_DIR_NAME = 'db_bkp'
IMDTABLER_JAR_NAME = 'static/IMDTabler.jar'
NODE_FW_PATHNAME = "firmware/RH_S32_BPill_node.bin"
# check if 'log' directory owned by 'root' and change owner to 'pi' user if so
if RHUtils.checkSetFileOwnerPi(log.LOG_DIR_NAME):
logger.info("Changed '{0}' dir owner from 'root' to 'pi'".format(log.LOG_DIR_NAME))
# command-line arguments:
CMDARG_VERSION_LONG_STR = '--version' # show program version and exit
CMDARG_VERSION_SHORT_STR = '-v' # show program version and exit
CMDARG_ZIP_LOGS_STR = '--ziplogs' # create logs .zip file
CMDARG_JUMP_TO_BL_STR = '--jumptobl' # send jump-to-bootloader command to node
CMDARG_FLASH_BPILL_STR = '--flashbpill' # flash firmware onto S32_BPill processor
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--version', '-v', action='version', version=RELEASE_VERSION)
arg_parser.add_argument('--config', '-c', action='store', metavar='file_name', help='use this configuration file')
arg_parser.add_argument('--database', '-db', action='store', metavar='db_name', help='use this database (file name or URL)')
arg_parser.add_argument('--autostart', '-a', action='store_true', help='Automatically start a race')
arg_parser.add_argument('--ziplogs', action='store_true', help='zip log files')
arg_parser.add_argument('--jumptobl', action='store_true', help='jump to bootloader')
arg_parser.add_argument('--flashbpill', action='store', nargs='?', metavar='source', const=stm32loader.DEF_BINSRC_STR, help='flash an STM32 BluePill processor')
args = arg_parser.parse_args(None if __name__ == '__main__' else [])
config_file_name = args.config;
if not config_file_name:
config_file_name = os.environ.get('RH_CONFIG')
if not config_file_name:
config_file_name = Config.FILE_NAME
rhconfig = Config()
rhconfig.load(config_file_name)
web.init(rhconfig)
TIMER_ID = 'http://' + socket.gethostname() + ':' + str(rhconfig.GENERAL['HTTP_PORT'])
DB_FILE_NAME = args.database
if not DB_FILE_NAME and args.config:
DB_FILE_NAME = rhconfig.GENERAL['DATABASE']
if not DB_FILE_NAME:
DB_FILE_NAME = os.environ.get('RH_DATABASE')
if not DB_FILE_NAME:
DB_FILE_NAME = rhconfig.GENERAL['DATABASE']
if not DB_FILE_NAME:
DB_FILE_NAME = Config.DB_FILE_NAME
if args.ziplogs:
log.create_log_files_zip(logger, config_file_name, DB_FILE_NAME)
sys.exit(0)
if not args.jumptobl: # handle jump-to-bootloader argument later
if args.flashbpill:
portStr = rhconfig.SERIAL_PORTS[0] if rhconfig.SERIAL_PORTS and \
len(rhconfig.SERIAL_PORTS) > 0 else None
srcStr = args.flashbpill
successFlag = stm32loader.flash_file_to_stm32(portStr, srcStr)
sys.exit(0 if successFlag else 1)
logger.info('RotorHazard v{0}'.format(RELEASE_VERSION))
TEAM_NAMES_LIST = [str(unichr(i)) for i in range(65, 91)] # list of 'A' to 'Z' strings
BASEDIR = os.getcwd()
APP.config['SQLALCHEMY_DATABASE_URI'] = Database.db_uri(BASEDIR, DB_FILE_NAME)
APP.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
Database.DB.init_app(APP)
Database.DB.app = APP
# start SocketIO service
SOCKET_IO.server_options['async_mode'] = 'gevent'
SOCKET_IO.server_options['cors_allowed_origins'] = rhconfig.GENERAL['CORS_ALLOWED_HOSTS']
SOCKET_IO.init_app(APP, json=json)
# this is the moment where we can forward log-messages to the frontend, and
# thus set up logging for good.
Current_log_path_name = log.later_stage_setup(rhconfig.LOGGING, SOCKET_IO)
INTERFACE: Optional[BaseHardwareInterface] = None # initialized later
SENSORS = Sensors()
CLUSTER = None # initialized later
CHORUS_API = None # initialized later
MQTT_API = None # initialized later
STARTABLES = []
ClusterSendAckQueueObj = None
serverInfo = None
serverInfoItems = None
Use_imdtabler_jar_flag = False # set True if IMDTabler.jar is available
vrx_controller = None
server_ipaddress_str = None
ShutdownButtonInputHandler = None
Server_secondary_mode = None
RACE = RHRace.RHRace() # For storing race management variables
LAST_RACE: Optional[RHRace.RHRace] = None
SECONDARY_RACE_FORMAT: Optional['RHRaceFormat'] = None
RESULTS_CACHE = Results.ResultsCache()
RHDATA = RHData.RHData(Database, Events, RACE, SERVER_API, DB_FILE_NAME, DB_BKP_DIR_NAME, RESULTS_CACHE) # Primary race data storage
RESULTS = Results.Results(RHDATA, RESULTS_CACHE)
RACE.result_fn = RESULTS.calc_current_race_leaderboard
RACE.team_result_fn = RESULTS.calc_team_leaderboard
PAGE_CACHE = PageCache.PageCache(RESULTS) # For storing page cache
LANGUAGE = Language.Language(RHDATA) # initialize language
def __web(text):
lang = request.accept_languages.best_match(LANGUAGE.getLanguageTags()) if request else None
return LANGUAGE.__(text, lang)
__sys = LANGUAGE.__
__ = __web # Shortcut to translation function
RHDATA.late_init(PAGE_CACHE, LANGUAGE) # Give RHDATA additional references
APP.rhserver = vars()
ui_server_messages = {}
def set_ui_message(mainclass, message, header=None, subclass=None):
item = {}
item['message'] = message
if header:
item['header'] = header
if subclass:
item['subclass'] = subclass
ui_server_messages[mainclass] = item
# Wrapper to be used as a decorator on callback functions that do database calls,
# so their exception details are sent to the log file (instead of 'stderr')
# and the database session is closed on thread exit (prevents DB-file handles left open).
def catchLogExcDBCloseWrapper(func):
def wrapper(*args, **kwargs):
try:
retVal = func(*args, **kwargs)
RHDATA.close()
return retVal
except:
logger.exception("Exception via catchLogExcDBCloseWrapper")
try:
RHDATA.close()
except:
logger.exception("Error closing DB session in catchLogExcDBCloseWrapper-catch")
return wrapper
# Return 'DEF_NODE_FWUPDATE_URL' config value; if not set in 'config.json'
# then return default value based on BASEDIR and server RELEASE_VERSION
def getDefNodeFwUpdateUrl():
try:
if rhconfig.GENERAL['DEF_NODE_FWUPDATE_URL']:
return rhconfig.GENERAL['DEF_NODE_FWUPDATE_URL']
if RELEASE_VERSION.lower().find("dev") > 0: # if "dev" server version then
retStr = stm32loader.DEF_BINSRC_STR # use current "dev" firmware at URL
else:
# return path that is up two levels from BASEDIR, and then NODE_FW_PATHNAME
retStr = os.path.abspath(os.path.join(os.path.join(os.path.join(BASEDIR, os.pardir), \
os.pardir), NODE_FW_PATHNAME))
# check if file with better-matching processor type (i.e., STM32F4) is available
try:
curTypStr = INTERFACE.node_managers[0].firmware_proctype_str if len(INTERFACE.node_managers) else None
if curTypStr:
fwTypStr = getFwfileProctypeStr(retStr)
if fwTypStr and curTypStr != fwTypStr:
altFwFNameStr = RHUtils.appendToBaseFilename(retStr, ('_'+curTypStr))
altFwTypeStr = getFwfileProctypeStr(altFwFNameStr)
if curTypStr == altFwTypeStr:
logger.debug("Using better-matching node-firmware file: " + altFwFNameStr)
return altFwFNameStr
except Exception as ex:
logger.debug("Error checking fw type vs current type: " + str(ex))
return retStr
except:
logger.exception("Error determining value for 'DEF_NODE_FWUPDATE_URL'")
return "/home/pi/RotorHazard/" + NODE_FW_PATHNAME
# Returns the processor-type string from the given firmware file, or None if not found
def getFwfileProctypeStr(fileStr):
dataStr = None
try:
dataStr = stm32loader.load_source_file(fileStr, False)
if dataStr:
return RHUtils.findPrefixedSubstring(dataStr, INTERFACE.FW_PROCTYPE_PREFIXSTR, \
INTERFACE.FW_TEXT_BLOCK_SIZE)
except Exception as ex:
logger.debug("Error processing file '{}' in 'getFwfileProctypeStr()': {}".format(fileStr, ex))
return None
def getCurrentProfile():
current_profile = RHDATA.get_optionInt('currentProfile')
return RHDATA.get_profile(current_profile)
def getCurrentRaceFormat():
if RACE.format is None:
val = RHDATA.get_optionInt('currentFormat')
if val:
race_format = RHDATA.get_raceFormat(val)
if not race_format:
race_format = RHDATA.get_first_raceFormat()
RHDATA.set_option('currentFormat', race_format.id)
else:
race_format = RHDATA.get_first_raceFormat()
# create a shared instance
RACE.format = RHRaceFormat.copy(race_format)
RACE.format.id = race_format.id #pylint: disable=attribute-defined-outside-init
return RACE.format
def getCurrentDbRaceFormat():
if RACE.format is None or RHRaceFormat.isDbBased(RACE.format):
val = RHDATA.get_optionInt('currentFormat')
return RHDATA.get_raceFormat(val)
else:
return None
def setCurrentRaceFormat(race_format, **kwargs):
if RHRaceFormat.isDbBased(race_format): # stored in DB, not internal race format
RHDATA.set_option('currentFormat', race_format.id)
# create a shared instance
RACE.format = RHRaceFormat.copy(race_format)
RACE.format.id = race_format.id
else:
RACE.format = race_format
if 'silent' not in kwargs:
emit_current_laps()
class RHRaceFormat():
def __init__(self, name, race_mode, race_time_sec, lap_grace_sec, start_delay_min, start_delay_max, staging_tones, number_laps_win, win_condition, team_racing_mode, start_behavior):
self.name = name
self.race_mode = race_mode
self.race_time_sec = race_time_sec
self.lap_grace_sec = lap_grace_sec
self.start_delay_min = start_delay_min
self.start_delay_max = start_delay_max
self.staging_tones = staging_tones
self.number_laps_win = number_laps_win
self.win_condition = win_condition
self.team_racing_mode = team_racing_mode
self.start_behavior = start_behavior
@classmethod
def copy(cls, race_format):
return RHRaceFormat(name=race_format.name,
race_mode=race_format.race_mode,
race_time_sec=race_format.race_time_sec,
lap_grace_sec=race_format.lap_grace_sec,
start_delay_min=race_format.start_delay_min,
start_delay_max=race_format.start_delay_max,
staging_tones=race_format.staging_tones,
number_laps_win=race_format.number_laps_win,
win_condition=race_format.win_condition,
team_racing_mode=race_format.team_racing_mode,
start_behavior=race_format.start_behavior)
@classmethod
def isDbBased(cls, race_format):
return hasattr(race_format, 'id')
#
# Authentication
#
def check_auth(username, password):
'''Check if a username password combination is valid.'''
return username == rhconfig.GENERAL['ADMIN_USERNAME'] and password == rhconfig.GENERAL['ADMIN_PASSWORD']
def authenticate():
'''Sends a 401 response that enables basic auth.'''
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
@functools.wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
# Flask template render with exception catch, so exception
# details are sent to the log file (instead of 'stderr').
def render_template(template_name_or_list, **context):
try:
return templating.render_template(template_name_or_list, **context)
except Exception:
logger.exception("Exception in render_template")
return "Error rendering template"
#
# Routes
#
@APP.route('/')
def render_index():
'''Route to home page.'''
return render_template('home.html', serverInfo=serverInfo,
getOption=RHDATA.get_option, __=__, Debug=rhconfig.GENERAL['DEBUG'])
@APP.route('/event')
def render_event():
'''Route to heat summary page.'''
return render_template('event.html', num_nodes=RACE.num_nodes, serverInfo=serverInfo, getOption=RHDATA.get_option, __=__)
@APP.route('/results')
def render_results():
'''Route to round summary page.'''
return render_template('results.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__, Debug=rhconfig.GENERAL['DEBUG'])
@APP.route('/run')
@requires_auth
def render_run():
'''Route to race management page.'''
frequencies = INTERFACE.get_node_frequencies()
nodes = []
for idx, freq in enumerate(frequencies):
if freq:
nodes.append({
'freq': freq,
'index': idx
})
return render_template('run.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
led_enabled=(led_manager.isEnabled() or (CLUSTER and CLUSTER.hasRecEventsSecondaries())),
vrx_enabled=vrx_controller!=None,
num_nodes=RACE.num_nodes,
nodes=nodes,
cluster_has_secondaries=(CLUSTER and CLUSTER.hasSecondaries()))
@APP.route('/current')
def render_current():
'''Route to race management page.'''
frequencies = INTERFACE.get_node_frequencies()
nodes = []
for idx, freq in enumerate(frequencies):
if freq:
nodes.append({
'freq': freq,
'index': idx
})
return render_template('current.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
num_nodes=RACE.num_nodes,
nodes=nodes,
cluster_has_secondaries=(CLUSTER and CLUSTER.hasSecondaries()))
@APP.route('/marshal')
@requires_auth
def render_marshal():
'''Route to race management page.'''
return render_template('marshal.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
num_nodes=RACE.num_nodes)
@APP.route('/settings')
@requires_auth
def render_settings():
'''Route to settings page.'''
server_messages_formatted = ''
if len(ui_server_messages):
for key, item in ui_server_messages.items():
message = '<li class="' + key
if 'subclass' in item and item['subclass']:
message += ' ' + key + '-' + item['subclass']
if 'header' in item and item['header']:
message += ' ' + item['header'].lower()
message += '">'
if 'header' in item and item['header']:
message += '<strong>' + __(item['header']) + ':</strong> '
message += __(item['message'])
message += '</li>'
server_messages_formatted += message
if rhconfig.GENERAL['configFile'] == 'defaults':
server_messages_formatted += '<li class="config config-none warning"><strong>' + __('Warning') + ': ' + '</strong>' + __('No configuration file was loaded. Falling back to default configuration.') + '<br />' + __('See <a href="/docs?d=User Guide.md#set-up-config-file">User Guide</a> for more information.') +'</li>'
return render_template('settings.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
led_enabled=(led_manager.isEnabled() or (CLUSTER and CLUSTER.hasRecEventsSecondaries())),
led_events_enabled=led_manager.isEnabled(),
vrx_enabled=vrx_controller!=None,
num_nodes=RACE.num_nodes,
server_messages=server_messages_formatted,
cluster_has_secondaries=(CLUSTER and CLUSTER.hasSecondaries()),
node_fw_updatable=(hasattr(INTERFACE, 'fwupd_serial_port') and INTERFACE.fwupd_serial_port!=None),
is_raspberry_pi=RHUtils.isSysRaspberryPi(),
Debug=rhconfig.GENERAL['DEBUG'])
@APP.route('/streams')
def render_stream():
'''Route to stream index.'''
return render_template('streams.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
num_nodes=RACE.num_nodes)
@APP.route('/stream/results')
def render_stream_results():
'''Route to current race leaderboard stream.'''
return render_template('streamresults.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
num_nodes=RACE.num_nodes)
@APP.route('/stream/node/<int:node_id>')
def render_stream_node(node_id):
'''Route to single node overlay for streaming.'''
if node_id <= RACE.num_nodes:
return render_template('streamnode.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
node_id=node_id-1
)
else:
return False
@APP.route('/stream/class/<int:class_id>')
def render_stream_class(class_id):
'''Route to class leaderboard display for streaming.'''
return render_template('streamclass.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
class_id=class_id
)
@APP.route('/stream/heat/<int:heat_id>')
def render_stream_heat(heat_id):
'''Route to heat display for streaming.'''
return render_template('streamheat.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
num_nodes=RACE.num_nodes,
heat_id=heat_id
)
@APP.route('/scanner')
@requires_auth
def render_scanner():
'''Route to scanner page.'''
return render_template('scanner.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
num_nodes=RACE.num_nodes)
@APP.route('/decoder')
@requires_auth
def render_decoder():
'''Route to race management page.'''
return render_template('decoder.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
num_nodes=RACE.num_nodes)
@APP.route('/imdtabler')
def render_imdtabler():
'''Route to IMDTabler page.'''
return render_template('imdtabler.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__)
@APP.route('/updatenodes')
@requires_auth
def render_updatenodes():
'''Route to update nodes page.'''
return render_template('updatenodes.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__, \
fw_src_str=getDefNodeFwUpdateUrl())
# Debug Routes
@APP.route('/hardwarelog')
@requires_auth
def render_hardwarelog():
'''Route to hardware log page.'''
return render_template('hardwarelog.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__)
@APP.route('/database')
@requires_auth
def render_database():
'''Route to database page.'''
return render_template('database.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__,
pilots=RHDATA.get_pilots(),
heats=RHDATA.get_heats(),
heatnodes=RHDATA.get_heatNodes(),
race_class=RHDATA.get_raceClasses(),
savedraceMeta=RHDATA.get_savedRaceMetas(),
savedraceLap=RHDATA.get_savedRaceLaps(),
profiles=RHDATA.get_profiles(),
race_format=RHDATA.get_raceFormats(),
globalSettings=RHDATA.get_options())
@APP.route('/vrxstatus')
@requires_auth
def render_vrxstatus():
'''Route to VRx status debug page.'''
return render_template('vrxstatus.html', serverInfo=serverInfo, getOption=RHDATA.get_option, __=__)
# Documentation Viewer
@APP.route('/docs')
@requires_auth
def render_viewDocs():
'''Route to doc viewer.'''
folderBase = '../doc/'
try:
docfile = request.args.get('d')
while docfile[0:2] == '../':
docfile = docfile[3:]
docPath = folderBase + docfile
language = RHDATA.get_option("currentLanguage")
if language:
translated_path = folderBase + language + '/' + docfile
if os.path.isfile(translated_path):
docPath = translated_path
with io.open(docPath, 'r', encoding="utf-8") as f:
doc = f.read()
return templating.render_template('viewdocs.html',
serverInfo=serverInfo,
getOption=RHDATA.get_option,
__=__,
doc=doc
)
except Exception:
logger.exception("Exception in render_template")
return "Error rendering documentation"
@APP.route('/img/<path:imgfile>')
@requires_auth
def render_viewImg(imgfile):
'''Route to img called within doc viewer.'''
folderBase = '../../doc/'
folderImg = 'img/'
while imgfile[0:2] == '../':
imgfile = imgfile[3:]
imgPath = folderBase + folderImg + imgfile
language = RHDATA.get_option("currentLanguage")
if language:
translated_path = folderBase + language + '/' + folderImg + imgfile
if os.path.isfile(translated_path):
imgPath = translated_path
return send_file(imgPath)
# Redirect routes (Previous versions/Delta 5)
@APP.route('/race')
def redirect_race():
return redirect("/run", code=301)
@APP.route('/heats')
def redirect_heats():
return redirect("/event", code=301)
def start_background_threads(forceFlag=False):
global BACKGROUND_THREADS_ENABLED
if BACKGROUND_THREADS_ENABLED or forceFlag:
BACKGROUND_THREADS_ENABLED = True
INTERFACE.start()
global HEARTBEAT_THREAD
if HEARTBEAT_THREAD is None:
HEARTBEAT_THREAD = gevent.spawn(heartbeat_thread_function)
logger.debug('Heartbeat thread started')
start_shutdown_button_thread()
def stop_background_threads():
try:
stop_shutdown_button_thread()
if CLUSTER:
CLUSTER.shutdown()
global BACKGROUND_THREADS_ENABLED
BACKGROUND_THREADS_ENABLED = False
global HEARTBEAT_THREAD
if HEARTBEAT_THREAD:
logger.info('Stopping heartbeat thread')
HEARTBEAT_THREAD.kill(block=True, timeout=0.5)
HEARTBEAT_THREAD = None
INTERFACE.stop()
except Exception:
logger.error("Error stopping background threads")
def shutdown(msg):
emit_priority_message(msg, True, event=Evt.SHUTDOWN)
Events.trigger(Evt.SHUTDOWN)
stop_background_threads()
INTERFACE.close()
gevent.sleep(0.5)
def stopSocketIo(socket_io):
if socket_io:
socket_io.stop
gevent.spawn(stopSocketIo, SOCKET_IO) # shut down flask http server
#
# Socket IO Events
#
@SOCKET_IO.on('connect')
@catchLogExceptionsWrapper
def connect_handler():
'''Starts the interface and a heartbeat thread for rssi.'''
logger.debug('Client connected')
start_background_threads()
# push initial data
emit_frontend_load(nobroadcast=True)
@SOCKET_IO.on('disconnect')
def disconnect_handler():
'''Emit disconnect event.'''
logger.debug('Client disconnected')
# LiveTime compatible events
@SOCKET_IO.on('get_version')
@catchLogExceptionsWrapper
def on_get_version():
session['LiveTime'] = True
ver_parts = RELEASE_VERSION.split('.')
return {'major': ver_parts[0], 'minor': ver_parts[1]}
@SOCKET_IO.on('get_timestamp')
@catchLogExceptionsWrapper
def on_get_timestamp():
if RACE.race_status == RHRace.RaceStatus.STAGING:
now = RACE.start_time_ms
else:
now = ms_counter()
return {'timestamp': PROGRAM_START.monotonic_to_epoch_millis(now)}
@SOCKET_IO.on('get_settings')
@catchLogExceptionsWrapper
def on_get_settings():
return {'nodes': [{
'frequency': node.frequency,
'trigger_rssi': node.enter_at_level
} for node in INTERFACE.nodes
]}
@SOCKET_IO.on('reset_auto_calibration')
@catchLogExceptionsWrapper
def on_reset_auto_calibration(data):
on_stop_race()
on_discard_laps()
setCurrentRaceFormat(SECONDARY_RACE_FORMAT)
emit_race_format()
on_stage_race()
# Cluster events
def emit_cluster_msg_to_primary(messageType, messagePayload, waitForAckFlag=True):
'''Emits cluster message to primary timer.'''
global ClusterSendAckQueueObj
if not ClusterSendAckQueueObj:
ClusterSendAckQueueObj = SendAckQueue(20, SOCKET_IO, logger)
ClusterSendAckQueueObj.put(messageType, messagePayload, waitForAckFlag)
def emit_join_cluster_response():
'''Emits 'join_cluster_response' message to primary timer.'''
payload = {
'server_info': json.dumps(serverInfoItems),
'node_managers': {
node_manager.addr: [node.index for node in node_manager.nodes]
for node_manager in INTERFACE.node_managers
}
}
emit_cluster_msg_to_primary('join_cluster_response', payload, False)
def has_joined_cluster():
return True if ClusterSendAckQueueObj else False
@SOCKET_IO.on('join_cluster')
@catchLogExceptionsWrapper
def on_join_cluster():
setCurrentRaceFormat(SECONDARY_RACE_FORMAT)
emit_race_format()
logger.info("Joined cluster")
Events.trigger(Evt.CLUSTER_JOIN, {
'message': __('Joined cluster')
})
@SOCKET_IO.on('join_cluster_ex')
@catchLogExceptionsWrapper
def on_join_cluster_ex(data=None):
global Server_secondary_mode
prev_mode = Server_secondary_mode
Server_secondary_mode = str(data.get('mode', SecondaryNode.SPLIT_MODE)) if data else None
logger.info("Joined cluster" + ((" as '" + Server_secondary_mode + "' timer") \
if Server_secondary_mode else ""))
if Server_secondary_mode != SecondaryNode.MIRROR_MODE: # mode is split timer
try: # if first time joining and DB contains races then backup DB and clear races
if prev_mode is None and len(RHDATA.get_savedRaceMetas()) > 0:
logger.info("Making database autoBkp and clearing races on split timer")
RHDATA.backup_db_file(True, "autoBkp_")
RHDATA.clear_race_data()
reset_current_laps()
emit_current_laps()
emit_result_data()
RHDATA.delete_old_db_autoBkp_files(Config.GENERAL['DB_AUTOBKP_NUM_KEEP'], \
"autoBkp_")
except:
logger.exception("Error making db-autoBkp / clearing races on split timer")
setCurrentRaceFormat(SECONDARY_RACE_FORMAT)
emit_race_format()
Events.trigger(Evt.CLUSTER_JOIN, {
'message': __('Joined cluster')
})
emit_join_cluster_response()
@SOCKET_IO.on('check_secondary_query')
@catchLogExceptionsWrapper
def on_check_secondary_query(data):
''' Check-query received from primary; return response. '''
payload = {
'timestamp': PROGRAM_START.monotonic_to_epoch_millis(ms_counter())
}
SOCKET_IO.emit('check_secondary_response', payload)
@SOCKET_IO.on('cluster_event_trigger')
@catchLogExceptionsWrapper
def on_cluster_event_trigger(data):
''' Received event trigger from primary. '''
evtName = data['evt_name']
evtArgs = json.loads(data['evt_args']) if 'evt_args' in data else None
# set mirror timer state
if Server_secondary_mode == SecondaryNode.MIRROR_MODE:
if evtName == Evt.RACE_STAGE:
RACE.race_status = RHRace.RaceStatus.STAGING
RACE.results = lambda race: None
if led_manager.isEnabled():
if 'race_node_colors' in evtArgs and isinstance(evtArgs['race_node_colors'], list):
led_manager.setDisplayColorCache(evtArgs['race_node_colors'])
else:
RHDATA.set_option('ledColorMode', 0)
elif evtName == Evt.RACE_START:
RACE.race_status = RHRace.RaceStatus.RACING
elif evtName == Evt.RACE_STOP:
RACE.race_status = RHRace.RaceStatus.DONE
elif evtName == Evt.LAPS_CLEAR:
RACE.race_status = RHRace.RaceStatus.READY
elif evtName == Evt.RACE_LAP_RECORDED:
RACE.results = lambda race: evtArgs['results']
evtArgs.pop('RACE', None) # remove race if exists
if evtName not in [Evt.STARTUP, Evt.LED_SET_MANUAL]:
Events.trigger(evtName, evtArgs)
# special handling for LED Control via primary timer
elif 'effect' in evtArgs and led_manager.isEnabled():
led_manager.setEventEffect(Evt.LED_MANUAL, evtArgs['effect'])
@SOCKET_IO.on('cluster_message_ack')
@catchLogExceptionsWrapper
def on_cluster_message_ack(data):
''' Received message acknowledgement from primary. '''
if ClusterSendAckQueueObj:
messageType = str(data.get('messageType')) if data else None
messagePayload = data.get('messagePayload') if data else None
ClusterSendAckQueueObj.ack(messageType, messagePayload)
else:
logger.warning("Received 'on_cluster_message_ack' message with no ClusterSendAckQueueObj setup")
# RotorHazard events
@SOCKET_IO.on('load_data')
@catchLogExceptionsWrapper
def on_load_data(data):
'''Allow pages to load needed data'''
load_types = data['load_types']
for load_type in load_types:
if load_type == 'node_data':
emit_node_data(nobroadcast=True)
elif load_type == 'environmental_data':
emit_environmental_data(nobroadcast=True)
elif load_type == 'frequency_data':
emit_frequency_data(nobroadcast=True)
elif load_type == 'heat_data':
emit_heat_data(nobroadcast=True)
elif load_type == 'class_data':
emit_class_data(nobroadcast=True)
elif load_type == 'pilot_data':
emit_pilot_data(nobroadcast=True)
elif load_type == 'result_data':
emit_result_data(nobroadcast=True)
elif load_type == 'race_format':
emit_race_format(nobroadcast=True)
elif load_type == 'race_formats':
emit_race_formats(nobroadcast=True)
elif load_type == 'node_tuning':
emit_node_tuning(nobroadcast=True)
elif load_type == 'enter_and_exit_at_levels':
emit_enter_and_exit_at_levels(nobroadcast=True)
elif load_type == 'start_thresh_lower_amount':
emit_start_thresh_lower_amount(nobroadcast=True)
elif load_type == 'start_thresh_lower_duration':
emit_start_thresh_lower_duration(nobroadcast=True)
elif load_type == 'min_lap':
emit_min_lap(nobroadcast=True)
elif load_type == 'leaderboard':
emit_current_leaderboard(nobroadcast=True)
elif load_type == 'current_laps':
emit_current_laps(nobroadcast=True)
elif load_type == 'race_status':
emit_race_status(nobroadcast=True)
elif load_type == 'current_heat':
emit_current_heat(nobroadcast=True)
elif load_type == 'race_list':
emit_race_list(nobroadcast=True)
elif load_type == 'language':
emit_language(nobroadcast=True)
elif load_type == 'all_languages':
emit_all_languages(nobroadcast=True)
elif load_type == 'led_effect_setup':
emit_led_effect_setup()
elif load_type == 'led_effects':
emit_led_effects()
elif load_type == 'callouts':
emit_callouts()
elif load_type == 'imdtabler_page':
emit_imdtabler_page(nobroadcast=True)
elif load_type == 'vrx_list':
emit_vrx_list(nobroadcast=True)
elif load_type == 'backups_list':
on_list_backups()
elif load_type == 'exporter_list':
emit_exporter_list()
elif load_type == 'cluster_status':
emit_cluster_status()
elif load_type == 'hardware_log_init':
emit_current_log_file_to_socket()
@SOCKET_IO.on('broadcast_message')
@catchLogExceptionsWrapper
def on_broadcast_message(data):
emit_priority_message(data['message'], data['interrupt'])
# Settings socket io events
@SOCKET_IO.on('set_frequency')
def ws_on_set_frequency(data):
node_index = data['node']
frequency = int(data['frequency'])
band = str(data['band']) if 'band' in data and data['band'] is not None else None
channel = int(data['channel']) if 'channel' in data and data['channel'] is not None else None
INTERFACE.set_frequency(node_index, frequency, band, channel)
@catchLogExceptionsWrapper
def on_set_frequency(data):
'''Set node frequency.'''
if CLUSTER:
CLUSTER.emitToSplits('set_frequency', data)
if isinstance(data, string_types): # LiveTime compatibility
data = json.loads(data)
node_index = data['node']
frequency = int(data['frequency'])
band = str(data['band']) if 'band' in data and data['band'] is not None else None
channel = int(data['channel']) if 'channel' in data and data['channel'] is not None else None
if node_index < 0 or node_index >= RACE.num_nodes:
logger.info('Unable to set frequency ({0}) on node {1}; node index out of range'.format(frequency, node_index+1))
return
profile = getCurrentProfile()
freqs = json.loads(profile.frequencies)
# handle case where more nodes were added
modified = False
while node_index >= len(freqs["f"]):
freqs["b"].append(None)
freqs["c"].append(None)
freqs["f"].append(RHUtils.FREQUENCY_ID_NONE)
modified = True
if frequency != freqs["f"][node_index] or (band is not None and channel is not None):
freqs["b"][node_index] = band
freqs["c"][node_index] = channel
freqs["f"][node_index] = frequency
modified = True
if modified:
logger.info('Frequency set: Profile {} Node {} B:{} Ch:{} Freq:{}'.format(profile.id, node_index+1, band, channel, frequency))
RHDATA.alter_profile({
'profile_id': profile.id,
'frequencies': freqs
})
Events.trigger(Evt.FREQUENCY_SET, {
'nodeIndex': node_index,
'frequency': frequency,
'band': band,
'channel': channel
})
emit_frequency_data()
@SOCKET_IO.on('set_frequency_preset')
@catchLogExceptionsWrapper
def on_set_frequency_preset(data):
''' Apply preset frequencies '''
CLUSTER.emitToSplits('set_frequency_preset', data)
bands = []
channels = []
freqs = []
if data['preset'] == 'All-N1':
profile = getCurrentProfile()
profile_freqs = json.loads(profile.frequencies)
for _idx in range(RACE.num_nodes):
bands.append(profile_freqs["b"][0])
channels.append(profile_freqs["c"][0])
freqs.append(profile_freqs["f"][0])
else:
if data['preset'] == 'RB-4':
bands = ['R', 'R', 'R', 'R']
channels = [1, 3, 6, 7]
freqs = [5658, 5732, 5843, 5880]
elif data['preset'] == 'RB-8':
bands = ['R', 'R', 'R', 'R', 'R', 'R', 'R', 'R']
channels = [1, 2, 3, 4, 5, 6, 7, 8]
freqs = [5658, 5695, 5732, 5769, 5806, 5843, 5880, 5917]
elif data['preset'] == 'IMD5C':
bands = ['R', 'R', 'F', 'F', 'E']
channels = [1, 2, 2, 4, 5]
freqs = [5658, 5695, 5760, 5800, 5885]
else: #IMD6C is default
bands = ['R', 'R', 'F', 'F', 'R', 'R']
channels = [1, 2, 2, 4, 7, 8]
freqs = [5658, 5695, 5760, 5800, 5880, 5917]
while RACE.num_nodes > len(bands):
bands.append(RHUtils.FREQUENCY_ID_NONE)
while RACE.num_nodes > len(channels):
channels.append(RHUtils.FREQUENCY_ID_NONE)
while RACE.num_nodes > len(freqs):
freqs.append(RHUtils.FREQUENCY_ID_NONE)
payload = {
"b": bands,
"c": channels,
"f": freqs
}
set_all_frequencies(payload)
hardware_set_all_frequencies(payload)
emit_frequency_data()
def set_all_frequencies(freqs):
''' Set frequencies for all nodes (but do not update hardware) '''
# Set DB
profile = getCurrentProfile()
profile_freqs = json.loads(profile.frequencies)
for idx in range(RACE.num_nodes):
if idx < len(profile_freqs["b"]):
profile_freqs["b"][idx] = freqs["b"][idx]
else:
profile_freqs["b"].append(freqs["b"][idx])
if idx < len(profile_freqs["c"]):
profile_freqs["c"][idx] = freqs["c"][idx]
else:
profile_freqs["c"].append(freqs["c"][idx])
if idx < len(profile_freqs["f"]):
profile_freqs["f"][idx] = freqs["f"][idx]
else:
profile_freqs["f"].append(freqs["f"][idx])
logger.info('Frequency set: Node {0} B:{1} Ch:{2} Freq:{3}'.format(idx+1, freqs["b"][idx], freqs["c"][idx], freqs["f"][idx]))
RHDATA.alter_profile({
'profile_id': profile.id,
'frequencies': profile_freqs
})
def hardware_set_all_frequencies(freqs):
'''do hardware update for frequencies'''
logger.debug("Sending frequency values to nodes: " + str(freqs["f"]))
for idx in range(RACE.num_nodes):
INTERFACE.set_frequency(idx, freqs["f"][idx], freqs["b"][idx], freqs["c"][idx])
Events.trigger(Evt.FREQUENCY_SET, {
'nodeIndex': idx,
'frequency': freqs["f"][idx],
'band': freqs["b"][idx],
'channel': freqs["c"][idx]
})
@SOCKET_IO.on('set_enter_at_level')
def ws_on_set_enter_at_level(data):
node_index = data['node']
enter_at_level = data['enter_at_level']
INTERFACE.set_enter_at_level(node_index, enter_at_level)
@catchLogExceptionsWrapper
def on_set_enter_at_level(data):
'''Set node enter-at level.'''
node_index = data['node']
enter_at_level = data['enter_at_level']
if node_index < 0 or node_index >= RACE.num_nodes:
logger.info('Unable to set enter-at ({0}) on node {1}; node index out of range'.format(enter_at_level, node_index+1))
return
if not enter_at_level:
logger.info('Node enter-at set null; getting from node: Node {0}'.format(node_index+1))
enter_at_level = INTERFACE.nodes[node_index].enter_at_level
profile = getCurrentProfile()
enter_ats = json.loads(profile.enter_ats)
# handle case where more nodes were added
while node_index >= len(enter_ats["v"]):
enter_ats["v"].append(None)
enter_ats["v"][node_index] = enter_at_level
RHDATA.alter_profile({
'profile_id': profile.id,
'enter_ats': enter_ats
})
Events.trigger(Evt.ENTER_AT_LEVEL_SET, {
'nodeIndex': node_index,
'enter_at_level': enter_at_level,
})
logger.info('Node enter-at set: Node {0} Level {1}'.format(node_index+1, enter_at_level))
@SOCKET_IO.on('set_exit_at_level')
def ws_on_set_exit_at_level(data):
node_index = data['node']
exit_at_level = data['exit_at_level']
INTERFACE.set_exit_at_level(node_index, exit_at_level)
@catchLogExceptionsWrapper
def on_set_exit_at_level(data):
'''Set node exit-at level.'''
node_index = data['node']
exit_at_level = data['exit_at_level']
if node_index < 0 or node_index >= RACE.num_nodes:
logger.info('Unable to set exit-at ({0}) on node {1}; node index out of range'.format(exit_at_level, node_index+1))
return
if not exit_at_level:
logger.info('Node exit-at set null; getting from node: Node {0}'.format(node_index+1))
exit_at_level = INTERFACE.nodes[node_index].exit_at_level
profile = getCurrentProfile()
exit_ats = json.loads(profile.exit_ats)
# handle case where more nodes were added
while node_index >= len(exit_ats["v"]):
exit_ats["v"].append(None)
exit_ats["v"][node_index] = exit_at_level
RHDATA.alter_profile({
'profile_id': profile.id,
'exit_ats': exit_ats
})
Events.trigger(Evt.EXIT_AT_LEVEL_SET, {
'nodeIndex': node_index,
'exit_at_level': exit_at_level,
})
logger.info('Node exit-at set: Node {0} Level {1}'.format(node_index+1, exit_at_level))
def hardware_set_all_enter_ats(enter_at_levels):
'''send update to nodes'''
logger.debug("Sending enter-at values to nodes: " + str(enter_at_levels))
for idx in range(RACE.num_nodes):
if enter_at_levels[idx]:
INTERFACE.set_enter_at_level(idx, enter_at_levels[idx])
else:
on_set_enter_at_level({
'node': idx,
'enter_at_level': INTERFACE.nodes[idx].enter_at_level
})
def hardware_set_all_exit_ats(exit_at_levels):
'''send update to nodes'''
logger.debug("Sending exit-at values to nodes: " + str(exit_at_levels))
for idx in range(RACE.num_nodes):
if exit_at_levels[idx]:
INTERFACE.set_exit_at_level(idx, exit_at_levels[idx])
else:
on_set_exit_at_level({
'node': idx,
'exit_at_level': INTERFACE.nodes[idx].exit_at_level
})
@SOCKET_IO.on('set_calibration_mode')
@catchLogExceptionsWrapper
def on_set_calibration_mode(data):
node_index = data['node']
if 'ai_calibrate' in data:
ai_calibrate = data['ai_calibrate']
if node_index >= 0 or node_index < RACE.num_nodes:
INTERFACE.nodes[node_index].ai_calibrate = ai_calibrate
else:
logger.info('Unable to set AI calibration mode ({0}) on node {1}; node index out of range'.format(ai_calibrate, node_index+1))
if 'calibrate' in data:
calibrate = data['calibrate']
if node_index >= 0 or node_index < RACE.num_nodes:
INTERFACE.nodes[node_index].calibrate = calibrate
else:
logger.info('Unable to set calibration mode ({0}) on node {1}; node index out of range'.format(ai_calibrate, node_index+1))
@SOCKET_IO.on('set_data_logger')
def on_set_data_logger(data):
node_index = data['node']
enable_flag = data['data_logger']
if enable_flag:
if hasattr(INTERFACE, 'start_data_logger'):
INTERFACE.start_data_logger(node_index)
else:
if hasattr(INTERFACE, 'stop_data_logger'):
INTERFACE.stop_data_logger(node_index)
@SOCKET_IO.on("set_start_thresh_lower_amount")
@catchLogExceptionsWrapper
def on_set_start_thresh_lower_amount(data):
start_thresh_lower_amount = data['start_thresh_lower_amount']
RHDATA.set_option("startThreshLowerAmount", start_thresh_lower_amount)
logger.info("set start_thresh_lower_amount to %s percent" % start_thresh_lower_amount)
emit_start_thresh_lower_amount(noself=True)
@SOCKET_IO.on("set_start_thresh_lower_duration")
@catchLogExceptionsWrapper
def on_set_start_thresh_lower_duration(data):
start_thresh_lower_duration = data['start_thresh_lower_duration']
RHDATA.set_option("startThreshLowerDuration", start_thresh_lower_duration)
logger.info("set start_thresh_lower_duration to %s seconds" % start_thresh_lower_duration)
emit_start_thresh_lower_duration(noself=True)
@SOCKET_IO.on('set_language')
@catchLogExceptionsWrapper
def on_set_language(data):
'''Set interface language.'''
RHDATA.set_option('currentLanguage', data['language'])
@SOCKET_IO.on('cap_enter_at_btn')
@catchLogExceptionsWrapper
def on_cap_enter_at_btn(data):
'''Capture enter-at level.'''
node_index = data['node_index']
if INTERFACE.start_capture_enter_at_level(node_index):
logger.info('Starting capture of enter-at level for node {0}'.format(node_index+1))
@SOCKET_IO.on('cap_exit_at_btn')
@catchLogExceptionsWrapper
def on_cap_exit_at_btn(data):
'''Capture exit-at level.'''
node_index = data['node_index']
if INTERFACE.start_capture_exit_at_level(node_index):
logger.info('Starting capture of exit-at level for node {0}'.format(node_index+1))
@SOCKET_IO.on('set_scan')
@catchLogExceptionsWrapper
def on_set_scan(data):
node_index = data['node']
scan_enabled = data['scan']
if hasattr(INTERFACE, 'set_frequency_scan'):
INTERFACE.set_frequency_scan(node_index, scan_enabled)
@SOCKET_IO.on('add_heat')
@catchLogExceptionsWrapper
def on_add_heat():
'''Adds the next available heat number to the database.'''
heats = RHDATA.get_heats()
stage = heats[-1].stage.name if len(heats) > 0 else 'Qualifying'
RHDATA.add_heat(init={'stage': stage})
emit_heat_data()
@SOCKET_IO.on('duplicate_heat')
@catchLogExceptionsWrapper
def on_duplicate_heat(data):
RHDATA.duplicate_heat(data['heat'])
emit_heat_data()
@SOCKET_IO.on('alter_heat')
@catchLogExceptionsWrapper
def on_alter_heat(data):
'''Update heat.'''
if 'pilot' in data:
node_index = data['node']
location_id, seat = get_local_location_id_and_seat(INTERFACE.nodes[node_index])
data['node'] = seat
heat, altered_race_list = RHDATA.alter_heat(data)
if RACE.current_heat == heat.id: # if current heat was altered then update heat data
set_current_heat_data()
emit_heat_data(noself=True)
if ('pilot' in data or 'class' in data) and len(altered_race_list):
emit_result_data() # live update rounds page
message = __('Alterations made to heat: {0}').format(heat.note)
emit_priority_message(message, False)
@SOCKET_IO.on('delete_heat')
@catchLogExceptionsWrapper
def on_delete_heat(data):
'''Delete heat.'''
global LAST_RACE
heat_id = data['heat']
result = RHDATA.delete_heat(heat_id)
if result is not None:
if LAST_RACE and LAST_RACE.current_heat == result:
LAST_RACE = None # if last-race heat deleted then clear last race
if RACE.current_heat == result: # if current heat was deleted then load new heat data
heat_id = RHDATA.get_first_heat().id
if RACE.current_heat != heat_id:
logger.info('Changing current heat to Heat {0}'.format(heat_id))
RACE.current_heat = heat_id
set_current_heat_data()
emit_heat_data()
@SOCKET_IO.on('add_race_class')
@catchLogExceptionsWrapper
def on_add_race_class():
'''Adds the next available pilot id number in the database.'''
RHDATA.add_raceClass()
emit_class_data()
emit_heat_data() # Update class selections in heat displays
@SOCKET_IO.on('duplicate_race_class')
@catchLogExceptionsWrapper
def on_duplicate_race_class(data):
'''Adds new race class by duplicating an existing one.'''
RHDATA.duplicate_raceClass(data['class'])
emit_class_data()
emit_heat_data()
@SOCKET_IO.on('alter_race_class')
@catchLogExceptionsWrapper
def on_alter_race_class(data):
'''Update race class.'''
race_class, altered_race_list = RHDATA.alter_raceClass(data)
if ('format_id' in data or 'name' in data) and len(altered_race_list):
emit_result_data() # live update rounds page
message = __('Alterations made to race class: {0}').format(race_class.name)
emit_priority_message(message, False)
emit_class_data(noself=True)
if 'name' in data:
emit_heat_data() # Update class names in heat displays
if 'format_id' in data:
emit_current_heat(noself=True) # in case race operator is a different client, update locked format dropdown
@SOCKET_IO.on('delete_class')
@catchLogExceptionsWrapper
def on_delete_class(data):
'''Delete class.'''
result = RHDATA.delete_raceClass(data['class'])
if result:
emit_class_data()
emit_heat_data()
@SOCKET_IO.on('add_pilot')
@catchLogExceptionsWrapper
def on_add_pilot(data={}):
'''Adds the next available pilot id number in the database.'''
RHDATA.add_pilot(data)
emit_pilot_data()
@SOCKET_IO.on('alter_pilot')
@catchLogExceptionsWrapper
def on_alter_pilot(data):
'''Update pilot.'''
_pilot, race_list = RHDATA.alter_pilot(data)
emit_pilot_data(noself=True) # Settings page, new pilot settings
if 'callsign' in data or 'team_name' in data:
emit_heat_data() # Settings page, new pilot callsign in heats
if len(race_list):
emit_result_data() # live update rounds page
if 'phonetic' in data:
emit_heat_data() # Settings page, new pilot phonetic in heats. Needed?
RACE.set_current_pilots(RHDATA)
@SOCKET_IO.on('delete_pilot')
@catchLogExceptionsWrapper
def on_delete_pilot(data):
'''Delete heat.'''
result = RHDATA.delete_pilot(data['pilot'])
if result:
RACE.set_current_pilots(RHDATA)
emit_pilot_data()
emit_heat_data()
@SOCKET_IO.on('add_profile')
@catchLogExceptionsWrapper
def on_add_profile():
'''Adds new profile (frequency set) in the database.'''
source_profile = getCurrentProfile()
new_profile = RHDATA.duplicate_profile(source_profile.id)
on_set_profile({ 'profile': new_profile.id })
@SOCKET_IO.on('alter_profile')
@catchLogExceptionsWrapper
def on_alter_profile(data):
''' update profile '''
profile = getCurrentProfile()
data['profile_id'] = profile.id
profile = RHDATA.alter_profile(data)
emit_node_tuning(noself=True)
@SOCKET_IO.on('delete_profile')
@catchLogExceptionsWrapper
def on_delete_profile():
'''Delete profile'''
profile = getCurrentProfile()
result = RHDATA.delete_profile(profile.id)
if result:
first_profile_id = RHDATA.get_first_profile().id
RHDATA.set_option("currentProfile", first_profile_id)
on_set_profile({ 'profile': first_profile_id })
@SOCKET_IO.on("set_profile")
@catchLogExceptionsWrapper
def on_set_profile(data, emit_vals=True):
''' set current profile '''
profile_val = int(data['profile'])
profile = RHDATA.get_profile(profile_val)
if profile:
RHDATA.set_option("currentProfile", data['profile'])
logger.info("Set Profile to '%s'" % profile_val)
# set freqs, enter_ats, and exit_ats
freqs = json.loads(profile.frequencies)
while RACE.num_nodes > len(freqs["b"]):
freqs["b"].append(RHUtils.FREQUENCY_ID_NONE)
while RACE.num_nodes > len(freqs["c"]):
freqs["c"].append(RHUtils.FREQUENCY_ID_NONE)
while RACE.num_nodes > len(freqs["f"]):
freqs["f"].append(RHUtils.FREQUENCY_ID_NONE)
if profile.enter_ats:
enter_at_levels = json.loads(profile.enter_ats)
enter_ats = enter_at_levels["v"]
while RACE.num_nodes > len(enter_ats):
enter_ats.append(None)
else: #handle null data by copying in hardware values
enter_at_levels = {}
enter_at_levels["v"] = [node.enter_at_level for node in INTERFACE.nodes]
enter_ats = enter_at_levels["v"]
if profile.exit_ats:
exit_at_levels = json.loads(profile.exit_ats)
exit_ats = exit_at_levels["v"]
while RACE.num_nodes > len(exit_ats):
exit_ats.append(None)
else: #handle null data by copying in hardware values
exit_at_levels = {}
exit_at_levels["v"] = [node.exit_at_level for node in INTERFACE.nodes]
exit_ats = exit_at_levels["v"]
RHDATA.alter_profile({'profile_id': profile_val, 'enter_ats': enter_at_levels, 'exit_ats': exit_at_levels})
Events.trigger(Evt.PROFILE_SET, {
'profile_id': profile_val,
})
if emit_vals:
emit_node_tuning()
emit_enter_and_exit_at_levels()
emit_frequency_data()
hardware_set_all_frequencies(freqs)
hardware_set_all_enter_ats(enter_ats)
hardware_set_all_exit_ats(exit_ats)
else:
logger.warning('Invalid set_profile value: ' + str(profile_val))
@SOCKET_IO.on('alter_race')
@catchLogExceptionsWrapper
def on_alter_race(data):
'''Update race (retroactively via marshaling).'''
_race_meta, new_heat = RHDATA.reassign_savedRaceMeta_heat(data['race_id'], data['heat_id'])
heatnote = new_heat.note
if heatnote:
name = heatnote
else:
name = new_heat.id
message = __('A race has been reassigned to {0}').format(name)
emit_priority_message(message, False)
emit_race_list(nobroadcast=True)
emit_result_data()
@SOCKET_IO.on('backup_database')
@catchLogExceptionsWrapper
def on_backup_database():
'''Backup database.'''
bkp_name = RHDATA.backup_db_file(True) # make copy of DB file
# read DB data and convert to Base64
with open(bkp_name, mode='rb') as file_obj:
file_content = file_obj.read()
file_content = base64.encodebytes(file_content).decode()
emit_payload = {
'file_name': os.path.basename(bkp_name),
'file_data' : file_content
}
Events.trigger(Evt.DATABASE_BACKUP, {
'file_name': emit_payload['file_name'],
})
emit('database_bkp_done', emit_payload)
on_list_backups()
@SOCKET_IO.on('list_backups')
@catchLogExceptionsWrapper
def on_list_backups():
'''List database files in db_bkp'''
if not os.path.exists(DB_BKP_DIR_NAME):
emit_payload = {
'backup_files': None
}
else:
files = []
for (_, _, filenames) in os.walk(DB_BKP_DIR_NAME):
files.extend(filenames)
break
emit_payload = {
'backup_files': files
}
emit('backups_list', emit_payload)
@SOCKET_IO.on('restore_database')
@catchLogExceptionsWrapper
def on_restore_database(data):
'''Restore database.'''
global RACE
global LAST_RACE
success = None
if 'backup_file' in data:
backup_file = data['backup_file']
backup_path = DB_BKP_DIR_NAME + '/' + backup_file
if os.path.exists(backup_path):
logger.info('Found {0}: starting restoration...'.format(backup_file))
RHDATA.close()
RACE = RHRace.RHRace() # Reset all RACE values
RACE.result_fn = RESULTS.calc_current_race_leaderboard
RACE.team_result_fn = RESULTS.calc_team_leaderboard
RACE.num_nodes = len(INTERFACE.nodes) # restore number of nodes
LAST_RACE = RACE
try:
RHDATA.recover_database(Database.db_uri(BASEDIR, DB_BKP_DIR_NAME + '/' + backup_file))
reset_current_laps()
clean_results_cache()
expand_heats()
raceformat_id = RHDATA.get_optionInt('currentFormat')
race_format = RHDATA.get_raceFormat(raceformat_id)
setCurrentRaceFormat(race_format)
success = True
except Exception as ex:
logger.warning('Clearing all data after recovery failure: ' + str(ex))
db_reset()
success = False
init_race_state()
init_interface_state()
Events.trigger(Evt.DATABASE_RESTORE, {
'file_name': backup_file,
})
SOCKET_IO.emit('database_restore_done')
else:
logger.warning('Unable to restore {0}: File does not exist'.format(backup_file))
success = False
if success == False:
message = __('Database recovery failed for: {0}').format(backup_file)
emit_priority_message(message, False, nobroadcast=True)
@SOCKET_IO.on('delete_database')
@catchLogExceptionsWrapper
def on_delete_database_file(data):
'''Restore database.'''
if 'backup_file' in data:
backup_file = data['backup_file']
backup_path = DB_BKP_DIR_NAME + '/' + backup_file
if os.path.exists(backup_path):
logger.info('Deleting backup file {0}'.format(backup_file))
os.remove(backup_path)
emit_payload = {
'file_name': backup_file
}
Events.trigger(Evt.DATABASE_DELETE_BACKUP, {
'file_name': backup_file,
})
SOCKET_IO.emit('database_delete_done', emit_payload)
on_list_backups()
else:
logger.warning('Unable to delete {0}: File does not exist'.format(backup_file))
@SOCKET_IO.on('reset_database')
@catchLogExceptionsWrapper
def on_reset_database(data):
'''Reset database.'''
PAGE_CACHE.set_valid(False)
reset_type = data['reset_type']
if reset_type == 'races':
RHDATA.clear_race_data()
reset_current_laps()
elif reset_type == 'heats':
RHDATA.reset_heats()
RHDATA.clear_race_data()
reset_current_laps()
elif reset_type == 'classes':
RHDATA.reset_heats()
RHDATA.reset_raceClasses()
RHDATA.clear_race_data()
reset_current_laps()
elif reset_type == 'pilots':
RHDATA.reset_pilots()
RHDATA.reset_heats()
RHDATA.clear_race_data()
reset_current_laps()
elif reset_type == 'all':
RHDATA.reset_pilots()
RHDATA.reset_heats()
RHDATA.reset_raceClasses()
RHDATA.clear_race_data()
reset_current_laps()
elif reset_type == 'formats':
RHDATA.clear_race_data()
reset_current_laps()
RHDATA.reset_raceFormats()
setCurrentRaceFormat(RHDATA.get_first_raceFormat())
emit_pilot_data()
emit_heat_data()
emit_race_format()
emit_class_data()
emit_current_laps()
emit_result_data()
emit('reset_confirm')
Events.trigger(Evt.DATABASE_RESET)
@SOCKET_IO.on('export_database')
@catchLogExceptionsWrapper
def on_export_database_file(data):
'''Run the selected Exporter'''
exporter = data['exporter']
if export_manager.hasExporter(exporter):
# do export
logger.info('Exporting data via {0}'.format(exporter))
export_result = export_manager.export(exporter)
if export_result != False:
try:
emit_payload = {
'filename': 'RotorHazard Export ' + datetime.now().strftime('%Y%m%d_%H%M%S') + ' ' + exporter + '.' + export_result['ext'],
'encoding': export_result['encoding'],
'data' : export_result['data']
}
emit('exported_data', emit_payload)
Events.trigger(Evt.DATABASE_EXPORT)
except Exception:
logger.exception("Error downloading export file")
emit_priority_message(__('Data export failed. (See log)'), False, nobroadcast=True)
else:
logger.warning('Failed exporting data: exporter returned no data')
emit_priority_message(__('Data export failed. (See log)'), False, nobroadcast=True)
return
logger.error('Data exporter "{0}" not found'.format(exporter))
emit_priority_message(__('Data export failed. (See log)'), False, nobroadcast=True)
@SOCKET_IO.on('shutdown_pi')
@catchLogExceptionsWrapper
def on_shutdown_pi():
'''Shutdown the raspberry pi.'''
if INTERFACE.send_shutdown_started_message():
gevent.sleep(0.25) # give shutdown-started message a chance to transmit to node
if CLUSTER:
CLUSTER.emit('shutdown_pi')
logger.info('Performing system shutdown')
shutdown(__('Server has shut down.'))
if RHUtils.isSysRaspberryPi():
gevent.sleep(0.1)
logger.debug("Executing system command: sudo shutdown now")
log.wait_for_queue_empty()
log.close_logging()
os.system("sudo shutdown now")
else:
logger.warning("Not executing system shutdown command because not RPi")
@SOCKET_IO.on('reboot_pi')
@catchLogExceptionsWrapper
def on_reboot_pi():
'''Reboot the raspberry pi.'''
if CLUSTER:
CLUSTER.emit('reboot_pi')
logger.info('Performing system reboot')
shutdown(__('Server is rebooting.'))
if RHUtils.isSysRaspberryPi():
gevent.sleep(0.1)
logger.debug("Executing system command: sudo reboot now")
log.wait_for_queue_empty()
log.close_logging()
os.system("sudo reboot now")
else:
logger.warning("Not executing system reboot command because not RPi")
@SOCKET_IO.on('kill_server')
@catchLogExceptionsWrapper
def on_kill_server():
'''Shutdown this server.'''
if CLUSTER:
CLUSTER.emit('kill_server')
logger.info('Killing RotorHazard server')
shutdown(__('Server has stopped.'))
@SOCKET_IO.on('download_logs')
@catchLogExceptionsWrapper
def on_download_logs(data):
'''Download logs (as .zip file).'''
zip_path_name = log.create_log_files_zip(logger, config_file_name, DB_FILE_NAME)
RHUtils.checkSetFileOwnerPi(log.LOGZIP_DIR_NAME)
if zip_path_name:
RHUtils.checkSetFileOwnerPi(zip_path_name)
try:
# read logs-zip file data and convert to Base64
with open(zip_path_name, mode='rb') as file_obj:
file_content = file_obj.read()
file_content = base64.encodebytes(file_content).decode()
emit_payload = {
'file_name': os.path.basename(zip_path_name),
'file_data' : file_content
}
Events.trigger(Evt.DATABASE_BACKUP, {
'file_name': emit_payload['file_name'],
})
SOCKET_IO.emit(data['emit_fn_name'], emit_payload)
except Exception:
logger.exception("Error downloading logs-zip file")
@SOCKET_IO.on("set_min_lap")
@catchLogExceptionsWrapper
def on_set_min_lap(data):
min_lap = data['min_lap']
RHDATA.set_option("MinLapSec", data['min_lap'])
Events.trigger(Evt.MIN_LAP_TIME_SET, {
'min_lap': min_lap,
})
logger.info("set min lap time to %s seconds" % min_lap)
emit_min_lap(noself=True)
@SOCKET_IO.on("set_min_lap_behavior")
@catchLogExceptionsWrapper
def on_set_min_lap_behavior(data):
min_lap_behavior = int(data['min_lap_behavior'])
RHDATA.set_option("MinLapBehavior", min_lap_behavior)
Events.trigger(Evt.MIN_LAP_BEHAVIOR_SET, {
'min_lap_behavior': min_lap_behavior,
})
logger.info("set min lap behavior to %s" % min_lap_behavior)
emit_min_lap(noself=True)
@SOCKET_IO.on("set_race_format")
@catchLogExceptionsWrapper
def on_set_race_format(data):
''' set current race_format '''
if RACE.race_status == RHRace.RaceStatus.READY: # prevent format change if race running
race_format_val = data['race_format']
race_format = RHDATA.get_raceFormat(race_format_val)
setCurrentRaceFormat(race_format)
Events.trigger(Evt.RACE_FORMAT_SET, {
'race_format': race_format_val,
})
emit_race_format()
logger.info("set race format to '%s' (%s)" % (race_format.name, race_format.id))
else:
emit_priority_message(__('Format change prevented by active race: Stop and save/discard laps'), False, nobroadcast=True)
logger.info("Format change prevented by active race")
emit_race_format()
@SOCKET_IO.on('add_race_format')
@catchLogExceptionsWrapper
def on_add_race_format():
'''Adds new format in the database by duplicating an existing one.'''
source_format = getCurrentRaceFormat()
new_format = RHDATA.duplicate_raceFormat(source_format.id)
on_set_race_format(data={ 'race_format': new_format.id })
@SOCKET_IO.on('alter_race_format')
@catchLogExceptionsWrapper
def on_alter_race_format(data):
''' update race format '''
race_format = getCurrentDbRaceFormat()
data['format_id'] = race_format.id
race_format, race_list = RHDATA.alter_raceFormat(data)
if race_format != False:
setCurrentRaceFormat(race_format)
if 'format_name' in data:
emit_race_format()
emit_class_data()
if len(race_list):
emit_result_data()
message = __('Alterations made to race format: {0}').format(__(race_format.name))
emit_priority_message(message, False)
else:
emit_priority_message(__('Format alteration prevented by active race: Stop and save/discard laps'), False, nobroadcast=True)
@SOCKET_IO.on('delete_race_format')
@catchLogExceptionsWrapper
def on_delete_race_format():
'''Delete profile'''
raceformat = getCurrentDbRaceFormat()
result = RHDATA.delete_raceFormat(raceformat.id)
if result:
first_raceFormat = RHDATA.get_first_raceFormat()
setCurrentRaceFormat(first_raceFormat)
emit_race_format()
else:
if RACE.race_status == RHRace.RaceStatus.READY:
emit_priority_message(__('Format deletion prevented: saved race exists with this format'), False, nobroadcast=True)
else:
emit_priority_message(__('Format deletion prevented by active race: Stop and save/discard laps'), False, nobroadcast=True)
@SOCKET_IO.on("set_next_heat_behavior")
@catchLogExceptionsWrapper
def on_set_next_heat_behavior(data):
next_heat_behavior = int(data['next_heat_behavior'])
RHDATA.set_option("nextHeatBehavior", next_heat_behavior)
logger.info("set next heat behavior to %s" % next_heat_behavior)
# LED Effects
def emit_led_effect_setup(**params):
'''Emits LED event/effect wiring options.'''
if led_manager.isEnabled():
effects = led_manager.getRegisteredEffects()
emit_payload = {
'events': []
}
for event in LEDEvent.configurable_events:
selectedEffect = led_manager.getEventEffect(event['event'])
effect_list_recommended = []
effect_list_normal = []
for effect in effects:
if event['event'] in effects[effect]['validEvents'].get('include', []) or (
event['event'] not in [Evt.SHUTDOWN, LEDEvent.IDLE_DONE, LEDEvent.IDLE_RACING, LEDEvent.IDLE_READY]
and event['event'] not in effects[effect]['validEvents'].get('exclude', [])
and Evt.ALL not in effects[effect]['validEvents'].get('exclude', [])):
if event['event'] in effects[effect]['validEvents'].get('recommended', []) or \
Evt.ALL in effects[effect]['validEvents'].get('recommended', []):
effect_list_recommended.append({
'name': effect,
'label': '* ' + __(effects[effect]['label'])
})
else:
effect_list_normal.append({
'name': effect,
'label': __(effects[effect]['label'])
})
effect_list_recommended.sort(key=lambda x: x['label'])
effect_list_normal.sort(key=lambda x: x['label'])
emit_payload['events'].append({
'event': event["event"],
'label': __(event["label"]),
'selected': selectedEffect,
'effects': effect_list_recommended + effect_list_normal
})
# never broadcast
emit('led_effect_setup_data', emit_payload)
def emit_led_effects(**params):
if led_manager.isEnabled() or (CLUSTER and CLUSTER.hasRecEventsSecondaries()):
effects = led_manager.getRegisteredEffects()
effect_list = []
if effects:
for effect in effects:
if effects[effect]['validEvents'].get('manual', True):
effect_list.append({
'name': effect,
'label': __(effects[effect]['label'])
})
emit_payload = {
'effects': effect_list
}
# never broadcast
emit('led_effects', emit_payload)
@SOCKET_IO.on('set_led_event_effect')
@catchLogExceptionsWrapper
def on_set_led_effect(data):
'''Set effect for event.'''
if 'event' in data and 'effect' in data:
if led_manager.isEnabled():
led_manager.setEventEffect(data['event'], data['effect'])
effect_opt = RHDATA.get_option('ledEffects')
if effect_opt:
effects = json.loads(effect_opt)
else:
effects = {}
effects[data['event']] = data['effect']
RHDATA.set_option('ledEffects', json.dumps(effects))
Events.trigger(Evt.LED_EFFECT_SET, {
'effect': data['event'],
})
logger.info('Set LED event {0} to effect {1}'.format(data['event'], data['effect']))
@SOCKET_IO.on('use_led_effect')
@catchLogExceptionsWrapper
def on_use_led_effect(data):
'''Activate arbitrary LED Effect.'''
if 'effect' in data:
if led_manager.isEnabled():
led_manager.setEventEffect(Evt.LED_MANUAL, data['effect'])
Events.trigger(Evt.LED_SET_MANUAL, data) # setup manual effect on mirror timers
args = {}
if 'args' in data:
args = data['args']
if 'color' in data:
args['color'] = hexToColor(data['color'])
Events.trigger(Evt.LED_MANUAL, args)
# Race management socket io events
@SOCKET_IO.on('schedule_race')
@catchLogExceptionsWrapper
def on_schedule_race(data):
RACE.scheduled_time = millis_to_secs(ms_counter()) + (data['m'] * 60) + data['s']
RACE.scheduled = True
Events.trigger(Evt.RACE_SCHEDULE, {
'scheduled_at': RACE.scheduled_time
})
SOCKET_IO.emit('race_scheduled', {
'scheduled': RACE.scheduled,
'scheduled_at': RACE.scheduled_time
})
emit_priority_message(__("Next race begins in {0:01d}:{1:02d}".format(data['m'], data['s'])), True)
@SOCKET_IO.on('cancel_schedule_race')
@catchLogExceptionsWrapper
def cancel_schedule_race():
RACE.scheduled = False
Events.trigger(Evt.RACE_SCHEDULE_CANCEL)
SOCKET_IO.emit('race_scheduled', {
'scheduled': RACE.scheduled,
'scheduled_at': RACE.scheduled_time
})
emit_priority_message(__("Scheduled race cancelled"), False)
@SOCKET_IO.on('get_pi_time')
@catchLogExceptionsWrapper
def on_get_pi_time():
# never broadcasts to all (client must make request)
emit('pi_time', {
'pi_time_ms': ms_counter()
})
@SOCKET_IO.on('stage_race')
@catchLogExceptionsWrapper
def on_stage_race():
global LAST_RACE
valid_pilots = False
heatNodes = RHDATA.get_heatNodes_by_heat(RACE.current_heat)
for heatNode in heatNodes:
if heatNode.node_index < RACE.num_nodes:
if heatNode.pilot_id != RHUtils.PILOT_ID_NONE:
valid_pilots = True
break
if request and valid_pilots is False:
emit_priority_message(__('No valid pilots in race'), True, nobroadcast=True)
if CLUSTER:
CLUSTER.emitToSplits('stage_race')
race_format = getCurrentRaceFormat()
if RACE.race_status != RHRace.RaceStatus.READY:
if race_format is SECONDARY_RACE_FORMAT: # if running as secondary timer
if RACE.race_status == RHRace.RaceStatus.RACING:
return # if race in progress then leave it be
# if missed stop/discard message then clear current race
logger.info("Forcing race clear/restart because running as secondary timer")
on_discard_laps()
elif RACE.race_status == RHRace.RaceStatus.DONE and not RACE.any_laps_recorded():
on_discard_laps() # if no laps then allow restart
if RACE.race_status == RHRace.RaceStatus.READY: # only initiate staging if ready
# common race start events (do early to prevent processing delay when start is called)
heat_data = RHDATA.get_heat(RACE.current_heat)
if heat_data.class_id != RHUtils.CLASS_ID_NONE:
class_format_id = RHDATA.get_raceClass(heat_data.class_id).format_id
if class_format_id != RHUtils.FORMAT_ID_NONE:
class_format = RHDATA.get_raceFormat(class_format_id)
setCurrentRaceFormat(class_format)
logger.info("Forcing race format from class setting: '{0}' ({1})".format(class_format.name, class_format_id))
clear_laps() # Clear laps before race start
heatNodes = RHDATA.get_heatNodes_by_heat(RACE.current_heat) # reload after potential commit
init_node_cross_fields(heatNodes) # set 'cur_pilot_id' and 'cross' fields on nodes
LAST_RACE = None # clear all previous race data
RACE.timer_running = False # indicate race timer not running
RACE.race_status = RHRace.RaceStatus.STAGING
RACE.win_status = RHRace.WinStatus.NONE
RACE.status_message = ''
RACE.any_races_started = True
RACE.init_node_finished_flags(heatNodes)
emit_current_laps() # Race page, blank laps to the web client
emit_current_leaderboard() # Race page, blank leaderboard to the web client
emit_race_status()
emit_race_format()
MIN = min(race_format.start_delay_min, race_format.start_delay_max) # in case values are reversed
MAX = max(race_format.start_delay_min, race_format.start_delay_max)
RACE.start_time_delay_secs = random.randint(MIN, MAX) + RHRace.RACE_START_DELAY_EXTRA_SECS
RACE.start_time_ms = ms_counter() + secs_to_millis(RACE.start_time_delay_secs)
RACE.start_time_epoch_ms = PROGRAM_START.monotonic_to_epoch_millis(RACE.start_time_ms)
RACE.start_token = random.random()
gevent.spawn(race_start_thread, RACE.start_token)
eventPayload = {
'hide_stage_timer': MIN != MAX,
'pi_starts_at_ms': RACE.start_time_ms,
'color': ColorVal.ORANGE,
}
if led_manager.isEnabled():
eventPayload['race_node_colors'] = led_manager.getNodeColors(RACE.num_nodes)
else:
eventPayload['race_node_colors'] = None
Events.trigger(Evt.RACE_STAGE, eventPayload)
SOCKET_IO.emit('stage_ready', {
'hide_stage_timer': MIN != MAX,
'delay': RACE.start_time_delay_secs,
'race_mode': race_format.race_mode,
'race_time_sec': race_format.race_time_sec,
'pi_starts_at_ms': RACE.start_time_ms
}) # Announce staging with chosen delay
else:
logger.warning("Attempted to stage race while status is not 'ready': status is {}".format(RACE.race_status))
def autoUpdateCalibration():
''' Apply best tuning values to nodes '''
for node_index, node in enumerate(INTERFACE.nodes):
calibration = findBestValues(node, node_index)
if node.enter_at_level is not calibration['enter_at_level']:
on_set_enter_at_level({
'node': node_index,
'enter_at_level': calibration['enter_at_level']
})
if node.exit_at_level is not calibration['exit_at_level']:
on_set_exit_at_level({
'node': node_index,
'exit_at_level': calibration['exit_at_level']
})
logger.info('Updated calibration with best discovered values')
emit_enter_and_exit_at_levels()
def findBestValues(node, node_index):
''' Search race history for best tuning values '''
# get commonly used values
heat = RHDATA.get_heat(RACE.current_heat)
pilot = RHDATA.get_pilot_from_heatNode(RACE.current_heat, node_index)
current_class = heat.class_id
races = RHDATA.get_savedRaceMetas()
races.sort(key=lambda x: x.id, reverse=True)
pilotRaces = RHDATA.get_savedPilotRaces()
pilotRaces.sort(key=lambda x: x.id, reverse=True)
# test for disabled node
if pilot is RHUtils.PILOT_ID_NONE or node.frequency is RHUtils.FREQUENCY_ID_NONE:
logger.debug('Node {0} calibration: skipping disabled node'.format(node.index+1))
return {
'enter_at_level': node.enter_at_level,
'exit_at_level': node.exit_at_level
}
# test for same heat, same node
for race in races:
if race.heat_id == heat.id:
for pilotRace in pilotRaces:
if pilotRace.race_id == race.id and \
pilotRace.node_index == node_index:
logger.debug('Node {0} calibration: found same pilot+node in same heat'.format(node.index+1))
return {
'enter_at_level': pilotRace.enter_at,
'exit_at_level': pilotRace.exit_at
}
break
# test for same class, same pilot, same node
for race in races:
if race.class_id == current_class:
for pilotRace in pilotRaces:
if pilotRace.race_id == race.id and \
pilotRace.node_index == node_index and \
pilotRace.pilot_id == pilot:
logger.debug('Node {0} calibration: found same pilot+node in other heat with same class'.format(node.index+1))
return {
'enter_at_level': pilotRace.enter_at,
'exit_at_level': pilotRace.exit_at
}
break
# test for same pilot, same node
for pilotRace in pilotRaces:
if pilotRace.node_index == node_index and \
pilotRace.pilot_id == pilot:
logger.debug('Node {0} calibration: found same pilot+node in other heat with other class'.format(node.index+1))
return {
'enter_at_level': pilotRace.enter_at,
'exit_at_level': pilotRace.exit_at
}
# test for same node
for pilotRace in pilotRaces:
if pilotRace.node_index == node_index:
logger.debug('Node {0} calibration: found same node in other heat'.format(node.index+1))
return {
'enter_at_level': pilotRace.enter_at,
'exit_at_level': pilotRace.exit_at
}
# fallback
logger.debug('Node {0} calibration: no calibration hints found, no change'.format(node.index+1))
return {
'enter_at_level': node.enter_at_level,
'exit_at_level': node.exit_at_level
}
@catchLogExceptionsWrapper
def race_start_thread(start_token):
# clear any lingering crossings at staging (if node rssi < enterAt)
for node in INTERFACE.nodes:
if node.is_crossing and node.frequency > 0 and \
(getCurrentRaceFormat() is SECONDARY_RACE_FORMAT or
(node.current_pilot_id != RHUtils.PILOT_ID_NONE and node.current_rssi.rssi < node.enter_at_level)):
logger.info("Forcing end crossing for node {0} at staging (rssi={1}, enterAt={2}, exitAt={3})".\
format(node.index+1, node.current_rssi.rssi, node.enter_at_level, node.exit_at_level))
INTERFACE.force_end_crossing(node.index)
if CLUSTER and CLUSTER.hasSecondaries():
CLUSTER.doClusterRaceStart()
# set lower EnterAt/ExitAt values if configured
if RHDATA.get_optionInt('startThreshLowerAmount') > 0 and RHDATA.get_optionInt('startThreshLowerDuration') > 0:
lower_amount = RHDATA.get_optionInt('startThreshLowerAmount')
logger.info("Lowering EnterAt/ExitAt values at start of race, amount={0}%, duration={1} secs".\
format(lower_amount, RHDATA.get_optionInt('startThreshLowerDuration')))
lower_end_time_ms = RACE.start_time_ms + secs_to_millis(RHDATA.get_optionInt('startThreshLowerDuration'))
for node in INTERFACE.nodes:
if node.frequency > 0 and (getCurrentRaceFormat() is SECONDARY_RACE_FORMAT or node.current_pilot_id != RHUtils.PILOT_ID_NONE):
if node.current_rssi.rssi < node.enter_at_level:
diff_val = int((node.enter_at_level-node.exit_at_level)*lower_amount/100)
if diff_val > 0:
new_enter_at = node.enter_at_level - diff_val
new_exit_at = max(node.exit_at_level - diff_val, 0)
if node.is_valid_rssi(new_enter_at):
logger.info("For node {0} lowering EnterAt from {1} to {2} and ExitAt from {3} to {4}"\
.format(node.index+1, node.enter_at_level, new_enter_at, node.exit_at_level, new_exit_at))
node.start_thresh_lower_time_ms = lower_end_time_ms # set time when values will be restored
node.start_thresh_lower_flag = True
# use 'transmit_' instead of 'set_' so values are not saved in node object
INTERFACE.transmit_enter_at_level(node, new_enter_at)
INTERFACE.transmit_exit_at_level(node, new_exit_at)
else:
logger.info("Not lowering EnterAt/ExitAt values for node {0} because EnterAt value ({1}) unchanged"\
.format(node.index+1, node.enter_at_level))
else:
logger.info("Not lowering EnterAt/ExitAt values for node {0} because current RSSI ({1}) >= EnterAt ({2})"\
.format(node.index+1, node.current_rssi.rssi, node.enter_at_level))
RACE.current_round = RHDATA.get_max_round(RACE.current_heat) + 1
heat_data = RHDATA.get_heat(RACE.current_heat)
RACE.current_stage = heat_data.stage_id
# do non-blocking delay before time-critical code
time_remaining_ms = RACE.start_time_ms - ms_counter()
countdown_time_secs = int(time_remaining_ms/1000)
for secs_remaining in range(countdown_time_secs, 0, -1):
gevent.sleep(millis_to_secs(time_remaining_ms - secs_to_millis(secs_remaining))) # sleep until next whole second
evt_data = {'time_remaining': secs_remaining,
'countdown_time': countdown_time_secs}
Events.trigger(Evt.RACE_START_COUNTDOWN, evt_data)
time_remaining_ms = RACE.start_time_ms - ms_counter()
while (ms_counter() < RACE.start_time_ms - 500):
gevent.sleep(0.1)
if RACE.race_status == RHRace.RaceStatus.STAGING and \
RACE.start_token == start_token:
# Only start a race if it is not already in progress
# Null this thread if token has changed (race stopped/started quickly)
RACE.laps_winner_name = None # name of winner in first-to-X-laps race
RACE.winning_lap_id = 0 # track winning lap-id if race tied during first-to-X-laps race
# do blocking delay until race start
while ms_counter() < RACE.start_time_ms:
pass
# !!! RACE STARTS NOW !!!
RACE.start_time = datetime.now() # record standard-formatted time
# do time-critical tasks
RACE.race_status = RHRace.RaceStatus.RACING # To enable registering passed laps
INTERFACE.on_race_start(RACE.start_time_ms)
RACE.timer_running = True # indicate race timer is running
Events.trigger(Evt.RACE_START, {
'race': RACE,
'color': ColorVal.GREEN
})
# do secondary start tasks (small delay is acceptable)
for node in INTERFACE.nodes:
# clear any lingering crossing (if rssi>enterAt then first crossing starts now)
if node.is_crossing and node.frequency > 0 and (
getCurrentRaceFormat() is SECONDARY_RACE_FORMAT or node.current_pilot_id != RHUtils.PILOT_ID_NONE):
logger.info("Forcing end crossing for node {0} at start (rssi={1}, enterAt={2}, exitAt={3})".\
format(node.index+1, node.current_rssi.rssi, node.enter_at_level, node.exit_at_level))
INTERFACE.force_end_crossing(node.index)
# kick off race expire processing
race_format = getCurrentRaceFormat()
if race_format and race_format.race_mode == RHRace.RaceMode.FIXED_TIME: # count down
gevent.spawn(race_expire_thread, start_token)
emit_race_status() # Race page, to set race button states
logger.info('Race started at {} ({})'.format(RACE.start_time_ms, RACE.start_time_epoch_ms))
@catchLogExceptionsWrapper
def race_expire_thread(start_token):
race_format = getCurrentRaceFormat()
if race_format and race_format.race_mode == RHRace.RaceMode.FIXED_TIME: # count down
race_duration_sec = race_format.race_time_sec + race_format.lap_grace_sec
race_end_time_ms = RACE.start_time_ms + secs_to_millis(race_duration_sec)
race_tick_time_ms = RACE.start_time_ms
while RACE.race_status == RHRace.RaceStatus.RACING and RACE.start_token == start_token and race_tick_time_ms < race_end_time_ms:
race_tick_time_ms += 1000 # 1 second ticks
gevent.sleep(race_tick_time_ms - ms_counter())
Events.trigger(Evt.RACE_TICK, {'timer_sec': round((race_tick_time_ms - RACE.start_time_ms)/1000)})
# if race still in progress and is still same race
if RACE.race_status == RHRace.RaceStatus.RACING and RACE.start_token == start_token:
RACE.timer_running = False # indicate race timer no longer running
RACE.finish_time_ms = ms_counter()
RACE.finish_time_epoch_ms = PROGRAM_START.monotonic_to_epoch_millis(RACE.finish_time_ms)
Events.trigger(Evt.RACE_FINISH, {
'race': RACE
})
logger.info("Race count-down timer reached expiration")
check_win_condition(at_finish=True, start_token=start_token)
emit_current_leaderboard()
else:
logger.debug("Finished unused race-time-expire thread")
@SOCKET_IO.on('stop_race')
@catchLogExceptionsWrapper
def on_stop_race():
'''Stops the race and stops registering laps.'''
if CLUSTER:
CLUSTER.emitToSplits('stop_race')
# clear any crossings still in progress
any_forced_flag = False
for node in INTERFACE.nodes:
if node.is_crossing and node.frequency > 0 and \
node.current_pilot_id != RHUtils.PILOT_ID_NONE:
logger.info("Forcing end crossing for node {} at race stop (rssi={}, enterAt={}, exitAt={})".\
format(node.index+1, node.current_rssi.rssi, node.enter_at_level, node.exit_at_level))
INTERFACE.force_end_crossing(node.index)
any_forced_flag = True
if any_forced_flag: # give forced end-crossings a chance to complete before stopping race
gevent.spawn_later(0.5, do_stop_race_actions)
else:
do_stop_race_actions()
@catchLogExceptionsWrapper
def do_stop_race_actions():
if RACE.race_status == RHRace.RaceStatus.RACING:
RACE.end_time_ms = ms_counter() # Update the race end time stamp
RACE.end_time_epoch_ms = PROGRAM_START.monotonic_to_epoch_millis(RACE.end_time_ms)
duration_ms = RACE.end_time_ms - RACE.start_time_ms
logger.info('Race stopped at {} ({}), duration {}ms'.format(RACE.end_time_ms, RACE.end_time_epoch_ms, duration_ms))
min_laps_list = [] # show nodes with laps under minimum (if any)
for node in INTERFACE.nodes:
if node.under_min_lap_count > 0:
min_laps_list.append('Node {0} Count={1}'.format(node.index+1, node.under_min_lap_count))
if len(min_laps_list) > 0:
logger.info('Nodes with laps under minimum: ' + ', '.join(min_laps_list))
INTERFACE.on_race_stop()
RACE.race_status = RHRace.RaceStatus.DONE # To stop registering passed laps, waiting for laps to be cleared
Events.trigger(Evt.RACE_STOP, {
'race': RACE,
'color': ColorVal.RED
})
check_win_condition()
if CLUSTER and CLUSTER.hasSecondaries():
CLUSTER.doClusterRaceStop()
else:
logger.debug('No active race to stop')
RACE.race_status = RHRace.RaceStatus.READY # Go back to ready state
Events.trigger(Evt.LAPS_CLEAR)
duration_ms = 0
# check if nodes may be set to temporary lower EnterAt/ExitAt values (and still have them)
if RHDATA.get_optionInt('startThreshLowerAmount') > 0 and \
duration_ms < 1000*RHDATA.get_optionInt('startThreshLowerDuration'):
for node in INTERFACE.nodes:
# if node EnterAt/ExitAt values need to be restored then do it soon
if node.frequency > 0 and (
getCurrentRaceFormat() is SECONDARY_RACE_FORMAT or (
node.current_pilot_id != RHUtils.PILOT_ID_NONE and \
node.start_thresh_lower_flag)):
node.start_thresh_lower_time_ms = RACE.end_time_ms + 100
RACE.timer_running = False # indicate race timer not running
RACE.scheduled = False # also stop any deferred start
SOCKET_IO.emit('stop_timer') # Loop back to race page to start the timer counting up
emit_race_status() # Race page, to set race button states
emit_current_leaderboard()
@SOCKET_IO.on('save_laps')
@catchLogExceptionsWrapper
def on_save_laps():
'''Save current laps data to the database.'''
# Determine if race is empty
# race_has_laps = False
# for node_index in RACE.node_passes:
# if RACE.node_passes[node_index]:
# race_has_laps = True
# break
#
# if race_has_laps == True:
if CLUSTER:
CLUSTER.emitToSplits('save_laps')
PAGE_CACHE.set_valid(False)
heat = RHDATA.get_heat(RACE.current_heat)
# Get the last saved round for the current heat
max_round = RHDATA.get_max_round(RACE.current_heat)
# Loop through laps to copy to saved races
profile = getCurrentProfile()
profile_freqs = json.loads(profile.frequencies)
new_race_data = {
'round_id': max_round+1,
'heat_id': RACE.current_heat,
'class_id': heat.class_id,
'format_id': RHDATA.get_option('currentFormat'),
'start_time': RACE.start_time_ms,
'start_time_formatted': RACE.start_time.strftime("%Y-%m-%d %H:%M:%S"),
}
new_race = RHDATA.add_savedRaceMeta(new_race_data)
race_data = {}
for node_index in range(min(RACE.num_nodes, len(profile_freqs["f"]))):
if profile_freqs["f"][node_index] != RHUtils.FREQUENCY_ID_NONE:
node = INTERFACE.nodes[node_index]
location_id, seat = get_local_location_id_and_seat(node)
if location_id == 0:
pilot_id = RHDATA.get_pilot_from_heatNode(RACE.current_heat, seat)
history_times, history_values = node.history.get()
race_data[seat] = {
'race_id': new_race.id,
'pilot_id': pilot_id,
'history_values': json.dumps(history_values),
'history_times': json.dumps(history_times),
'enter_at': node.enter_at_level,
'exit_at': node.exit_at_level,
'laps': RACE.node_passes[seat],
'splits': RACE.node_splits[seat]
}
if hasattr(node, 'lifetime_history'):
lifetime_times, lifetime_values = node.lifetime_history.get()
race_data[seat]['lifetime_values'] = json.dumps(lifetime_values)
race_data[seat]['lifetime_times'] = json.dumps(lifetime_times)
RHDATA.add_race_data(race_data)
# spawn thread for updating results caches
cache_params = {
'race_id': new_race.id,
'heat_id': RACE.current_heat,
'round_id': new_race.round_id,
}
gevent.spawn(build_atomic_result_caches, cache_params)
history_times, history_values = INTERFACE.nodes[node_index].history.get()
gevent.spawn(INTERFACE.calibrate_nodes, RACE.start_time_ms,
{node_idx: (laps, history_times, history_values) for node_idx,laps in RACE.node_passes.items()}
)
Events.trigger(Evt.LAPS_SAVE, {
'race_id': new_race.id,
})
logger.info('Current laps saved: Heat {0} Round {1}'.format(RACE.current_heat, max_round+1))
on_discard_laps(saved=True) # Also clear the current laps
# else:
# on_discard_laps()
# message = __('Discarding empty race')
# emit_priority_message(message, False, nobroadcast=True)
@SOCKET_IO.on('resave_laps')
@catchLogExceptionsWrapper
def on_resave_laps(data):
PAGE_CACHE.set_valid(False)
heat_id = data['heat_id']
round_id = data['round_id']
callsign = data['callsign']
race_id = data['race_id']
pilotrace_id = data['pilotrace_id']
node_idx = data['node']
pilot_id = data['pilot_id']
laps = data['laps']
enter_at = data['enter_at']
exit_at = data['exit_at']
pilotrace_data = {
'pilotrace_id': pilotrace_id,
'enter_at': enter_at,
'exit_at': exit_at
}
RHDATA.alter_savedPilotRace(pilotrace_data)
new_laps = []
new_racedata = {
'race_id': race_id,
'pilotrace_id': pilotrace_id,
'node_index': node_idx,
'pilot_id': pilot_id,
'laps': new_laps
}
for lap in laps:
tmp_lap_time_formatted = lap['lap_time']
if isinstance(lap['lap_time'], float):
tmp_lap_time_formatted = RHUtils.time_format(lap['lap_time'], RHDATA.get_option('timeFormat'))
new_laps.append({
'lap_time_stamp': lap['lap_time_stamp'],
'lap_time': lap['lap_time'],
'lap_time_formatted': tmp_lap_time_formatted,
'source': lap['source'],
'deleted': lap['deleted']
})
RHDATA.replace_savedRaceLaps(new_racedata)
message = __('Race times adjusted for: Heat {0} Round {1} / {2}').format(heat_id, round_id, callsign)
emit_priority_message(message, False)
logger.info(message)
# spawn thread for updating results caches
cache_params = {
'race_id': race_id,
'heat_id': heat_id,
'round_id': round_id,
}
gevent.spawn(build_atomic_result_caches, cache_params)
race_meta = RHDATA.get_savedRaceMeta(race_id)
pilotrace_obj = RHDATA.get_savedPilotRace(pilotrace_id)
history_values = json.loads(pilotrace_obj.history_values)
history_times = json.loads(pilotrace_obj.history_times)
gevent.spawn(INTERFACE.calibrate_nodes, 1000*race_meta.start_time,
{node_idx: (new_laps, history_times, history_values)}
)
Events.trigger(Evt.LAPS_RESAVE, {
'race_id': race_id,
'pilot_id': pilot_id,
})
# run adaptive calibration
if RHDATA.get_optionInt('calibrationMode'):
autoUpdateCalibration()
@catchLogExceptionsWrapper
def build_atomic_result_caches(params):
PAGE_CACHE.set_valid(False)
emit_result_data()
@SOCKET_IO.on('discard_laps')
@catchLogExceptionsWrapper
def on_discard_laps(**kwargs):
'''Clear the current laps without saving.'''
clear_laps()
RACE.race_status = RHRace.RaceStatus.READY # Flag status as ready to start next race
RACE.win_status = RHRace.WinStatus.NONE
RACE.status_message = ''
emit_current_laps() # Race page, blank laps to the web client
emit_current_leaderboard() # Race page, blank leaderboard to the web client
emit_race_status() # Race page, to set race button states
if 'saved' in kwargs and kwargs['saved'] == True:
# discarding follows a save action
pass
else:
# discarding does not follow a save action
Events.trigger(Evt.LAPS_DISCARD)
if CLUSTER:
CLUSTER.emitToSplits('discard_laps')
Events.trigger(Evt.LAPS_CLEAR)
def clear_laps():
'''Clear the current laps table.'''
global LAST_RACE
memo = {id(RACE.result_fn): RACE.result_fn, id(RACE.team_result_fn): RACE.team_result_fn}
LAST_RACE = copy.deepcopy(RACE, memo)
RACE.laps_winner_name = None # clear winner in first-to-X-laps race
RACE.winning_lap_id = 0
reset_current_laps() # Clear out the current laps table
logger.info('Current laps cleared')
def init_node_cross_fields(heatNodes):
'''Sets the 'current_pilot_id' and 'cross' values on each node.'''
for node in INTERFACE.nodes:
node.current_pilot_id = RHUtils.PILOT_ID_NONE
if node.frequency and node.frequency > 0:
for heatNode in heatNodes:
if heatNode.node_index == node.index:
node.current_pilot_id = heatNode.pilot_id
break
node.first_cross_flag = False
node.show_crossing_flag = False
def set_current_heat_data():
RACE.set_current_pilots(RHDATA)
heat_data = RHDATA.get_heat(RACE.current_heat)
if heat_data.class_id != RHUtils.CLASS_ID_NONE:
class_format_id = RHDATA.get_raceClass(heat_data.class_id).format_id
if class_format_id != RHUtils.FORMAT_ID_NONE:
class_format = RHDATA.get_raceFormat(class_format_id)
setCurrentRaceFormat(class_format)
logger.info("Forcing race format from class setting: '{0}' ({1})".format(class_format.name, class_format_id))
if RHDATA.get_optionInt('calibrationMode'):
autoUpdateCalibration()
Events.trigger(Evt.HEAT_SET, {
'heat_id': RACE.current_heat,
})
emit_current_heat() # Race page, to update heat selection button
emit_current_leaderboard() # Race page, to update callsigns in leaderboard
emit_race_format()
@SOCKET_IO.on('set_current_heat')
@catchLogExceptionsWrapper
def on_set_current_heat(data):
'''Update the current heat variable and data.'''
new_heat_id = data['heat']
logger.info('Setting current heat to Heat {0}'.format(new_heat_id))
RACE.current_heat = new_heat_id
set_current_heat_data()
@SOCKET_IO.on('generate_heats')
def on_generate_heats(data):
'''Spawn heat generator thread'''
gevent.spawn(generate_heats, data)
@catchLogExceptionsWrapper
def generate_heats(data):
'''Generate heats from qualifying class'''
input_class = int(data['input_class'])
output_class = int(data['output_class'])
stage = data['stage']
suffix = data['suffix']
pilots_per_heat = int(data['pilots_per_heat'])
if input_class == RHUtils.CLASS_ID_NONE:
results = {
'by_race_time': []
}
for pilot in RHDATA.get_pilots():
# *** if pilot is active
entry = {}
entry['pilot_id'] = pilot.id
pilot_node = RHDATA.get_recent_pilot_node(pilot.id)
if pilot_node:
entry['node'] = pilot_node.node_index
else:
entry['node'] = -1
results['by_race_time'].append(entry)
win_condition = RHRace.WinCondition.NONE
else:
race_class = RHDATA.get_raceClass(input_class)
race_format = RHDATA.get_raceFormat(race_class.format_id)
results = RESULTS.calc_class_leaderboard(race_class.id)
if race_format:
win_condition = race_format.win_condition
else:
win_condition = RHRace.WinCondition.NONE
logger.info('Unable to fetch format from race class {0}'.format(input_class))
if results:
if win_condition == RHRace.WinCondition.NONE:
leaderboard = random.sample(results['by_race_time'], len(results['by_race_time']))
else:
leaderboard = results[results['meta']['primary_leaderboard']]
generated_heats = []
unplaced_pilots = []
new_heat = {}
assigned_pilots = 0
available_nodes = []
profile_freqs = json.loads(getCurrentProfile().frequencies)
for node_index in range(RACE.num_nodes):
if profile_freqs["f"][node_index] != RHUtils.FREQUENCY_ID_NONE:
available_nodes.append(node_index)
pilots_per_heat = min(pilots_per_heat, RACE.num_nodes, len(available_nodes))
for i,row in enumerate(leaderboard, start=1):
logger.debug("Placing {0} into heat {1}".format(row['pilot_id'], len(generated_heats)))
if row['node'] in new_heat or row['node'] not in available_nodes:
unplaced_pilots.append(row['pilot_id'])
else:
new_heat[row['node']] = row['pilot_id']
assigned_pilots += 1
if assigned_pilots >= pilots_per_heat or i == len(leaderboard):
# find slots for unassigned pilots
if len(unplaced_pilots):
for pilot in unplaced_pilots:
for index in available_nodes:
if index in new_heat:
continue
else:
new_heat[index] = pilot
break
# heat is full, flush and start next heat
generated_heats.append(new_heat)
unplaced_pilots = []
new_heat = {}
assigned_pilots = 0
# commit generated heats to database, lower seeds first
letters = __('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
for idx, heat in enumerate(reversed(generated_heats), start=1):
ladder = letters[len(generated_heats) - idx]
new_heat = RHDATA.add_heat({
'class': output_class,
'note': ladder + ' ' + suffix,
'stage': stage
}, heat)
logger.info("Generated {0} heats from class {1}".format(len(generated_heats), input_class))
SOCKET_IO.emit('heat_generate_done')
Events.trigger(Evt.HEAT_GENERATE)
emit_heat_data()
else:
logger.warning("Unable to generate heats from class {0}: can't get valid results".format(input_class))
SOCKET_IO.emit('heat_generate_done')
@SOCKET_IO.on('delete_lap')
@catchLogExceptionsWrapper
def on_delete_lap(data):
'''Delete a false lap.'''
node_index = data['node']
lap_index = data['lap_index']
if node_index is None or lap_index is None:
logger.error("Bad parameter in 'on_delete_lap()': node_index={0}, lap_index={1}".format(node_index, lap_index))
return
RACE.node_passes[node_index][lap_index]['deleted'] = True
RACE.node_passes[node_index][lap_index]['late_lap'] = False
time = RACE.node_passes[node_index][lap_index]['lap_time_stamp']
lap_number = 0
for lap in RACE.node_passes[node_index]:
if not lap['deleted']:
lap['lap_number'] = lap_number
lap_number += 1
else:
lap['lap_number'] = None
db_last = False
db_next = False
for lap in RACE.node_passes[node_index]:
if not lap['deleted']:
if lap['lap_time_stamp'] < time:
db_last = lap
if lap['lap_time_stamp'] > time:
db_next = lap
break
if db_next and db_last:
db_next['lap_time'] = db_next['lap_time_stamp'] - db_last['lap_time_stamp']
db_next['lap_time_formatted'] = RHUtils.time_format(db_next['lap_time'], RHDATA.get_option('timeFormat'))
elif db_next:
db_next['lap_time'] = db_next['lap_time_stamp']
db_next['lap_time_formatted'] = RHUtils.time_format(db_next['lap_time'], RHDATA.get_option('timeFormat'))
# delete any split laps for deleted lap
RACE.node_splits[node_index] = list(filter(lambda split: split['lap_id'] != lap_index, RACE.node_splits[node_index]))
RACE.modification_count += 1
Events.trigger(Evt.LAP_DELETE, {
#'race': RACE, # TODO this causes exceptions via 'json.loads()', so leave out for now
'node_index': node_index,
})
logger.info('Lap deleted: Node {0} LapIndex {1}'.format(node_index+1, lap_index))
check_win_condition(deletedLap=True) # handle possible change in win status
emit_current_laps() # Race page, update web client
emit_current_leaderboard() # Race page, update web client
@SOCKET_IO.on('restore_deleted_lap')
@catchLogExceptionsWrapper
def on_restore_deleted_lap(data):
'''Restore a deleted (or "late") lap.'''
node_index = data['node']
lap_index = data['lap_index']
if node_index is None or lap_index is None:
logger.error("Bad parameter in 'on_restore_deleted_lap()': node_index={0}, lap_index={1}".format(node_index, lap_index))
return
lap_obj = RACE.node_passes[node_index][lap_index]
lap_obj['deleted'] = False
lap_obj['late_lap'] = False
lap_number = 0 # adjust lap numbers and times as needed
last_lap_ts = 0
for idx, lap in enumerate(RACE.node_passes[node_index]):
if not lap['deleted']:
if idx >= lap_index:
lap['lap_number'] = lap_number
lap['lap_time'] = lap['lap_time_stamp'] - last_lap_ts
lap['lap_time_formatted'] = RHUtils.time_format(lap['lap_time'], RHDATA.get_option('timeFormat'))
last_lap_ts = lap['lap_time_stamp']
lap_number += 1
RACE.modification_count += 1
Events.trigger(Evt.LAP_RESTORE_DELETED, {
#'race': RACE, # TODO this causes exceptions via 'json.loads()', so leave out for now
'node_index': node_index,
})
logger.info('Restored deleted lap: Node {0} LapIndex {1}'.format(node_index+1, lap_index))
check_win_condition(deletedLap=True) # handle possible change in win status
emit_current_laps() # Race page, update web client
emit_current_leaderboard() # Race page, update web client
@SOCKET_IO.on('simulate_lap')
@catchLogExceptionsWrapper
def on_simulate_lap(data):
'''Simulates a lap (for debug testing).'''
node_index = data['node']
logger.info('Simulated lap: Node {0}'.format(node_index+1))
Events.trigger(Evt.CROSSING_EXIT, {
'nodeIndex': node_index,
'color': led_manager.getDisplayColor(node_index)
})
INTERFACE.simulate_lap(node_index)
@SOCKET_IO.on('LED_solid')
@catchLogExceptionsWrapper
def on_LED_solid(data):
'''LED Solid Color'''
if 'off' in data and data['off']:
led_manager.clear()
else:
led_red = data['red']
led_green = data['green']
led_blue = data['blue']
on_use_led_effect({
'effect': "stripColor",
'args': {
'color': Color(led_red,led_green,led_blue),
'pattern': ColorPattern.SOLID,
'preventIdle': True
}
})
@SOCKET_IO.on('LED_chase')
@catchLogExceptionsWrapper
def on_LED_chase(data):
'''LED Solid Color Chase'''
led_red = data['red']
led_green = data['green']
led_blue = data['blue']
on_use_led_effect({
'effect': "stripColor",
'args': {
'color': Color(led_red,led_green,led_blue),
# 'pattern': ColorPattern.CHASE, # TODO implement chase animation pattern
'pattern': ColorPattern.ALTERNATING,
'time': 5
}
})
@SOCKET_IO.on('LED_RB')
@catchLogExceptionsWrapper
def on_LED_RB():
'''LED rainbow'''
on_use_led_effect({
'effect': "rainbow",
'args': {
'time': 5
}
})
@SOCKET_IO.on('LED_RBCYCLE')
@catchLogExceptionsWrapper
def on_LED_RBCYCLE():
'''LED rainbow Cycle'''
on_use_led_effect({
'effect': "rainbowCycle",
'args': {
'time': 5
}
})
@SOCKET_IO.on('LED_RBCHASE')
@catchLogExceptionsWrapper
def on_LED_RBCHASE():
'''LED Rainbow Cycle Chase'''
on_use_led_effect({
'effect': "rainbowCycleChase",
'args': {
'time': 5
}
})
@SOCKET_IO.on('LED_brightness')
@catchLogExceptionsWrapper
def on_LED_brightness(data):
'''Change LED Brightness'''
brightness = data['brightness']
strip.setBrightness(brightness)
strip.show()
RHDATA.set_option("ledBrightness", brightness)
Events.trigger(Evt.LED_BRIGHTNESS_SET, {
'level': brightness,
})
@SOCKET_IO.on('set_option')
@catchLogExceptionsWrapper
def on_set_option(data):
RHDATA.set_option(data['option'], data['value'])
Events.trigger(Evt.OPTION_SET, {
'option': data['option'],
'value': data['value'],
})
@SOCKET_IO.on('get_race_scheduled')
@catchLogExceptionsWrapper
def get_race_elapsed():
# get current race status; never broadcasts to all
emit('race_scheduled', {
'scheduled': RACE.scheduled,
'scheduled_at': RACE.scheduled_time
})
@SOCKET_IO.on('save_callouts')
@catchLogExceptionsWrapper
def save_callouts(data):
# save callouts to Options
callouts = json.dumps(data['callouts'])
RHDATA.set_option('voiceCallouts', callouts)
logger.info('Set all voice callouts')
logger.debug('Voice callouts set to: {0}'.format(callouts))
@SOCKET_IO.on('imdtabler_update_freqs')
@catchLogExceptionsWrapper
def imdtabler_update_freqs(data):
''' Update IMDTabler page with new frequencies list '''
emit_imdtabler_data(data['freq_list'].replace(',',' ').split())
@SOCKET_IO.on('clean_cache')
@catchLogExceptionsWrapper
def clean_results_cache():
''' wipe all results caches '''
RESULTS_CACHE.clear_all()
PAGE_CACHE.set_valid(False)
# Socket io emit functions
def emit_frontend_load(**params):
'''Emits reload command.'''
if ('nobroadcast' in params):
emit('load_all')
else:
SOCKET_IO.emit('load_all')
def emit_priority_message(message, interrupt=False, key=None, event=None, **params):
''' Emits message to all clients '''
emit_payload = {
'message': message,
'interrupt': interrupt,
'key': key
}
if ('nobroadcast' in params):
emit('priority_message', emit_payload)
else:
if interrupt:
Events.trigger(Evt.MESSAGE_INTERRUPT, {
'message': message,
'interrupt': interrupt,
'event': event
})
else:
Events.trigger(Evt.MESSAGE_STANDARD, {
'message': message,
'interrupt': interrupt,
'event': event
})
SOCKET_IO.emit('priority_message', emit_payload)
def emit_race_status(**params):
'''Emits race status.'''
race_format = getCurrentRaceFormat()
emit_payload = {
'race_status': RACE.race_status,
'race_mode': race_format.race_mode,
'race_time_sec': race_format.race_time_sec,
'race_staging_tones': race_format.staging_tones,
'hide_stage_timer': race_format.start_delay_min != race_format.start_delay_max,
'pi_starts_at_ms': RACE.start_time_ms
}
if ('nobroadcast' in params):
emit('race_status', emit_payload)
else:
SOCKET_IO.emit('race_status', emit_payload)
def emit_frequency_data(**params):
'''Emits node data.'''
profile_freqs = json.loads(getCurrentProfile().frequencies)
fdata = []
num_freqs = min(RACE.num_nodes, len(profile_freqs["f"]))
for idx in range(num_freqs):
fdata.append({
'band': profile_freqs["b"][idx],
'channel': profile_freqs["c"][idx],
'frequency': profile_freqs["f"][idx]
})
emit_payload = {
'vtx_table': RHUtils.VTX_TABLE,
'fdata': fdata
}
if ('nobroadcast' in params):
emit('frequency_data', emit_payload)
else:
SOCKET_IO.emit('frequency_data', emit_payload)
# send changes to LiveTime
for n in range(num_freqs):
# if session.get('LiveTime', False):
SOCKET_IO.emit('frequency_set', {
'node': n,
'frequency': profile_freqs["f"][n]
})
# if IMDTabler.java available then trigger call to
# 'emit_imdtabler_rating' via heartbeat function:
if Use_imdtabler_jar_flag:
heartbeat_imdtabler_flag = True
def emit_node_data(**params):
'''Emits node data.'''
emit_payload = {
'node_peak_rssi': [node.node_peak_rssi if node.node_peak_rssi >= node.node_nadir_rssi else '' for node in INTERFACE.nodes],
'node_nadir_rssi': [node.node_nadir_rssi if node.node_peak_rssi >= node.node_nadir_rssi else '' for node in INTERFACE.nodes],
'pass_peak_rssi': [node.pass_peak_rssi if node.pass_peak_rssi >= node.pass_nadir_rssi else '' for node in INTERFACE.nodes],
'pass_nadir_rssi': [node.pass_nadir_rssi if node.pass_peak_rssi >= node.pass_nadir_rssi else '' for node in INTERFACE.nodes],
'pass_count': [node.pass_count for node in INTERFACE.nodes]
}
if ('nobroadcast' in params):
emit('node_data', emit_payload)
else:
SOCKET_IO.emit('node_data', emit_payload)
def emit_environmental_data(**params):
'''Emits environmental data.'''
emit_payload = []
for sensor in SENSORS:
emit_payload.append({sensor.name: sensor.getReadings()})
Events.trigger(Evt.SENSOR_UPDATE, {'sensors': emit_payload})
if ('nobroadcast' in params):
emit('environmental_data', emit_payload)
else:
SOCKET_IO.emit('environmental_data', emit_payload)
# check sensor alarms
for sensor in SENSORS:
sensor_config = rhconfig.SENSORS.get(sensor.url, {})
min_alarms = sensor_config.get('min_alarms', {})
max_alarms = sensor_config.get('max_alarms', {})
for m, r in sensor.getReadings().items():
val = r['value']
unit = r['units']
msg_key = sensor.name+' '+m
min_threshold = min_alarms.get(m, None)
if min_threshold is not None and val <= min_threshold:
emit_priority_message("{}: {} {}{} <= {}{}".format(sensor.name, m, val, unit, min_threshold, unit), key=msg_key)
max_threshold = max_alarms.get(m, None)
if max_threshold is not None and val >= max_threshold:
emit_priority_message("{}: {} {}{} >= {}{}".format(sensor.name, m, val, unit, max_threshold, unit), key=msg_key)
def emit_scan_data(node):
freqs = sorted(node.scan_data)
rssis = [node.scan_data[freq] for freq in freqs]
SOCKET_IO.emit('scan_data', {'node' : node.index, 'frequency' : freqs, 'rssi' : rssis})
def emit_enter_and_exit_at_levels(**params):
'''Emits enter-at and exit-at levels for nodes.'''
profile = getCurrentProfile()
profile_enter_ats = json.loads(profile.enter_ats)
profile_exit_ats = json.loads(profile.exit_ats)
emit_payload = {
'enter_at_levels': profile_enter_ats["v"][:RACE.num_nodes],
'exit_at_levels': profile_exit_ats["v"][:RACE.num_nodes],
'level_type': ['lifetime' if hasattr(node.manager, 'rhfeature_flags') and node.manager.rhfeature_flags&RHFEAT_PH else 'rssi' for node in INTERFACE.nodes],
'ai_calibrate': [node.ai_calibrate for node in INTERFACE.nodes],
'calibrate': [node.calibrate for node in INTERFACE.nodes]
}
if ('nobroadcast' in params):
emit('enter_and_exit_at_levels', emit_payload)
else:
SOCKET_IO.emit('enter_and_exit_at_levels', emit_payload)
def emit_cluster_status(**params):
'''Emits cluster status information.'''
if CLUSTER:
if ('nobroadcast' in params):
emit('cluster_status', CLUSTER.getClusterStatusInfo())
else:
SOCKET_IO.emit('cluster_status', CLUSTER.getClusterStatusInfo())
def emit_start_thresh_lower_amount(**params):
'''Emits current start_thresh_lower_amount.'''
emit_payload = {
'start_thresh_lower_amount': RHDATA.get_option('startThreshLowerAmount'),
}
if ('nobroadcast' in params):
emit('start_thresh_lower_amount', emit_payload)
elif ('noself' in params):
emit('start_thresh_lower_amount', emit_payload, broadcast=True, include_self=False)
else:
SOCKET_IO.emit('start_thresh_lower_amount', emit_payload)
def emit_start_thresh_lower_duration(**params):
'''Emits current start_thresh_lower_duration.'''
emit_payload = {
'start_thresh_lower_duration': RHDATA.get_option('startThreshLowerDuration'),
}
if ('nobroadcast' in params):
emit('start_thresh_lower_duration', emit_payload)
elif ('noself' in params):
emit('start_thresh_lower_duration', emit_payload, broadcast=True, include_self=False)
else:
SOCKET_IO.emit('start_thresh_lower_duration', emit_payload)
def emit_node_tuning(**params):
'''Emits node tuning values.'''
tune_val = getCurrentProfile()
emit_payload = {
'profile_ids': [profile.id for profile in RHDATA.get_profiles()],
'profile_names': [profile.name for profile in RHDATA.get_profiles()],
'current_profile': RHDATA.get_optionInt('currentProfile'),
'profile_name': tune_val.name,
'profile_description': tune_val.description
}
if ('nobroadcast' in params):
emit('node_tuning', emit_payload)
else:
SOCKET_IO.emit('node_tuning', emit_payload)
def emit_language(**params):
'''Emits race status.'''
emit_payload = {
'language': RHDATA.get_option("currentLanguage"),
'languages': LANGUAGE.getLanguages()
}
if ('nobroadcast' in params):
emit('language', emit_payload)
else:
SOCKET_IO.emit('language', emit_payload)
def emit_all_languages(**params):
'''Emits full language dictionary.'''
emit_payload = {
'languages': LANGUAGE.getAllLanguages()
}
if ('nobroadcast' in params):
emit('all_languages', emit_payload)
else:
SOCKET_IO.emit('all_languages', emit_payload)
def emit_min_lap(**params):
'''Emits current minimum lap.'''
emit_payload = {
'min_lap': RHDATA.get_option('MinLapSec'),
'min_lap_behavior': RHDATA.get_optionInt("MinLapBehavior")
}
if ('nobroadcast' in params):
emit('min_lap', emit_payload)
else:
SOCKET_IO.emit('min_lap', emit_payload)
def emit_race_format(**params):
'''Emits race format values.'''
race_format = getCurrentRaceFormat()
is_db_race_format = RHRaceFormat.isDbBased(race_format)
locked = not is_db_race_format or RHDATA.savedRaceMetas_has_raceFormat(race_format.id)
raceFormats = RHDATA.get_raceFormats()
emit_payload = {
'format_ids': [raceformat.id for raceformat in raceFormats],
'format_names': [__(raceformat.name) for raceformat in raceFormats],
'current_format': race_format.id if is_db_race_format else None,
'format_name': __(race_format.name),
'race_mode': race_format.race_mode,
'race_time_sec': race_format.race_time_sec,
'lap_grace_sec': race_format.lap_grace_sec,
'start_delay_min': race_format.start_delay_min,
'start_delay_max': race_format.start_delay_max,
'staging_tones': race_format.staging_tones,
'number_laps_win': race_format.number_laps_win,
'win_condition': race_format.win_condition,
'start_behavior': race_format.start_behavior,
'team_racing_mode': 1 if race_format.team_racing_mode else 0,
'locked': locked
}
if ('nobroadcast' in params):
emit('race_format', emit_payload)
else:
SOCKET_IO.emit('race_format', emit_payload)
emit_current_leaderboard()
def emit_race_formats(**params):
'''Emits all race formats.'''
formats = RHDATA.get_raceFormats()
emit_payload = {}
for race_format in formats:
format_copy = {
'format_name': __(race_format.name),
'race_mode': race_format.race_mode,
'race_time_sec': race_format.race_time_sec,
'lap_grace_sec': race_format.lap_grace_sec,
'start_delay_min': race_format.start_delay_min,
'start_delay_max': race_format.start_delay_max,
'staging_tones': race_format.staging_tones,
'number_laps_win': race_format.number_laps_win,
'win_condition': race_format.win_condition,
'start_behavior': race_format.start_behavior,
'team_racing_mode': 1 if race_format.team_racing_mode else 0,
}
has_race = RHDATA.savedRaceMetas_has_raceFormat(race_format.id)
if has_race:
format_copy['locked'] = True
else:
format_copy['locked'] = False
emit_payload[race_format.id] = format_copy
if ('nobroadcast' in params):
emit('race_formats', emit_payload)
else:
SOCKET_IO.emit('race_formats', emit_payload)
def build_laps_list(active_race=RACE):
current_laps = []
for node_idx in range(active_race.num_nodes):
node_laps = []
fastest_lap_time = float("inf")
fastest_lap_index = None
last_lap_id = -1
for idx, lap in enumerate(active_race.node_passes[node_idx]):
if (not lap['deleted']) or lap.get('late_lap', False):
if not lap.get('late_lap', False):
last_lap_id = lap_number = lap['lap_number']
if active_race.format and active_race.format.start_behavior == RHRace.StartBehavior.FIRST_LAP:
lap_number += 1
splits = get_splits(active_race, node_idx, lap['lap_number'], True)
if lap['lap_time'] > 0 and idx > 0 and lap['lap_time'] < fastest_lap_time:
fastest_lap_time = lap['lap_time']
fastest_lap_index = idx
else:
lap_number = -1
splits = []
node_laps.append({
'lap_index': idx,
'lap_number': lap_number,
'lap_raw': lap['lap_time'],
'lap_time': lap['lap_time_formatted'],
'lap_time_stamp': lap['lap_time_stamp'],
'splits': splits,
'late_lap': lap.get('late_lap', False)
})
splits = get_splits(active_race, node_idx, last_lap_id+1, False)
if splits:
node_laps.append({
'lap_number': last_lap_id+1,
'lap_time': '',
'lap_time_stamp': 0,
'splits': splits
})
if node_idx in active_race.node_pilots and active_race.node_pilots[node_idx]:
pilot = active_race.node_pilots[node_idx]
pilot_data = {
'id': pilot.id,
'name': pilot.name,
'callsign': pilot.callsign
}
else:
pilot_data = None
current_laps.append({
'laps': node_laps,
'fastest_lap_index': fastest_lap_index,
'pilot': pilot_data,
'finished_flag': active_race.get_node_finished_flag(node_idx)
})
current_laps = {
'node_index': current_laps
}
return current_laps
def emit_current_laps(**params):
'''Emits current laps.'''
emit_payload = {
'current': {}
}
emit_payload['current'] = build_laps_list(RACE)
if LAST_RACE is not None:
emit_payload['last_race'] = build_laps_list(LAST_RACE)
if ('nobroadcast' in params):
emit('current_laps', emit_payload)
else:
SOCKET_IO.emit('current_laps', emit_payload)
def get_splits(active_race, node_idx, lap_id, lapCompleted):
splits = []
if CLUSTER:
for secondary_index in range(len(CLUSTER.secondaries)):
if CLUSTER.isSplitSecondaryAvailable(secondary_index):
split = list(filter(lambda split: split['lap_id'] == lap_id and split['split_id'] == secondary_index, active_race.node_splits[node_idx]))
if split:
split_payload = {
'split_id': secondary_index,
'split_raw': split.split_time,
'split_time': split.split_time_formatted,
'split_speed': '{0:.2f}'.format(split.split_speed) if split.split_speed is not None else None
}
elif lapCompleted:
split_payload = {
'split_id': secondary_index,
'split_time': '-'
}
else:
break
splits.append(split_payload)
return splits
def emit_race_list(**params):
'''Emits race listing'''
heats = {}
for heat in RHDATA.get_heats():
if RHDATA.savedRaceMetas_has_heat(heat.id):
heatnote = RHDATA.get_heat_note(heat.id)
rounds = {}
for race in RHDATA.get_savedRaceMetas_by_heat(heat.id):
pilotraces = []
for pilotrace in RHDATA.get_savedPilotRaces_by_savedRaceMeta(race.id):
laps = []
for lap in RHDATA.get_savedRaceLaps_by_savedPilotRace(pilotrace.id):
laps.append({
'id': lap.id,
'lap_time_stamp': lap.lap_time_stamp,
'lap_time': lap.lap_time,
'lap_time_formatted': lap.lap_time_formatted,
'source': lap.source,
'deleted': lap.deleted
})
pilot_data = RHDATA.get_pilot(pilotrace.pilot_id)
if pilot_data:
nodepilot = pilot_data.callsign
else:
nodepilot = None
pilotraces.append({
'pilotrace_id': pilotrace.id,
'callsign': nodepilot,
'pilot_id': pilotrace.pilot_id,
'node_index': pilotrace.node_index,
'history_values': json.loads(pilotrace.history_values),
'history_times': json.loads(pilotrace.history_times),
'lifetime_values': json.loads(pilotrace.lifetime_values) if pilotrace.lifetime_values else None,
'lifetime_times': json.loads(pilotrace.lifetime_times) if pilotrace.lifetime_times else None,
'laps': laps,
'enter_at': pilotrace.enter_at,
'exit_at': pilotrace.exit_at,
})
rounds[race.round_id] = {
'race_id': race.id,
'class_id': race.class_id,
'format_id': race.format_id,
'start_time': race.start_time,
'start_time_formatted': race.start_time_formatted,
'pilotraces': pilotraces
}
heats[heat.id] = {
'heat_id': heat.id,
'note': heatnote,
'rounds': rounds,
}
emit_payload = {
'heats': heats,
# 'heats_by_class': heats_by_class,
# 'classes': current_classes,
}
if ('nobroadcast' in params):
emit('race_list', emit_payload)
else:
SOCKET_IO.emit('race_list', emit_payload)
def emit_result_data(**params):
''' kick off non-blocking thread to generate data'''
if request:
gevent.spawn(emit_result_data_thread, params, request.sid)
else:
gevent.spawn(emit_result_data_thread, params)
@catchLogExceptionsWrapper
def emit_result_data_thread(params, sid=None):
with APP.test_request_context():
emit_payload = PAGE_CACHE.get_cache()
if 'nobroadcast' in params and sid is not None:
emit('result_data', emit_payload, namespace='/', room=sid)
else:
SOCKET_IO.emit('result_data', emit_payload, namespace='/')
def emit_current_leaderboard(**params):
'''Emits leaderboard.'''
emit_payload = {
'current': {}
}
# current
emit_payload['current']['heat'] = RACE.current_heat
emit_payload['current']['heat_note'] = RHDATA.get_heat_note(RACE.current_heat)
emit_payload['current']['status_msg'] = RACE.status_message
emit_payload['current']['leaderboard'] = RACE.results
emit_payload['current']['team_leaderboard'] = RACE.team_results
# cache
if LAST_RACE is not None:
emit_payload['last_race'] = {}
emit_payload['last_race']['status_msg'] = LAST_RACE.status_message
emit_payload['last_race']['leaderboard'] = LAST_RACE.results
emit_payload['last_race']['heat'] = LAST_RACE.current_heat
emit_payload['last_race']['heat_note'] = RHDATA.get_heat_note(LAST_RACE.current_heat)
if LAST_RACE.format.team_racing_mode:
emit_payload['last_race']['team_leaderboard'] = LAST_RACE.team_results
if ('nobroadcast' in params):
emit('leaderboard', emit_payload)
else:
SOCKET_IO.emit('leaderboard', emit_payload)
def emit_heat_data(**params):
'''Emits heat data.'''
current_heats = {}
for heat in RHDATA.get_heats():
heat_id = heat.id
note = heat.note
race_class = heat.class_id
heatnodes = RHDATA.get_heatNodes_by_heat(heat.id)
pilots = []
for heatnode in heatnodes:
pilots.append(heatnode.pilot_id)
has_race = RHDATA.savedRaceMetas_has_heat(heat.id)
if has_race:
locked = True
else:
locked = False
current_heats[heat_id] = {
'pilots': pilots,
'note': note,
'heat_id': heat_id,
'class_id': race_class,
'locked': locked}
current_classes = []
for race_class in RHDATA.get_raceClasses():
current_class = {}
current_class['id'] = race_class.id
current_class['name'] = race_class.name
current_class['description'] = race_class.description
current_classes.append(current_class)
pilots = []
for pilot in RHDATA.get_pilots():
pilots.append({
'pilot_id': pilot.id,
'callsign': pilot.callsign,
'name': pilot.name
})
if RHDATA.get_option('pilotSort') == 'callsign':
pilots.sort(key=lambda x: (x['callsign'], x['name']))
else:
pilots.sort(key=lambda x: (x['name'], x['callsign']))
emit_payload = {
'heats': current_heats,
'pilot_data': pilots,
'classes': current_classes,
'pilotSort': RHDATA.get_option('pilotSort'),
}
if ('nobroadcast' in params):
emit('heat_data', emit_payload)
elif ('noself' in params):
emit('heat_data', emit_payload, broadcast=True, include_self=False)
else:
SOCKET_IO.emit('heat_data', emit_payload)
def emit_class_data(**params):
'''Emits class data.'''
current_classes = []
for race_class in RHDATA.get_raceClasses():
current_class = {}
current_class['id'] = race_class.id
current_class['name'] = race_class.name
current_class['description'] = race_class.description
current_class['format'] = race_class.format_id
current_class['locked'] = RHDATA.savedRaceMetas_has_raceClass(race_class.id)
current_classes.append(current_class)
formats = []
for race_format in RHDATA.get_raceFormats():
raceformat = {}
raceformat['id'] = race_format.id
raceformat['name'] = race_format.name
raceformat['race_mode'] = race_format.race_mode
raceformat['race_time_sec'] = race_format.race_time_sec
raceformat['lap_grace_sec'] = race_format.lap_grace_sec
raceformat['start_delay_min'] = race_format.start_delay_min
raceformat['start_delay_max'] = race_format.start_delay_max
raceformat['staging_tones'] = race_format.staging_tones
raceformat['number_laps_win'] = race_format.number_laps_win
raceformat['win_condition'] = race_format.win_condition
raceformat['team_racing_mode'] = race_format.team_racing_mode
raceformat['start_behavior'] = race_format.start_behavior
formats.append(raceformat)
emit_payload = {
'classes': current_classes,
'formats': formats
}
if ('nobroadcast' in params):
emit('class_data', emit_payload)
elif ('noself' in params):
emit('class_data', emit_payload, broadcast=True, include_self=False)
else:
SOCKET_IO.emit('class_data', emit_payload)
def emit_pilot_data(**params):
'''Emits pilot data.'''
pilot_objs = RHDATA.get_pilots()
# prefetch web data
def fetch_web_data(urls):
data = {}
for url in urls:
data[url] = web.get_pilot_data(url)
return data
g = gevent.spawn(fetch_web_data, [pilot.url for pilot in pilot_objs if pilot.url])
web_gs = {}
for pilot in pilot_objs:
if pilot.url:
web_gs[pilot.id] = g
pilots_list = []
for pilot in pilot_objs:
opts_str = '' # create team-options string for each pilot, with current team selected
for name in TEAM_NAMES_LIST:
opts_str += '<option value="' + name + '"'
if name == pilot.team:
opts_str += ' selected'
opts_str += '>' + name + '</option>'
locked = RHDATA.savedPilotRaces_has_pilot(pilot.id)
pilot_data = {
'pilot_id': pilot.id,
'name': pilot.name,
'callsign': pilot.callsign,
'url': pilot.url if pilot.url else '',
'team': pilot.team,
'phonetic': pilot.phonetic,
'team_options': opts_str,
'locked': locked,
}
if pilot.url:
gevent.wait([web_gs[pilot.id]])
web_data = web_gs[pilot.id].value
pilot_web_data = web_data[pilot.url]
changed_pilot_data = {}
if 'name' in pilot_web_data and pilot_web_data['name'] != pilot_data['name']:
changed_pilot_data['name'] = pilot_web_data['name']
if 'callsign' in pilot_web_data and pilot_web_data['callsign'] != pilot_data['callsign']:
changed_pilot_data['callsign'] = pilot_web_data['callsign']
if changed_pilot_data:
changed_pilot_data['pilot_id'] = pilot.id
RHDATA.alter_pilot(changed_pilot_data)
pilot_data.update(pilot_web_data)
# local overrides
if not pilot.name.startswith('~'):
pilot_data['name'] = pilot.name
if not pilot.callsign.startswith('~'):
pilot_data['callsign'] = pilot.callsign
if led_manager.isEnabled():
pilot_data['color'] = pilot.color
pilots_list.append(pilot_data)
if RHDATA.get_option('pilotSort') == 'callsign':
pilots_list.sort(key=lambda x: (x['callsign'], x['name']))
else:
pilots_list.sort(key=lambda x: (x['name'], x['callsign']))
emit_payload = {
'pilots': pilots_list
}
if ('nobroadcast' in params):
emit('pilot_data', emit_payload)
elif ('noself' in params):
emit('pilot_data', emit_payload, broadcast=True, include_self=False)
else:
SOCKET_IO.emit('pilot_data', emit_payload)
def emit_current_heat(**params):
'''Emits the current heat.'''
callsigns = []
pilot_ids = []
heat_data = RHDATA.get_heat(RACE.current_heat)
heatNode_data = {}
for heatNode in RHDATA.get_heatNodes_by_heat(RACE.current_heat):
heatNode_data[heatNode.node_index] = {
'pilot_id': heatNode.pilot_id,
'callsign': None,
'heatNodeColor': heatNode.color,
'pilotColor': None,
'activeColor': None
}
pilot = RHDATA.get_pilot(heatNode.pilot_id)
if pilot:
heatNode_data[heatNode.node_index]['callsign'] = pilot.callsign
heatNode_data[heatNode.node_index]['pilotColor'] = pilot.color
if led_manager.isEnabled():
heatNode_data[heatNode.node_index]['activeColor'] = led_manager.getDisplayColor(heatNode.node_index)
heat_format = None
if heat_data.class_id != RHUtils.CLASS_ID_NONE:
heat_format = RHDATA.get_raceClass(heat_data.class_id).format_id
emit_payload = {
'current_heat': RACE.current_heat,
'heatNodes': heatNode_data,
'callsign': callsigns,
'pilot_ids': pilot_ids,
'heat_note': heat_data.note,
'heat_format': heat_format,
'heat_class': heat_data.class_id
}
if ('nobroadcast' in params):
emit('current_heat', emit_payload)
else:
SOCKET_IO.emit('current_heat', emit_payload)
def emit_race_status_message(**params):
'''Emits given team-racing status info.'''
logger.info("Race status message: {}".format(RACE.status_message))
emit_payload = {'team_laps_str': RACE.status_message}
if ('nobroadcast' in params):
emit('race_status_message', emit_payload)
else:
SOCKET_IO.emit('race_status_message', emit_payload)
def emit_phonetic_data(pilot_id, lap_id, lap_time, lap_time_stamp, team_name, team_laps, leader_flag=False, **params):
'''Emits phonetic data.'''
phonetic_time = RHUtils.phonetictime_format(lap_time, RHDATA.get_option('timeFormatPhonetic'))
pilot = RHDATA.get_pilot(pilot_id)
emit_payload = {
'pilot': pilot.phonetic,
'callsign': pilot.callsign,
'pilot_id': pilot.id,
'lap': lap_id,
'raw_time': lap_time,
'raw_time_stamp': lap_time_stamp,
'phonetic': phonetic_time,
'team_name' : team_name,
'team_laps' : team_laps,
'leader_flag' : leader_flag
}
if ('nobroadcast' in params):
emit('phonetic_data', emit_payload)
else:
SOCKET_IO.emit('phonetic_data', emit_payload)
def emit_first_pass_registered(node_idx, **params):
'''Emits when first pass (lap 0) is registered during a race'''
emit_payload = {
'node_index': node_idx,
}
Events.trigger(Evt.RACE_FIRST_PASS, {
'node_index': node_idx,
})
if ('nobroadcast' in params):
emit('first_pass_registered', emit_payload)
else:
SOCKET_IO.emit('first_pass_registered', emit_payload)
def emit_phonetic_text(text_str, domain=False, winner_flag=False, **params):
'''Emits given phonetic text.'''
emit_payload = {
'text': text_str,
'domain': domain,
'winner_flag': winner_flag
}
if ('nobroadcast' in params):
emit('phonetic_text', emit_payload)
else:
SOCKET_IO.emit('phonetic_text', emit_payload)
def emit_phonetic_split(pilot_id, split_id, split_time, **params):
'''Emits phonetic split-pass data.'''
pilot = RHDATA.get_pilot(pilot_id)
phonetic_name = pilot.phonetic or pilot.callsign
phonetic_time = RHUtils.phonetictime_format(split_time, RHDATA.get_option('timeFormatPhonetic'))
emit_payload = {
'pilot_name': phonetic_name,
'split_id': str(split_id+1),
'split_time': phonetic_time
}
if ('nobroadcast' in params):
emit('phonetic_split_call', emit_payload)
else:
SOCKET_IO.emit('phonetic_split_call', emit_payload)
def emit_split_pass_info(pilot_id, split_id, split_time):
emit_current_laps() # update all laps on the race page
emit_phonetic_split(pilot_id, split_id, split_time)
def emit_enter_at_level(node, **params):
'''Emits enter-at level for given node.'''
emit_payload = {
'node_index': node.index,
'level': node.enter_at_level
}
if ('nobroadcast' in params):
emit('node_enter_at_level', emit_payload)
else:
SOCKET_IO.emit('node_enter_at_level', emit_payload)
def emit_exit_at_level(node, **params):
'''Emits exit-at level for given node.'''
emit_payload = {
'node_index': node.index,
'level': node.exit_at_level
}
if ('nobroadcast' in params):
emit('node_exit_at_level', emit_payload)
else:
SOCKET_IO.emit('node_exit_at_level', emit_payload)
def emit_node_crossing_change(node, crossing_flag, ts, rssi, **params):
'''Emits crossing-flag change for given node.'''
emit_payload = {
'node_index': node.index,
'crossing_flag': crossing_flag,
'timestamp': ts,
'rssi': rssi
}
if ('nobroadcast' in params):
emit('node_crossing_change', emit_payload)
else:
SOCKET_IO.emit('node_crossing_change', emit_payload)
def emit_cluster_connect_change(connect_flag, **params):
'''Emits connect/disconnect tone for cluster timer.'''
emit_payload = {
'connect_flag': connect_flag
}
if ('nobroadcast' in params):
emit('cluster_connect_change', emit_payload)
else:
SOCKET_IO.emit('cluster_connect_change', emit_payload)
def emit_callouts():
callouts = RHDATA.get_option('voiceCallouts')
if callouts:
emit('callouts', json.loads(callouts))
def emit_imdtabler_page(**params):
'''Emits IMDTabler page, using current profile frequencies.'''
if Use_imdtabler_jar_flag:
try: # get IMDTabler version string
imdtabler_ver = subprocess.check_output( \
'java -jar ' + IMDTABLER_JAR_NAME + ' -v', shell=True).decode("utf-8").rstrip()
profile_freqs = json.loads(getCurrentProfile().frequencies)
fi_list = list(OrderedDict.fromkeys(profile_freqs['f'][:RACE.num_nodes])) # remove duplicates
fs_list = []
for val in fi_list: # convert list of integers to list of strings
if val > 0: # drop any zero entries
fs_list.append(str(val))
emit_imdtabler_data(fs_list, imdtabler_ver)
except Exception:
logger.exception('emit_imdtabler_page exception')
def emit_imdtabler_data(fs_list, imdtabler_ver=None, **params):
'''Emits IMDTabler data for given frequencies.'''
try:
imdtabler_data = None
if len(fs_list) > 2: # if 3+ then invoke jar; get response
imdtabler_data = subprocess.check_output( \
'java -jar ' + IMDTABLER_JAR_NAME + ' -t ' + ' '.join(fs_list), shell=True).decode("utf-8")
except Exception:
imdtabler_data = None
logger.exception('emit_imdtabler_data exception')
emit_payload = {
'freq_list': ' '.join(fs_list),
'table_data': imdtabler_data,
'version_str': imdtabler_ver
}
if ('nobroadcast' in params):
emit('imdtabler_data', emit_payload)
else:
SOCKET_IO.emit('imdtabler_data', emit_payload)
def emit_imdtabler_rating():
'''Emits IMDTabler rating for current profile frequencies.'''
try:
profile_freqs = json.loads(getCurrentProfile().frequencies)
imd_val = None
fi_list = list(OrderedDict.fromkeys(profile_freqs['f'][:RACE.num_nodes])) # remove duplicates
fs_list = []
for val in fi_list: # convert list of integers to list of strings
if val > 0: # drop any zero entries
fs_list.append(str(val))
if len(fs_list) > 2:
imd_val = subprocess.check_output( # invoke jar; get response
'java -jar ' + IMDTABLER_JAR_NAME + ' -r ' + ' '.join(fs_list), shell=True).decode("utf-8").rstrip()
except Exception:
imd_val = None
logger.exception('emit_imdtabler_rating exception')
emit_payload = {
'imd_rating': imd_val
}
SOCKET_IO.emit('imdtabler_rating', emit_payload)
def emit_vrx_list(*args, **params):
''' get list of connected VRx devices '''
if vrx_controller:
# if vrx_controller.has_connection:
vrx_list = {}
for vrx in vrx_controller.rx_data:
vrx_list[vrx] = vrx_controller.rx_data[vrx]
emit_payload = {
'enabled': True,
'connection': True,
'vrx': vrx_list
}
# else:
# emit_payload = {
# 'enabled': True,
# 'connection': False,
# }
else:
emit_payload = {
'enabled': False,
'connection': False
}
if ('nobroadcast' in params):
emit('vrx_list', emit_payload)
else:
SOCKET_IO.emit('vrx_list', emit_payload)
@SOCKET_IO.on('check_bpillfw_file')
@catchLogExceptionsWrapper
def check_bpillfw_file(data):
fileStr = data['src_file_str']
logger.debug("Checking node firmware file: " + fileStr)
dataStr = None
try:
dataStr = stm32loader.load_source_file(fileStr, False)
except Exception as ex:
SOCKET_IO.emit('upd_set_info_text', "Error reading firmware file: {}<br><br><br><br>".format(ex))
logger.debug("Error reading file '{}' in 'check_bpillfw_file()': {}".format(fileStr, ex))
return
try: # find version, processor-type and build-timestamp strings in firmware '.bin' file
rStr = RHUtils.findPrefixedSubstring(dataStr, INTERFACE.FW_VERSION_PREFIXSTR, \
INTERFACE.FW_TEXT_BLOCK_SIZE)
fwVerStr = rStr if rStr else "(unknown)"
fwRTypStr = RHUtils.findPrefixedSubstring(dataStr, INTERFACE.FW_PROCTYPE_PREFIXSTR, \
INTERFACE.FW_TEXT_BLOCK_SIZE)
fwTypStr = (fwRTypStr + ", ") if fwRTypStr else ""
rStr = RHUtils.findPrefixedSubstring(dataStr, INTERFACE.FW_BUILDDATE_PREFIXSTR, \
INTERFACE.FW_TEXT_BLOCK_SIZE)
if rStr:
fwTimStr = rStr
rStr = RHUtils.findPrefixedSubstring(dataStr, INTERFACE.FW_BUILDTIME_PREFIXSTR, \
INTERFACE.FW_TEXT_BLOCK_SIZE)
if rStr:
fwTimStr += " " + rStr
else:
fwTimStr = "unknown"
fileSize = len(dataStr)
logger.debug("Node update firmware file size={}, version={}, {}build timestamp: {}".\
format(fileSize, fwVerStr, fwTypStr, fwTimStr))
infoStr = "Firmware update file size = {}<br>".format(fileSize) + \
"Firmware update version: {} ({}Build timestamp: {})<br><br>".\
format(fwVerStr, fwTypStr, fwTimStr)
curNodeStr = INTERFACE.node_managers[0].firmware_version_str if len(INTERFACE.node_managers) else None
if curNodeStr:
tsStr = INTERFACE.node_managers[0].firmware_timestamp_str
if tsStr:
curRTypStr = INTERFACE.node_managers[0].firmware_proctype_str
ptStr = (curRTypStr + ", ") if curRTypStr else ""
curNodeStr += " ({}Build timestamp: {})".format(ptStr, tsStr)
else:
curRTypStr = None
curNodeStr = "(unknown)"
infoStr += "Current firmware version: " + curNodeStr
if fwRTypStr and curRTypStr and fwRTypStr != curRTypStr:
infoStr += "<br><br><b>Warning</b>: Firmware file processor type ({}) does not match current ({})".\
format(fwRTypStr, curRTypStr)
SOCKET_IO.emit('upd_set_info_text', infoStr)
SOCKET_IO.emit('upd_enable_update_button')
except Exception as ex:
SOCKET_IO.emit('upd_set_info_text', "Error processing firmware file: {}<br><br><br><br>".format(ex))
logger.exception("Error processing file '{}' in 'check_bpillfw_file()'".format(fileStr))
@SOCKET_IO.on('do_bpillfw_update')
@catchLogExceptionsWrapper
def do_bpillfw_update(data):
srcStr = data['src_file_str']
portStr = INTERFACE.fwupd_serial_port
msgStr = "Performing S32_BPill update, port='{}', file: {}".format(portStr, srcStr)
logger.info(msgStr)
SOCKET_IO.emit('upd_messages_init', (msgStr + "\n"))
stop_background_threads()
gevent.sleep(0.1)
try:
jump_to_node_bootloader()
INTERFACE.close()
s32Logger = logging.getLogger("stm32loader")
def doS32Log(msgStr): # send message to update-messages window and log file
SOCKET_IO.emit('upd_messages_append', msgStr)
gevent.idle() # do thread yield to allow display updates
s32Logger.info(msgStr)
gevent.idle() # do thread yield to allow display updates
log.wait_for_queue_empty()
stm32loader.set_console_output_fn(doS32Log)
successFlag = stm32loader.flash_file_to_stm32(portStr, srcStr)
msgStr = "Node update " + ("succeeded; restarting interface" \
if successFlag else "failed")
logger.info(msgStr)
SOCKET_IO.emit('upd_messages_append', ("\n" + msgStr))
except:
logger.exception("Error in 'do_bpillfw_update()'")
stm32loader.set_console_output_fn(None)
gevent.sleep(0.2)
logger.info("Reinitializing hardware interface")
ui_server_messages.clear()
initialize_hardware_interface()
if RACE.num_nodes == 0:
logger.warning('*** WARNING: NO RECEIVER NODES FOUND ***')
set_ui_message(
'node',
"No receiver nodes found",
header='Warning',
subclass='none'
)
SOCKET_IO.emit('upd_messages_append', "\nWarning: No receiver nodes found")
buildServerInfo()
reportServerInfo()
init_race_state()
start_background_threads(True)
SOCKET_IO.emit('upd_messages_finish') # show 'Close' button
@SOCKET_IO.on('set_vrx_node')
@catchLogExceptionsWrapper
def set_vrx_node(data):
vrx_id = data['vrx_id']
node = data['node']
if vrx_controller:
vrx_controller.set_seat_number(serial_num=vrx_id, desired_seat_num=node)
logger.info("Set VRx {0} to node {1}".format(vrx_id, node))
else:
logger.error("Can't set VRx {0} to node {1}: Controller unavailable".format(vrx_id, node))
@catchLogExceptionsWrapper
def emit_pass_record(node, lap_number: int, lap_time_stamp: int):
'''Emits 'pass_record' message (will be consumed by primary timer in cluster, livetime, etc).'''
if CHORUS_API:
CHORUS_API.emit_pass_record(node, lap_number, lap_time_stamp)
payload = {
'node': node.index,
'frequency': node.frequency,
'timestamp': lap_time_stamp + RACE.start_time_epoch_ms
}
emit_cluster_msg_to_primary('pass_record', payload)
def emit_exporter_list():
'''List Database Exporters'''
emit_payload = {
'exporters': []
}
for name, exp in export_manager.getExporters().items():
emit_payload['exporters'].append({
'name': name,
'label': exp.label
})
emit('exporter_list', emit_payload)
#
# Program Functions
#
# declare/initialize variables for heartbeat functions
heartbeat_iter_tracker = 0
heartbeat_imdtabler_flag = False
heartbeat_last_error_rep_time = millis_to_secs(ms_counter())
def heartbeat_thread_function():
'''Emits current rssi data, etc'''
global heartbeat_iter_tracker, heartbeat_imdtabler_flag, heartbeat_last_error_rep_time
gevent.sleep(0.010) # allow time for connection handshake to terminate before emitting data
while True:
try:
if CHORUS_API:
CHORUS_API.emit_rssi('*')
node_data = INTERFACE.get_heartbeat_json()
SOCKET_IO.emit('heartbeat', node_data)
heartbeat_iter_tracker += 1
# update displayed IMD rating after freqs changed:
if heartbeat_imdtabler_flag and \
(heartbeat_iter_tracker % HEARTBEAT_DATA_RATE_FACTOR) == 0:
heartbeat_imdtabler_flag = False
emit_imdtabler_rating()
scanners = [node for node in INTERFACE.nodes if node.scan_enabled]
if scanners:
SCANNER_UPDATE_FACTOR = 2
scan_counter = heartbeat_iter_tracker % (SCANNER_UPDATE_FACTOR*len(scanners))
if (scan_counter % SCANNER_UPDATE_FACTOR) == 1:
emit_scan_data(scanners[scan_counter//SCANNER_UPDATE_FACTOR])
# emit rest of node data, but less often:
if (heartbeat_iter_tracker % (4*HEARTBEAT_DATA_RATE_FACTOR)) == 0:
emit_node_data()
# emit cluster status less often:
if (heartbeat_iter_tracker % (4*HEARTBEAT_DATA_RATE_FACTOR)) == (2*HEARTBEAT_DATA_RATE_FACTOR):
emit_cluster_status()
# collect vrx lock status
if (heartbeat_iter_tracker % (10*HEARTBEAT_DATA_RATE_FACTOR)) == 1:
if vrx_controller:
# if vrx_controller.has_connection
vrx_controller.get_seat_lock_status()
vrx_controller.request_variable_status()
if (heartbeat_iter_tracker % (10*HEARTBEAT_DATA_RATE_FACTOR)) == 5:
# emit display status with offset
if vrx_controller:
emit_vrx_list()
# emit environment data less often:
if (heartbeat_iter_tracker % (20*HEARTBEAT_DATA_RATE_FACTOR)) == 2:
SENSORS.update_environmental_data()
emit_environmental_data()
time_now = millis_to_secs(ms_counter())
# check if race is to be started
if RACE.scheduled:
if time_now > RACE.scheduled_time:
on_stage_race()
RACE.scheduled = False
# if any comm errors then log them (at defined intervals; faster if debug mode)
if time_now > heartbeat_last_error_rep_time + \
(ERROR_REPORT_INTERVAL_SECS if not rhconfig.GENERAL['DEBUG'] \
else ERROR_REPORT_INTERVAL_SECS/10):
heartbeat_last_error_rep_time = time_now
rep_str = INTERFACE.get_intf_error_report_str()
if rep_str:
logger.info(rep_str)
gevent.sleep(0.500/HEARTBEAT_DATA_RATE_FACTOR)
except KeyboardInterrupt:
logger.info("Heartbeat thread terminated by keyboard interrupt")
raise
except SystemExit:
raise
except Exception:
logger.exception('Exception in Heartbeat thread loop')
gevent.sleep(0.500)
@catchLogExceptionsWrapper
def clock_check_thread_function():
''' Monitor system clock and adjust PROGRAM_START if significant jump detected.
(This can happen if NTP synchronization occurs after server starts up.) '''
global PROGRAM_START
global serverInfoItems
try:
while True:
gevent.sleep(10)
if RACE.any_races_started: # stop monitoring after any race started
break
sync_now = RHTimeFns.MonotonicEpochSync()
diff_ms = sync_now.diff(PROGRAM_START)
if abs(diff_ms) > 30000:
PROGRAM_START.adjustBy(sync_now, diff_ms)
logger.info("Adjusting PROGRAM_START for shift in system clock ({0} millis) to: {1}".\
format(diff_ms, PROGRAM_START.epoch_ms))
# update values that will be reported if running as cluster timer
serverInfoItems['prog_start_epoch'] = "{}".format(PROGRAM_START.epoch_ms)
serverInfoItems['prog_start_time'] = str(datetime.fromtimestamp(millis_to_secs(PROGRAM_START.epoch_ms), tz=timezone.utc))
if has_joined_cluster():
logger.debug("Emitting 'join_cluster_response' message with updated 'prog_start_epoch'")
emit_join_cluster_response()
except KeyboardInterrupt:
logger.info("clock_check_thread terminated by keyboard interrupt")
raise
except SystemExit:
raise
except Exception:
logger.exception('Exception in clock_check_thread')
def ms_from_race_start():
'''Return milliseconds since race start.'''
return ms_counter() - RACE.start_time_ms
def ms_to_race_start():
'''Return milliseconds since race start.'''
if RACE.scheduled:
delta_time = millis_to_secs(ms_counter()) - RACE.scheduled_time
milli_sec = delta_time * 1000.0
return milli_sec
else:
return None
def ms_from_program_start():
'''Returns the elapsed milliseconds since the start of the program.'''
return ms_counter() - PROGRAM_START.monotonic_ms
@catchLogExcDBCloseWrapper
def pass_record_callback(node, lap_race_time_ms: int, source):
'''Handles pass records from the nodes.'''
# lap_race_time is lap timestamp relative to start time
lap_timestamp_ms = lap_race_time_ms + RACE.start_time_ms
if logger.getEffectiveLevel() <= logging.DEBUG:
enter_fmtstr = RHUtils.time_format(node.enter_at_sample.timestamp, \
RHDATA.get_option('timeFormat')) \
if node.enter_at_sample is not None else "-"
exit_fmtstr = RHUtils.time_format(node.exit_at_sample.timestamp, \
RHDATA.get_option('timeFormat')) \
if node.exit_at_sample is not None else "-"
duration_fmtstr = (node.exit_at_sample.timestamp - node.enter_at_sample.timestamp) if node.enter_at_sample is not None and node.exit_at_sample is not None else "_"
logger.debug('Raw pass record: node: {0}, lap timestamp: {1} (enter: {3}, exit: {4}, duration: {5}), source: {2}' \
.format(node.index+1, lap_race_time_ms, source, enter_fmtstr, exit_fmtstr, duration_fmtstr))
node.pass_crossing_flag = False # clear the "synchronized" version of the crossing flag
emit_node_data() # For updated triggers and peaks
location_id, seat = get_local_location_id_and_seat(node)
if location_id > 0:
track = RHDATA.get_optionJson('trackLayout')
add_split(location_id, seat, lap_race_time_ms, track)
return
profile_freqs = json.loads(getCurrentProfile().frequencies)
if profile_freqs["f"][seat] != RHUtils.FREQUENCY_ID_NONE:
# always count laps if race is running, otherwise test if lap should have counted before race end
if RACE.race_status == RHRace.RaceStatus.RACING \
or (RACE.race_status == RHRace.RaceStatus.DONE and \
lap_timestamp_ms < RACE.end_time_ms):
# Get the current pilot on the node
pilot = RACE.node_pilots[seat]
race_format = getCurrentRaceFormat()
# reject passes before race start and with disabled (no-pilot) nodes
if pilot or race_format is SECONDARY_RACE_FORMAT:
if lap_timestamp_ms >= RACE.start_time_ms:
# if node EnterAt/ExitAt values need to be restored then do it soon
if node.start_thresh_lower_flag:
node.start_thresh_lower_time_ms = ms_counter()
lap_number = len(RACE.get_valid_laps()[seat])
if lap_number: # This is a normal completed lap
# Find the time stamp of the last lap completed (including "late" laps for timing)
last_lap = RACE.get_valid_laps(True)[seat][-1]
last_lap_time_stamp = last_lap['lap_time_stamp']
# New lap time is the difference between the current time stamp and the last
lap_time_ms = lap_race_time_ms - last_lap_time_stamp
if lap_time_ms < 0:
logger.warning("New lap timestamp {} is older than previous {}".format(lap_race_time_ms, last_lap_time_stamp))
else: # No previous laps, this is the first pass
# Lap zero represents the time from the launch pad to flying through the gate
lap_time_ms = lap_race_time_ms
node.first_cross_flag = True # indicate first crossing completed
if race_format is SECONDARY_RACE_FORMAT:
min_lap = 0 # don't enforce min-lap time if running as secondary timer
min_lap_behavior = 0
else:
min_lap = RHDATA.get_optionInt("MinLapSec")
min_lap_behavior = RHDATA.get_optionInt("MinLapBehavior")
node_finished_flag = RACE.get_node_finished_flag(seat)
# set next node race status as 'finished' if winner has been declared
# or timer mode is count-down race and race-time has expired
if RACE.win_status == RHRace.WinStatus.DECLARED or \
(race_format.race_mode == RHRace.RaceMode.FIXED_TIME and RACE.timer_running is False):
RACE.set_node_finished_flag(seat)
lap_time_fmtstr = RHUtils.time_format(lap_time_ms, RHDATA.get_option('timeFormat'))
lap_ok_flag = True
lap_late_flag = False
if lap_number != 0: # if initial lap then always accept and don't check lap time; else:
if lap_time_ms < (min_lap * 1000): # if lap time less than minimum
node.under_min_lap_count += 1
logger.info('Pass record under lap minimum ({3}): Seat={0}, Lap={1}, LapTime={2}, Count={4}' \
.format(seat+1, lap_number, lap_time_fmtstr, min_lap, node.under_min_lap_count))
if min_lap_behavior == RHRace.MinLapBehavior.DISCARD_SHORT_LAPS:
lap_ok_flag = False
if race_format.lap_grace_sec and lap_race_time_ms > secs_to_millis(race_format.race_time_sec) and lap_race_time_ms <= secs_to_millis(race_format.race_time_sec + race_format.lap_grace_sec):
if not node_finished_flag:
RACE.set_node_finished_flag(seat)
logger.info('Pilot {} done'.format(pilot.callsign))
else:
lap_ok_flag = False
if lap_ok_flag:
if RACE.win_status == RHRace.WinStatus.DECLARED and (race_format.team_racing_mode or \
node_finished_flag):
lap_late_flag = True # "late" lap pass (after race winner declared)
logger.info('Ignoring lap after race winner declared: Seat={}, lap={}, lapTime={}' \
.format(seat+1, lap_number, lap_time_fmtstr))
# emit 'pass_record' message (to primary timer in cluster, livetime, etc).
emit_pass_record(node, lap_number, lap_race_time_ms)
# Add the new lap to the database
lap_data = {
'lap_number': lap_number,
'lap_time_stamp': lap_race_time_ms,
'lap_time': lap_time_ms,
'lap_time_formatted': lap_time_fmtstr,
'source': source,
'deleted': lap_late_flag, # delete if lap pass is after race winner declared
'late_lap': lap_late_flag
}
RACE.add_new_pass(seat, lap_data)
Events.trigger(Evt.RACE_LAP_RECORDED, {
'node_index': seat,
'color': led_manager.getDisplayColor(seat),
'lap': lap_data,
'results': RACE.results,
'location_id': location_id
})
logger.debug('Pass record: Seat: {0}, Lap: {1}, Lap time: {2}, Late: {3}' \
.format(seat+1, lap_number, lap_time_fmtstr, lap_late_flag))
emit_current_laps() # update all laps on the race page
emit_current_leaderboard() # generate and update leaderboard
if lap_number == 0:
emit_first_pass_registered(seat) # play first-pass sound
if race_format.start_behavior == RHRace.StartBehavior.FIRST_LAP:
lap_number += 1
# announce lap
if lap_number > 0:
check_leader = race_format.win_condition != RHRace.WinCondition.NONE and \
RACE.win_status != RHRace.WinStatus.DECLARED
# announce pilot lap number unless winner declared and pilot has finished final lap
lap_id = lap_number if RACE.win_status != RHRace.WinStatus.DECLARED or \
(not node_finished_flag) else None
if lap_id:
if race_format.team_racing_mode:
team_name = pilot.team if pilot.team else ""
team_laps = RACE.team_results['meta']['teams'][team_name]['laps']
logger.debug('Team {} lap {}'.format(team_name, team_laps))
# if winning team has been declared then don't announce team lap number
if RACE.win_status == RHRace.WinStatus.DECLARED:
team_laps = None
emit_phonetic_data(pilot.id, lap_id, lap_time_ms, lap_race_time_ms, team_name, team_laps, \
(check_leader and \
team_name == Results.get_leading_team_name(RACE.team_results)))
else:
emit_phonetic_data(pilot.id, lap_id, lap_time_ms, lap_race_time_ms, None, None, \
(check_leader and \
pilot.id == Results.get_leading_pilot_id(RACE.results)))
check_win_condition() # check for and announce possible winner
if RACE.win_status != RHRace.WinStatus.NONE:
emit_current_leaderboard() # show current race status on leaderboard
else:
# record lap as 'deleted'
RACE.add_new_pass(seat, {
'lap_number': lap_number,
'lap_time_stamp': lap_race_time_ms,
'lap_time': lap_time_ms,
'lap_time_formatted': lap_time_fmtstr,
'source': source,
'deleted': True
})
else:
logger.debug('Pass record dismissed: Seat: {0}, Race not started' \
.format(seat+1))
else:
logger.debug('Pass record dismissed: Seat: {0}, Pilot not defined' \
.format(seat+1))
else:
logger.debug('Pass record dismissed: Seat: {0}, Frequency not defined' \
.format(seat+1))
def get_local_location_id_and_seat(node):
timer_mapping = RHDATA.get_optionJson('timerMapping')
track = RHDATA.get_optionJson('trackLayout')
return get_location_id_and_seat(TIMER_ID, node.manager.addr, node.multi_node_index, track, timer_mapping)
def get_location_id_and_seat(timer_id, nm, n, track, timer_mapping):
node_map_info = timer_mapping[timer_id][nm][n]
for idx, loc in enumerate(track['layout']):
if loc['name'] == node_map_info['location']:
return idx, node_map_info['seat']
raise ValueError('Timer mapping/track layout is inconsistent')
def join_cluster_callback(split_timer_id, nms):
timer_mapping = RHDATA.get_optionJson('timerMapping')
timer_map_info = timer_mapping.get(split_timer_id)
if not timer_map_info:
timer_mapping[split_timer_id] = {
nm.addr: [{'location': '', 'seat': node_index} for node_index in nodes]
for nm,nodes in nms.items()
}
RHDATA.set_optionJson('timerMapping', timer_mapping)
def split_record_callback(split_timer_id, nm, n, split_ts_ms):
track = RHDATA.get_optionJson('trackLayout')
timer_mapping = RHDATA.get_optionJson('timerMapping')
location_id, seat = get_location_id_and_seat(split_timer_id, nm, n, track, timer_mapping)
add_split(location_id, seat, split_ts_ms, track)
def add_split(location_id, seat, split_ts, track):
split_id = location_id - 1
if RACE.race_status == RHRace.RaceStatus.RACING:
pilot_id = RHDATA.get_pilot_from_heatNode(RACE.current_heat, seat)
if pilot_id != RHUtils.PILOT_ID_NONE:
act_laps_list = RACE.get_valid_laps()[seat]
lap_count = max(0, len(act_laps_list) - 1)
# get timestamp for last lap pass (including lap 0)
if len(act_laps_list) > 0:
last_lap_ts = act_laps_list[-1]['lap_time_stamp']
lap_splits = list(filter(lambda split: split['lap_id'] == lap_count, RACE.node_splits[seat]))
if len(lap_splits) == 0: # first split for this lap
if split_id > 0:
logger.info('Ignoring missing splits before {0} for seat {1}'.format(split_id+1, seat+1))
last_split_ts = last_lap_ts
else:
last_split = lap_splits[-1]
last_split_id = last_split['split_id']
if split_id > last_split_id:
if split_id > last_split_id + 1:
logger.info('Ignoring missing splits between {0} and {1} for seat {2}'.format(last_split_id+1, split_id+1, seat+1))
last_split_ts = last_split['split_time_stamp']
else:
logger.info('Ignoring out-of-order split {0} for seat {1}'.format(split_id+1, seat+1))
last_split_ts = None
else:
logger.info('Ignoring split {0} before zero lap for seat {1}'.format(split_id+1, seat+1))
last_split_ts = None
if last_split_ts is not None:
split_time = split_ts - last_split_ts
distance = math.dist(track['layout'][split_id+1]['location'], track['layout'][split_id]['location'])
split_speed = distance*1000.0/float(split_time) if distance else None
split_time_str = RHUtils.time_format(split_time, RHDATA.get_option('timeFormat'))
logger.debug('Split pass record: Seat {0}, lap {1}, split {2}, time={3}, speed={4}' \
.format(seat+1, lap_count+1, split_id+1, split_time_str, \
('{0:.2f}'.format(split_speed) if split_speed is not None else 'None')))
RACE.node_splits[seat].append({
'lap_id': lap_count,
'split_id': split_id,
'split_time_stamp': split_ts,
'split_time': split_time,
'split_time_formatted': split_time_str,
'split_speed': split_speed
})
Events.trigger(Evt.RACE_SPLIT_RECORDED, {
'node_index': seat,
'color': led_manager.getDisplayColor(seat),
'split': {
'lap_number': lap_count,
'split_time_stamp': split_ts,
'split_time': split_time,
'split_time_formatted': split_time_str,
},
'results': RACE.results,
'location_id': location_id
})
emit_split_pass_info(pilot_id, split_id, split_time)
else:
logger.info('Split pass record dismissed: No pilot in seat {}'.format(seat+1))
else:
logger.info('Ignoring split {0} for seat {1} because race not running'.format(split_id+1, seat+1))
def check_win_condition(**kwargs):
previous_win_status = RACE.win_status
win_not_decl_flag = RACE.win_status in [RHRace.WinStatus.NONE, RHRace.WinStatus.PENDING_CROSSING, RHRace.WinStatus.OVERTIME]
del_lap_flag = 'deletedLap' in kwargs
# if winner not yet declared or racer lap was deleted then check win condition
win_status_dict = RESULTS.check_win_condition_result(RACE, INTERFACE, **kwargs) \
if win_not_decl_flag or del_lap_flag else None
if win_status_dict is not None:
race_format = RACE.format
RACE.win_status = win_status_dict['status']
if RACE.win_status != RHRace.WinStatus.NONE and logger.getEffectiveLevel() <= logging.DEBUG:
logger.debug("Pilot lap counts: " + Results.get_pilot_lap_counts_str(RACE.results))
if race_format.team_racing_mode:
logger.debug("Team lap totals: " + Results.get_team_lap_totals_str(RACE.team_results))
# if racer lap was deleted and result is winner un-declared
if del_lap_flag and RACE.win_status != previous_win_status and \
RACE.win_status == RHRace.WinStatus.NONE:
RACE.win_status = RHRace.WinStatus.NONE
RACE.status_message = ''
logger.info("Race status msg: <None>")
return win_status_dict
if win_status_dict['status'] == RHRace.WinStatus.DECLARED:
# announce winner
win_data = win_status_dict['data']
if race_format.team_racing_mode:
win_str = win_data.get('name', '')
status_msg_str = __('Winner is') + ' ' + __('Team') + ' ' + win_str
log_msg_str = "Race status msg: Winner is Team " + win_str
phonetic_str = status_msg_str
else:
win_str = win_data.get('callsign', '')
status_msg_str = __('Winner is') + ' ' + win_str
log_msg_str = "Race status msg: Winner is " + win_str
win_phon_name = RHDATA.get_pilot(win_data['pilot_id']).phonetic \
if 'pilot_id' in win_data else None
if (not win_phon_name) or len(win_phon_name) <= 0: # if no phonetic then use callsign
win_phon_name = win_data.get('callsign', '')
phonetic_str = __('Winner is') + ' ' + win_phon_name
if 'node' in win_data: # make sure winner node race status always set to 'finished'
RACE.set_node_finished_flag(win_data['node'])
# if racer lap was deleted then only output if win-status details changed
if (not del_lap_flag) or RACE.win_status != previous_win_status or \
status_msg_str != RACE.status_message:
RACE.status_message = status_msg_str
logger.info(log_msg_str)
emit_phonetic_text(phonetic_str, 'race_winner', True)
Events.trigger(Evt.RACE_WIN, {
'win_status': win_status_dict,
'message': RACE.status_message,
'node_index': win_data.get('node', None),
'color': led_manager.getDisplayColor(win_data['node']) \
if 'node' in win_data else None,
'results': RACE.results
})
elif win_status_dict['status'] == RHRace.WinStatus.TIE:
# announce tied
if win_status_dict['status'] != previous_win_status:
RACE.status_message = __('Race Tied')
logger.info("Race status msg: Race Tied")
emit_phonetic_text(RACE.status_message, 'race_winner')
elif win_status_dict['status'] == RHRace.WinStatus.OVERTIME:
# announce overtime
if win_status_dict['status'] != previous_win_status:
RACE.status_message = __('Race Tied: Overtime')
logger.info("Race status msg: Race Tied: Overtime")
emit_phonetic_text(RACE.status_message, 'race_winner')
if 'max_consideration' in win_status_dict:
logger.info("Waiting {0}ms to declare winner.".format(win_status_dict['max_consideration']))
gevent.sleep(win_status_dict['max_consideration'] / 1000)
if 'start_token' in kwargs and RACE.start_token == kwargs['start_token']:
logger.info("Maximum win condition consideration time has expired.")
check_win_condition(forced=True)
return win_status_dict
@catchLogExcDBCloseWrapper
def new_enter_at_callback(node, enter_at_level):
gevent.sleep(0.025) # delay to avoid potential I/O error
if enter_at_level > 0:
on_set_enter_at_level({
'node': node.index,
'enter_at_level': enter_at_level
})
emit_enter_at_level(node)
@catchLogExcDBCloseWrapper
def new_exit_at_callback(node, exit_at_level):
gevent.sleep(0.025) # delay to avoid potential I/O error
if exit_at_level > 0:
on_set_exit_at_level({
'node': node.index,
'exit_at_level': exit_at_level
})
emit_exit_at_level(node)
@catchLogExcDBCloseWrapper
def node_crossing_callback(node, crossing_flag, ts, rssi):
emit_node_crossing_change(node, crossing_flag, ts, rssi)
# handle LED gate-status indicators:
if RACE.race_status == RHRace.RaceStatus.RACING: # if race is in progress
# if pilot assigned to node and first crossing is complete
if getCurrentRaceFormat() is SECONDARY_RACE_FORMAT or (
node.current_pilot_id != RHUtils.PILOT_ID_NONE and node.first_cross_flag):
# first crossing has happened; if 'enter' then show indicator,
# if first event is 'exit' then ignore (because will be end of first crossing)
if crossing_flag:
Events.trigger(Evt.CROSSING_ENTER, {
'nodeIndex': node.index,
'color': led_manager.getDisplayColor(node.index)
})
node.show_crossing_flag = True
else:
if node.show_crossing_flag:
Events.trigger(Evt.CROSSING_EXIT, {
'nodeIndex': node.index,
'color': led_manager.getDisplayColor(node.index)
})
else:
node.show_crossing_flag = True
def assign_frequencies():
'''Assign frequencies to nodes'''
profile = getCurrentProfile()
freqs = json.loads(profile.frequencies)
for idx in range(RACE.num_nodes):
INTERFACE.set_frequency(idx, freqs["f"][idx], freqs["b"][idx], freqs["c"][idx])
Events.trigger(Evt.FREQUENCY_SET, {
'nodeIndex': idx,
'frequency': freqs["f"][idx],
'band': freqs["b"][idx],
'channel': freqs["c"][idx]
})
logger.info('Frequency set: Node {0} B:{1} Ch:{2} Freq:{3}'.format(idx+1, freqs["b"][idx], freqs["c"][idx], freqs["f"][idx]))
def emit_current_log_file_to_socket():
if Current_log_path_name:
try:
with io.open(Current_log_path_name, 'r') as f:
SOCKET_IO.emit("hardware_log_init", f.read())
except Exception:
logger.exception("Error sending current log file to socket")
log.start_socket_forward_handler()
def db_init(nofill=False):
'''Initialize database.'''
RHDATA.db_init(nofill)
reset_current_laps()
assign_frequencies()
Events.trigger(Evt.DATABASE_INITIALIZE)
logger.info('Database initialized')
def db_reset():
'''Resets database.'''
RHDATA.reset_all()
reset_current_laps()
assign_frequencies()
logger.info('Database reset')
def reset_current_laps():
'''Resets database current laps to default.'''
RACE.reset()
logger.debug('Database current laps reset')
def expand_heats():
''' ensure loaded data includes enough slots for current nodes '''
heatNode_data = {}
for heatNode in RHDATA.get_heatNodes():
if heatNode.heat_id not in heatNode_data:
heatNode_data[heatNode.heat_id] = []
heatNode_data[heatNode.heat_id].append(heatNode.node_index)
for heat_id, nodes in heatNode_data.items():
for node_index in range(RACE.num_nodes):
if node_index not in nodes:
RHDATA.add_heatNode(heat_id, node_index)
def init_race_state():
expand_heats()
# Send profile values to nodes
on_set_profile({'profile': getCurrentProfile().id}, False)
# Set current heat
first_heat = RHDATA.get_first_heat()
if first_heat:
RACE.current_heat = first_heat.id
RACE.set_current_pilots(RHDATA)
# Set race format
raceformat_id = RHDATA.get_optionInt('currentFormat')
if raceformat_id > 0:
race_format = RHDATA.get_raceFormat(raceformat_id)
else:
race_format = RHDATA.get_first_raceFormat()
setCurrentRaceFormat(race_format, silent=True)
# Normalize results caches
PAGE_CACHE.set_valid(False)
def init_interface_state(startup=False):
# Cancel current race
if startup:
RACE.race_status = RHRace.RaceStatus.READY # Go back to ready state
Events.trigger(Evt.LAPS_CLEAR)
RACE.timer_running = False # indicate race timer not running
RACE.scheduled = False # also stop any deferred start
SOCKET_IO.emit('stop_timer')
else:
on_stop_race()
# Reset laps display
reset_current_laps()
def init_LED_effects():
# start with defaults
effects = {
Evt.RACE_STAGE: "stripColor2_1",
Evt.RACE_START: "stripColorSolid",
Evt.RACE_FINISH: "stripColor4_4",
Evt.RACE_STOP: "stripColorSolid",
Evt.LAPS_CLEAR: "clear",
Evt.CROSSING_ENTER: "stripSparkle",
Evt.CROSSING_EXIT: "none",
Evt.RACE_LAP_RECORDED: "none",
Evt.RACE_WIN: "none",
Evt.MESSAGE_STANDARD: "none",
Evt.MESSAGE_INTERRUPT: "none",
Evt.STARTUP: "rainbowCycle",
Evt.SHUTDOWN: "clear",
LEDEvent.IDLE_DONE: "clear",
LEDEvent.IDLE_READY: "clear",
LEDEvent.IDLE_RACING: "clear",
}
if "bitmapRHLogo" in led_manager.getRegisteredEffects() and rhconfig.LED['LED_ROWS'] > 1:
effects[Evt.STARTUP] = "bitmapRHLogo"
effects[Evt.RACE_STAGE] = "bitmapOrangeEllipsis"
effects[Evt.RACE_START] = "bitmapGreenArrow"
effects[Evt.RACE_FINISH] = "bitmapCheckerboard"
effects[Evt.RACE_STOP] = "bitmapRedX"
# update with DB values (if any)
effect_opt = RHDATA.get_option('ledEffects')
if effect_opt:
effects.update(json.loads(effect_opt))
# set effects
led_manager.setEventEffect("manualColor", "stripColor")
for item in effects:
led_manager.setEventEffect(item, effects[item])
def initVRxController():
try:
vrx_config = rhconfig.VRX_CONTROL
try:
vrx_enabled = vrx_config["ENABLED"]
if vrx_enabled:
try:
from rh.vrx.VRxController import VRxController
except ImportError as e:
logger.error("VRxController unable to be imported")
logger.error(e)
return None
else:
logger.debug('VRxController disabled by config option')
return None
except KeyError:
logger.error('VRxController disabled: config needs "ENABLED" key.')
return None
except AttributeError:
logger.info('VRxController disabled: No VRX_CONTROL config option')
return None
# If got through import success, create the VRxController object
vrx_config = rhconfig.VRX_CONTROL
return VRxController(
RHDATA,
Events,
vrx_config,
RACE,
INTERFACE.get_node_frequencies(),
LANGUAGE)
def killVRxController(*args):
global vrx_controller
logger.info('Killing VRxController')
vrx_controller = None
def determineHostAddress(maxRetrySecs=10):
''' Determines local host IP address. Will wait and retry to get valid IP, in
case system is starting up and needs time to connect to network and DHCP. '''
global server_ipaddress_str
if server_ipaddress_str:
return server_ipaddress_str # if previously determined then return value
sTime = monotonic()
while True:
try:
ipAddrStr = RHUtils.getLocalIPAddress()
if ipAddrStr and ipAddrStr != "127.0.0.1": # don't accept default-localhost IP
server_ipaddress_str = ipAddrStr
break
logger.debug("Querying of host IP address returned " + ipAddrStr)
except Exception as ex:
logger.debug("Error querying host IP address: " + str(ex))
if monotonic() > sTime + maxRetrySecs:
ipAddrStr = "0.0.0.0"
logger.warning("Unable to determine IP address for host machine")
break
gevent.sleep(1)
try:
hNameStr = socket.gethostname()
except Exception as ex:
logger.info("Error querying hostname: " + str(ex))
hNameStr = "UNKNOWN"
logger.info("Host machine is '{0}' at {1}".format(hNameStr, ipAddrStr))
return ipAddrStr
def jump_to_node_bootloader():
if hasattr(INTERFACE, 'jump_to_bootloader'):
try:
INTERFACE.jump_to_bootloader()
except Exception:
logger.error("Error executing jump to node bootloader")
else:
logger.info("No jump-to-bootloader support")
def shutdown_button_thread_fn():
try:
logger.debug("Started shutdown-button-handler thread")
idleCntr = 0
while True:
gevent.sleep(0.050)
if not ShutdownButtonInputHandler.isEnabled(): # if button handler disabled
break # then exit thread
# poll button input and invoke callbacks
bStatFlg = ShutdownButtonInputHandler.pollProcessInput(monotonic())
# while background thread not started and button not pressed
# send periodic server-idle messages to node
if (HEARTBEAT_THREAD is None) and BACKGROUND_THREADS_ENABLED and INTERFACE:
idleCntr += 1
if idleCntr >= 74:
if idleCntr >= 80:
idleCntr = 0 # show pattern on node LED via messages
if (not bStatFlg) and (idleCntr % 2 == 0):
INTERFACE.send_server_idle_message()
except KeyboardInterrupt:
logger.info("shutdown_button_thread_fn terminated by keyboard interrupt")
raise
except SystemExit:
raise
except Exception:
logger.exception("Exception error in 'shutdown_button_thread_fn()'")
logger.debug("Exited shutdown-button-handler thread")
def start_shutdown_button_thread():
if ShutdownButtonInputHandler and not ShutdownButtonInputHandler.isEnabled():
ShutdownButtonInputHandler.setEnabled(True)
gevent.spawn(shutdown_button_thread_fn)
def stop_shutdown_button_thread():
if ShutdownButtonInputHandler:
ShutdownButtonInputHandler.setEnabled(False)
def shutdown_button_pressed():
logger.debug("Detected shutdown button pressed")
INTERFACE.send_shutdown_button_state(1)
def shutdown_button_released(longPressReachedFlag):
logger.debug("Detected shutdown button released, longPressReachedFlag={}".\
format(longPressReachedFlag))
if not longPressReachedFlag:
INTERFACE.send_shutdown_button_state(0)
def shutdown_button_long_press():
logger.info("Detected shutdown button long press; performing shutdown now")
on_shutdown_pi()
def initialize_hardware_interface():
try:
global INTERFACE
if 'RH_INTERFACE' in os.environ:
rh_interface_name = interface_pkg.__name__ + '.' + os.environ.get('RH_INTERFACE') + "Interface"
elif 'ADDRESS' in rhconfig.LAPRF and rhconfig.LAPRF['ADDRESS']:
rh_interface_name = interface_pkg.__name__ + '.LapRFInterface'
elif 'HARDWARE_PORT' in rhconfig.CHORUS and rhconfig.CHORUS['HARDWARE_PORT']:
rh_interface_name = interface_pkg.__name__ + '.ChorusInterface'
else:
rh_interface_name = interface_pkg.__name__ + '.RHInterface'
try:
logger.debug("Initializing interface module: " + rh_interface_name)
interfaceModule = importlib.import_module(rh_interface_name)
INTERFACE = interfaceModule.get_hardware_interface(config=rhconfig, \
isS32BPillFlag=RHGPIO.isS32BPillBoard(), **serviceHelpers)
# if no nodes detected, system is RPi, not S32_BPill, and no serial port configured
# then check if problem is 'smbus2' or 'gevent' lib not installed
if INTERFACE and ((not INTERFACE.nodes) or len(INTERFACE.nodes) <= 0) and \
RHUtils.isSysRaspberryPi() and (not RHGPIO.isS32BPillBoard()) and \
((not rhconfig.SERIAL_PORTS) or len(rhconfig.SERIAL_PORTS) <= 0):
try:
importlib.import_module('smbus2')
importlib.import_module('gevent')
except ImportError:
logger.warning("Unable to import libraries for I2C nodes; try: " +\
"sudo pip install --upgrade --no-cache-dir -r requirements.txt")
set_ui_message(
'i2c',
"Unable to import libraries for I2C nodes. Try: <code>sudo pip install --upgrade --no-cache-dir -r requirements.txt</code>",
header='Warning',
subclass='no-library'
)
RACE.num_nodes = 0
return True
except (ImportError, RuntimeError, IOError) as ex:
logger.info('Unable to initialize nodes via ' + rh_interface_name + ': ' + str(ex))
# if interface was explicitly configured
if 'RH_INTERFACE' in os.environ or rh_interface_name != 'rh.interface.RHInterface':
return False
if (not INTERFACE) or (not INTERFACE.nodes) or len(INTERFACE.nodes) <= 0:
if (not rhconfig.SERIAL_PORTS) or len(rhconfig.SERIAL_PORTS) <= 0:
interfaceModule = importlib.import_module(interface_pkg.__name__ + '.MockInterface')
INTERFACE = interfaceModule.get_hardware_interface(config=rhconfig, use_random=True, **serviceHelpers)
for node_manager in INTERFACE.node_managers: # put mock nodes at latest API level
node_manager.api_level = NODE_API_BEST
set_ui_message(
'mock',
"Server is using simulated (mock) nodes",
header='Notice',
subclass='in-use'
)
else:
try:
importlib.import_module('serial')
logger.info("Unable to initialize specified serial node(s): {0}".format(rhconfig.SERIAL_PORTS))
if INTERFACE:
logger.info("If an S32_BPill board is connected, its processor may need to be flash-updated")
# enter serial port name so it's available for node firmware update
if hasattr(INTERFACE, "fwupd_serial_port"):
INTERFACE.fwupd_serial_port = rhconfig.SERIAL_PORTS[0]
set_ui_message('stm32', \
"Server is unable to communicate with node processor. " + \
"If an S32_BPill board is connected, you may attempt to" + \
" <a href=\"/updatenodes\">" + "flash" + "</a> " + \
"its processor.", \
header='Warning', subclass='no-comms')
else:
return False # unable to open serial port
except ImportError:
logger.info("Unable to import library for serial node(s) - is 'pyserial' installed?")
return False
RACE.num_nodes = len(INTERFACE.nodes) # save number of nodes found
return True
except:
logger.exception("Error initializing hardware interface")
return False
# Create and save server/node information
def buildServerInfo():
global serverInfo
global serverInfoItems
try:
serverInfo = {}
serverInfo['about_html'] = "<ul>"
# Release Version
serverInfo['release_version'] = RELEASE_VERSION
serverInfo['about_html'] += "<li>" + __sys("Version") + ": " + str(RELEASE_VERSION) + "</li>"
# Server API
serverInfo['server_api'] = SERVER_API
serverInfo['about_html'] += "<li>" + __sys("Server API") + ": " + str(SERVER_API) + "</li>"
# Server API
serverInfo['json_api'] = JSON_API
if isinstance(INTERFACE, RHInterface):
# Node API levels
node_api_level = 0
serverInfo['node_api_match'] = True
serverInfo['node_api_lowest'] = 0
serverInfo['node_api_levels'] = [None]
if len(INTERFACE.node_managers):
if INTERFACE.node_managers[0].api_level:
node_api_level = INTERFACE.node_managers[0].api_level
serverInfo['node_api_lowest'] = node_api_level
serverInfo['node_api_levels'] = []
for node_manager in INTERFACE.node_managers:
serverInfo['node_api_levels'].append(node_manager.api_level)
if node_manager.api_level != node_api_level:
serverInfo['node_api_match'] = False
if node_manager.api_level < serverInfo['node_api_lowest']:
serverInfo['node_api_lowest'] = node_manager.api_level
# if multi-node and all api levels same then only include one entry
if serverInfo['node_api_match'] and INTERFACE.node_managers[0].is_multi_node():
serverInfo['node_api_levels'] = serverInfo['node_api_levels'][0:1]
serverInfo['about_html'] += "<li>" + __sys("Node API") + ": "
if node_api_level:
if serverInfo['node_api_match']:
serverInfo['about_html'] += str(node_api_level)
else:
serverInfo['about_html'] += "[ "
for idx, level in enumerate(serverInfo['node_api_levels']):
serverInfo['about_html'] += str(idx+1) + ":" + str(level) + " "
serverInfo['about_html'] += "]"
else:
serverInfo['about_html'] += "None (Delta5)"
serverInfo['about_html'] += "</li>"
# Node firmware versions
node_fw_version = None
serverInfo['node_version_match'] = True
serverInfo['node_fw_versions'] = [None]
if len(INTERFACE.node_managers):
if hasattr(INTERFACE.node_managers[0], 'firmware_version_str') and INTERFACE.node_managers[0].firmware_version_str:
node_fw_version = INTERFACE.node_managers[0].firmware_version_str
serverInfo['node_fw_versions'] = []
for node_manager in INTERFACE.node_managers:
serverInfo['node_fw_versions'].append(\
node_manager.firmware_version_str if node_manager.firmware_version_str else "0")
if node_manager.firmware_version_str != node_fw_version:
serverInfo['node_version_match'] = False
# if multi-node and all versions same then only include one entry
if serverInfo['node_version_match'] and INTERFACE.node_managers[0].is_multi_node():
serverInfo['node_fw_versions'] = serverInfo['node_fw_versions'][0:1]
if node_fw_version:
serverInfo['about_html'] += "<li>" + __sys("Node Version") + ": "
if serverInfo['node_version_match']:
serverInfo['about_html'] += str(node_fw_version)
else:
serverInfo['about_html'] += "[ "
for idx, ver in enumerate(serverInfo['node_fw_versions']):
serverInfo['about_html'] += str(idx+1) + ":" + str(ver) + " "
serverInfo['about_html'] += "]"
serverInfo['about_html'] += "</li>"
serverInfo['node_api_best'] = NODE_API_BEST
if serverInfo['node_api_match'] is False or node_api_level < NODE_API_BEST:
# Show Recommended API notice
serverInfo['about_html'] += "<li><strong>" + __sys("Node Update Available") + ": " + str(NODE_API_BEST) + "</strong></li>"
serverInfo['about_html'] += "</ul>"
# create version of 'serverInfo' without 'about_html' entry
serverInfoItems = serverInfo.copy()
serverInfoItems.pop('about_html', None)
serverInfoItems['prog_start_epoch'] = "{}".format(PROGRAM_START.epoch_ms)
serverInfoItems['prog_start_time'] = str(datetime.fromtimestamp(millis_to_secs(PROGRAM_START.epoch_ms), tz=timezone.utc))
return serverInfo
except:
logger.exception("Error in 'buildServerInfo()'")
# Log server/node information
def reportServerInfo():
logger.debug("Server info: " + json.dumps(serverInfoItems))
if 'node_api_match' in serverInfo and serverInfo['node_api_match'] is False:
logger.info('** WARNING: Node API mismatch **')
set_ui_message('node-match',
"Node versions do not match and may not function similarly", header='Warning')
if 'node_api_lowest' in serverInfo and RACE.num_nodes > 0:
if serverInfo['node_api_lowest'] < NODE_API_SUPPORTED:
logger.info('** WARNING: Node firmware is out of date and may not function properly **')
msgStr = "Node firmware is out of date and may not function properly."
if hasattr(INTERFACE, 'fwupd_serial_port') and INTERFACE.fwupd_serial_port is not None:
msgStr += " " + "If an S32_BPill board is connected, you should" + \
" <a href=\"/updatenodes\">" + "flash" + "</a> " + \
"its processor."
set_ui_message('node-obs', msgStr, header='Warning', subclass='api-not-supported')
elif serverInfo['node_api_lowest'] < NODE_API_BEST:
logger.info('** NOTICE: Node firmware update is available **')
msgStr = "Node firmware update is available."
if hasattr(INTERFACE, 'fwupd_serial_port') and INTERFACE.fwupd_serial_port is not None:
msgStr += " " + "If an S32_BPill board is connected, you should" + \
" <a href=\"/updatenodes\">" + "flash" + "</a> " + \
"its processor."
set_ui_message('node-old', msgStr, header='Notice', subclass='api-low')
elif serverInfo['node_api_lowest'] > NODE_API_BEST:
logger.warning('** WARNING: Node firmware is newer than this server version supports **')
set_ui_message('node-newer',
"Node firmware is newer than this server version and may not function properly",
header='Warning', subclass='api-high')
#
# Program Initialize
#
logger.info('Release: {0} / Server API: {1} / Latest Node API: {2}'.format(RELEASE_VERSION, SERVER_API, NODE_API_BEST))
logger.debug('Program started at {}'.format(PROGRAM_START.epoch_ms))
RHUtils.idAndLogSystemInfo()
if RHUtils.isVersionPython2():
logger.warning("Python version is obsolete: " + RHUtils.getPythonVersionStr())
set_ui_message('python',
("Python version" + " (" + RHUtils.getPythonVersionStr() + ") " + \
"is obsolete and no longer supported; see" + \
" <a href=\"docs?d=Software Setup.md#python\">Software Settings</a> " + \
"doc for upgrade instructions"),
header='Warning', subclass='old-python')
determineHostAddress(2) # attempt to determine IP address, but don't wait too long for it
if (not RHGPIO.isS32BPillBoard()) and rhconfig.GENERAL['FORCE_S32_BPILL_FLAG']:
RHGPIO.setS32BPillBoardFlag()
logger.info("Set S32BPillBoardFlag in response to FORCE_S32_BPILL_FLAG in config")
logger.debug("isRPi={}, isRealGPIO={}, isS32BPill={}".format(RHUtils.isSysRaspberryPi(), \
RHGPIO.isRealRPiGPIO(), RHGPIO.isS32BPillBoard()))
if RHUtils.isSysRaspberryPi() and not RHGPIO.isRealRPiGPIO():
logger.warning("Unable to access real GPIO on Pi; try: sudo pip install RPi.GPIO")
set_ui_message(
'gpio',
"Unable to access real GPIO on Pi. Try: <code>sudo pip install RPi.GPIO</code>",
header='Warning',
subclass='no-access'
)
# check if current log file owned by 'root' and change owner to 'pi' user if so
if Current_log_path_name and RHUtils.checkSetFileOwnerPi(Current_log_path_name):
logger.debug("Changed log file owner from 'root' to 'pi' (file: '{0}')".format(Current_log_path_name))
RHUtils.checkSetFileOwnerPi(log.LOG_DIR_NAME) # also make sure 'log' dir not owned by 'root'
logger.info("Using log file: {0}".format(Current_log_path_name))
if RHUtils.isSysRaspberryPi() and RHGPIO.isS32BPillBoard():
try:
if rhconfig.GENERAL['SHUTDOWN_BUTTON_GPIOPIN']:
logger.debug("Configuring shutdown-button handler, pin={}, delayMs={}".format(\
rhconfig.GENERAL['SHUTDOWN_BUTTON_GPIOPIN'], \
rhconfig.GENERAL['SHUTDOWN_BUTTON_DELAYMS']))
ShutdownButtonInputHandler = ButtonInputHandler(
rhconfig.GENERAL['SHUTDOWN_BUTTON_GPIOPIN'], logger, \
shutdown_button_pressed, shutdown_button_released, \
shutdown_button_long_press,
rhconfig.GENERAL['SHUTDOWN_BUTTON_DELAYMS'], False)
start_shutdown_button_thread()
except Exception:
logger.exception("Error setting up shutdown-button handler")
logger.debug("Resetting S32_BPill processor")
s32logger = logging.getLogger("stm32loader")
stm32loader.set_console_output_fn(s32logger.info)
stm32loader.reset_to_run()
stm32loader.set_console_output_fn(None)
serviceHelpers = {}
for helper in search_modules(helper_pkg, suffix='helper'):
helper_key = helper.__name__[len(helper_pkg.__name__)+1:]
try:
serviceHelpers[helper_key] = helper.create(rhconfig)
except Exception as ex:
logger.warning("Unable to create service helper '{0}': {1}".format(helper.__name__, ex))
initRhResultFlag = initialize_hardware_interface()
if not initRhResultFlag:
log.wait_for_queue_empty()
sys.exit(1)
if args.jumptobl:
stop_background_threads()
jump_to_node_bootloader()
if args.flashbpill:
portStr = rhconfig.SERIAL_PORTS[0] if rhconfig.SERIAL_PORTS and \
len(rhconfig.SERIAL_PORTS) > 0 else None
srcStr = args.flashbpill
successFlag = stm32loader.flash_file_to_stm32(portStr, srcStr)
sys.exit(0 if successFlag else 1)
sys.exit(0)
logger.info('Number of nodes found: {0}'.format(RACE.num_nodes))
if INTERFACE is not None:
# if I2C nodes then only report comm errors if > 1.0%
for node_manager in INTERFACE.node_managers:
if hasattr(node_manager, 'i2c_addr'):
INTERFACE.set_intf_error_report_percent_limit(1.0)
break
# Delay to get I2C addresses through interface class initialization
gevent.sleep(0.500)
if INTERFACE is not None and hasattr(INTERFACE, 'sensors'):
SENSORS.extend(INTERFACE.sensors)
try:
SENSORS.discover(sensor_pkg, config=rhconfig.SENSORS, **serviceHelpers)
except Exception:
logger.exception("Exception while discovering sensors")
mqtt_clients = serviceHelpers.get('mqtt_helper')
if mqtt_clients:
mqtt_interface_class = get_mqtt_interface_for(INTERFACE.__class__)
mqtt_interface = mqtt_interface_class(mqtt_clients['timer'],
rhconfig.MQTT['TIMER_ANN_TOPIC'],
rhconfig.MQTT['TIMER_CTRL_TOPIC'],
TIMER_ID,
INTERFACE)
STARTABLES.append(mqtt_interface)
else:
logger.error("MQTT not available")
sys.exit(1)
# if no DB file then create it now (before "__()" fn used in 'buildServerInfo()')
db_inited_flag = False
if not os.path.exists(DB_FILE_NAME):
logger.info("No '{0}' file found; creating initial database".format(DB_FILE_NAME))
db_init()
db_inited_flag = True
RHDATA.primeCache() # Ready the Options cache
# check if DB file owned by 'root' and change owner to 'pi' user if so
if RHUtils.checkSetFileOwnerPi(DB_FILE_NAME):
logger.debug("Changed DB-file owner from 'root' to 'pi' (file: '{0}')".format(DB_FILE_NAME))
# check if directories owned by 'root' and change owner to 'pi' user if so
if RHUtils.checkSetFileOwnerPi(DB_BKP_DIR_NAME):
logger.info("Changed '{0}' dir owner from 'root' to 'pi'".format(DB_BKP_DIR_NAME))
if RHUtils.checkSetFileOwnerPi(log.LOGZIP_DIR_NAME):
logger.info("Changed '{0}' dir owner from 'root' to 'pi'".format(log.LOGZIP_DIR_NAME))
# collect server info for About panel, etc
buildServerInfo()
reportServerInfo()
# Do data consistency checks
if not db_inited_flag:
try:
RHDATA.primeCache() # Ready the Options cache
if not RHDATA.check_integrity():
RHDATA.recover_database(Database.db_uri(BASEDIR, DB_FILE_NAME), startup=True)
clean_results_cache()
except Exception as ex:
logger.warning('Clearing all data after recovery failure: ' + str(ex))
db_reset()
# Initialize internal state with database
# DB session commit needed to prevent 'application context' errors
try:
init_race_state()
except Exception:
logger.exception("Exception in 'init_race_state()'")
log.wait_for_queue_empty()
sys.exit(1)
# internal secondary race format for LiveTime (needs to be created after initial DB setup)
SECONDARY_RACE_FORMAT = RHRaceFormat(name="Secondary",
race_mode=RHRace.RaceMode.NO_TIME_LIMIT,
race_time_sec=0,
lap_grace_sec=0,
start_delay_min=0,
start_delay_max=0,
staging_tones=0,
number_laps_win=0,
win_condition=RHRace.WinCondition.NONE,
team_racing_mode=False,
start_behavior=RHRace.StartBehavior.HOLESHOT)
# Import IMDTabler
if os.path.exists(IMDTABLER_JAR_NAME): # if 'IMDTabler.jar' is available
java_ver: Optional[str] = None
try:
java_ver = subprocess.check_output('java -version', stderr=subprocess.STDOUT, shell=True).decode("utf-8")
logger.debug('Found installed: ' + java_ver.split('\n')[0].strip())
except:
logger.info('Unable to find java; for IMDTabler functionality try:')
logger.info('sudo apt install default-jdk-headless')
if java_ver:
try:
chk_imdtabler_ver = subprocess.check_output( \
'java -jar ' + IMDTABLER_JAR_NAME + ' -v', \
stderr=subprocess.STDOUT, shell=True).decode("utf-8").rstrip()
Use_imdtabler_jar_flag = True # indicate IMDTabler.jar available
logger.debug('Found installed: ' + chk_imdtabler_ver)
except Exception:
logger.exception('Error checking IMDTabler: ')
else:
logger.info('IMDTabler lib not found at: ' + IMDTABLER_JAR_NAME)
# Create LED object with appropriate configuration
strip = None
if rhconfig.LED['LED_COUNT'] > 0:
led_type = os.environ.get('RH_LEDS', 'ws281x')
# note: any calls to 'RHDATA.get_option()' need to happen after the DB initialization,
# otherwise it causes problems when run with no existing DB file
led_brightness = RHDATA.get_optionInt("ledBrightness")
led_pkg_prefix = led_pkg.__name__ + '.'
try:
ledModule = importlib.import_module(led_pkg_prefix + led_type + '_leds')
strip = ledModule.get_pixel_interface(config=rhconfig.LED, brightness=led_brightness)
except ImportError:
# No hardware LED handler, the OpenCV emulation
try:
ledModule = importlib.import_module(led_pkg_prefix + 'cv2_leds')
strip = ledModule.get_pixel_interface(config=rhconfig.LED, brightness=led_brightness)
except ImportError:
# No OpenCV emulation, try console output
try:
ledModule = importlib.import_module(led_pkg_prefix + 'ANSI_leds')
strip = ledModule.get_pixel_interface(config=rhconfig.LED, brightness=led_brightness)
except ImportError:
logger.info('LED: disabled (no modules available)')
else:
logger.debug('LED: disabled (configured LED_COUNT is <= 0)')
led_manager: Any = None
if strip:
# Initialize the library (must be called once before other functions).
try:
strip.begin()
led_manager = LEDEventManager(Events, strip, RHDATA, RACE, LANGUAGE, INTERFACE)
except:
logger.exception("Error initializing LED support")
elif CLUSTER and CLUSTER.hasRecEventsSecondaries():
led_manager = ClusterLEDManager()
if led_manager:
led_effects = Plugins(prefix='led_handler')
led_effects.discover(led_pkg, config=rhconfig.LED)
for led_effect in led_effects:
led_manager.registerEffect(led_effect)
init_LED_effects()
else:
led_manager = NoLEDManager()
# start up VRx Control
vrx_controller = initVRxController()
if vrx_controller:
Events.on(Evt.CLUSTER_JOIN, 'VRx', killVRxController)
audio_manager = AudioEventManager(Events, RHDATA, RACE, rhconfig.AUDIO)
audio_manager.install_default_effects()
mqtt_manager = MqttEventManager(Events, RHDATA, RACE, rhconfig.MQTT, mqtt_clients['race'] if mqtt_clients else None)
mqtt_manager.install_default_messages()
# data exporters
export_manager = DataExportManager(RHDATA, PAGE_CACHE, LANGUAGE)
export_manager.discover(export_pkg)
# register endpoints
from rh.endpoints import json_endpoints, ota_endpoints, race_explorer_endpoints, heat_generator_endpoints, rssi_endpoints
APP.register_blueprint(json_endpoints.createBlueprint(RESULTS, RACE, serverInfo, getCurrentProfile))
APP.register_blueprint(ota_endpoints.createBlueprint())
APP.register_blueprint(race_explorer_endpoints.createBlueprint(rhconfig, TIMER_ID, INTERFACE, RHDATA, APP.rhserver))
APP.register_blueprint(heat_generator_endpoints.createBlueprint(RHDATA))
if 'API_PORT' in rhconfig.CHORUS and rhconfig.CHORUS['API_PORT']:
from rh.apis.chorus_api import ChorusAPI
import serial
chorusPort = rhconfig.CHORUS['API_PORT']
chorusSerial = serial.Serial(port=chorusPort, baudrate=115200, timeout=0.1)
CHORUS_API = ChorusAPI(chorusSerial, INTERFACE, SENSORS, connect_handler, on_stop_race, lambda : on_reset_auto_calibration({}))
STARTABLES.append(CHORUS_API)
if mqtt_clients:
from rh.apis import RHListener, RssiSampleListener
from rh.apis.mqtt_api import MqttAPI
mqtt_listener = RHListener(
node_crossing_callback,
pass_record_callback,
split_record_callback,
on_set_frequency,
on_set_enter_at_level,
on_set_exit_at_level)
sample_listener = RssiSampleListener()
APP.register_blueprint(rssi_endpoints.createBlueprint(sample_listener))
assert INTERFACE is not None
MQTT_API = MqttAPI(mqtt_clients['timer'],
rhconfig.MQTT['TIMER_ANN_TOPIC'],
TIMER_ID,
INTERFACE, BaseHardwareInterfaceEventBroadcaster([mqtt_listener, sample_listener]))
STARTABLES.append(MQTT_API)
CLUSTER = ClusterNodeSet(LANGUAGE, Events)
hasMirrors = False
DEFAULT_TRACK: Dict[str,Any] = {
'crs': 'Local grid',
'units': 'm',
'layout': [{'name': 'Start/finish', 'type': 'Arch gate', 'location': [0,0]}],
'locationTypes': {"Arch gate": {}, "Square gate": {"description": "1.5m square"}, "Flag": {}}
}
try:
for sec_idx, secondary_info in enumerate(rhconfig.GENERAL['SECONDARIES']):
if isinstance(secondary_info, string_types):
secondary_info = {'address': secondary_info, 'mode': SecondaryNode.SPLIT_MODE}
if 'address' not in secondary_info:
raise RuntimeError("Secondary 'address' item not specified")
# substitute asterisks in given address with values from host IP address
secondary_info['address'] = RHUtils.substituteAddrWildcards(determineHostAddress, \
secondary_info['address'])
if not '://' in secondary_info['address']:
secondary_info['address'] = 'http://' + secondary_info['address']
if 'timeout' not in secondary_info:
secondary_info['timeout'] = rhconfig.GENERAL['SECONDARY_TIMEOUT']
if 'mode' in secondary_info and str(secondary_info['mode']) == SecondaryNode.MIRROR_MODE:
hasMirrors = True
elif hasMirrors:
logger.warning('** Mirror secondaries must be last - ignoring remaining secondary config **')
set_ui_message(
'secondary',
"Mirror secondaries must be last; ignoring part of secondary configuration",
header='Notice',
subclass='mirror'
)
break
secondary = SecondaryNode(sec_idx, secondary_info, RACE, getCurrentProfile, \
split_record_callback, join_cluster_callback,
PROGRAM_START, \
emit_cluster_connect_change, RELEASE_VERSION)
CLUSTER.addSecondary(secondary)
track_location = 'Split '+(secondary.id+1)
prev_loc_coord = DEFAULT_TRACK['layout'][-1]['location']
dist = secondary_info['distance'] if 'distance' in secondary_info else 0
track_coord = [prev_loc_coord[0] + dist, prev_loc_coord[1]]
track_loc_info = {'name': track_location, 'type': 'Arch gate', 'location': track_coord}
DEFAULT_TRACK['layout'].append(track_loc_info)
except:
logger.exception("Error adding secondary to cluster")
set_ui_message(
'secondary',
'Secondary configuration is invalid.',
header='Error',
subclass='error'
)
if CLUSTER and CLUSTER.hasRecEventsSecondaries():
CLUSTER.init_repeater()
if RHDATA.get_optionJson('trackLayout') is None:
RHDATA.set_optionJson('trackLayout', DEFAULT_TRACK)
def update_timer_mapping():
timer_mapping = RHDATA.get_optionJson('timerMapping')
if not timer_mapping:
timer_mapping = {}
mapped_nms = timer_mapping.get(TIMER_ID, None)
if not mapped_nms:
mapped_nms = {}
timer_mapping[TIMER_ID] = mapped_nms
for nm in INTERFACE.node_managers:
mapped_nm = mapped_nms.get(nm.addr, None)
if not mapped_nm:
mapped_nm = [{'location': 'Start/finish', 'seat': node.index} for node in nm.nodes]
mapped_nms[nm.addr] = mapped_nm
for node in nm.nodes[len(mapped_nm):]:
mapped_nm.append({'location': 'Start/finish', 'seat': node.index})
RHDATA.set_optionJson('timerMapping', timer_mapping)
update_timer_mapping()
def start():
gevent.spawn(clock_check_thread_function) # start thread to monitor system clock
for startable in STARTABLES:
startable.start()
init_interface_state(startup=True)
Events.trigger(Evt.STARTUP, {
'color': ColorVal.ORANGE,
'message': 'RotorHazard ' + RELEASE_VERSION
})
def stop():
Events.trigger(Evt.SHUTDOWN, {
'color': ColorVal.RED
})
rep_str = INTERFACE.get_intf_error_report_str(True)
if rep_str:
logger.log((logging.INFO if INTERFACE.get_intf_total_error_count() else logging.DEBUG), rep_str)
for startable in STARTABLES:
startable.stop()
stop_background_threads()
INTERFACE.close()
for service in serviceHelpers.values():
if hasattr(service, 'close'):
service.close()
RHDATA.close()
log.wait_for_queue_empty()
gevent.sleep(2) # allow system shutdown command to run before program exit
log.close_logging()
def run(port_val = rhconfig.GENERAL['HTTP_PORT']):
if not RHDATA.get_option("secret_key"):
RHDATA.set_option("secret_key", ''.join(random.choice(string.ascii_letters) for i in range(50)))
APP.config['SECRET_KEY'] = RHDATA.get_option("secret_key")
logger.info("Running http server at port " + str(port_val))
start()
if args.autostart:
gevent.spawn(on_stage_race)
try:
# the following fn does not return until the server is shutting down
SOCKET_IO.run(APP, host='0.0.0.0', port=port_val, debug=True, use_reloader=False)
logger.info("Server is shutting down")
except KeyboardInterrupt:
logger.info("Server terminated by keyboard interrupt")
except SystemExit:
logger.info("Server terminated by system exit")
except Exception:
logger.exception("Server exception")
stop()
# Start HTTP server
if __name__ == '__main__':
run()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,096
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/util/RHTimeFns.py
|
# RHTimeFns: Helpers for datetime and timezone functions.
from datetime import datetime, timedelta, timezone
from . import ms_counter
def getEpochStartTime():
return datetime(1970, 1, 1, tzinfo=timezone.utc)
def getUtcDateTimeNow():
return datetime.now(timezone.utc)
EPOCH_START = getEpochStartTime()
def getEpochTimeNow():
'''
Returns the current time in milliseconds since 1970-01-01.
'''
td = getUtcDateTimeNow() - EPOCH_START
return round(td/timedelta(milliseconds=1))
class MonotonicEpochSync:
def __init__(self):
epochTime_ms = getEpochTimeNow()
monotonicTime_ms = ms_counter()
# offset for converting 'monotonic' time to epoch milliseconds since 1970-01-01
offset_ms = epochTime_ms - monotonicTime_ms
self.epoch_ms = epochTime_ms
self.monotonic_ms = monotonicTime_ms
self.offset_ms = offset_ms
def monotonic_to_epoch_millis(self, clock_ms: int) -> int:
'''
Converts millisecond 'monotonic' time to epoch milliseconds since 1970-01-01.
'''
return clock_ms + self.offset_ms
def diff(self, other):
return self.epoch_ms - other.monotonic_to_epoch_millis(self.monotonic_ms)
def adjustBy(self, other, diff_ms):
self.epoch_ms += diff_ms
self.offset_ms = other.offset_ms
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,097
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/__init__.py
|
import gevent
import bisect
from collections import namedtuple
from typing import List, Tuple
RssiSample = namedtuple('RssiSample', ['timestamp', 'rssi'], defaults=[0,0])
LifetimeSample = namedtuple('LifetimeSample', ['timestamp', 'lifetime'], defaults=[0,0])
def unpack_8(data: bytes) -> int:
return data[0]
def unpack_8_signed(data: bytes) -> int:
v = data[0]
mag = v & 0x7F
if v & 0x80:
return mag - 128
else:
return mag
def pack_8(data: int):
return [int(data & 0xFF)]
def unpack_16(data: bytes) -> int:
'''Returns the full variable from 2 bytes input.'''
result = data[0]
result = (result << 8) | data[1]
return result
def pack_16(data: int):
'''Returns a 2 part array from the full variable.'''
part_a = (data >> 8) & 0xFF
part_b = (data & 0xFF)
return [int(part_a), int(part_b)]
def unpack_32(data: bytes) -> int:
'''Returns the full variable from 4 bytes input.'''
result = data[0]
result = (result << 8) | data[1]
result = (result << 8) | data[2]
result = (result << 8) | data[3]
return result
def pack_32(data: int):
'''Returns a 4 part array from the full variable.'''
part_a = (data >> 24) & 0xFF
part_b = (data >> 16) & 0xFF
part_c = (data >> 8) & 0xFF
part_d = (data & 0xFF)
return [int(part_a), int(part_b), int(part_c), int(part_d)]
def calculate_checksum(data: bytearray):
checksum = sum(data) & 0xFF
return checksum
def ensure_iter(l):
if type(l) == str or not hasattr(l, '__iter__'):
l = [l]
return l
class SampleHistory:
def __init__(self):
# timestamps (ms)
self._times: List[int] = []
self._values: List[int] = []
self.lock = gevent.lock.RLock()
def __len__(self):
assert len(self._times) == len(self._values)
return len(self._times)
def append(self, ts: int, value: int):
with self.lock:
n = len(self._times)
# if previous two entries have same value then just extend time on last entry
if n >= 2 and self._values[n-1] == value and self._values[n-2] == value:
self._times[n-1] = ts
else:
self._times.append(ts)
self._values.append(value)
def merge(self, new_entries: List[Tuple[int, int]]):
with self.lock:
for ts_value in new_entries:
idx = bisect.bisect_left(self._times, ts_value[0])
if idx < len(self._times) and ts_value[0] == self._times[idx]:
# replace existing value
self._values[idx] = ts_value[1]
else:
self._times.insert(idx, ts_value[0])
self._values.insert(idx, ts_value[1])
def set(self, times, values):
if len(times) != len(values):
raise ValueError("History time and value lists must have the same length")
with self.lock:
self._times = times
self._values = values
def get(self):
with self.lock:
return self._times.copy(), self._values.copy()
def prune(self, keep_after):
with self.lock:
prune_idx = bisect.bisect_left(self._times, keep_after)
del self._values[:prune_idx]
del self._times[:prune_idx]
class ExtremumFilter:
def __init__(self):
self.prev_t = None
self.prev_x = None
self.delta = None
def filter(self, t: int, x: int):
'''Includes inflexion points'''
if self.prev_x is not None:
new_delta = x - self.prev_x
if self.delta > 0 and new_delta <= 0:
next_x = self.prev_x
elif self.delta < 0 and new_delta >= 0:
next_x = self.prev_x
elif self.delta == 0 and new_delta != 0:
next_x = self.prev_x
else:
next_x = None
else:
new_delta = 0
next_x = self.prev_x
next_t = self.prev_t
self.prev_t = t
self.prev_x = x
self.delta = new_delta
return next_t, next_x
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,098
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/cluster/ClusterNodeSet.py
|
# ClusterNodeSet: Manages a set of secondary nodes
import logging
import gevent
import json
import socketio
from rh.app.RHRace import RaceStatus
from rh.events.eventmanager import Evt
from rh.util.RunningMedian import RunningMedian
from rh.util import Averager, ms_counter, millis_to_secs
logger = logging.getLogger(__name__)
class SecondaryNode:
SPLIT_MODE = 'split'
MIRROR_MODE = 'mirror'
LATENCY_AVG_SIZE = 30
TIMEDIFF_MEDIAN_SIZE = 30
TIMEDIFF_CORRECTION_THRESH_MS = 250 # correct split times if secondary clock more off than this
def __init__(self, idVal, info, RACE, getCurrentProfile, \
split_record_callback, join_cluster_callback,
PROGRAM_START, \
emit_cluster_connect_change, server_release_version):
self.id = idVal
self.info = info
self.RACE = RACE
self.getCurrentProfile = getCurrentProfile
self.split_record_callback = split_record_callback
self.join_cluster_callback = join_cluster_callback
self.PROGRAM_START = PROGRAM_START
self.emit_cluster_connect_change = emit_cluster_connect_change
self.server_release_version = server_release_version
self.address = info['address']
self.node_managers = {}
self.isMirrorMode = (str(info.get('mode', SecondaryNode.SPLIT_MODE)) == SecondaryNode.MIRROR_MODE)
self.secondaryModeStr = SecondaryNode.MIRROR_MODE if self.isMirrorMode else SecondaryNode.SPLIT_MODE
self.recEventsFlag = info.get('recEventsFlag', self.isMirrorMode)
self.queryInterval_ms = 1000*info['queryInterval'] if 'queryInterval' in info else 0
if self.queryInterval_ms <= 0:
self.queryInterval_ms = 10000
self.firstQueryInterval_ms = 3000 if self.queryInterval_ms >= 3000 else 1000
self.startConnectTime_ms = 0
self.lastContactTime_ms = -1
self.firstContactTime_ms = 0
self.lastCheckQueryTime_ms = 0
self.msSinceDisconnect = 0
self.freqsSentFlag = False
self.numDisconnects = 0
self.numDisconnsDuringRace = 0
self.numContacts = 0
self.latencyAveragerObj = Averager(self.LATENCY_AVG_SIZE)
self.totalUpTimeSecs = 0
self.totalDownTimeSecs = 0
self.timeDiffMedianObj = RunningMedian(self.TIMEDIFF_MEDIAN_SIZE)
self.timeDiffMedianMs = 0
self.timeCorrectionMs = 0
self.progStartEpoch = 0
self.runningFlag = True
self.sio = socketio.Client(reconnection=False, request_timeout=1)
self.sio.on('connect', self.on_connect)
self.sio.on('disconnect', self.on_disconnect)
self.sio.on('pass_record', self.on_pass_record)
self.sio.on('check_secondary_response', self.on_check_secondary_response)
self.sio.on('join_cluster_response', self.join_cluster_response)
gevent.spawn(self.secondary_worker_thread)
def secondary_worker_thread(self):
self.startConnectTime_ms = ms_counter()
gevent.sleep(0.1)
while self.runningFlag:
try:
gevent.sleep(1)
if self.lastContactTime_ms <= 0: # if current status is not connected
oldMsSinceDis = self.msSinceDisconnect
self.msSinceDisconnect = ms_counter() - self.startConnectTime_ms
if self.msSinceDisconnect >= 1.0: # if disconnect just happened then wait a second before reconnect
# if never connected then only retry if race not in progress
if self.numDisconnects > 0 or (self.RACE.race_status != RaceStatus.STAGING and \
self.RACE.race_status != RaceStatus.RACING):
# if first-ever attempt or was previously connected then show log msg
if oldMsSinceDis == 0 or self.numDisconnects > 0:
logger.log((logging.INFO if self.msSinceDisconnect <= self.info['timeout'] else logging.DEBUG), \
"Attempting to connect to secondary {0} at {1}...".format(self.id+1, self.address))
try:
self.sio.connect(self.address)
except socketio.exceptions.ConnectionError as ex:
if self.lastContactTime_ms > 0: # if current status is connected
logger.info("Error connecting to secondary {0} at {1}: {2}".format(self.id+1, self.address, ex))
if not self.sio.connected: # if not connected then
self.on_disconnect() # invoke disconnect function to update status
else:
err_msg = "Unable to connect to secondary {0} at {1}: {2}".format(self.id+1, self.address, ex)
if ms_counter() <= self.startConnectTime_ms + self.info['timeout']:
if self.numDisconnects > 0: # if previously connected then always log failure
logger.info(err_msg)
elif oldMsSinceDis == 0: # if not previously connected then only log once
err_msg += " (will continue attempts until timeout)"
logger.info(err_msg)
else: # if beyond timeout period
if self.numDisconnects > 0: # if was previously connected then keep trying
logger.debug(err_msg) # log at debug level and
gevent.sleep(29) # increase delay between attempts
else:
logger.warning(err_msg) # if never connected then give up
logger.warning("Reached timeout; no longer trying to connect to secondary {0} at {1}".\
format(self.id+1, self.address))
if self.runningFlag and self.emit_cluster_connect_change:
self.emit_cluster_connect_change(False) # play one disconnect tone
return # exit worker thread
else: # if current status is connected
now_ms = ms_counter()
if not self.freqsSentFlag:
try:
self.freqsSentFlag = True
if (not self.isMirrorMode) and self.getCurrentProfile:
logger.info("Sending node frequencies to secondary {0} at {1}".format(self.id+1, self.address))
for idx, freq in enumerate(json.loads(self.getCurrentProfile().frequencies)["f"]):
data = { 'node':idx, 'frequency':freq }
self.emit('set_frequency', data)
gevent.sleep(0.001)
except (KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
logger.error("Error sending node frequencies to secondary {0} at {1}: {2}".format(self.id+1, self.address, ex))
else:
try:
if self.sio.connected:
# send heartbeat-query every 'queryInterval' seconds, or that long since last contact
if (now_ms > self.lastContactTime_ms + self.queryInterval_ms and \
now_ms > self.lastCheckQueryTime_ms + self.queryInterval_ms) or \
(self.lastCheckQueryTime_ms == 0 and \
now_ms > self.lastContactTime_ms + self.firstQueryInterval_ms): # if first query do it sooner
self.lastCheckQueryTime_ms = now_ms
# timestamp not actually used by secondary, but send to make query and response symmetrical
payload = {
'timestamp': self.PROGRAM_START.monotonic_to_epoch_millis(now_ms) \
if self.PROGRAM_START else 0
}
# don't update 'lastContactTime' value until response received
self.sio.emit('check_secondary_query', payload)
# if there was no response to last query then disconnect (and reconnect next loop)
elif self.lastCheckQueryTime_ms > self.lastContactTime_ms:
if self.lastCheckQueryTime_ms - self.lastContactTime_ms > 3900:
if len(self.timeDiffMedianObj.sorted_) > 0:
logger.warning("Disconnecting after no response for 'check_secondary_query'" \
" received for secondary {0} at {1}".format(self.id+1, self.address))
# calling 'disconnect()' will usually invoke 'on_disconnect()', but
# 'disconnect()' can be slow to return, so force-update status if needed
gevent.spawn(self.do_sio_disconnect)
if self.wait_for_sio_disconnect(1.0):
logger.info("Forcing 'disconnected' status for stuck connection on" \
" secondary {0} at {1}".format(self.id+1, self.address))
self.on_disconnect()
else: # if never any responses then may be old server version on secondary timer
logger.warning("No response for 'check_secondary_query'" \
" received for secondary {0} at {1} (may need upgrade)".\
format(self.id+1, self.address))
self.lastCheckQueryTime_ms = self.lastContactTime_ms = now_ms
else:
logger.debug("No response for 'check_secondary_query' received "\
"after {0:.1f} secs for secondary {1} at {2}".\
format(self.lastCheckQueryTime_ms - self.lastContactTime_ms, \
self.id+1, self.address))
else:
logger.info("Invoking 'on_disconnect()' fn for secondary {0} at {1}".\
format(self.id+1, self.address))
self.on_disconnect()
except (KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
logger.error("Error sending check-query to secondary {0} at {1}: {2}".format(self.id+1, self.address, ex))
except KeyboardInterrupt:
logger.info("SecondaryNode worker thread terminated by keyboard interrupt")
raise
except SystemExit:
raise
except Exception as ex:
if type(ex) is ValueError and "connected" in str(ex): # if error was because already connected
if self.lastContactTime_ms <= 0: # if current tracked status is not connected
logger.debug("Ignoring connect error from sio-already-connected on secondary {0}".\
format(self.id+1))
else:
logger.info("Forcing 'disconnected' status after sio-already-connected error on" \
" secondary {0}".format(self.id+1))
self.on_disconnect()
else:
logger.exception("Exception in SecondaryNode worker thread for secondary {} (sio.conn={})".\
format(self.id+1, self.sio.connected))
gevent.sleep(9)
def emit(self, event, data = None):
try:
if self.lastContactTime_ms > 0:
self.sio.emit(event, data)
self.lastContactTime_ms = ms_counter()
self.numContacts += 1
elif self.numDisconnects > 0: # only warn if previously connected
logger.warning("Unable to emit to disconnected secondary {0} at {1}, event='{2}'".\
format(self.id+1, self.address, event))
except Exception:
logger.exception("Error emitting to secondary {0} at {1}, event='{2}'".\
format(self.id+1, self.address, event))
if self.sio.connected:
logger.warning("Disconnecting after error emitting to secondary {0} at {1}".\
format(self.id+1, self.address))
self.sio.disconnect()
def on_connect(self):
try:
if self.lastContactTime_ms <= 0:
self.lastContactTime_ms = ms_counter()
self.firstContactTime_ms = self.lastContactTime_ms
if self.numDisconnects <= 0:
logger.info("Connected to secondary {0} at {1} (mode: {2})".format(\
self.id+1, self.address, self.secondaryModeStr))
else:
downSecs = round(millis_to_secs(self.lastContactTime_ms - self.startConnectTime_ms)) if self.startConnectTime_ms > 0 else 0
logger.info("Reconnected to " + self.get_log_str(downSecs, False))
self.totalDownTimeSecs += downSecs
payload = {
'mode': self.secondaryModeStr
}
self.emit('join_cluster_ex', payload)
if (not self.isMirrorMode) and \
(self.RACE.race_status == RaceStatus.STAGING or self.RACE.race_status == RaceStatus.RACING):
self.emit('stage_race') # if race in progress then make sure running on secondary
if self.runningFlag and self.emit_cluster_connect_change:
self.emit_cluster_connect_change(True)
else:
self.lastContactTime_ms = ms_counter()
logger.debug("Received extra 'on_connect' event for secondary {0} at {1}".format(self.id+1, self.address))
except Exception:
logger.exception("Error handling Cluster 'on_connect' for secondary {0} at {1}".\
format(self.id+1, self.address))
def on_disconnect(self):
try:
if self.lastContactTime_ms > 0:
self.startConnectTime_ms = ms_counter()
self.lastContactTime_ms = -1
self.numDisconnects += 1
self.numDisconnsDuringRace += 1
upSecs = round(millis_to_secs(self.startConnectTime_ms - self.firstContactTime_ms)) if self.firstContactTime_ms > 0 else 0
logger.warning("Disconnected from " + self.get_log_str(upSecs))
self.totalUpTimeSecs += upSecs
if self.runningFlag and self.emit_cluster_connect_change:
self.emit_cluster_connect_change(False)
else:
logger.debug("Received extra 'on_disconnect' event for secondary {0} at {1}".format(self.id+1, self.address))
except Exception:
logger.exception("Error handling Cluster 'on_disconnect' for secondary {0} at {1}".\
format(self.id+1, self.address))
def do_sio_disconnect(self):
try:
if self.sio.connected:
self.sio.disconnect()
logger.debug("Returned from 'sio.disconnect()' call for secondary {0} at {1}".\
format(self.id+1, self.address))
except Exception:
logger.exception("Error calling 'sio.disconnect()' for secondary {0} at {1}".\
format(self.id+1, self.address))
def wait_for_sio_disconnect(self, maxWaitSecs):
dly = maxWaitSecs / 10
cnt = 10 if dly > 0 else 0
while True:
if not self.sio.connected:
return False
if cnt <= 0:
return True
cnt -= 1
gevent.sleep(dly)
def get_log_str(self, timeSecs=None, upTimeFlag=True, stoppedRaceFlag=False):
if timeSecs is None:
timeSecs = round(millis_to_secs(ms_counter() - self.firstContactTime_ms)) if self.lastContactTime_ms > 0 else 0
totUpSecs = self.totalUpTimeSecs
totDownSecs = self.totalDownTimeSecs
if upTimeFlag:
totUpSecs += timeSecs
upDownStr = "upTime"
else:
totDownSecs += timeSecs
upDownStr = "downTime"
upDownTotal = totUpSecs + totDownSecs
return "secondary {0} at {1} (latency: min={2} avg={3} max={4} last={5} ms, disconns={6}, contacts={7}, " \
"timeDiff={8}ms, {9}={10}, totalUp={11}, totalDown={12}, avail={13:.1%}{14})".\
format(self.id+1, self.address, self.latencyAveragerObj.min, \
int(round(self.latencyAveragerObj.mean)), self.latencyAveragerObj.max, \
self.latencyAveragerObj.last, self.numDisconnects, self.numContacts, \
self.timeDiffMedianMs, upDownStr, timeSecs, totUpSecs, totDownSecs, \
(float(totUpSecs)/upDownTotal if upDownTotal > 0 else 0),
((", numDisconnsDuringRace=" + str(self.numDisconnsDuringRace)) if \
(self.numDisconnsDuringRace > 0 and \
(stoppedRaceFlag or self.RACE.race_status == RaceStatus.RACING)) else ""))
def _lookup_node(self, node_index):
for nm, ns in self.node_managers.items():
for n, n_idx in ns.items():
if n_idx == node_index:
return nm, n
return None
def on_pass_record(self, data):
try:
self.lastContactTime_ms = ms_counter()
self.numContacts += 1
# if secondary-timer clock was detected as not synchronized then apply correction
if self.timeCorrectionMs != 0:
data['timestamp'] -= self.timeCorrectionMs
node_index = data['node']
ts = data['timestamp']
nm, n = self._lookup_node(node_index)
# convert split timestamp (epoch ms sine 1970-01-01) to equivalent local 'monotonic' time value
split_ts = ts - self.RACE.start_time_epoch_ms
self.split_record_callback(self.address, nm, n, split_ts)
except Exception:
logger.exception("Error processing pass record from secondary {0} at {1}".format(self.id+1, self.address))
try:
# send message-ack back to secondary (but don't update 'lastContactTime' value)
payload = {
'messageType': 'pass_record',
'messagePayload': data
}
self.sio.emit('cluster_message_ack', payload)
except Exception:
logger.exception("Error sending pass-record message acknowledgement to secondary {0} at {1}".\
format(self.id+1, self.address))
def on_check_secondary_response(self, data):
try:
if self.lastContactTime_ms > 0:
now_ms = ms_counter()
self.lastContactTime_ms = now_ms
self.numContacts += 1
transitTime = now_ms - self.lastCheckQueryTime_ms if self.lastCheckQueryTime_ms > 0 else 0
if transitTime > 0:
self.latencyAveragerObj.append(transitTime)
if data:
secondaryTimestamp = data.get('timestamp', 0)
if secondaryTimestamp:
# calculate local-time value midway between before and after network query
localTimestamp = self.PROGRAM_START.monotonic_to_epoch_millis(\
(self.lastCheckQueryTime_ms + transitTime/2)) \
if self.PROGRAM_START else 0
# calculate clock-time difference in ms and add to running median
self.timeDiffMedianObj.insert(int(round(secondaryTimestamp - localTimestamp)))
self.timeDiffMedianMs = self.timeDiffMedianObj.median()
return
logger.debug("Received check_secondary_response with no timestamp from secondary {0} at {1}".\
format(self.id+1, self.address))
else:
logger.debug("Received check_secondary_response while disconnected from secondary {0} at {1}".\
format(self.id+1, self.address))
except Exception:
logger.exception("Error processing check-response from secondary {0} at {1}".\
format(self.id+1, self.address))
def join_cluster_response(self, data):
self.node_managers = data['node_managers']
self.join_cluster_callback(self.address, self.node_managers)
try:
infoStr = data.get('server_info')
logger.debug("Server info from secondary {0} at {1}: {2}".\
format(self.id+1, self.address, infoStr))
infoDict = json.loads(infoStr)
prgStrtEpchStr = infoDict.get('prog_start_epoch')
newPrgStrtEpch = False
try:
prgStrtEpch = int(float(prgStrtEpchStr))
if self.progStartEpoch == 0:
self.progStartEpoch = prgStrtEpch
newPrgStrtEpch = True
logger.debug("Initial 'prog_start_epoch' value for secondary {0}: {1}".\
format(self.id+1, prgStrtEpch))
elif prgStrtEpch != self.progStartEpoch:
self.progStartEpoch = prgStrtEpch
newPrgStrtEpch = True
logger.info("New 'prog_start_epoch' value for secondary {0}: {1}; resetting 'timeDiff' median".\
format(self.id+1, prgStrtEpch))
self.timeDiffMedianObj = RunningMedian(self.TIMEDIFF_MEDIAN_SIZE)
except ValueError as ex:
logger.warning("Error parsing 'prog_start_epoch' value from secondary {0}: {1}".\
format(self.id+1, ex))
# if first time connecting (or possible secondary restart) then check/warn about program version
if newPrgStrtEpch or self.numDisconnects == 0:
secondaryVerStr = infoDict.get('release_version')
if secondaryVerStr:
if secondaryVerStr != self.server_release_version:
logger.warning("Different program version ('{0}') running on secondary {1} at {2}".\
format(secondaryVerStr, self.id+1, self.address))
else:
logger.warning("Unable to parse 'release_version' from secondary {0} at {1}".\
format(self.id+1, self.address))
except Exception:
logger.exception("Error processing join-cluster response from secondary {0} at {1}".\
format(self.id+1, self.address))
try:
# send message-ack back to secondary (but don't update 'lastContactTime' value)
# this tells secondary timer to expect future message-acks in response to 'pass_record' emits
payload = { 'messageType': 'join_cluster_response' }
self.sio.emit('cluster_message_ack', payload)
except Exception:
logger.exception("Error sending join-cluster message acknowledgement to secondary {0} at {1}".\
format(self.id+1, self.address))
class ClusterNodeSet:
def __init__(self, Language, eventmanager):
self._Language = Language
self.secondaries = []
self.splitSecondaries = []
self.recEventsSecondaries = []
self.Events = eventmanager
def init_repeater(self):
self.Events.on(Evt.ALL, 'cluster', self.event_repeater, priority=75, unique=True)
def event_repeater(self, args):
try:
# if there are cluster timers interested in events then emit it out to them
if self.hasRecEventsSecondaries():
payload = { 'evt_name': args['_eventName'] }
del args['_eventName']
payload['evt_args'] = json.dumps(args, default=lambda x: '<not serializiable>')
self.emitEventTrigger(payload)
except Exception as ex:
logger.exception("Exception in 'Events.trigger()': " + ex)
def shutdown(self):
for secondary in self.secondaries:
secondary.runningFlag = False
def addSecondary(self, secondary):
self.secondaries.append(secondary)
if not secondary.isMirrorMode:
self.splitSecondaries.append(secondary)
if secondary.recEventsFlag:
self.recEventsSecondaries.append(secondary)
def hasSecondaries(self):
return (len(self.secondaries) > 0)
def hasRecEventsSecondaries(self):
return (len(self.recEventsSecondaries) > 0)
# return True if secondary is 'split' mode and is or has been connected
def isSplitSecondaryAvailable(self, secondary_index):
return (secondary_index < len(self.secondaries)) and \
(not self.secondaries[secondary_index].isMirrorMode) and \
(self.secondaries[secondary_index].lastContactTime_ms > 0 or \
self.secondaries[secondary_index].numDisconnects > 0)
def emit(self, event, data = None):
for secondary in self.secondaries:
gevent.spawn(secondary.emit, event, data)
def emitToSplits(self, event, data = None):
for secondary in self.splitSecondaries:
gevent.spawn(secondary.emit, event, data)
def emitEventTrigger(self, data = None):
for secondary in self.recEventsSecondaries:
gevent.spawn(secondary.emit, 'cluster_event_trigger', data)
def getClusterStatusInfo(self):
now_ms = ms_counter()
payload = []
for secondary in self.secondaries:
upTimeSecs = round(millis_to_secs(now_ms - secondary.firstContactTime_ms)) if secondary.lastContactTime_ms > 0 else 0
downTimeSecs = int(round(secondary.msSinceDisconnect)) if secondary.lastContactTime_ms <= 0 else 0
totalUpSecs = secondary.totalUpTimeSecs + upTimeSecs
totalDownSecs = secondary.totalDownTimeSecs + downTimeSecs
payload.append(
{'address': secondary.address, \
'modeIndicator': ('M' if secondary.isMirrorMode else 'S'), \
'minLatencyMs': secondary.latencyAveragerObj.min, \
'avgLatencyMs': int(round(self.latencyAveragerObj.mean)), \
'maxLatencyMs': secondary.latencyAveragerObj.max, \
'lastLatencyMs': secondary.latencyAveragerObj.last, \
'numDisconnects': secondary.numDisconnects, \
'numContacts': secondary.numContacts, \
'timeDiffMs': secondary.timeDiffMedianMs, \
'upTimeSecs': upTimeSecs, \
'downTimeSecs': downTimeSecs, \
'availability': round((100.0*totalUpSecs/(totalUpSecs+totalDownSecs) \
if totalUpSecs+totalDownSecs > 0 else 0), 1), \
'last_contact': int(now_ms-secondary.lastContactTime_ms) if secondary.lastContactTime_ms >= 0 else \
(self.__("connection lost") if secondary.numDisconnects > 0 else self.__("never connected"))
})
return {'secondaries': payload}
def doClusterRaceStart(self):
for secondary in self.secondaries:
secondary.numDisconnsDuringRace = 0
if secondary.lastContactTime_ms > 0:
logger.info("Connected at race start to " + secondary.get_log_str())
if abs(secondary.timeDiffMedianMs) > SecondaryNode.TIMEDIFF_CORRECTION_THRESH_MS:
secondary.timeCorrectionMs = secondary.timeDiffMedianMs
logger.info("Secondary {0} clock not synchronized with primary, timeDiff={1}ms".\
format(secondary.id+1, secondary.timeDiffMedianMs))
else:
secondary.timeCorrectionMs = 0
logger.debug("Secondary {0} clock synchronized OK with primary, timeDiff={1}ms".\
format(secondary.id+1, secondary.timeDiffMedianMs))
elif secondary.numDisconnects > 0:
logger.warning("Secondary {0} not connected at race start".format(secondary.id+1))
def doClusterRaceStop(self):
for secondary in self.secondaries:
if secondary.lastContactTime_ms > 0:
logger.info("Connected at race stop to " + secondary.get_log_str(stoppedRaceFlag=True))
elif secondary.numDisconnects > 0:
logger.warning("Not connected at race stop to " + secondary.get_log_str(stoppedRaceFlag=True))
def __(self, *args, **kwargs):
return self._Language.__(*args, **kwargs)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,099
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/data_export/data_export_json.py
|
'''JSON data exporter'''
import logging
logger = logging.getLogger(__name__)
from rh.util import RHUtils
import json
from sqlalchemy.ext.declarative import DeclarativeMeta
from . import DataExporter
def write_json(data):
payload = json.dumps(data, indent='\t', cls=AlchemyEncoder)
return {
'data': payload,
'encoding': 'application/json',
'ext': 'json'
}
def assemble_all(RHData, PageCache, Language):
payload = {}
payload['Pilots'] = assemble_pilots(RHData, PageCache, Language)
payload['Heats'] = assemble_heats(RHData, PageCache, Language)
payload['Classes'] = assemble_classes(RHData, PageCache, Language)
payload['Formats'] = assemble_formats(RHData, PageCache, Language)
payload['Results'] = assemble_results(RHData, PageCache, Language)
return payload
def assemble_pilots(RHData, PageCache, Language):
pilots = RHData.get_pilots()
payload = []
for pilot in pilots:
# payload.append(pilot)
payload.append({
'Callsign': pilot.callsign,
'Name': pilot.name,
'Team': pilot.team,
})
return payload
def assemble_heats(RHData, PageCache, Language):
payload = {}
for heat in RHData.get_heats():
heat_id = heat.id
note = heat.note
if heat.class_id != RHUtils.CLASS_ID_NONE:
race_class = RHData.get_raceClass(heat.class_id).name
else:
race_class = None
heatnodes = RHData.get_heatNodes(filter_by={'heat_id': heat.id})
pilots = {}
for heatnode in heatnodes:
if heatnode.pilot_id != RHUtils.PILOT_ID_NONE:
pilots[heatnode.node_index] = RHData.get_pilot(heatnode.pilot_id).callsign
else:
pilots[heatnode.node_index] = None
payload[heat_id] = {
'Name': note,
'Class': race_class,
'Pilots': pilots,
}
return payload
def assemble_classes(RHData, PageCache, Language):
race_classes = RHData.get_raceClasses()
payload = []
for race_class in race_classes:
# payload.append(race_class)
# expand format id to name
class_payload = {
'Name': race_class.name,
'Description': race_class.description,
'Race Format': None
}
if race_class.format_id:
class_payload['Race Format'] = RHData.get_raceFormat(race_class.format_id).name
payload.append(class_payload)
return payload
def assemble_formats(RHData, PageCache, Language):
timer_modes = [
Language.__('Fixed Time'),
Language.__('No Time Limit'),
]
tones = [
Language.__('None'),
Language.__('One'),
Language.__('Each Second')
]
win_conditions = [
Language.__('None'),
Language.__('Most Laps in Fastest Time'),
Language.__('First to X Laps'),
Language.__('Fastest Lap'),
Language.__('Fastest 3 Consecutive Laps'),
Language.__('Most Laps Only'),
Language.__('Most Laps Only with Overtime')
]
start_behaviors = [
Language.__('Hole Shot'),
Language.__('First Lap'),
Language.__('Staggered Start'),
]
formats = RHData.get_raceFormats()
payload = []
for race_format in formats:
# payload.append(race_format)
payload.append({
'Name': race_format.name,
'Mode': timer_modes[race_format.race_mode],
'Duration (seconds)': race_format.race_time_sec,
'Minimum Start Delay': race_format.start_delay_min,
'Maximum Start Delay': race_format.start_delay_max,
'Staging Tones': tones[race_format.staging_tones],
'Win Condition': race_format.win_condition,
'Laps to Win': race_format.number_laps_win,
'Team Racing': race_format.team_racing_mode,
'First Crossing': start_behaviors[race_format.start_behavior],
})
return payload
def assemble_results(RHData, PageCache, Language):
# TODO: Make results friendly
payload = PageCache.get_cache()
return payload
def assemble_complete(RHData, PageCache, Language):
payload = {}
payload['Pilot'] = assemble_pilots_complete(RHData, PageCache, Language)
payload['Heat'] = assemble_heats_complete(RHData, PageCache, Language)
payload['HeatNode'] = assemble_heatnodes_complete(RHData, PageCache, Language)
payload['RaceClass'] = assemble_classes_complete(RHData, PageCache, Language)
payload['RaceFormat'] = assemble_formats_complete(RHData, PageCache, Language)
payload['SavedRaceMeta'] = assemble_racemeta_complete(RHData, PageCache, Language)
payload['SavedPilotRace'] = assemble_pilotrace_complete(RHData, PageCache, Language)
payload['SavedRaceLap'] = assemble_racelap_complete(RHData, PageCache, Language)
payload['SavedRaceLapSplit'] = assemble_split_complete(RHData, PageCache, Language)
payload['Profiles'] = assemble_profiles_complete(RHData, PageCache, Language)
payload['GlobalSettings'] = assemble_settings_complete(RHData, PageCache, Language)
return payload
def assemble_results_raw(RHData, PageCache, Language):
payload = PageCache.get_cache()
return payload
def assemble_pilots_complete(RHData, PageCache, Language):
payload = RHData.get_pilots()
return payload
def assemble_heats_complete(RHData, PageCache, Language):
payload = RHData.get_heats()
return payload
def assemble_heatnodes_complete(RHData, PageCache, Language):
payload = RHData.get_heatNodes()
return payload
def assemble_classes_complete(RHData, PageCache, Language):
payload = RHData.get_raceClasses()
return payload
def assemble_formats_complete(RHData, PageCache, Language):
payload = RHData.get_raceFormats()
return payload
def assemble_split_complete(RHData, PageCache, Language):
payload = RHData.get_lapSplits()
return payload
def assemble_racemeta_complete(RHData, PageCache, Language):
payload = RHData.get_savedRaceMetas()
return payload
def assemble_pilotrace_complete(RHData, PageCache, Language):
payload = RHData.get_savedPilotRaces()
return payload
def assemble_racelap_complete(RHData, PageCache, Language):
payload = RHData.get_savedRaceLaps()
return payload
def assemble_profiles_complete(RHData, PageCache, Language):
payload = RHData.get_profiles()
return payload
def assemble_settings_complete(RHData, PageCache, Language):
payload = RHData.get_options()
return payload
class AlchemyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj.__class__, DeclarativeMeta):
# an SQLAlchemy class
fields = {}
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:
data = obj.__getattribute__(field)
if field != "query" \
and field != "query_class":
try:
json.dumps(data) # this will fail on non-encodable values, like other classes
if field == "frequencies":
fields[field] = json.loads(data)
elif field == "enter_ats" or field == "exit_ats":
fields[field] = json.loads(data)
else:
fields[field] = data
except TypeError:
fields[field] = None
# a json-encodable dict
return fields
return json.JSONEncoder.default(self, obj)
def discover(*args, **kwargs):
# returns array of exporters with default arguments
return [
DataExporter(
'json_pilots',
'JSON (Friendly) / Pilots',
write_json,
assemble_pilots
),
DataExporter(
'json_heats',
'JSON (Friendly) / Heats',
write_json,
assemble_heats
),
DataExporter(
'json_classes',
'JSON (Friendly) / Classes',
write_json,
assemble_classes
),
DataExporter(
'json_formats',
'JSON (Friendly) / Formats',
write_json,
assemble_formats
),
DataExporter(
'json_results',
'JSON (Friendly) / Results',
write_json,
assemble_results
),
DataExporter(
'json_all',
'JSON (Friendly) / All',
write_json,
assemble_all
),
DataExporter(
'json_complete_all',
'JSON (Complete) / All',
write_json,
assemble_complete
),
DataExporter(
'json_complete_results',
'JSON (Complete) / Results',
write_json,
assemble_results_raw
)
]
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,100
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/apis/chorus_api.py
|
'''Chorus API serial endpoints.'''
import logging
import gevent
from rh.util.RHUtils import FREQS
logger = logging.getLogger(__name__)
class ChorusAPI():
def __init__(self, serial_io, hwInterface, sensors, on_start, on_stop_race, on_reset_race):
self.serial_io = serial_io
self.INTERFACE = hwInterface
self.SENSORS = sensors
self.on_start = on_start
self.on_stop_race = on_stop_race
self.on_reset_race = on_reset_race
self.rssi_push_interval_ms = 0
self.thread = None
def start(self):
logger.info('Chorus API started')
if self.thread is None:
self.thread = gevent.spawn(self.chorus_api_thread_function)
def stop(self):
if self.thread:
self.thread.kill(block=True, timeout=0.5)
self.thread = None
logger.info('Chorus API stopped')
def emit_pass_record(self, node, lap_number: int, lap_time_stamp: int):
self.serial_io.write("S{0}L{1:02x}{2:08x}\n".format(node.index, lap_number, lap_time_stamp).encode("UTF-8"))
def emit_rssi(self, node_addr):
self.serial_io.write(self._getRssiResponse(node_addr).encode("UTF-8"))
def _getRssiResponse(self, node_addr):
node_data = self.INTERFACE.get_heartbeat_json()
response = ''
for i, rssi in enumerate(node_data['current_rssi']):
if node_addr == '*' or int(node_addr) == i:
response += 'S{0}r{1:04x}\n'.format(i, rssi)
return response
def _getVoltageResponse(self):
for sensor in self.SENSORS:
for sensorReading in sensor.getReadings().values():
if sensorReading['units'] == 'V':
return 'S{0}v{1:04x}\n'.format(0, int(sensorReading['value']*1024.0/55.0))
return ''
def _process_message(self, data):
num_nodes = len(self.INTERFACE.nodes)
response = None
if data:
if data == 'N0':
self.on_start()
response = 'N{0}\n'.format(num_nodes)
elif data[0] == 'R' and len(data) >= 3:
node_addr = data[1]
cmd = data[2]
is_setter = len(data) > 3
if cmd == 'r':
response = self._getRssiResponse(node_addr)
elif cmd == 't':
response = ''
for i in range(num_nodes):
if node_addr == '*' or int(node_addr) == i:
response += 'S{0}t{1:04x}\n'.format(i, 0)
elif cmd == 'v':
response = self._getVoltageResponse()
elif cmd == 'y':
response = ''
for i in range(num_nodes):
response += 'S{0}y0\n'.format(i)
elif cmd == '#':
response = ''
for i in range(num_nodes):
response += 'S{0}#0004\n'.format(i)
elif cmd == 'B':
node_index = int(node_addr)
bandChannel = self.INTERFACE.nodes[node_index].bandChannel
if is_setter:
band = data[3]
chan = int(bandChannel[1]) if bandChannel is not None else 0
freq = self.INTERFACE.nodes[node_index].frequency
if chan > 0:
bandChannel = band + str(chan)
if bandChannel in FREQS:
freq = FREQS[bandChannel]
self.INTERFACE.set_frequency(node_index, freq, band, chan)
response = 'S{0}B{1}\nS{0}F{2:04x}\n'.format(node_index, band, freq)
else:
band = bandChannel[0] if bandChannel is not None else 0
response = 'S{0}B{1}'.format(node_index, band)
elif cmd == 'C':
node_index = int(node_addr)
bandChannel = self.INTERFACE.nodes[node_index].bandChannel
if is_setter:
band = bandChannel[0] if bandChannel is not None else ''
chan = data[3]
freq = self.INTERFACE.nodes[node_index].frequency
if band:
bandChannel = band + str(chan)
if bandChannel in FREQS:
freq = FREQS[bandChannel]
self.INTERFACE.set_frequency(node_index, freq, band, chan)
response = 'S{0}C{1}\nS{0}F{2:04x}\n'.format(node_index, chan, freq)
else:
chan = int(bandChannel[1]) if bandChannel is not None else 0
response = 'S{0}C{1}'.format(node_index, chan)
elif cmd == 'F':
node_index = int(node_addr)
if is_setter:
freq = int(data[3:7], 16)
self.INTERFACE.set_frequency(node_index, freq)
else:
freq = self.INTERFACE.nodes[node_index].frequency
response = 'S{0}F{1:04x}\n'.format(node_index, freq)
elif cmd == 'I':
if is_setter:
self.rssi_push_interval_ms = int(data[3:7], 16)
response = ''
for i in range(num_nodes):
response += 'S{0}I{1:04x}\n'.format(i, int(self.rssi_push_interval_ms))
elif cmd == '1':
node_index = int(node_addr)
if is_setter:
flag = data[3]
response = 'S{0}1{1}\n'.format(node_index, flag)
elif cmd == 'J':
node_index = int(node_addr)
time_adjust = int(data[3:11], 16)
response = 'S{0}J{1:08x}\n'.format(node_index, time_adjust)
elif cmd == 'M':
node_index = int(node_addr)
min_lap_time = int(data[3:5], 16)
response = 'S{0}M{1:02x}\n'.format(node_index, min_lap_time)
elif cmd == 'S':
if is_setter:
flag = data[3]
response = ''
for i in range(num_nodes):
if node_addr == '*' or int(node_addr) == i:
response += 'S{0}S{1}\n'.format(i, flag)
elif cmd == 'T':
node_index = int(node_addr)
level = int(data[3:7], 16)
self.INTERFACE.set_enter_at_level(node_index, level)
self.INTERFACE.set_exit_at_level(node_index, level)
response = 'S{0}T{1:04x}\n'.format(node_index, level)
elif cmd == 'R':
if data[3] == '0':
self.on_stop_race()
response = ''
for i in range(num_nodes):
response += 'S{0}R0\n'.format(i)
elif data[3] == '2':
self.on_reset_race()
response = ''
for i in range(num_nodes):
response += 'S{0}R2\n'.format(i)
elif self.rssi_push_interval_ms > 0:
gevent.sleep(self.rssi_push_interval_ms)
response = self._getRssiResponse('*')
else:
gevent.sleep(0.1)
return response
def chorus_api_thread_function(self):
while True:
data = self.serial_io.read_until()[:-1]
response = self._process_message(data)
if response:
self.serial_io.write(response.encode('UTF-8'))
elif data and response is None:
logger.info('Not yet supported: {0}', data)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,101
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/app/Language.py
|
#
# Translation functions
#
import logging
import json
import io
logger = logging.getLogger(__name__)
class Language():
LANGUAGE_FILE_NAME = 'rh/language.json'
def __init__(self, RHData):
self._Languages = {}
self._RHData = RHData
# Load language file
try:
with io.open(self.LANGUAGE_FILE_NAME, 'r', encoding="utf8") as f:
self._Languages = json.load(f)
logger.debug('Language file imported')
except IOError:
logger.warn('No language file found, using defaults')
except ValueError:
logger.error('Language file invalid, using defaults')
def __(self, text, lang=None):
# return translated string
if not lang:
lang = self._RHData.get_option('currentLanguage')
if lang in self._Languages:
return self._Languages[lang]['values'].get(text, text)
else:
return text
def getLanguages(self):
# get list of available languages
langs = []
for lang in self._Languages:
l = {}
l['id'] = lang
l['name'] = self._Languages[lang]['name']
langs.append(l)
return langs
def getLanguageTags(self):
return [lang for lang in self._Languages]
def getAllLanguages(self):
# return full language dictionary
return self._Languages
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,102
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/data_export/__init__.py
|
#
# Data export handlers
#
#
# Data exporters first collect data via their assembler function,
# then pass that data through the formatter function before output.
# PageCache is updated before export() is called and can be assumed accurate.
#
# name should be unique and acts as an identifier
# label becomes visible in the UI and becomes translatable
# formatter(data) should be used for final handling of file format
# assembler(RHData, PageCache, Language) collects relevant data from timer
# before formatting.
#
import logging
from typing import Dict
from rh.util.Plugins import Plugins
logger = logging.getLogger(__name__)
class DataExportManager():
def __init__(self, RHData, PageCache, Language):
self._RHData = RHData
self._PageCache = PageCache
self._Language = Language
self.exporters: Dict[str,"DataExporter"] = {}
def discover(self, export_pkg):
exporter_plugins = Plugins(prefix='data_export')
exporter_plugins.discover(export_pkg)
for exporter in exporter_plugins:
self.registerExporter(exporter)
def registerExporter(self, exporter):
if hasattr(exporter, 'name'):
if exporter.name in self.exporters:
logger.warning('Overwriting data exporter "{0}"'.format(exporter['name']))
self.exporters[exporter.name] = exporter
else:
logger.warning('Invalid exporter')
def hasExporter(self, exporter_id):
if exporter_id in self.exporters:
return True
return False
def getExporters(self):
return self.exporters
def export(self, exporter_id):
return self.exporters[exporter_id].export(self._RHData, self._PageCache, self._Language)
class DataExporter():
def __init__(self, name, label, formatterFn, assemblerFn):
self.name = name
self.label = label
self.formatter = formatterFn
self.assembler = assemblerFn
def export(self, RHData, PageCache, Language):
data = self.assembler(RHData, PageCache, Language)
return self.formatter(data)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,103
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/app/RHData.py
|
#
# RaceData
# Provides abstraction for database and results page caches
#
import logging
logger = logging.getLogger(__name__)
from sqlalchemy import create_engine, MetaData, Table
from datetime import datetime
import os
import shutil
import json
import jsonschema
import glob
from uuid import uuid4
from types import MappingProxyType
from typing import Any, Dict, Mapping
from rh.util import RHUtils
from rh.events.eventmanager import Evt
from .RHRace import RaceStatus, RaceMode, WinCondition, StagingTones, StartBehavior
QUALIFYING_STAGE = 'Qualifying'
MAINS_STAGE = 'Mains'
EMPTY_DICT: Mapping[Any,Any] = MappingProxyType({})
def unique_id():
return uuid4().hex
class RHData():
_OptionsCache: Dict[str,Any] = {} # Local Python cache for global settings
def __init__(self, Database, Events, RACE, SERVER_API, DB_FILE_NAME, DB_BKP_DIR_NAME, RESULTS_CACHE):
self._Database = Database
self._Events = Events
self._RACE = RACE
self._SERVER_API = SERVER_API
self._DB_FILE_NAME = DB_FILE_NAME
self._DB_BKP_DIR_NAME = DB_BKP_DIR_NAME
self._RESULTS_CACHE = RESULTS_CACHE
self._PageCache = None
self._Language = None
def late_init(self, PageCache, Language):
self._PageCache = PageCache
self._Language = Language
def __(self, *args, **kwargs):
return self._Language.__(*args, **kwargs)
# Integrity Checking
def check_integrity(self):
try:
if self.get_optionInt('server_api') < self._SERVER_API:
logger.info('Old server API version; recovering database')
return False
if not self._Database.Heat.query.count():
logger.info('Heats are empty; recovering database')
return False
if not self._Database.Profiles.query.count():
logger.info('Profiles are empty; recovering database')
return False
if not self._Database.RaceFormat.query.count():
logger.info('Formats are empty; recovering database')
return False
try: # make sure no problems reading 'Heat' table data
self._Database.Heat.query.all()
except Exception as ex:
logger.warning('Error reading Heat data; recovering database; err: ' + str(ex))
return False
if self.get_optionInt('server_api') > self._SERVER_API:
logger.warning('Database API version ({}) is newer than server version ({})'.\
format(self.get_optionInt('server_api'), self._SERVER_API))
return True
except Exception as ex:
logger.error('Error checking database integrity; err: ' + str(ex))
return False
# Caching
def primeCache(self):
settings = self._Database.GlobalSettings.query.all()
self._OptionsCache = {} # empty cache
for setting in settings:
self._OptionsCache[setting.option_name] = setting.option_value
# General
def db_init(self, nofill=False):
# Creates tables from database classes/models
try:
self._Database.DB.create_all()
self.reset_all(nofill) # Fill with defaults
return True
except Exception as ex:
logger.error('Error creating database', exc_info=ex)
return False
def reset_all(self, nofill=False):
self.reset_pilots()
self.reset_stages(nofill=nofill)
self.reset_heats(nofill=nofill)
self.clear_race_data()
self.reset_profiles()
self.reset_raceFormats(nofill=nofill)
self.reset_raceClasses()
self.reset_options()
def commit(self):
try:
self._Database.DB.session.commit()
return True
except Exception as ex:
logger.error('Error writing to database', exc_info=ex)
return False
def close(self):
try:
self._Database.DB.session.close()
return True
except Exception as ex:
logger.error('Error closing to database', exc_info=ex)
return False
# File Handling
def backup_db_file(self, copy_flag, prefix_str=''):
self.close()
try: # generate timestamp from last-modified time of database file
time_str = datetime.fromtimestamp(os.stat(self._DB_FILE_NAME).st_mtime).strftime('%Y%m%d_%H%M%S')
except: # if error then use 'now' timestamp
time_str = datetime.now().strftime('%Y%m%d_%H%M%S')
try:
(dbname, dbext) = os.path.splitext(self._DB_FILE_NAME)
dbname = prefix_str + dbname
bkp_name = self._DB_BKP_DIR_NAME + '/' + dbname + '_' + time_str + dbext
if not os.path.exists(self._DB_BKP_DIR_NAME):
os.makedirs(self._DB_BKP_DIR_NAME)
RHUtils.checkSetFileOwnerPi(self._DB_BKP_DIR_NAME)
if os.path.isfile(bkp_name): # if target file exists then use 'now' timestamp
time_str = datetime.now().strftime('%Y%m%d_%H%M%S')
bkp_name = self._DB_BKP_DIR_NAME + '/' + dbname + '_' + time_str + dbext
if copy_flag:
shutil.copy2(self._DB_FILE_NAME, bkp_name)
logger.info('Copied database file to: ' + bkp_name)
else:
os.renames(self._DB_FILE_NAME, bkp_name)
logger.info('Moved old database file to: ' + bkp_name)
RHUtils.checkSetFileOwnerPi(bkp_name)
except Exception as ex:
logger.exception("Error backing up database file: {}".format(ex))
return bkp_name
def delete_old_db_autoBkp_files(self, num_keep_val, prefix_str=''):
try:
if num_keep_val > 0:
(dbname, dbext) = os.path.splitext(self._DB_FILE_NAME)
dbname = prefix_str + dbname
file_list = list(filter(os.path.isfile, glob.glob(self._DB_BKP_DIR_NAME + \
'/' + dbname + '*' + dbext)))
file_list.sort(key=os.path.getmtime) # sort by last-modified time
num_del = 0
if len(file_list) > num_keep_val:
for del_path in file_list[:(-num_keep_val)]:
os.remove(del_path)
num_del += 1
logger.info("Removed {} old DB-autoBkp file(s)".format(num_del))
except Exception as ex:
logger.error("Error removing old DB-autoBkp files: {}".format(ex))
# Migration
def get_legacy_table_data(self, metadata, table_name, filter_crit=None, filter_value=None):
try:
table = Table(table_name, metadata, autoload=True)
if filter_crit is None:
data = table.select().execute().fetchall()
else:
data = table.select().execute().filter(filter_crit==filter_value).fetchall()
output = []
for row in data:
d = dict(row.items())
output.append(d)
return output
except Exception as ex:
logger.warning('Unable to read "{0}" table from previous database'.format(table_name), exc_info=ex)
def restore_table(self, class_type, table_query_data, defaults=EMPTY_DICT):
if table_query_data:
try:
for row_data in table_query_data:
if (class_type is not self._Database.Pilot) or getattr(row_data, 'callsign', '') != '-' or \
getattr(row_data, 'name', '') != '-None-':
if 'id' in class_type.__table__.columns.keys() and \
'id' in row_data.keys():
# update existing record
obj_to_update = class_type.query.filter(getattr(class_type,'id')==row_data['id']).first()
else:
# insert new record
obj_to_update = None
if obj_to_update is None:
new_data = class_type()
for col in class_type.__table__.columns:
colName = col.name
if colName in row_data.keys():
setattr(new_data, colName, row_data[colName])
elif colName in defaults:
defaultValue = defaults[colName]
setattr(new_data, colName, defaultValue(new_data) if callable(defaultValue) else defaultValue)
#logger.debug('Row_data add: ' + str(new_data))
self._Database.DB.session.add(new_data)
else:
#logger.debug('Row_data update: ' + str(obj_to_update))
for col in class_type.__table__.columns:
colName = col.name
if colName in row_data.keys():
setattr(obj_to_update, colName, row_data[colName])
elif colName in defaults:
if colName != 'id':
defaultValue = defaults[colName]
setattr(obj_to_update, colName, defaultValue(obj_to_update) if callable(defaultValue) else defaultValue)
self._Database.DB.session.flush()
logger.info('Database table "{0}" restored'.format(class_type.__name__))
except Exception as ex:
logger.warning('Error restoring "{0}" table from previous database'.format(class_type.__name__), exc_info=ex)
else:
logger.debug('Error restoring "{0}" table: no data'.format(class_type.__name__))
def recover_database(self, db_uri, **kwargs):
recover_status = {
'stage_0': False,
'stage_1': False,
'stage_2': False,
}
# stage 0: collect data from file
try:
logger.info('Recovering data from previous database')
# load file directly
engine = create_engine(db_uri, convert_unicode=True)
metadata = MetaData(bind=engine)
options_query_data = self.get_legacy_table_data(metadata, 'global_settings')
migrate_db_api = 0 # delta5 or very old RH versions
if options_query_data:
for row in options_query_data:
if row['option_name'] == 'server_api':
migrate_db_api = int(row['option_value'])
break
if migrate_db_api > self._SERVER_API:
raise ValueError('Database version is newer than server version')
pilot_query_data = self.get_legacy_table_data(metadata, 'pilot')
heat_query_data = self.get_legacy_table_data(metadata, 'heat')
heatNode_query_data = self.get_legacy_table_data(metadata, 'heat_node')
raceFormat_query_data = self.get_legacy_table_data(metadata, 'race_format')
profiles_query_data = self.get_legacy_table_data(metadata, 'profiles')
raceClass_query_data = self.get_legacy_table_data(metadata, 'race_class')
raceMeta_query_data = self.get_legacy_table_data(metadata, 'saved_race_meta')
racePilot_query_data = self.get_legacy_table_data(metadata, 'saved_pilot_race')
raceLap_query_data = self.get_legacy_table_data(metadata, 'saved_race_lap')
stage_query_data = self.get_legacy_table_data(metadata, 'stage')
engine.dispose() # close connection after loading
carryoverOpts = [
"timerName",
"timerLogo",
"hue_0",
"sat_0",
"lum_0_low",
"lum_0_high",
"contrast_0_low",
"contrast_0_high",
"hue_1",
"sat_1",
"lum_1_low",
"lum_1_high",
"contrast_1_low",
"contrast_1_high",
"currentLanguage",
"timeFormat",
"timeFormatPhonetic",
"currentProfile",
"currentFormat",
"calibrationMode",
"MinLapSec",
"MinLapBehavior",
"ledEffects",
"ledBrightness",
"ledColorNodes",
"ledColorFreqs",
"osd_lapHeader",
"osd_positionHeader",
"startThreshLowerAmount",
"startThreshLowerDuration",
"nextHeatBehavior",
"eventName",
"eventDecription",
"eventURL",
"trackLayout",
"timerMapping"
]
# RSSI reduced by half for 2.0.0
if migrate_db_api < 23:
for profile in profiles_query_data:
if 'enter_ats' in profile and profile['enter_ats']:
enter_ats = json.loads(profile['enter_ats'])
enter_ats["v"] = [(val/2 if val else None) for val in enter_ats["v"]]
profile['enter_ats'] = json.dumps(enter_ats)
if 'exit_ats' in profile and profile['exit_ats']:
exit_ats = json.loads(profile['exit_ats'])
exit_ats["v"] = [(val/2 if val else None) for val in exit_ats["v"]]
profile['exit_ats'] = json.dumps(exit_ats)
# Convert frequencies
if migrate_db_api < 30:
for profile in profiles_query_data:
if 'frequencies' in profile and profile['frequencies']:
freqs = json.loads(profile['frequencies'])
freqs["b"] = [None for _i in range(self._RACE.num_nodes)]
freqs["c"] = [None for _i in range(self._RACE.num_nodes)]
profile['frequencies'] = json.dumps(freqs)
recover_status['stage_0'] = True
except Exception as ex:
logger.warning('Error reading data from previous database (stage 0)', exc_info=ex)
if "startup" in kwargs:
self.backup_db_file(False) # rename and move DB file
self.db_init(nofill=True)
# stage 1: recover pilots, heats, heatnodes, format, profile, class, options
if recover_status['stage_0'] == True:
try:
if pilot_query_data:
self._Database.DB.session.query(self._Database.Pilot).delete()
self.restore_table(self._Database.Pilot, pilot_query_data, defaults={
'name': 'New Pilot',
'callsign': lambda data: 'New Callsign ' + unique_id(),
'team': RHUtils.DEF_TEAM_NAME,
'phonetic': '',
'color': None
})
for pilot in self._Database.Pilot.query.all():
if not pilot.color:
pilot.color = RHUtils.hslToHex(False, 100, 50)
else:
self.reset_pilots()
if stage_query_data:
self._Database.DB.session.query(self._Database.Stage).delete()
self.restore_table(self._Database.Stage, stage_query_data)
else:
self.reset_stages()
if migrate_db_api < 27:
# old heat DB structure; migrate node 0 to heat table
# build list of heat meta
heat_extracted_meta = []
if len(heat_query_data):
for row in heat_query_data:
if 'node_index' in row:
if row['node_index'] == 0:
new_row = {}
new_row['id'] = row['heat_id']
if 'note' in row:
new_row['note'] = row['note']
if 'class_id' in row:
new_row['class_id'] = row['class_id']
heat_extracted_meta.append(new_row)
self.restore_table(self._Database.Heat, heat_extracted_meta, defaults={
'note': None,
'class_id': RHUtils.CLASS_ID_NONE,
'stage_id': self.get_default_stage_id
})
# extract pilots from heats and load into heatnode
heatnode_extracted_data = []
heatnode_dummy_id = 0
for row in heat_query_data:
heatnode_row = {}
heatnode_row['id'] = heatnode_dummy_id
heatnode_row['heat_id'] = int(row['heat_id'])
heatnode_row['node_index'] = int(row['node_index'])
heatnode_row['pilot_id'] = int(row['pilot_id'])
heatnode_extracted_data.append(heatnode_row)
heatnode_dummy_id += 1
self._Database.DB.session.query(self._Database.HeatNode).delete()
self.restore_table(self._Database.HeatNode, heatnode_extracted_data, defaults={
'pilot_id': RHUtils.PILOT_ID_NONE,
'color': None
})
else:
self.reset_heats()
else:
# current heat structure; use basic migration
if heat_query_data:
self.restore_table(self._Database.Heat, heat_query_data, defaults={
'class_id': RHUtils.CLASS_ID_NONE,
'stage_id': self.get_default_stage_id
})
self.restore_table(self._Database.HeatNode, heatNode_query_data, defaults={
'pilot_id': RHUtils.PILOT_ID_NONE,
'color': None
})
self._RACE.current_heat = self.get_first_heat().id
else:
self.reset_heats()
if raceFormat_query_data:
self.restore_table(self._Database.RaceFormat, raceFormat_query_data, defaults={
'name': lambda data: self.__("Migrated Format") + ' ' + unique_id(),
'race_mode': RaceMode.FIXED_TIME.value,
'race_time_sec': 120,
'lap_grace_sec': 0,
'start_delay_min': 2,
'start_delay_max': 5,
'staging_tones': StagingTones.TONES_ALL.value,
'number_laps_win': 0,
'win_condition': WinCondition.MOST_LAPS.value,
'team_racing_mode': False,
'start_behavior': StartBehavior.HOLESHOT.value
})
else:
self.reset_raceFormats()
if profiles_query_data:
self.restore_table(self._Database.Profiles, profiles_query_data, defaults={
'name': lambda data: self.__("Migrated Profile") + ' ' + unique_id(),
'frequencies': json.dumps(self.default_frequencies()),
'enter_ats': json.dumps({'v': [None for _i in range(self._RACE.num_nodes)]}),
'exit_ats': json.dumps({'v': [None for _i in range(self._RACE.num_nodes)]}),
'f_ratio': None
})
else:
self.reset_profiles()
self.restore_table(self._Database.RaceClass, raceClass_query_data, defaults={
'name': lambda data: 'New class ' + unique_id(),
'format_id': 0
})
self.reset_options()
if options_query_data:
for opt in options_query_data:
if opt['option_name'] in carryoverOpts:
self.set_option(opt['option_name'], opt['option_value'])
logger.info('UI Options restored')
recover_status['stage_1'] = True
except Exception as ex:
logger.warning('Error while writing data from previous database (stage 1)', exc_info=ex)
# failed recovery, db reset
self.reset_all()
self.commit()
self.primeCache() # refresh Options cache
self._Events.trigger(Evt.DATABASE_RECOVER)
return recover_status
# stage 2: recover race result data
if recover_status['stage_1'] == True:
try:
if migrate_db_api < 23:
# don't attempt to migrate race data older than 2.0
logger.warning('Race data older than v2.0; skipping results migration')
else:
self.restore_table(self._Database.SavedRaceMeta, raceMeta_query_data)
self.restore_table(self._Database.SavedPilotRace, racePilot_query_data, defaults={
'history_values': None,
'history_times': None,
'lifetime_values': None,
'lifetime_times': None,
'penalty_time': None,
'penalty_desc': None,
'enter_at': None,
'exit_at': None
})
self.restore_table(self._Database.SavedRaceLap, raceLap_query_data, defaults={
'source': None,
'deleted': False
})
recover_status['stage_2'] = True
except Exception as ex:
logger.warning('Error while writing data from previous database (stage 2)', exc_info=ex)
self.commit()
self.primeCache() # refresh Options cache
self.install_new_raceFormats()
self._Events.trigger(Evt.DATABASE_RECOVER)
return recover_status
def default_frequencies(self):
'''Set node frequencies, R1367 for 4, IMD6C+ for 5+.'''
if self._RACE.num_nodes < 5:
freqs = {
'b': ['R', 'R', 'R', 'R'],
'c': [1, 3, 6, 7],
'f': [5658, 5732, 5843, 5880]
}
else:
freqs = {
'b': ['R', 'R', 'F', 'F', 'R', 'R'],
'c': [1, 2, 2, 4, 7, 8],
'f': [5658, 5695, 5760, 5800, 5880, 5917]
}
while self._RACE.num_nodes > len(freqs['f']):
freqs['b'].append(None)
freqs['c'].append(None)
freqs['f'].append(RHUtils.FREQUENCY_ID_NONE)
return freqs
# Pilots
def get_pilot(self, pilot_id):
return self._Database.Pilot.query.get(pilot_id)
def get_pilots(self):
return self._Database.Pilot.query.all()
def add_pilot(self, init=EMPTY_DICT):
default_color = RHUtils.hslToHex(False, 100, 50)
new_pilot = self._Database.Pilot(
name=init['name'] if 'name' in init else '',
callsign=init['callsign'] if 'callsign' in init else '',
team=init['team'] if 'team' in init else RHUtils.DEF_TEAM_NAME,
phonetic=init['phonetic'] if 'phonetic' in init else '',
color=init['color'] if init and 'color' in init else default_color,
url=init['url'] if init and 'url' in init else None,
data=init['data'] if init and 'data' in init else None)
self._Database.DB.session.add(new_pilot)
self._Database.DB.session.flush()
if not new_pilot.name:
new_pilot.name=self.__('~Pilot %d Name') % (new_pilot.id)
if not new_pilot.callsign:
new_pilot.callsign=self.__('~Callsign %d') % (new_pilot.id)
self.commit()
self._Events.trigger(Evt.PILOT_ADD, {
'pilot_id': new_pilot.id,
})
logger.info('Pilot added: Pilot {0}'.format(new_pilot.id))
return new_pilot
def alter_pilot(self, data):
pilot_id = data['pilot_id']
pilot = self._Database.Pilot.query.get(pilot_id)
if 'callsign' in data:
pilot.callsign = data['callsign']
if 'team_name' in data:
pilot.team = data['team_name']
if 'phonetic' in data:
pilot.phonetic = data['phonetic']
if 'name' in data:
pilot.name = data['name']
if 'color' in data:
pilot.color = data['color']
if 'url' in data:
pilot.url = data['url']
self.commit()
self._Events.trigger(Evt.PILOT_ALTER, {
'pilot_id': pilot_id,
})
logger.info('Altered pilot {0} to {1}'.format(pilot_id, data))
race_list = []
if 'callsign' in data or 'team_name' in data:
heatnodes = self._Database.HeatNode.query.filter_by(pilot_id=pilot_id).all()
if heatnodes:
for heatnode in heatnodes:
heat = self.get_heat(heatnode.heat_id)
self._RESULTS_CACHE.evict_heat(heat.id)
if heat.class_id != RHUtils.CLASS_ID_NONE:
self._RESULTS_CACHE.evict_class(heat.class_id)
for race in self._Database.SavedRaceMeta.query.filter_by(heat_id=heatnode.heat_id).all():
race_list.append(race)
if len(race_list):
self._PageCache.set_valid(False)
self._RESULTS_CACHE.evict_event()
for race in race_list:
self._RESULTS_CACHE.evict_race(race.id)
self.commit()
return pilot, race_list
def delete_pilot(self, pilot_id):
pilot = self._Database.Pilot.query.get(pilot_id)
if self.savedPilotRaces_has_pilot(pilot.id):
logger.info('Refusing to delete pilot {0}: is in use'.format(pilot.id))
return False
else:
self._Database.DB.session.delete(pilot)
for heatNode in self._Database.HeatNode.query.all():
if heatNode.pilot_id == pilot.id:
heatNode.pilot_id = RHUtils.PILOT_ID_NONE
self.commit()
logger.info('Pilot {0} deleted'.format(pilot.id))
return True
def get_recent_pilot_node(self, pilot_id):
return self._Database.HeatNode.query.filter_by(pilot_id=pilot_id).order_by(self._Database.HeatNode.id.desc()).first()
def clear_pilots(self):
self._Database.DB.session.query(self._Database.Pilot).delete()
self.commit()
return True
def reset_pilots(self):
self.clear_pilots()
for node in range(self._RACE.num_nodes):
self.add_pilot({
'callsign': 'Callsign {0}'.format(node+1),
'name': 'Pilot {0} Name'.format(node+1),
'phonetic': 'Callsine {0}'.format(node+1)
})
logger.info('Database pilots reset')
return True
def get_or_insert_stage(self, name):
stage = self._Database.Stage.query.filter_by(name=name).one_or_none()
if not stage:
stage = self._Database.Stage(name=name)
self._Database.DB.session.add(stage)
self.commit()
return stage
def get_default_stage_id(self, heat):
if hasattr(heat, 'note'):
if heat.note:
stage_name = MAINS_STAGE if heat.note.endswith(' Main') else QUALIFYING_STAGE
parts = heat.note.split(' ')
if len(parts) >= 3:
stage_name = parts[0] + ' ' + stage_name
else:
stage_name = QUALIFYING_STAGE
else:
stage_name = MAINS_STAGE
stage = self.get_or_insert_stage(stage_name)
return stage.id
def reset_stages(self, nofill=False):
self._Database.DB.session.query(self._Database.Stage).delete()
self.commit()
if not nofill:
default_stages = [
{'name': QUALIFYING_STAGE},
{'name': MAINS_STAGE}
]
for stage_data in default_stages:
if not self._Database.Stage.query.filter_by(name=stage_data['name']).one_or_none():
stage_obj = self._Database.Stage(name=stage_data['name'])
self._Database.DB.session.add(stage_obj)
self.commit()
return True
# Heats
def get_heat(self, heat_id):
return self._Database.Heat.query.get(heat_id)
def get_heat_note(self, heat_id):
heat_data = self._Database.Heat.query.get(heat_id)
return heat_data.note if heat_data else None
def get_heats(self):
return self._Database.Heat.query.order_by(self._Database.Heat.id).all()
def get_heats_by_class(self, class_id):
return self._Database.Heat.query.filter_by(class_id=class_id).all()
def get_first_heat(self):
return self._Database.Heat.query.first()
def add_heat(self, init=EMPTY_DICT, initPilots=EMPTY_DICT):
stage_name = init['stage']
stage = self.get_or_insert_stage(stage_name)
# Add new heat
new_heat = self._Database.Heat(
stage_id=stage.id,
class_id=RHUtils.CLASS_ID_NONE
)
if 'class' in init:
new_heat.class_id = init['class']
if 'note' in init:
new_heat.note = init['note']
self._Database.DB.session.add(new_heat)
self._Database.DB.session.flush()
self._Database.DB.session.refresh(new_heat)
# Add heatnodes
for node_index in range(self._RACE.num_nodes):
new_heatNode = self._Database.HeatNode(
heat_id=new_heat.id,
node_index=node_index,
pilot_id=RHUtils.PILOT_ID_NONE
)
if node_index in initPilots:
new_heatNode.pilot_id = initPilots[node_index]
self._Database.DB.session.add(new_heatNode)
self.commit()
self._Events.trigger(Evt.HEAT_DUPLICATE, {
'heat_id': new_heat.id,
})
logger.info('Heat added: Heat {0}'.format(new_heat.id))
return new_heat
def duplicate_heat(self, source, **kwargs):
# Add new heat by duplicating an existing one
source_heat = self.get_heat(source)
if source_heat.note:
all_heat_notes = [heat.note for heat in self.get_heats()]
new_heat_note = RHUtils.uniqueName(source_heat.note, all_heat_notes)
else:
new_heat_note = ''
if 'dest_class' in kwargs:
new_class = kwargs['dest_class']
else:
new_class = source_heat.class_id
new_heat = self._Database.Heat(stage_id=source_heat.stage_id,
note=new_heat_note,
class_id=new_class)
self._Database.DB.session.add(new_heat)
self._Database.DB.session.flush()
self._Database.DB.session.refresh(new_heat)
for source_heatnode in self.get_heatNodes_by_heat(source_heat.id):
new_heatnode = self._Database.HeatNode(heat_id=new_heat.id,
node_index=source_heatnode.node_index,
pilot_id=source_heatnode.pilot_id)
self._Database.DB.session.add(new_heatnode)
self.commit()
self._Events.trigger(Evt.HEAT_DUPLICATE, {
'heat_id': new_heat.id,
})
logger.info('Heat {0} duplicated to heat {1}'.format(source, new_heat.id))
return new_heat
def alter_heat(self, data):
# Alters heat. Returns heat and list of affected races
heat_id = data['heat']
heat = self._Database.Heat.query.get(heat_id)
class_changed = False
pilot_changed = False
if 'note' in data and heat.note != data['note']:
self._PageCache.set_valid(False)
heat.note = data['note']
if 'stage' in data and heat.stage != data['stage']:
stage = self.get_or_insert_stage(data['stage'])
heat.stage_id = stage.id
if 'class' in data and heat.class_id != data['class']:
old_class_id = heat.class_id
heat.class_id = data['class']
class_changed = True
if 'pilot' in data:
node_index = data['node']
heatnode = self._Database.HeatNode.query.filter_by(
heat_id=heat_id, node_index=node_index).one()
if heatnode.pilot_id != data['pilot']:
heatnode.pilot_id = data['pilot']
pilot_changed = True
# alter existing saved races:
race_list = self._Database.SavedRaceMeta.query.filter_by(heat_id=heat_id).all()
if class_changed:
if len(race_list):
for race_meta in race_list:
race_meta.class_id = data['class']
if old_class_id is not RHUtils.CLASS_ID_NONE:
self._RESULTS_CACHE.evict_class(old_class_id)
if pilot_changed:
if len(race_list):
for race_meta in race_list:
for pilot_race in self._Database.SavedPilotRace.query.filter_by(race_id=race_meta.id).all():
if pilot_race.node_index == data['node']:
pilot_race.pilot_id = data['pilot']
for race_lap in self._Database.SavedRaceLap.query.filter_by(race_id=race_meta.id):
if race_lap.node_index == data['node']:
race_lap.pilot_id = data['pilot']
self._RESULTS_CACHE.evict_race(race_meta.id)
self._RESULTS_CACHE.evict_heat(heat.id)
if pilot_changed or class_changed:
if len(race_list):
if heat.class_id is not RHUtils.CLASS_ID_NONE:
self._RESULTS_CACHE.evict_class(heat.class_id)
self._RESULTS_CACHE.evict_event()
self._PageCache.set_valid(False)
self.commit()
self._Events.trigger(Evt.HEAT_ALTER, {
'heat_id': heat.id,
})
# update current race
if heat_id == self._RACE.current_heat:
self._RACE.set_current_pilots(self)
logger.info('Heat {0} altered with {1}'.format(heat_id, data))
return heat, race_list
def delete_heat(self, heat_id):
# Deletes heat. Returns heat-ID if successful, None if not
heat_count = self._Database.Heat.query.count()
heat = self._Database.Heat.query.get(heat_id)
if heat and heat_count > 1: # keep at least one heat
heatnodes = self._Database.HeatNode.query.filter_by(heat_id=heat.id).order_by(self._Database.HeatNode.node_index).all()
has_race = self.savedRaceMetas_has_heat(heat.id)
if has_race or (self._RACE.current_heat == heat.id and self._RACE.race_status != RaceStatus.READY):
logger.info('Refusing to delete heat {0}: is in use'.format(heat.id))
return None
else:
self._Database.DB.session.delete(heat)
for heatnode in heatnodes:
self._Database.DB.session.delete(heatnode)
self.commit()
logger.info('Heat {0} deleted'.format(heat.id))
self._Events.trigger(Evt.HEAT_DELETE, {
'heat_id': heat_id,
})
# if only one heat remaining then set ID to 1
if heat_count == 2 and self._RACE.race_status == RaceStatus.READY:
try:
heat_obj = self._Database.Heat.query.first()
if heat_obj.id != 1:
heatnodes = self._Database.HeatNode.query.filter_by(heat_id=heat_obj.id).order_by(self._Database.HeatNode.node_index).all()
if not self.savedRaceMetas_has_heat(heat_obj.id):
logger.info("Adjusting single remaining heat ({0}) to ID 1".format(heat_obj.id))
heat_obj.id = 1
for heatnode in heatnodes:
heatnode.heat_id = heat_obj.id
self.commit()
self._RACE.current_heat = 1
heat_id = 1 # set value so heat data is updated
else:
logger.warning("Not changing single remaining heat ID ({0}): is in use".format(heat_obj.id))
except Exception as ex:
logger.warning("Error adjusting single remaining heat ID", exc_info=ex)
return heat_id
else:
logger.info('Refusing to delete only heat')
return None
def clear_heats(self):
self._Database.DB.session.query(self._Database.Heat).delete()
self._Database.DB.session.query(self._Database.HeatNode).delete()
self.commit()
return True
def reset_heats(self, nofill=False):
self.clear_heats()
if not nofill:
self.add_heat(init={'stage':QUALIFYING_STAGE})
self._RACE.current_heat = self.get_first_heat().id
logger.info('Database heats reset')
# HeatNodes
def get_heatNodes(self):
return self._Database.HeatNode.query.all()
def get_heatNodes_by_heat(self, heat_id):
return self._Database.HeatNode.query.filter_by(heat_id=heat_id).order_by(self._Database.HeatNode.node_index).all()
def add_heatNode(self, heat_id, node_index):
new_heatNode = self._Database.HeatNode(
heat_id=heat_id,
node_index=node_index,
pilot_id=RHUtils.PILOT_ID_NONE
)
self._Database.DB.session.add(new_heatNode)
return True
def get_pilot_from_heatNode(self, heat_id, node_index):
heatNode = self._Database.HeatNode.query.filter_by(heat_id=heat_id, node_index=node_index).one_or_none()
if heatNode:
return heatNode.pilot_id
else:
return None
# Race Classes
def get_raceClass(self, raceClass_id):
return self._Database.RaceClass.query.get(raceClass_id)
def get_raceClasses(self):
return self._Database.RaceClass.query.all()
def add_raceClass(self, init=EMPTY_DICT):
# Add new race class
new_race_class = self._Database.RaceClass(
name=init['name'] if 'name' in init else '',
description=init['description'] if 'description' in init else '',
format_id=init['format_id'] if 'format_id' in init else RHUtils.FORMAT_ID_NONE,
parent_id=init['parent_id'] if 'parent_id' in init else None
)
self._Database.DB.session.add(new_race_class)
self._Database.DB.session.flush()
if not new_race_class.name:
new_race_class.name = 'Class {}'.format(new_race_class.id)
self.commit()
self._Events.trigger(Evt.CLASS_ADD, {
'class_id': new_race_class.id,
})
logger.info('Class added: Class {0}'.format(new_race_class))
return new_race_class
def duplicate_raceClass(self, source_class_id):
source_class = self.get_raceClass(source_class_id)
if source_class.name:
all_class_names = [race_class.name for race_class in self.get_raceClasses()]
new_class_name = RHUtils.uniqueName(source_class.name, all_class_names)
else:
new_class_name = ''
new_class = self._Database.RaceClass(name=new_class_name,
description=source_class.description,
format_id=source_class.format_id
)
self._Database.DB.session.add(new_class)
self._Database.DB.session.flush()
self._Database.DB.session.refresh(new_class)
for heat in self._Database.Heat.query.filter_by(class_id=source_class_id).all():
self.duplicate_heat(heat.id, dest_class=new_class.id)
self.commit()
self._Events.trigger(Evt.CLASS_DUPLICATE, {
'class_id': new_class.id,
})
logger.info('Class {0} duplicated to class {1}'.format(source_class.id, new_class.id))
return new_class
def alter_raceClass(self, data):
# alter existing classes
race_class_id = data['id']
race_class = self._Database.RaceClass.query.get(race_class_id)
if not race_class:
return False, False
if 'name' in data:
race_class.name = data['name']
if 'format_id' in data:
race_class.format_id = data['format_id']
if 'description' in data:
race_class.description = data['description']
if 'parent_id' in data:
race_class.parent_id = data['parent_id']
race_list = self._Database.SavedRaceMeta.query.filter_by(class_id=race_class_id).all()
if 'name' in data:
if len(race_list):
self._PageCache.set_valid(False)
if 'format_id' in data:
if len(race_list):
self._PageCache.set_valid(False)
self._RESULTS_CACHE.evict_event()
self._RESULTS_CACHE.evict_class(race_class.id)
for race_meta in race_list:
race_meta.format_id = data['format_id']
self._RESULTS_CACHE.evict_race(race_meta.id)
heats = self._Database.Heat.query.filter_by(class_id=race_class_id).all()
for heat in heats:
self._RESULTS_CACHE.evict_heat(heat.id)
self.commit()
self._Events.trigger(Evt.CLASS_ALTER, {
'class_id': race_class_id,
})
logger.info('Altered race class {0} to {1}'.format(race_class_id, data))
return race_class, race_list
def delete_raceClass(self, class_id):
race_class = self._Database.RaceClass.query.get(class_id)
has_race = self.savedRaceMetas_has_raceClass(race_class.id)
if has_race:
logger.info('Refusing to delete class {0}: is in use'.format(race_class.id))
return False
else:
self._Database.DB.session.delete(race_class)
for heat in self._Database.Heat.query.all():
if heat.class_id == race_class.id:
heat.class_id = RHUtils.CLASS_ID_NONE
self.commit()
self._Events.trigger(Evt.CLASS_DELETE, {
'class_id': race_class.id,
})
logger.info('Class {0} deleted'.format(race_class.id))
return True
def clear_raceClasses(self):
self._Database.DB.session.query(self._Database.RaceClass).delete()
self.commit()
return True
def reset_raceClasses(self):
self.clear_raceClasses()
logger.info('Database race classes reset')
return True
# Profiles
def get_profile(self, profile_id):
return self._Database.Profiles.query.get(profile_id)
def get_profiles(self):
return self._Database.Profiles.query.all()
def get_first_profile(self):
return self._Database.Profiles.query.first()
def add_profile(self, init=EMPTY_DICT):
new_profile = self._Database.Profiles(
name = init['profile_name'] if 'profile_name' in init else '',
frequencies = json.dumps(init['frequencies']) if 'frequencies' in init else '',
enter_ats = json.dumps(init['enter_ats']) if 'enter_ats' in init else '',
exit_ats = json.dumps(init['exit_ats']) if 'exit_ats' in init else ''
)
self._Database.DB.session.add(new_profile)
self.commit()
return new_profile
def duplicate_profile(self, source_profile_id):
source_profile = self.get_profile(source_profile_id)
all_profile_names = [profile.name for profile in self.get_profiles()]
if source_profile.name:
new_profile_name = RHUtils.uniqueName(source_profile.name, all_profile_names)
else:
new_profile_name = RHUtils.uniqueName(self._Language.__('New Profile'), all_profile_names)
new_profile = self._Database.Profiles(
name=new_profile_name,
description = '',
frequencies = source_profile.frequencies,
enter_ats = source_profile.enter_ats,
exit_ats = source_profile.exit_ats,
f_ratio = 100)
self._Database.DB.session.add(new_profile)
self.commit()
self._Events.trigger(Evt.PROFILE_ADD, {
'profile_id': new_profile.id,
})
return new_profile
def alter_profile(self, data):
profile = self._Database.Profiles.query.get(data['profile_id'])
if 'profile_name' in data:
profile.name = data['profile_name']
if 'profile_description' in data:
profile.description = data['profile_description']
if 'frequencies' in data:
profile.frequencies = json.dumps(data['frequencies'])
if 'enter_ats' in data:
profile.enter_ats = json.dumps(data['enter_ats'])
if 'exit_ats' in data:
profile.exit_ats = json.dumps(data['exit_ats'])
self.commit()
self._Events.trigger(Evt.PROFILE_ALTER, {
'profile_id': profile.id,
})
logger.debug('Altered profile {0} to {1}'.format(profile.id, data))
return profile
def upsert_profile(self, profile_data):
profile = self._Database.Profiles.query.filter_by(name=profile_data['profile_name']).one_or_none()
if profile:
# update existing profile
profile_data['profile_id'] = profile.id
profile = self.alter_profile(profile_data)
else:
# add new profile
profile = self.add_profile(profile_data)
return profile
def delete_profile(self, profile_id):
if len(self.get_profiles()) > 1: # keep one profile
profile = self._Database.Profiles.query.get(profile_id)
self._Database.DB.session.delete(profile)
self.commit()
self._Events.trigger(Evt.PROFILE_DELETE, {
'profile_id': profile_id,
})
return True
else:
logger.info('Refusing to delete only profile')
return False
def clear_profiles(self):
self._Database.DB.session.query(self._Database.Profiles).delete()
self.commit()
return True
def reset_profiles(self):
self.clear_profiles()
new_freqs = self.default_frequencies()
template = {}
template["v"] = [None for _i in range(self._RACE.num_nodes)]
self.add_profile({
'profile_name': self.__("Default"),
'frequencies': new_freqs,
'enter_ats': template,
'exit_ats': template
})
self.set_option("currentProfile", self.get_first_profile().id)
logger.info("Database set default profiles")
return True
# Formats
def get_raceFormat(self, raceFormat_id):
return self._Database.RaceFormat.query.get(raceFormat_id)
def get_raceFormats(self):
return self._Database.RaceFormat.query.all()
def get_first_raceFormat(self):
return self._Database.RaceFormat.query.first()
def add_format(self, init=None):
race_format = self._Database.RaceFormat(
name='',
race_mode=RaceMode.FIXED_TIME.value,
race_time_sec=0,
lap_grace_sec=0,
start_delay_min=0,
start_delay_max=0,
staging_tones=StagingTones.TONES_NONE.value,
number_laps_win=0,
win_condition=WinCondition.NONE.value,
team_racing_mode=False,
start_behavior=StartBehavior.HOLESHOT.value)
if init:
if 'format_name' in init:
race_format.name = init['format_name']
else:
race_format.name = 'Format '+unique_id()
if 'race_mode' in init:
race_format.race_mode = init['race_mode']
if 'race_time_sec' in init:
race_format.race_time_sec = init['race_time_sec']
if 'lap_grace_sec' in init:
race_format.lap_grace_sec = init['lap_grace_sec']
if 'start_delay_min' in init:
race_format.start_delay_min = init['start_delay_min']
if 'start_delay_max' in init:
race_format.start_delay_max = init['start_delay_max']
if 'staging_tones' in init:
race_format.staging_tones = init['staging_tones']
if 'number_laps_win' in init:
race_format.number_laps_win = init['number_laps_win']
if 'start_behavior' in init:
race_format.start_behavior = init['start_behavior']
if 'win_condition' in init:
race_format.win_condition = init['win_condition']
if 'team_racing_mode' in init:
race_format.team_racing_mode = (True if init['team_racing_mode'] else False)
self._Database.DB.session.add(race_format)
self.commit()
def duplicate_raceFormat(self, source_format_id):
source_format = self.get_raceFormat(source_format_id)
all_format_names = [raceformat.name for raceformat in self.get_raceFormats()]
if source_format.name:
new_format_name = RHUtils.uniqueName(source_format.name, all_format_names)
else:
new_format_name = RHUtils.uniqueName(self._Language.__('New Format'), all_format_names)
new_format = self._Database.RaceFormat(
name=new_format_name,
race_mode=source_format.race_mode,
race_time_sec=source_format.race_time_sec,
lap_grace_sec=source_format.lap_grace_sec,
start_delay_min=source_format.start_delay_min,
start_delay_max=source_format.start_delay_max,
staging_tones=source_format.staging_tones,
number_laps_win=source_format.number_laps_win,
win_condition=source_format.win_condition,
team_racing_mode=source_format.team_racing_mode,
start_behavior=source_format.start_behavior)
self._Database.DB.session.add(new_format)
self.commit()
self._Events.trigger(Evt.RACE_FORMAT_ADD, {
'format_id': new_format.id,
})
return new_format
def alter_raceFormat(self, data):
race_format = self._Database.RaceFormat.query.get(data['format_id'])
# Prevent active race format change
if self.get_optionInt('currentFormat') == data['format_id'] and \
self._RACE.race_status != RaceStatus.READY:
logger.warning('Preventing race format alteration: race in progress')
return False, False
if 'format_name' in data:
race_format.name = data['format_name']
if 'race_mode' in data:
race_format.race_mode = data['race_mode']
if 'race_time_sec' in data:
race_format.race_time_sec = data['race_time_sec']
if 'lap_grace_sec' in data:
race_format.lap_grace_sec = data['lap_grace_sec']
if 'start_delay_min' in data:
race_format.start_delay_min = data['start_delay_min']
if 'start_delay_max' in data:
race_format.start_delay_max = data['start_delay_max']
if 'staging_tones' in data:
race_format.staging_tones = data['staging_tones']
if 'number_laps_win' in data:
race_format.number_laps_win = data['number_laps_win']
if 'start_behavior' in data:
race_format.start_behavior = data['start_behavior']
if 'win_condition' in data:
race_format.win_condition = data['win_condition']
if 'team_racing_mode' in data:
race_format.team_racing_mode = (True if data['team_racing_mode'] else False)
self.commit()
race_list = []
if 'win_condition' in data or 'start_behavior' in data:
race_list = self._Database.SavedRaceMeta.query.filter_by(format_id=race_format.id).all()
if len(race_list):
self._PageCache.set_valid(False)
self._RESULTS_CACHE.evict_event()
for race in race_list:
self._RESULTS_CACHE.evict_race(race.id)
classes = self._Database.RaceClass.query.filter_by(format_id=race_format.id).all()
for race_class in classes:
self._RESULTS_CACHE.evict_class(race_class.id)
heats = self._Database.Heat.query.filter_by(class_id=race_class.id).all()
for heat in heats:
self._RESULTS_CACHE.evict_heat(heat.id)
self.commit()
self._Events.trigger(Evt.RACE_FORMAT_ALTER, {
'race_format': race_format.id,
})
logger.info('Altered format {0} to {1}'.format(race_format.id, data))
return race_format, race_list
def delete_raceFormat(self, format_id):
# Prevent active race format change
if self.get_optionInt('currentFormat') == format_id and \
self._RACE.race_status != RaceStatus.READY:
logger.warning('Preventing race format deletion: race in progress')
return False
if self.savedRaceMetas_has_raceFormat(format_id):
logger.warning('Preventing race format deletion: saved race exists')
return False
race_format = self._Database.RaceFormat.query.get(format_id)
if race_format and len(self.get_raceFormats()) > 1: # keep one format
self._Database.DB.session.delete(race_format)
self.commit()
self._Events.trigger(Evt.RACE_FORMAT_DELETE, {
'race_format': format_id,
})
return True
else:
logger.info('Refusing to delete only format')
return False
def clear_raceFormats(self):
self._Database.DB.session.query(self._Database.RaceFormat).delete()
for race_class in self.get_raceClasses():
self.alter_raceClass({
'id': race_class.id,
'format_id': RHUtils.FORMAT_ID_NONE
})
self.commit()
return True
def reset_raceFormats(self, nofill=False):
self.clear_raceFormats()
self.commit()
if not nofill:
self.install_new_raceFormats()
logger.info("Database reset race formats")
return True
def install_new_raceFormats(self):
with open('race_formats.schema.json', 'r') as f:
schema = json.load(f)
with open('race_formats.json', 'r') as f:
default_race_formats = json.load(f)
jsonschema.validate(instance=default_race_formats, schema=schema)
for race_format in default_race_formats:
if not self._Database.RaceFormat.query.filter_by(name=race_format['format_name']).one_or_none():
self.add_format({
'format_name': race_format['format_name'],
'race_mode': RaceMode[race_format['race_mode']].value,
'race_time_sec': race_format['race_time_sec'],
'lap_grace_sec': race_format['lap_grace_sec'],
'start_delay_min': race_format['start_delay_min'],
'start_delay_max': race_format['start_delay_max'],
'staging_tones': StagingTones[race_format['staging_tones']].value,
'number_laps_win': race_format['number_laps_win'],
'win_condition': WinCondition[race_format['win_condition']].value,
'team_racing_mode': race_format['team_racing_mode'],
'start_behavior': StartBehavior[race_format['start_behavior']].value
})
self.commit()
# Race Meta
def get_savedRaceMeta(self, race_id):
return self._Database.SavedRaceMeta.query.get(race_id)
def get_savedRaceMeta_by_heat_round(self, heat_id, round_id):
return self._Database.SavedRaceMeta.query.filter_by(heat_id=heat_id, round_id=round_id).one()
def get_savedRaceMetas(self):
return self._Database.SavedRaceMeta.query.all()
def get_savedRaceMetas_by_heat(self, heat_id):
return self._Database.SavedRaceMeta.query.filter_by(heat_id=heat_id).order_by(self._Database.SavedRaceMeta.round_id).all()
def get_savedRaceMetas_by_raceClass(self, class_id):
return self._Database.SavedRaceMeta.query.filter_by(class_id=class_id).order_by(self._Database.SavedRaceMeta.round_id).all()
def savedRaceMetas_has_raceFormat(self, race_format_id):
return bool(self._Database.SavedRaceMeta.query.filter_by(format_id=race_format_id).count())
def savedRaceMetas_has_heat(self, heat_id):
return bool(self._Database.SavedRaceMeta.query.filter_by(heat_id=heat_id).count())
def savedRaceMetas_has_raceClass(self, class_id):
return bool(self._Database.SavedRaceMeta.query.filter_by(class_id=class_id).count())
def add_savedRaceMeta(self, data):
new_race = self._Database.SavedRaceMeta(
round_id=data['round_id'],
heat_id=data['heat_id'],
class_id=data['class_id'],
format_id=data['format_id'],
start_time=data['start_time'],
start_time_formatted=data['start_time_formatted']
)
self._Database.DB.session.add(new_race)
self.commit()
logger.info('Race added: Race {0}'.format(new_race.id))
return new_race
def reassign_savedRaceMeta_heat(self, race_id, new_heat_id):
race_meta = self._Database.SavedRaceMeta.query.get(race_id)
old_heat_id = race_meta.heat_id
old_heat = self.get_heat(old_heat_id)
old_class = self.get_raceClass(old_heat.class_id)
old_format_id = old_class.format_id
new_heat = self.get_heat(new_heat_id)
new_class = self.get_raceClass(new_heat.class_id)
new_format_id = new_class.format_id
# clear round ids
heat_races = self._Database.SavedRaceMeta.query.filter_by(heat_id=new_heat_id).order_by(self._Database.SavedRaceMeta.round_id).all()
race_meta.round_id = 0
dummy_round_counter = -1
for race in heat_races:
race.round_id = dummy_round_counter
dummy_round_counter -= 1
# assign new heat
race_meta.heat_id = new_heat_id
race_meta.class_id = new_heat.class_id
race_meta.format_id = new_format_id
# reassign pilots to pilotRaces
new_pilots = self.get_heatNodes_by_heat(new_heat_id)
for np in new_pilots:
for pilot_race in self.get_savedPilotRaces_by_savedRaceMeta(race_id):
if pilot_race.node_index == np.node_index:
pilot_race.pilot_id = np.pilot_id
for lap in self.get_savedRaceLaps_by_savedPilotRace(pilot_race.id):
lap.pilot_id = np.pilot_id
break
if pilot_race.node_index == np.node_index:
pilot_race.pilot_id = np.pilot_id
break
# renumber rounds
self._Database.DB.session.flush()
old_heat_races = self._Database.SavedRaceMeta.query.filter_by(heat_id=old_heat_id) \
.order_by(self._Database.SavedRaceMeta.start_time_formatted).all()
round_counter = 1
for race in old_heat_races:
race.round_id = round_counter
round_counter += 1
new_heat_races = self._Database.SavedRaceMeta.query.filter_by(heat_id=new_heat_id) \
.order_by(self._Database.SavedRaceMeta.start_time_formatted).all()
round_counter = 1
for race in new_heat_races:
race.round_id = round_counter
round_counter += 1
self.commit()
# cache cleaning
self._PageCache.set_valid(False)
self._RESULTS_CACHE.evict_heat(new_heat.id)
self._RESULTS_CACHE.evict_heat(old_heat.id)
if old_heat.class_id != new_heat.class_id:
self._RESULTS_CACHE.evict_class(new_class.id)
self._RESULTS_CACHE.evict_class(old_class.id)
self.commit()
self._Events.trigger(Evt.RACE_ALTER, {
'race_id': race_id,
})
logger.info('Race {0} reaasigned to heat {1}'.format(race_id, new_heat_id))
return race_meta, new_heat
def get_max_round(self, heat_id):
max_round = self._Database.DB.session.query(
self._Database.DB.func.max(
self._Database.SavedRaceMeta.round_id
)).filter_by(heat_id=heat_id).scalar()
return max_round if max_round is not None else 0
# Pilot-Races
def get_savedPilotRace(self, pilotrace_id):
return self._Database.SavedPilotRace.query.get(pilotrace_id)
def get_savedPilotRaces(self):
return self._Database.SavedPilotRace.query.all()
def get_savedPilotRaces_by_savedRaceMeta(self, race_id):
return self._Database.SavedPilotRace.query.filter_by(race_id=race_id).all()
def alter_savedPilotRace(self, data):
pilotrace = self._Database.SavedPilotRace.query.get(data['pilotrace_id'])
if 'enter_at' in data:
pilotrace.enter_at = data['enter_at']
if 'exit_at' in data:
pilotrace.exit_at = data['exit_at']
self.commit()
return True
def savedPilotRaces_has_pilot(self, pilot_id):
return bool(self._Database.SavedPilotRace.query.filter_by(pilot_id=pilot_id).count())
# Race Laps
def get_savedRaceLaps(self):
return self._Database.SavedRaceLap.query.all()
def get_savedRaceLaps_by_savedPilotRace(self, pilotrace_id):
return self._Database.SavedRaceLap.query.filter_by(pilotrace_id=pilotrace_id).order_by(self._Database.SavedRaceLap.lap_time_stamp).all()
def get_active_savedRaceLaps(self):
return self._Database.SavedRaceLap.query.filter(self._Database.SavedRaceLap.deleted != 1).all()
# Race general
def replace_savedRaceLaps(self, data):
self._Database.SavedRaceLap.query.filter_by(pilotrace_id=data['pilotrace_id']).delete()
for lap in data['laps']:
self._Database.DB.session.add(self._Database.SavedRaceLap(
race_id=data['race_id'],
pilotrace_id=data['pilotrace_id'],
node_index=data['node_index'],
pilot_id=data['pilot_id'],
lap_time_stamp=lap['lap_time_stamp'],
lap_time=lap['lap_time'],
lap_time_formatted=lap['lap_time_formatted'],
source = lap['source'],
deleted = lap['deleted']
))
self.commit()
return True
# Race general
def add_race_data(self, data):
for node_index, node_data in data.items():
new_pilotrace = self._Database.SavedPilotRace(
race_id=node_data['race_id'],
node_index=node_index,
pilot_id=node_data['pilot_id'],
history_values=node_data['history_values'],
history_times=node_data['history_times'],
lifetime_values=node_data.get('lifetime_values'),
lifetime_times=node_data.get('lifetime_times'),
penalty_time=0,
enter_at=node_data['enter_at'],
exit_at=node_data['exit_at']
)
self._Database.DB.session.add(new_pilotrace)
self._Database.DB.session.flush()
self._Database.DB.session.refresh(new_pilotrace)
for lap in node_data['laps']:
self._Database.DB.session.add(self._Database.SavedRaceLap(
race_id=node_data['race_id'],
pilotrace_id=new_pilotrace.id,
node_index=node_index,
pilot_id=node_data['pilot_id'],
lap_time_stamp=lap['lap_time_stamp'],
lap_time=lap['lap_time'],
lap_time_formatted=lap['lap_time_formatted'],
source=lap['source'],
deleted=lap['deleted']
))
for split in node_data['splits']:
self._Database.DB.session.add(self._Database.SavedRaceLapSplit(
race_id=node_data['race_id'],
node_index=node_index,
pilot_id=node_data['pilot_id'],
lap_id=split['lap_id'],
split_id=split['split_id'],
split_time_stamp=split['split_time_stamp'],
split_time=split['split_time'],
split_time_formatted=split['split_time_formatted'],
split_speed=split['split_speed']
))
self.commit()
return True
def clear_race_data(self):
self._Database.DB.session.query(self._Database.SavedRaceMeta).delete()
self._Database.DB.session.query(self._Database.SavedPilotRace).delete()
self._Database.DB.session.query(self._Database.SavedRaceLap).delete()
self._Database.DB.session.query(self._Database.SavedRaceLapSplit).delete()
self.commit()
logger.info('Database saved races reset')
return True
# Splits
def get_lapSplits(self):
return self._Database.SavedRaceLapSplit.query.all()
def get_lapSplits_by_lap(self, race_id, node_index, lap_id):
return self._Database.SavedRaceLapSplit.query.filter_by(
race_id=race_id,
node_index=node_index,
lap_id=lap_id
).all()
def clear_lapSplit(self, lapSplit):
self._Database.DB.session.delete(lapSplit)
self.commit()
return True
# Options
def get_options(self):
return self._Database.GlobalSettings.query.all()
def get_option(self, option, default_value=False):
try:
val = self._OptionsCache[option]
if val or val == "":
return val
else:
return default_value
except:
return default_value
def set_option(self, option, value):
self._OptionsCache[option] = value
settings = self._Database.GlobalSettings.query.filter_by(option_name=option).one_or_none()
if settings:
settings.option_value = value
else:
self._Database.DB.session.add(self._Database.GlobalSettings(option_name=option, option_value=value))
self.commit()
def get_optionInt(self, option, default_value=0):
try:
val = self._OptionsCache[option]
if val:
return int(val)
else:
return default_value
except:
return default_value
def set_optionJson(self, option, value):
self.set_option(option, json.dumps(value))
def get_optionJson(self, option, default_value=None):
try:
val = self._OptionsCache[option]
if val:
return json.loads(val)
else:
return default_value
except:
return default_value
def clear_options(self):
self._Database.DB.session.query(self._Database.GlobalSettings).delete()
self.commit()
return True
def reset_options(self):
self.clear_options()
self.set_option("server_api", self._SERVER_API)
# group identifiers
self.set_option("timerName", self.__("RotorHazard"))
self.set_option("timerLogo", "")
# group colors
self.set_option("hue_0", "212")
self.set_option("sat_0", "55")
self.set_option("lum_0_low", "29.2")
self.set_option("lum_0_high", "46.7")
self.set_option("contrast_0_low", "#ffffff")
self.set_option("contrast_0_high", "#ffffff")
self.set_option("hue_1", "25")
self.set_option("sat_1", "85.3")
self.set_option("lum_1_low", "37.6")
self.set_option("lum_1_high", "54.5")
self.set_option("contrast_1_low", "#ffffff")
self.set_option("contrast_1_high", "#000000")
# timer state
self.set_option("currentLanguage", "")
self.set_option("timeFormat", "{m}:{s}.{d}")
self.set_option("timeFormatPhonetic", "{m} {s}.{d}")
self.set_option("currentProfile", "1")
self.set_option("calibrationMode", "0")
# minimum lap
self.set_option("MinLapSec", "10")
self.set_option("MinLapBehavior", "0")
# event information
self.set_option("eventName", self.__("FPV Race"))
self.set_option("eventDescription", "")
# LED settings
self.set_option("ledBrightness", "32")
self.set_option("startThreshLowerAmount", "0")
self.set_option("startThreshLowerDuration", "0")
self.set_option("nextHeatBehavior", "0")
logger.info("Reset global settings")
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,104
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_web.py
|
import unittest
import json
from rh.app import web
from rh.orgs import ifpv_org
web.init(None)
class WebTest(unittest.TestCase):
def test_ifpv_pilot(self):
url = 'https://league.ifpv.co.uk/pilots/220'
data = web.get_pilot_data(url)
if not data:
print("Skipping test - could not connect to {}".format(url))
return
self.assertEqual(data['name'], 'Jon Totham')
self.assertEqual(data['callsign'], 'Vaxel')
self.assertEqual(data['logo'], 'https://league.ifpv.co.uk/storage/images/pilots/1515246538.gif')
def test_ifpv_event(self):
with open('tests/test_ifpv_event.json') as f:
ifpv_json = json.loads(f.read())
ifpv = ifpv_org.Ifpv()
actual_json = ifpv.convert_ifpv_json(ifpv_json)
with open('tests/test_converted_ifpv_event.json') as f:
expected_json = json.loads(f.read())
self.assertDictEqual(actual_json, expected_json)
def test_multigp(self):
url = 'https://www.multigp.com/pilots/view/?pilot=SeekND'
data = web.get_pilot_data(url)
if not data:
print("Skipping test - could not connect to {}".format(url))
return
self.assertEqual(data['logo'], 'https://multigp-storage-new.s3.us-east-2.amazonaws.com/user/1135/profileImage-20.png')
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,105
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/util/__init__.py
|
from collections import deque
import math
from time import perf_counter_ns
def ms_counter() -> int:
return round(perf_counter_ns()/1000000)
def secs_to_millis(secs: float) -> int:
return round(1000*secs)
def millis_to_secs(ms: int) -> float:
return ms/1000
class Averager:
"""Tracks a running average, and min/max/last values"""
def __init__(self, maxNumItems):
self._n = maxNumItems
self._reset()
def _reset(self):
self._data = deque()
self._minVal = None
self._maxVal = None
self._lastVal = None
self._avgVal = None
self._totalVal = 0
self._newAvgFlag = False
def append(self, value):
self._lastVal = value
if len(self._data) >= self._n:
if value < self._minVal:
self._minVal = value
elif value > self._maxVal:
self._maxVal = value
poppedVal = self._data.popleft()
self._totalVal -= poppedVal
self._data.append(value)
self._totalVal += value
self._newAvgFlag = True
# if popped value was min/max then get new min/max from list
if poppedVal <= self._minVal:
self._minVal = min(self._data)
if poppedVal >= self._maxVal:
self._maxVal = max(self._data)
else:
self._data.append(value)
self._totalVal += value
if len(self._data) > 1:
if value < self._minVal:
self._minVal = value
elif value > self._maxVal:
self._maxVal = value
self._newAvgFlag = True
else:
self._minVal = self._maxVal = self._avgVal = value
def clear(self):
self._reset()
@property
def min(self):
return self._minVal
@property
def max(self):
return self._maxVal
@property
def last(self):
return self._lastVal
@property
def mean(self):
if self._newAvgFlag:
self._newAvgFlag = False
self._avgVal = self._totalVal / len(self._data)
return self._avgVal
@property
def std(self):
sum_diff = 0
mean = self.mean
for val in self._data:
diff = val - mean
sum_diff += diff**2
return math.sqrt(sum_diff/len(self._data)) if len(self._data) > 0 else None
def formatted(self, decimalplaces=None, units=''):
if decimalplaces is not None:
formatter = lambda x: round(x, decimalplaces) if x is not None else x
else:
formatter = lambda x: x
return "mean {}{}, std {}{}, min {}{}, max {}{}".format(
formatter(self.mean), units,
formatter(self.std), units,
formatter(self.min), units,
formatter(self.max), units
)
def __getitem__(self, item):
return self._data[item]
def __len__(self):
return len(self._data)
def __str__(self):
return self.formatted()
import flask
import numpy as np
class StrictJsonEncoder(flask.json.JSONEncoder):
def __init__(self, *args, **kwargs):
kwargs['allow_nan'] = False
super().__init__(*args, **kwargs)
def default(self, o):
if isinstance(o, np.generic):
return o.item()
else:
return super().default(o)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,682,198
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/mail.py
|
from flask import render_template
from flask_mail import Message
from threading import Thread
from email.message import EmailMessage
from sentiport import app, mail
def create_email_message(from_address, to_address, subject,
plaintext, html=None):
msg = EmailMessage()
msg['From'] = from_address
msg['To'] = to_address
msg['Subject'] = subject
msg.set_content(plaintext)
if html is not None:
msg.add_alternative(html, subtype='html')
return msg
def get_user_mail(targetmail):
for i in range(len(targetmail)):
if targetmail[i] == '@':
uname_targetmail = (targetmail[0:i])
domain_targetmail = (targetmail[-(len(targetmail) - i - 1):])
return uname_targetmail, domain_targetmail
# Got this from pedagogy, later will be modified
def send_async_email(app, msg):
with app.app_context():
mail.send(msg)
def send_email(subject, sender, recipients, text_body, html_body):
msg = Message(subject, sender=sender, recipients=recipients)
msg.body = text_body
msg.html = html_body
Thread(target=send_async_email, args=(app, msg)).start()
def send_pw_reset_email(user):
token = user.get_reset_password_token()
send_email('Reset your password on Pedagogy',
sender=app.config['ADMINS'][0],
recipients=[user.email],
text_body=render_template('email/reset_password.txt',
user=user, token=token),
html_body=render_template('email/reset_password.html',
user=user, token=token))
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,199
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/config.py
|
"""Flask config."""
from os import environ, path
from dotenv import load_dotenv
BASE_DIR = path.abspath(path.dirname(__file__))
load_dotenv(path.join(BASE_DIR, '.env'))
class Config:
"""Flask configuration variables."""
# General Config
FLASK_ASSETS_USE_CDN = True
PROPAGATE_EXCEPTIONS = True
FLASK_APP = environ.get('FLASK_APP')
FLASK_ENV = environ.get('FLASK_ENV')
SECRET_KEY = environ.get('SECRET_KEY')
# Cache Config
CACHE_TYPE = 'simple'
CACHE_DEFAULT_TIMEOUT = 86400 # 24 hours in seconds
CACHE_IGNORE_ERRORS = True
# Static Assets
STATIC_FOLDER = 'static'
TEMPLATES_FOLDER = 'templates'
COMPRESSOR_DEBUG = environ.get('COMPRESSOR_DEBUG')
#Email
# MAIL_SERVER = 'smtp.googlemail.com'
# MAIL_PORT = 587
# MAIL_USE_TLS = True
# MY_EMAIL = environ.get('MY_EMAIL')
# MY_PASSWORD = environ.get('MY_PASSWORD')
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,200
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/routes.py
|
import re
import requests
from email.headerregistry import Address
import smtplib
from email.mime.base import MIMEBase
from email import encoders
from os import environ, mkdir
from shutil import rmtree
from threading import Thread
from flask.helpers import make_response
# importing unit 4's functions
from sentiport.utils.utilities.crawling import *
from sentiport.pdf_generator import create_pdf
from sentiport.mail import create_email_message, get_user_mail
from sentiport.forms import AppForm
from sentiport import app, thread_lock, store
from flask import render_template, url_for, flash, redirect, request, abort, jsonify
from uuid import uuid1
# Error handler
@app.errorhandler(404)
def not_found_error(error):
return render_template('404.html'), 404
@app.route("/", methods=['GET', 'POST'])
@app.route("/iframe", methods=['GET', 'POST'])
def index():
rule = request.url_rule
if "iframe" in rule.rule:
return render_template('index-iframe.html')
else:
form = AppForm()
return render_template('index.html', form=form)
@app.route("/status/<thread_id>", methods=['GET'])
def status(thread_id):
# Common mistakes: passing byte got from redis into JSON response
try:
statuses = store.hmget(
thread_id,
"is_running",
"is_error",
"error_message",
"runtime_message"
)
is_running, is_error, error_message, runtime_message = [
status.decode("utf-8") for status in statuses]
is_running = int(is_running)
is_error = int(is_error)
return jsonify({
"status": 200,
"thread_id": thread_id,
"task_status": {
"isRunning": is_running,
"isError": is_error,
"errorMessage": error_message,
"runtimeMessage": runtime_message
}
})
except Exception as e:
return {
"status": 500,
"error": str(e)
}
@app.route("/status/delete", methods=['POST'])
def delete_thread_status():
# Assumption: Most of the time, this method is called naturally by JavaScript,
# when thread has stopped running. Not called manually by user
try:
thread_id = request.json["thread_id"]
store.delete(thread_id)
return {"status": 200, "message": "thread status deleted"}
except Exception as e:
return {"status": 200, "error_message": str(e)}
@app.route("/scrape", methods=['POST'])
@app.route("/scrape/iframe", methods=['POST'])
def scrape():
try:
rule = request.url_rule
is_iframe = "iframe" in rule.rule
form = None
if is_iframe:
APP_URL = request.form['app_id']
COUNTRY = request.form['country_code']
targetmail = request.form['email']
# DEBUG
print(request.form)
is_form_valid = True # Bypass rule
else:
form = AppForm()
APP_URL = form.app_id.data
COUNTRY = request.form['country_code']
targetmail = form.email.data
is_form_valid = form.validate_on_submit()
if is_form_valid:
try:
url_res = requests.get(APP_URL)
PLAYSTORE_ID = get_id(APP_URL)
except:
abort(404)
# start thread
if url_res.status_code == 200:
thread_id = str(uuid1())
thread = Thread(
target=pipeline,
args=(
PLAYSTORE_ID,
COUNTRY,
targetmail,
thread_id,
is_iframe
)
)
thread.start()
# store status to redis
if not is_iframe:
store.hmset(thread_id, {
"is_running": int(True),
"is_error": int(False),
"error_message": "",
"runtime_message": "Scraping data",
})
if is_iframe:
return redirect(url_for('index'))
status_url = url_for("status", thread_id=thread_id)
return make_response(render_template(
'status.html',
status_url=status_url,
thread_id=thread_id,
playstore_id=PLAYSTORE_ID,
country_code=COUNTRY,
user_email=targetmail,
form=form
))
flash("""Wrong url or the application doesnt exist""", 'danger')
return redirect(url_for('index'))
flash("""Wrong Playstore URL or the app doesnt exist""", 'danger')
return redirect(url_for('index'))
except:
return {"status": 500, "message": "Internal server error"}
def get_id(toParse):
regex = r'\?id=([a-zA-Z0-9\.]+)'
app_id = re.findall(regex, toParse)[0]
return app_id
def pipeline(playstore_id, country, targetmail, thread_id, is_iframe):
temp_path = f'sentiport/artifacts/{thread_id}'
mkdir(temp_path)
try:
"""PREPARING PLOTS AND VALUE"""
# store status to redis
if not is_iframe:
store.hmset(thread_id, {
"is_running": int(True),
"is_error": int(False),
"runtime_message": "Scraping data"
})
# crawling
DATAFRAME = get_crawl_google(playstore_id, country)
if not is_iframe:
store.hmset(thread_id, {
"is_running": int(True),
"is_error": int(False),
"runtime_message": "Creating PDF"
})
with thread_lock:
filename = create_pdf(DATAFRAME, playstore_id, country, thread_id)
"""SEND THE REPORT THROUGH EMAIL"""
if not is_iframe:
store.hmset(thread_id, {
"is_running": int(True),
"is_error": int(False),
"runtime_message": "Sending email"
})
uname_targetmail, domain_targetmail = get_user_mail(targetmail)
# Get the account used to send report
email_address = environ.get('ST_EMAIL')
print("my email: " + email_address)
email_password = environ.get('ST_PASSWORD')
# targeted email
to_address = (
Address(
username=uname_targetmail,
domain=domain_targetmail
),
)
# body message
with open("sentiport/templates/mail.html", "r", encoding='utf-8') as f:
HTML_MESSAGE = f.read()
msg = create_email_message(
from_address=email_address,
to_address=to_address,
subject=f'{playstore_id} Review Analysis Report',
plaintext="Plain text version.",
html=HTML_MESSAGE
)
p = MIMEBase('application', 'octet-stream')
# attaching the report into email
with open(f"{temp_path}/{filename}", "rb") as attachment:
p.set_payload(attachment.read())
encoders.encode_base64(p)
p.add_header('Content-Disposition',
"attachment; filename= %s" % filename)
msg.attach(p)
with smtplib.SMTP('smtp.gmail.com', port=587) as smtp_server:
smtp_server.ehlo()
smtp_server.starttls()
smtp_server.login(email_address, email_password)
smtp_server.send_message(msg)
print('Email sent successfully')
# store status to redis
if not is_iframe:
store.hmset(thread_id, {
"is_running": int(False),
"is_error": int(False)
})
except Exception as e:
# store status to redis
if not is_iframe:
store.hmset(thread_id, {
"is_running": int(False),
"is_error": int(True),
"error_message": str(e)
})
finally:
rmtree(temp_path)
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,201
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/utils/plot_rating/rating.py
|
# Libraries
import altair as alt
import pandas as pd
import re
import regex
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.colors as mcolors
import matplotlib.cm
import matplotlib.font_manager as fm
import matplotlib.ticker as plticker
import seaborn as sns
# -*- coding: utf-8 -*-
"""Matplotlib_02.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1OduaKQslyxiCeuI5shKfxlPfNl7IZqPx
"""
path = r'sentiport/utils/Helvetica-Font/Helvetica-Bold.ttf'
fontprop = fm.FontProperties(fname=path, size=15)
# @title User Rating Plot Function as "plot_matplotlib_totalrating_3(dataframe)"
def plot_overall_rating(dataframe, temp_dir):
'''
This function is used to plot total review by version
Parameters:
dataframe-- data from get_crawl_data (but we can put data from apple scrapper if the columns names are identical)
returns:
plot
'''
# Overal Rating Chart
# Preprocessing
df = dataframe.copy()
grouped_multiple = df.groupby(['rating']).agg({'review': ['count']})
grouped_multiple.columns = ['review']
grouped_multiple = grouped_multiple.reset_index()
grouped_multiple.sort_values(
by=['rating', 'review'], inplace=False, ascending=False)
grouped_multiple['value'] = (
grouped_multiple['review'] / grouped_multiple['review'].sum()) * 100
grouped_multiple['value'] = grouped_multiple['value'].astype(int)
grouped_multiple['star'] = grouped_multiple['rating'].astype(str) + '★'
grouped_multiple = grouped_multiple.sort_values(
by='rating', ascending=False)
value = grouped_multiple['value']
bars = grouped_multiple['star'].astype(str)
obj = plt.figure(figsize=(3.8377, 2.9858), dpi=100)
max = int(grouped_multiple['rating'].max())
min = int(grouped_multiple['rating'].min())
cvals = [min, (min+max)/2, max]
colors = [(197/255, 186/255, 160/255), (209/255, 200 /
255, 179/255), (236/255, 233/255, 224/255)]
norm = plt.Normalize(min, max)
tuples = list(zip(map(norm, cvals), colors))
cmap = matplotlib.colors.LinearSegmentedColormap.from_list("", tuples)
matplotlib.cm.register_cmap("mycolormap", cmap)
cpal = sns.color_palette("mycolormap", n_colors=64, desat=0.2)
plt.rcParams.update({
"figure.facecolor": (1.0, 1.0, 1.0, 1.0), # red with alpha = 100%
"axes.facecolor": (1.0, 1.0, 1.0, 1.0), # green with alpha = 100%
"savefig.facecolor": (1.0, 1.0, 1.0, 1.0), # blue with alpha = 100%
})
plt.grid(b=None)
#sub1 = plt.subplot(111)
ax = sns.barplot(value, bars, palette='mycolormap')
# Create names on the y-axis
# plt.yticks(bars)
plt.title("Rating", fontproperties=fontprop)
plt.ylabel(None)
plt.yticks(fontsize=15)
plt.xticks(fontsize=10)
# Hiding xaxes
ax = plt.gca()
ax.axes.xaxis.set_visible(False)
# Putting value on top of bar
for index, value in enumerate(value):
plt.text(value + 2, index + 0.15, str(value) + '%', fontsize=15)
sns.despine(left=True, bottom=True, right=True, top=True)
plt.grid(False)
plt.savefig(f"sentiport/artifacts/{temp_dir}/fig_overall_rating.png",
bbox_inches='tight')
return f"sentiport/artifacts/{temp_dir}/fig_overall_rating.png"
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,202
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/utils/plot_detect_language/detect_language.py
|
import altair as alt
import cmasher as cmr
import pandas as pd
import numpy as np
import re
from pylab import *
import seaborn as sns
import matplotlib.pyplot as plt
from langdetect import detect
import numpy as np
import matplotlib.colors
import seaborn as sns
import matplotlib.cm
import matplotlib.font_manager as fm
from cycler import cycler
def deEmojify(text):
'''
Remove emoji from review data
'''
regrex_pattern = re.compile(pattern="["
u"\U0001F600-\U0001F64F" # emoticons
u"\U0001F300-\U0001F5FF" # symbols & pictographs
u"\U0001F680-\U0001F6FF" # transport & map symbols
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
"]+", flags=re.UNICODE)
text = regrex_pattern.sub(r'', text)
text = text.replace('\n', ' ')
text = re.sub(' +', ' ', text)
return text
def get_preprocess_data2(df):
'''
Preprocessing 2 : Remove emoji from review data
'''
# print("Please wait, currently we're doing second preprocessing for your review data!")
df = df.copy()
df['review'] = df['review'].apply(deEmojify) # removing emoji
return df
def detect_lang(DATAFRAME):
'''
Identify every review data using detect language library
'''
# print("Please wait, we're currently detecting the review language!")
list_lang = []
row_x = []
row_xx = []
min_words_other = 2 # min words filtered
for i in range(len(DATAFRAME)):
if len(DATAFRAME.review[i].split()) <= min_words_other:
row_xx.append(i)
DATAFRAME = DATAFRAME.drop(row_xx).reset_index(drop=True)
for i in range(len(DATAFRAME)):
try:
x = detect(DATAFRAME['review'][i])
list_lang.append(x)
except:
x = 'no'
row_x.append(i)
DATAFRAME = DATAFRAME.drop(row_x).reset_index(drop=True)
DATAFRAME['lang'] = pd.DataFrame(list_lang)
return DATAFRAME
def lang_checker(dataframe):
'''
Change language outside the country language & mother language (english)
'''
language = 'lang'
percentage = 'percentage'
replace_lang = 'ud'
min_percentage = 10 # in percent(%)
min_words_other = 2 # min words filtered
dataframe_grouped = dataframe.groupby(by=[language]).count()
dataframe_grouped[percentage] = (
dataframe_grouped.review/dataframe_grouped.review.sum())*100
top_lang = dataframe_grouped[dataframe_grouped[percentage]
> min_percentage].reset_index()
top_lang = top_lang[language].tolist()
other_lang = dataframe[~dataframe[language].isin(
top_lang)][language].tolist()
dataframe[language] = dataframe[language].replace(other_lang, replace_lang)
row_x = []
for i in range(len(dataframe)):
if dataframe.lang[i] == replace_lang:
if len(dataframe.review[i].split()) <= min_words_other:
row_x.append(i)
dataframe = dataframe.drop(row_x).reset_index(drop=True)
return dataframe
############################################################################################################################################################
################################################################PLOT########################################################################################
############################################################################################################################################################
def get_total_language(DATAFRAME):
'''
Combine all the language detector into one function
'''
country_list = {"af": "Afrikaans", "ar": "Arabic", "bg": "Bulgarian", "bn": "Bengali", "ca": "Valencian", "cs": "Czech",
"cy": "Welsh", "da": "Danish", "de": "German", "el": "Greek", "en": "English", "es": "Spanish",
"et": "Estonian", "fa": "Persian", "fi": "Finnish", "fr": "French", "gu": "Gujarati", "he": "Hebrew",
"hi": "Hindi", "hr": "Croatian", "hu": "Hungarian", "id": "Indonesian", "it": "Italian", "ja": "Japanese",
"kn": "Kannada", "ko": "Korean", "lt": "Lithuanian", "lv": "Latvian", "mk": "Macedonian", "ml": "Malayalam",
"mr": "Marathy", "ne": "Nepali", "nl": "Dutch", "no": "Norwegian", "pa": "Punjabi", "pl": "Polish",
"pt": "Portuguese", "ro": "Romanian", "ru": "Russian", "sk": "Slovak", "sl": "Slovenian", "so": "Somali",
"sq": "Albanian", "sv": "Swedish", "sw": "Swahili", "ta": "Tamil", "te": "Telugu", "th": "Thai", "tl": "Tagalog",
"tr": "Turkish", "uk": "Ukranian", "ur": "Urdu", "vi": "Vietnamese", "zh-cn": "Chinese", "zh-tw": "Taiwan", "ud": "Other"}
DATAFRAME = get_preprocess_data2(DATAFRAME)
DATAFRAME1 = detect_lang(DATAFRAME)
DATAFRAME1 = lang_checker(DATAFRAME1)
lang = DATAFRAME1.groupby(by="lang").count()
lang = lang.sort_values(by='review', ascending=False).reset_index()
list_lang = []
for i in range(len(lang)):
list_lang.append(country_list[lang.lang[i]])
lang['Language'] = pd.DataFrame(list_lang)
return lang, DATAFRAME1
def plot_total_language(DATAFRAME):
'''
Function to get plot total language using altair library
'''
lang, DATAFRAME1 = get_total_language(DATAFRAME)
most_lang = lang['Language'][0]
short = (lang.Language).tolist()
plot = alt.Chart(lang).mark_rect().encode(
alt.X('review:Q'),
alt.Y('Language:N', sort='-x'),
alt.Color('Language:N',
sort=short, legend=None
)
).configure_axis(
grid=False
)
return plot, DATAFRAME1
def plot_total_language1(DATAFRAME):
'''
Function to get plot total language using matplotlib library
'''
lang, DATAFRAME1 = get_total_language(DATAFRAME)
most_lang = lang['Language'][0]
max = int(lang['review'].max())
min = int(lang['review'].min())
cvals = [min, (min+max)/2, max]
colors = [(109/255, 0/255, 0/255), (220/255, 211 /
255, 196/255), (0/255, 0/255, 0/255)]
norm = plt.Normalize(min, max)
tuples = list(zip(map(norm, cvals), colors))
cmap = matplotlib.colors.LinearSegmentedColormap.from_list("", tuples)
matplotlib.cm.register_cmap("mycolormap", cmap)
cpal = sns.color_palette("mycolormap", n_colors=64, desat=0.2)
plt.rcParams.update({
"figure.facecolor": (1.0, 1.0, 1.0, 1.0), # red with alpha = 30%
"axes.facecolor": (1.0, 1.0, 1.0, 1.0), # green with alpha = 50%
"savefig.facecolor": (1.0, 1.0, 1.0, 1.0), # blue with alpha = 20%
})
obj = plt.figure(figsize=(10, 5))
#plt.subplot(title='Number of Reviews by Language')
plt.grid(b=None)
sub1 = subplot(111)
ax = sns.barplot(x='review', y='Language', data=lang, palette='mycolormap')
total = lang['review'].sum()
for p in ax.patches:
percentage = '{:.1f}%'.format(100 * p.get_width()/total)
number = '{:.0f}'.format(total)
x = p.get_x() + p.get_width() + 0.02
y = p.get_y() + p.get_height()/2
ax.annotate(percentage, (x, y))
sns.despine(left=True, bottom=True, right=True, top=True)
grid(False)
title('Total Review by Language')
ylabel('')
xlabel('')
# I ADDED THIS FOR BORDERLINE
# autoAxis = sub1.axis()
# rec = Rectangle((autoAxis[0]-15,autoAxis[2]+0.4),(autoAxis[1]-autoAxis[0])+20,(autoAxis[3]-autoAxis[2])-0.7,fill=False,lw=2)
# rec = sub1.add_patch(rec)
# rec.set_clip_on(False)
obj.savefig('fig_lang_1.png')
return 'fig_lang_1.png', most_lang, DATAFRAME1
def plot_detect_language2(DATAFRAME, temp_dir):
'''
Function to get plot dougnut chart for detect language
'''
# get the dataframe & prepare for plot
lang, DATAFRAME1 = get_total_language(DATAFRAME)
most_lang = lang['Language'][0]
# Create new column to get anotate value for plot
anotate_col = 'anotate'
percent_format = "{0:.1f}%"
percent_coeff = 100
lang[anotate_col] = (lang.review/lang.review.sum())
lang[anotate_col] = pd.Series([percent_format.format(
val * percent_coeff) for val in lang[anotate_col]], index=lang.index)
lang[anotate_col] = lang[anotate_col].str.cat(lang.Language, sep=' ')
# define value for the label
anotate = lang.anotate.to_list()
data = lang.review.to_list()
# Set parameter for plot
cmap_type = 'cmr.sepia_r'
color_format = 'hex'
rcParams = 'axes.prop_cycle'
aspect = "equal"
cmap_n = 5
cmap_range = (0.15, 0.85)
width = 0.5
angle = -40
# Set color map using cmasher
colors = cmr.take_cmap_colors(
cmap_type, cmap_n, cmap_range=cmap_range, return_fmt=color_format)
plt.rcParams[rcParams] = cycler(color=colors)
# install font for plot
path = 'sentiport/utils/Helvetica-Font/Helvetica-Bold.ttf'
path1 = 'sentiport/utils/Helvetica-Font/Helvetica-Oblique.ttf'
Helvetica_Bold = fm.FontProperties(fname=path, size=18)
Helvetica = fm.FontProperties(fname=path1, size=15)
# Create the plot function to generate dougnut chart
obj, ax = plt.subplots(figsize=(9.03307, 4.7017),
dpi=100, subplot_kw=dict(aspect=aspect))
wedges, texts = ax.pie(data, wedgeprops=dict(
width=width), startangle=angle)
bbox_props = dict(boxstyle="square,pad=0.3", fc="w", ec="k", lw=0.72)
kw = dict(arrowprops=dict(arrowstyle="-"),
bbox=bbox_props, zorder=0, va="center")
for i, p in enumerate(wedges):
ang = (p.theta2 - p.theta1)/2. + p.theta1
y = np.sin(np.deg2rad(ang))
x = np.cos(np.deg2rad(ang))
horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(x))]
connectionstyle = "angle,angleA=0,angleB={}".format(ang)
kw["arrowprops"].update({"connectionstyle": connectionstyle})
ax.annotate(anotate[i], xy=(x, y), xytext=(1.35*np.sign(x), 1.4*y),
horizontalalignment=horizontalalignment, **kw, fontproperties=Helvetica)
sumstr = 'From\n'+str(lang.review.sum())+'\nReviews'
ax.text(0., 0., sumstr, horizontalalignment='center',
verticalalignment='center', fontproperties=Helvetica_Bold)
fig_path = f'sentiport/artifacts/{temp_dir}/fig_lang.png'
obj.savefig(fig_path)
return fig_path, most_lang, DATAFRAME1
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,203
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/__init__.py
|
from flask import Flask
from flask_mail import Mail
from threading import Lock
from os import environ, path
import redis
# Flask App
app = Flask(__name__)
app.config.from_object('config.Config')
# Mailing Service
mail = Mail(app)
# Multithreading
thread_lock = Lock()
threads = {}
# Redis key-val store (use this for heroku deployment without docker)
# store = redis.from_url(environ.get('REDISCLOUD_URL'))
# Redis key-val store (use if you want run it in your local machine or deploy it using docker)
store = redis.Redis(host="redis", port=6379)
from sentiport import routes
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,204
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/utils/utilities/helper.py
|
def drawMyRuler(pdf):
pdf.setFont("Courier-Bold", 20)
pdf.drawString(100, 755, 'x100')
pdf.drawString(200, 755, 'x200')
pdf.drawString(300, 755, 'x300')
pdf.drawString(400, 755, 'x400')
pdf.drawString(500, 755, 'x500')
pdf.drawString(600, 755, 'x600')
pdf.drawString(700, 755, 'x700')
pdf.drawString(800, 755, 'x800')
pdf.drawString(900, 755, 'x900')
pdf.drawString(1000, 755, 'x1000')
pdf.drawString(1100, 755, 'x1100')
pdf.drawString(1200, 755, 'x1200')
pdf.drawString(1300, 755, 'x1300')
pdf.drawString(50, 755, 'x')
pdf.drawString(150, 755, 'x')
pdf.drawString(250, 755, 'x')
pdf.drawString(350, 755, 'x')
pdf.drawString(450, 755, 'x')
pdf.drawString(550, 755, 'x')
pdf.drawString(650, 755, 'x')
pdf.drawString(750, 755, 'x')
pdf.drawString(850, 755, 'x')
pdf.drawString(950, 755, 'x')
pdf.drawString(1050, 755, 'x')
pdf.drawString(1150, 755, 'x')
pdf.drawString(1250, 755, 'x')
pdf.drawString(1350, 755, 'x')
pdf.drawString(5, 100, 'y100')
pdf.drawString(5, 200, 'y200')
pdf.drawString(5, 300, 'y300')
pdf.drawString(5, 400, 'y400')
pdf.drawString(5, 500, 'y500')
pdf.drawString(5, 600, 'y600')
pdf.drawString(5, 700, 'y700')
pdf.drawString(5, 50, 'y')
pdf.drawString(5, 150, 'y')
pdf.drawString(5, 250, 'y')
pdf.drawString(5, 350, 'y')
pdf.drawString(5, 450, 'y')
pdf.drawString(5, 550, 'y')
pdf.drawString(5, 650, 'y')
pdf.drawString(5, 750, 'y')
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,205
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/pdf_generator.py
|
from reportlab.pdfgen import canvas
from reportlab.platypus import Table
from reportlab.platypus import TableStyle
from reportlab.lib import colors
from datetime import datetime
from dateutil.relativedelta import relativedelta
import pandas as pd
import time
from datetime import date
from sentiport.utils.utilities.crawling import get_crawl_google, app_title, value_overall_rating, value_total_review, image_company_logo
# from sentiport.utils.utilities.helper import *
from sentiport.utils.plot_detect_language.detect_language import plot_detect_language2
from sentiport.utils.plot_rating.rating import *
from sentiport.utils.plot_sentiment_analysis.sentiment_analysis import *
from sentiport.utils.pdf_table_reportlab.bad_good_review import good_bad_table
from sentiport.utils.topic_extractor.topic_extractor import tag_topic, app_info, get_topic_df, get_topic_table
def short_name(app_title_name):
# cut the name into short name (ex: Halodoc)
if "-" in app_title_name:
app_name = []
for i in range(len(app_title_name.split())):
if app_title_name.split()[i] == "-":
for yy in app_title_name.split()[:i]:
app_name.append(yy)
app_name = ' '.join(app_name)
if ':' in app_title_name:
app_name = []
for i in range(len(app_title_name)):
if app_title_name[i] == ":":
for yy in app_title_name[:i]:
app_name.append(yy)
app_name = ''.join(app_name)
else:
app_name = app_title_name.split()
app_name = app_name[0]
return app_name
def create_pdf(DF_SOURCE, PLAYSTORE_ID, COUNTRY, temp_dir):
# cutting dataframe into maximum 1 year of data
# cutting dataframe into maximum 1 year of data
one_yr_ago = datetime.now() - relativedelta(years=1)
DATAFRAME = DF_SOURCE.copy()
DATAFRAME.index = DATAFRAME['at']
DATAFRAME = DATAFRAME[DATAFRAME.index.map(pd.to_datetime) > one_yr_ago]
DATAFRAME.reset_index(drop=True, inplace=True)
start = time.time()
# sentiment data preprocessing
sentiment_dataframe = sentiment_visual_preprocessing(DATAFRAME)
end = time.time()
print(
f"Sentiment pre-processing done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
start = time.time()
# scrapping current rating
company_logo = image_company_logo(PLAYSTORE_ID, temp_dir)
current_rating = value_overall_rating(PLAYSTORE_ID)
end = time.time()
print(f"Rating scrapping done! \n processing time: {(end-start)} sec")
start = time.time()
# scrapping current total review
current_review = value_total_review(PLAYSTORE_ID)
end = time.time()
print(
f"Total Review scrapping done! \n processing time: {(end-start)} sec")
start = time.time()
# scrapping current rating
company_logo = image_company_logo(PLAYSTORE_ID, temp_dir)
end = time.time()
print(
f"Company logo scrapping done! \n processing time: {(end-start)} sec")
start = time.time()
# call detect language plot and most language value
fig_lang, most_lang, df_lang = plot_detect_language2(DATAFRAME, temp_dir)
end = time.time()
print(
f"Review Language done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
start = time.time()
# call overall rating plot
fig_overall_rating = plot_overall_rating(DATAFRAME, temp_dir)
end = time.time()
print(
f"Overall Rating done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
start = time.time()
# call total review by time plot and all the value
fig_totalreview_time, MostReview_Month, MostReview_Month_Value = plot_totalreview_time(
sentiment_dataframe, temp_dir)
end = time.time()
print(
f"Total review-rating across months done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
start = time.time()
# call total review by version plot and all the value
fig_totalreview_version, MostReview_Version, MostReview_Version_Value = plot_totalreview_version(
DATAFRAME, temp_dir)
end = time.time()
print(
f"Total review-rating across version done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
start = time.time()
# call total review sentiment plot and all the value
fig_totalreview_sentiment, most_sentiment = plot_totalreview_sentiment(
sentiment_dataframe, temp_dir)
end = time.time()
print(
f"Overall review sentiment done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
start = time.time()
# call sentiment by time plot and all the value
fig_sentiment_time = plot_sentiment_time(sentiment_dataframe, temp_dir)
end = time.time()
print(
f"Review sentiment across time done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
start = time.time()
# call sentiment by version plot and all the value
fig_sentiment_version, MostPos_Version, MostNeg_Version = plot_sentiment_version(
sentiment_dataframe, temp_dir)
end = time.time()
print(
f"Review sentiment across version done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
start = time.time()
# prepare good review and bad review table for plot
negative_table, postive_table = good_bad_table(DATAFRAME)
end = time.time()
print(
f"Good-Bad Review done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
# get the full app title (ex: Halodoc - Doctors, Medicine, & Appiontments)
app_title_name, app_desc = app_info(PLAYSTORE_ID, COUNTRY)
app_name = short_name(app_title_name)
print("APP INFO OK")
# prepare table for keyword extraction
keyword_df, avg_rating_list, avg_sentiment_list, review_count_list = get_topic_df(
df_lang, app_desc, app_title_name)
keyword_df = tag_topic(keyword_df)
list_of_topic = keyword_df.keyword.unique()
end = time.time()
print(
f"Keyword Extraction done! \n processing time: {(end-start)} sec with {(len(DATAFRAME))} reviews")
# create the report filename using app name
fileName = f'{app_name}_review_analysis.pdf'
# create the document title
documentTitle = app_title_name
# define canvas to create the report
pdf = canvas.Canvas(
f"sentiport/artifacts/{temp_dir}/{fileName}", pagesize=(1366, 768))
# get today's date
today = date.today()
hari_ini = today.strftime("%B %d, %Y")
"""CREATING THE PDF"""
print("Creating the PDF")
start = time.time()
""" COVER DEPAN """
# put the opening page template
pdf.drawInlineImage(
'sentiport/utils/assets/cover_template.png', 0, 0, width=1366, height=768)
# set the font, size, and position of date
pdf.setFont("Helvetica", 18)
pdf.drawString(1155, 768-63, hari_ini)
# put logo in front page
pdf.drawInlineImage(company_logo, 75, 768-350, width=230, height=230)
# set font, size, and position of app name and report title
pdf.setFont("Helvetica-Bold", 50)
pdf.drawString(75, 768-405, app_name)
pdf.setFont("Helvetica", 44)
pdf.drawString(75, 768-460, "Application Analysis")
# set font, size, and position of app id and country id
pdf.setFont("Helvetica", 19)
pdf.drawString(75, 768-505, f"App ID: {PLAYSTORE_ID}")
pdf.drawString(75, 768-525, f"Country ID: {COUNTRY}")
""" TABLE OF CONTENT """
# page break
pdf.showPage()
# put table of content template
pdf.drawInlineImage(
'sentiport/utils/assets/table_of_content.png', 0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# set font, size, and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Table of Content")
""" EXECUTIVE SUMMARY """
# page break
pdf.showPage()
# put executive summary template
pdf.drawInlineImage(
'sentiport/utils/assets/executive_summary.png', 0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# set font, size, and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Executive Summary")
""" INTRODUCTION """
# page break
pdf.showPage()
# put the introduction template
pdf.drawInlineImage('sentiport/utils/assets/Introduction.png',
0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# set font, size, and position of app name, app id, country id, and current date
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(117, 768-180, "App Name")
pdf.drawString(117, 768-215, "App ID")
pdf.drawString(117, 768-250, "Country ID")
pdf.drawString(117, 768-285, "Current Date")
pdf.setFont("Helvetica-BoldOblique", 20)
pdf.drawString(268, 768-180, f": {app_title_name}")
pdf.drawString(268, 768-215, f": {PLAYSTORE_ID}")
pdf.drawString(268, 768-250, f": {COUNTRY}")
pdf.drawString(268, 768-285, f": {hari_ini}")
# set size and position of total rating plot
pdf.drawInlineImage(fig_overall_rating, 921, 768 -
635, width=378, height=293)
# set font, size, and position of current rating and total review
pdf.setFont("Helvetica-Bold", 54)
pdf.drawCentredString(258, 768-500, current_review)
pdf.drawCentredString(684, 768-500, current_rating)
# set font, size, and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Introduction")
""" REVIEW ANALYSIS BY TIME """
# page break
pdf.showPage()
# put review analysis by time template
pdf.drawInlineImage(
'sentiport/utils/assets/review_analysis_by_time.png', 0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# set size and position of total review by time plot
pdf.drawInlineImage(fig_totalreview_time, 99, 768 -
603, width=1273-99, height=603-125)
# set font, size, and position of insight summary
pdf.setFont("Helvetica-BoldOblique", 36)
pdf.drawCentredString(
683, 768-665, f"{MostReview_Month} has the highest number of review ({MostReview_Month_Value})")
# set font, size, and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Review Analysis by Time")
""" REVIEW ANALYSIS BY VERSION """
# page break
pdf.showPage()
# put review analysis by time template
pdf.drawInlineImage(
'sentiport/utils/assets/review_analysis_by_version.png', 0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# set size and position of total review by version plot
pdf.drawInlineImage(fig_totalreview_version, 99, 768 -
603, width=1273-99, height=603-125)
# set font, size, and position of insight summary
pdf.setFont("Helvetica-BoldOblique", 36)
pdf.drawCentredString(
683, 768-665, f"ver. {MostReview_Version} has the highest number of review ({MostReview_Version_Value})")
# set font, size, and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Review Analysis by Version")
""" SENTIMENT ANALYSIS """
# page break
pdf.showPage()
# put sentiment analysis template
pdf.drawInlineImage(
'sentiport/utils/assets/sentiment_analysis.png', 0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# set the size and position of sentiment by version plot
pdf.drawInlineImage(fig_sentiment_version, 48, 768 -
381, width=910-48, height=381-114)
# set the size and position of sentiment by time plot
pdf.drawInlineImage(fig_sentiment_time, 48, 768-677,
width=910-48, height=677-410)
# set the size and position of total review sentiment plot
pdf.drawInlineImage(fig_totalreview_sentiment, 932,
768-488, width=1327-932, height=488-113)
# set font, size and position of insight summary
pdf.setFont("Helvetica-BoldOblique", 16)
pdf.drawString(
935, 768-500, f"\t Most of the Review Sentiment is {most_sentiment}")
pdf.drawString(
935, 768-545, f"\t {MostPos_Version} has the highest positive review")
pdf.drawString(
935, 768-590, f"\t {MostNeg_Version} has the highest negative review")
# set font, size, and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Review Sentiment Analysis")
""" REVIEW LANGUAGE ANALYSIS """
# page break
pdf.showPage()
# put review analysis template
pdf.drawInlineImage(
'sentiport/utils/assets/review_language_analysis.png', 0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# set size and position of review language plot
pdf.drawInlineImage(fig_lang, 239, 768-595, width=1131-239, height=595-134)
# set font, size, and positon of insight summary
pdf.setFont("Helvetica-BoldOblique", 36)
pdf.drawCentredString(
683, 768-665, f"{most_lang} is the most used language in the reviews")
# set font, size and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Review Language Analysis")
""" BAD REVIEW """
# page break
pdf.showPage()
# put the bad reviewtemplate
pdf.drawInlineImage(
'sentiport/utils/assets/template_negative_reviews.png', 0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# set the position of bad review table
w, h = negative_table.wrap(0, 0)
negative_table.drawOn(pdf, 40, 768-675)
# set font, size, and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Top 5 Negative Review")
""" GOOD REVIEW """
# page break
pdf.showPage()
# put good review template
pdf.drawInlineImage(
"sentiport/utils/assets/positive_review.png", 0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# set position of good review table
w, h = postive_table.wrap(0, 0)
postive_table.drawOn(pdf, 40, 768-675)
# set font, size, and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Top 5 Positive Review")
""" KEYWORD EXTRACTION TABLE """
# page break
pdf.showPage()
for i in range(len(list_of_topic)):
# background template
pdf.drawInlineImage(
'sentiport/utils/assets/topic_extractions_template.png', 0, 0, width=1366, height=768)
# put logo
pdf.drawInlineImage(company_logo, 1135, 768-80, width=55, height=55)
# table making
df_split = keyword_df[keyword_df['keyword'] == list_of_topic[i]].reset_index(
drop=True).rename(columns={0: 'polarity_score'})
avg_rating = avg_rating_list[i]
avg_sentiment = avg_sentiment_list[i]
review_count = review_count_list[i]
review_table = get_topic_table(df_split)
# set the position of bad review table
w, h = review_table.wrap(0, 0)
review_table.drawOn(pdf, 40, 768-675)
keyword1 = list_of_topic[i]
average_sentiment = round(avg_sentiment, 2)
average_rating = round(avg_rating, 2)
review_counts = review_count
rev_len = len(df_split)
pdf.setFont("Helvetica", 40)
pdf.drawString(40, 768-145, f"Keyword:")
pdf.setFont("Helvetica-Bold", 40)
pdf.drawString(215, 768-145, f"{keyword1}")
pdf.setFont("Helvetica", 23)
pdf.drawString(40, 768-180, f"Average Sentiment:")
pdf.setFont("Helvetica-Bold", 23)
pdf.drawString(250, 768-180, f"{average_sentiment}")
pdf.setFont("Helvetica", 23)
pdf.drawString(550, 768-180, f"Average Rating:")
pdf.setFont("Helvetica-Bold", 23)
pdf.drawString(720, 768-180, f"{average_rating}")
pdf.setFont("Helvetica", 23)
pdf.drawString(900, 768-180, f"Total Reviews:")
pdf.setFont("Helvetica-Bold", 23)
pdf.drawString(1060, 768-180, f"{rev_len}")
pdf.setFont("Helvetica", 23)
pdf.drawString(1080, 768-180, f"from")
pdf.setFont("Helvetica-Bold", 23)
pdf.drawString(1140, 768-180, f"{review_count}")
# set font, size, and position of footer
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(20, 768-740, app_title_name)
pdf.setFont("Helvetica-Oblique", 20)
pdf.drawString(683, 768-740, "| Topics Extraction")
pdf.showPage()
""" CLOSING PAGE """
# put closing page template
pdf.drawInlineImage(
'sentiport/utils/assets/get_other_features.png', 0, 0, width=1366, height=768)
# saving the report into pdf
pdf.save()
end = time.time()
print(f"PDF Report done! \n processing time: {(end-start)} sec")
return fileName
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,206
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/utils/utilities/crawling.py
|
# prepare the environment
import pandas as pd
from google_play_scraper import app, reviews
from tqdm import tqdm
from urllib.request import urlopen as uReq
from bs4 import BeautifulSoup as soup
def get_crawl_google(id, country_id):
BATCH_SIZE = 50
MAX_REVIEWS = 2000
appinfo = app(
id,
lang='en',
country=country_id)
AVAIL_REVIEWS = appinfo.get('reviews')
TOFETCH_REVIEWS = min(AVAIL_REVIEWS, MAX_REVIEWS)
ints = list(range(max(TOFETCH_REVIEWS//BATCH_SIZE, 1)))
result = []
continuation_token = ""
t = tqdm(total=TOFETCH_REVIEWS)
for i in ints:
if i == 0:
result, continuation_token = reviews(id,
count=BATCH_SIZE,
country=country_id
)
res, continuation_token = reviews(
id, count=BATCH_SIZE, continuation_token=continuation_token)
result.extend(res)
t.update(BATCH_SIZE)
t.close()
dfp = pd.DataFrame(result)
dfp.drop_duplicates('reviewId', inplace=True)
data = [dfp['reviewId'], dfp['content'],
dfp['reviewCreatedVersion'], dfp['score'], dfp['at']]
headers = ['reviewId', 'review', 'version', 'rating', 'at']
df_google = pd.concat(data, axis=1, keys=headers)
df_google['version'].fillna("null", inplace=True)
for idx in range(len(df_google)-1):
if df_google['version'][idx] == 'null':
df_google.loc[idx, 'version'] = df_google['version'][idx+1]
for i in range(len(df_google)):
if "." in df_google['version'][i][1]:
pass
elif "." in df_google['version'][i][2]:
pass
else:
df_google.drop(index=i, inplace=True)
df_google.reset_index(drop=True, inplace=True)
df_google['at'] = pd.to_datetime(df_google['at'])
return df_google
def app_title(id, country_id):
appinfo = app(
id,
lang='en',
country=country_id
)
return appinfo['title']
def value_overall_rating(playstore_id):
target_url = 'https://play.google.com/store/apps/details?id='+playstore_id
uClient = uReq(target_url)
page_html = uClient.read()
uClient.close()
page_soup = soup(page_html, "html.parser")
overall_rating = page_soup.findAll("div", {"class": "BHMmbe"})
return overall_rating[0].text
def value_total_review(playstore_id):
target_url = 'https://play.google.com/store/apps/details?id='+playstore_id
uClient = uReq(target_url)
page_html = uClient.read()
uClient.close()
page_soup = soup(page_html, "html.parser")
total_review = page_soup.findAll("span", {"class": "EymY4b"})
return total_review[0].findAll("span", {"class": ""})[0].text
def image_company_logo(playstore_id, temp_dir):
target_url = 'https://play.google.com/store/apps/details?id='+playstore_id
uClient = uReq(target_url)
page_html = uClient.read()
uClient.close()
page_soup = soup(page_html, "html.parser")
total_review = page_soup.findAll("div", {"class": "xSyT2c"})
link_gambar = total_review[0].find(
"img", {"class": "T75of sHb2Xb"}).get('src')
gambar = uReq(link_gambar)
output = open(f"sentiport/artifacts/{temp_dir}/logo_company.png", "wb")
output.write(gambar.read())
output.close()
return f"sentiport/artifacts/{temp_dir}/logo_company.png"
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,207
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py
|
import pandas as pd
import numpy as np
import re
import regex
from google_play_scraper import app, reviews, reviews_all, Sort
from tqdm import tqdm
from tqdm.notebook import tnrange, tqdm_notebook
from textblob import TextBlob
from googletrans import Translator
import matplotlib.colors as mcolors
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
from dateutil.relativedelta import relativedelta
from datetime import datetime
path_title = 'sentiport/utils/Helvetica-Font/Helvetica-Bold.ttf'
path_label = 'sentiport/utils/Helvetica-Font/Helvetica.ttf'
fontprop = fm.FontProperties(fname=path_title, size=15)
fontprop_label = fm.FontProperties(fname=path_label, size=12)
def translate_dataframe(DATAFRAME):
try:
translator = Translator()
return translator.translate(DATAFRAME).text
except:
return None
def polarity_calc(text):
try:
return TextBlob(text).sentiment.polarity
except:
return None
def subjectivity_calc(text):
try:
return TextBlob(text).sentiment.subjectivity
except:
return None
def preprocessing_weeks(TRANSLATED_DATAFRAME):
TRANSLATED_DATAFRAME['time'] = pd.to_datetime(TRANSLATED_DATAFRAME['time'])
week_number = []
for i in range(len(TRANSLATED_DATAFRAME)):
week_number.append(TRANSLATED_DATAFRAME['time'][i].week)
TRANSLATED_DATAFRAME['week_number'] = pd.DataFrame(week_number)
week_list = []
for i in TRANSLATED_DATAFRAME['week_number']:
week_list.append('Week ' + str(i))
TRANSLATED_DATAFRAME['week_time'] = week_list
TRANSLATED_DATAFRAME.drop('week_number', axis=1, inplace=True)
if "Unnamed: 0" in TRANSLATED_DATAFRAME.columns:
TRANSLATED_DATAFRAME.drop('Unnamed: 0', axis=1, inplace=True)
if "Unnamed: 0.1" in TRANSLATED_DATAFRAME.columns:
TRANSLATED_DATAFRAME.drop('Unnamed: 0.1', axis=1, inplace=True)
return TRANSLATED_DATAFRAME
def get_translated_dataframe(DATAFRAME):
TRANSLATED_REVIEW = DATAFRAME['review'].apply(translate_dataframe)
TRANSLATED_REVIEW = pd.DataFrame(TRANSLATED_REVIEW)
TRANSLATED_REVIEW['version'] = DATAFRAME['version']
TRANSLATED_REVIEW['rating'] = DATAFRAME['rating']
TRANSLATED_REVIEW['time'] = DATAFRAME['at']
return TRANSLATED_REVIEW
def get_sentiment_dataframe(TRANSLATED_DATAFRAME):
# USE THIS LINE IF YOU WANNA SKIP TRANSLATION
TRANSLATED_DATAFRAME['version'] = TRANSLATED_DATAFRAME['version']
TRANSLATED_DATAFRAME['rating'] = TRANSLATED_DATAFRAME['rating']
TRANSLATED_DATAFRAME['time'] = TRANSLATED_DATAFRAME['at']
# USE THIS AND SKIP ABOVE IF YOU WANNA USE TRANSLATION
TRANSLATED_DATAFRAME['polarity'] = TRANSLATED_DATAFRAME['review'].apply(
polarity_calc)
TRANSLATED_DATAFRAME['subjectivity'] = TRANSLATED_DATAFRAME['review'].apply(
subjectivity_calc)
TRANSLATED_DATAFRAME['sentiment'] = np.nan
for i in range(len(TRANSLATED_DATAFRAME)):
if TRANSLATED_DATAFRAME.polarity[i] > 0:
# TRANSLATED_DATAFRAME['sentiment'].iloc[i] = 'Positive'
TRANSLATED_DATAFRAME.iloc[i, TRANSLATED_DATAFRAME.columns.get_loc(
'sentiment')] = 'Positive'
elif TRANSLATED_DATAFRAME.polarity[i] == 0:
# TRANSLATED_DATAFRAME['sentiment'].iloc[i] = 'Neutral'
TRANSLATED_DATAFRAME.iloc[i, TRANSLATED_DATAFRAME.columns.get_loc(
'sentiment')] = 'Neutral'
elif TRANSLATED_DATAFRAME.polarity[i] < 0:
# TRANSLATED_DATAFRAME['sentiment'].iloc[i] = 'Negative'
TRANSLATED_DATAFRAME.iloc[i, TRANSLATED_DATAFRAME.columns.get_loc(
'sentiment')] = 'Negative'
TRANSLATED_DATAFRAME['time'] = pd.to_datetime(TRANSLATED_DATAFRAME['time'])
TRANSLATED_DATAFRAME['time'] = TRANSLATED_DATAFRAME['time'].dt.strftime(
'%Y-%m-%d')
TRANSLATED_DATAFRAME['time'] = pd.to_datetime(TRANSLATED_DATAFRAME['time'])
# Checking if time is more than 3 months
check_month = TRANSLATED_DATAFRAME.copy()
check_month['time'] = check_month['time'].dt.strftime('%b %Y')
check_month['at'] = pd.to_datetime(check_month['time'])
months = check_month['time'].nunique()
if months >= 4:
return check_month
else:
print("Data less than 4 mos")
TRANSLATED_DATAFRAME = preprocessing_weeks(TRANSLATED_DATAFRAME)
return TRANSLATED_DATAFRAME
def plot_totalreview_time(data, temp_dir):
if "week_time" in data.columns:
review_by_time = pd.DataFrame(data.groupby('week_time').count()['review']).join(
data.groupby('week_time').mean()['rating'])
review_by_time = review_by_time.reset_index()
review_by_time['time'] = review_by_time['week_time']
else:
review_by_time = pd.DataFrame(data.groupby('at').count()['review']).join(
data.groupby('at').mean()['rating'])
review_by_time = review_by_time.reset_index()
review_by_time['time'] = pd.to_datetime(review_by_time['at'])
review_by_time['time'] = review_by_time['time'].dt.strftime("%b %Y")
cmap = mcolors.LinearSegmentedColormap.from_list(
"", ["#bba68a", "#957347", "#8b6636"])
# Plot graph with 2 y axes
fig, ax1 = plt.subplots(figsize=(11.8726, 4.9648), dpi=100)
# Plot bars
ax1.bar(review_by_time['time'], review_by_time['review'], color=cmap(
review_by_time['review'].values/review_by_time['review'].values.max()))
# Make the y-axis label and tick labels match the line color.
ax1.set_ylabel('Total Review', color="#8b6636",
fontproperties=fontprop_label)
# Set up ax2 to be the second y axis with x shared
ax2 = ax1.twinx()
# Plot a line
ax2.plot(review_by_time['time'], review_by_time['rating'],
marker='o', linestyle='dashed', color="#6d0000")
# Make the y-axis label and tick labels match the line color.
ax2.set_ylabel('Average Rating', color="#6d0000",
fontproperties=fontprop_label)
ax2.set_ylim(0, 5)
ax1.spines['top'].set_visible(False)
ax1.spines['right'].set_visible(False)
ax1.spines['left'].set_visible(False)
ax1.spines['bottom'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax2.spines['right'].set_visible(False)
ax2.spines['left'].set_visible(False)
ax2.spines['bottom'].set_visible(False)
ax1.patch.set_facecolor('white')
ax2.patch.set_facecolor('white')
plt.title('Total Review and Average Rating across Months',
fontproperties=fontprop)
plt.box(False)
plt.savefig(f'sentiport/artifacts/{temp_dir}/fig_review_rating_time.png',
bbox_inches='tight')
# review_by_time = data.groupby('time').review.nunique()
# review_by_time = pd.DataFrame(review_by_time)
# review_by_time = review_by_time.reset_index()
idmax = review_by_time['review'].idxmax()
idmin = review_by_time['review'].idxmin()
max_time = review_by_time['time'][idmax]
max_value = review_by_time['review'][idmax]
min_time = review_by_time['time'][idmin]
min_value = review_by_time['review'][idmin]
return f'sentiport/artifacts/{temp_dir}/fig_review_rating_time.png', max_time, max_value
def plot_totalreview_version(data, temp_dir):
review_by_version = pd.DataFrame(data.groupby('version').count(
)['review']).join(data.groupby('version').mean()['rating'])
review_by_version = review_by_version.reset_index()
percent = []
for i in range(len(review_by_version)):
persen = review_by_version['review'][i] / \
sum(review_by_version['review'])
percent.insert(i, persen)
review_by_version['percent'] = percent
value = 0
panjang_data = []
for i in reversed(review_by_version.index):
value = value + review_by_version['percent'][i]
if value < 0.9:
panjang_data.append(i)
review_by_version = review_by_version[-len(panjang_data):]
review_by_version.reset_index(inplace=True)
version = review_by_version['version'].nunique()
if version > 20:
review_by_version = review_by_version[:20]
cmap = mcolors.LinearSegmentedColormap.from_list(
"", ["#aa6a6a", "#791515", "#6d0000"])
# Plot graph with 2 y axes
fig, ax1 = plt.subplots(figsize=(11.8726, 4.9648), dpi=100)
# Plot bars
ax1.bar(review_by_version['version'], review_by_version['review'], color=cmap(
review_by_version['review'].values/review_by_version['review'].values.max()))
ax1.set_xticklabels(review_by_version['version'], rotation=90)
# Make the y-axis label and tick labels match the line color.
ax1.set_ylabel('Total Review', color="#6d0000",
fontproperties=fontprop_label)
# Set up ax2 to be the second y axis with x shared
ax2 = ax1.twinx()
# Plot a line
ax2.plot(review_by_version['version'], review_by_version['rating'],
marker='o', markersize=5, linestyle='dashed', color="#8b6636")
# Make the y-axis label and tick labels match the line color.
ax2.set_ylabel('Average Rating', color="#8b6636",
fontproperties=fontprop_label)
ax2.set_ylim(0, 5.2)
ax1.spines['top'].set_visible(False)
ax1.spines['right'].set_visible(False)
ax1.spines['left'].set_visible(False)
ax1.spines['bottom'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax2.spines['right'].set_visible(False)
ax2.spines['left'].set_visible(False)
ax2.spines['bottom'].set_visible(False)
ax1.patch.set_facecolor('white')
ax2.patch.set_facecolor('white')
plt.title('Total Review and Average Rating across Versions',
fontproperties=fontprop)
plt.box(False)
plt.savefig(f'sentiport/artifacts/{temp_dir}/fig_review_rating_version.png',
bbox_inches='tight')
# review_by_version = data.groupby('version').review.nunique()
# review_by_version = pd.DataFrame(review_by_version)
# review_by_version = review_by_version.reset_index()
idmax = review_by_version['review'].idxmax()
idmin = review_by_version['review'].idxmin()
max_version = review_by_version['version'][idmax]
max_value = review_by_version['review'][idmax]
min_version = review_by_version['version'][idmin]
min_value = review_by_version['review'][idmin]
return f'sentiport/artifacts/{temp_dir}/fig_review_rating_version.png', max_version, max_value
def plot_totalreview_sentiment(data, temp_dir):
review_by_sentiment = data['sentiment'].value_counts()
review_by_sentiment = pd.DataFrame(review_by_sentiment).reset_index()
review_by_sentiment.rename(
columns={'index': 'sentiment', 'sentiment': 'total'}, inplace=True)
review_by_sentiment = review_by_sentiment.sort_values(
by=['sentiment'], ascending=False)
sentiment = []
for i in review_by_sentiment['sentiment']:
sentiment.append(i)
total = review_by_sentiment['total'].sum()
labels = []
sizes = review_by_sentiment['total']
colours = {}
if "Positive" in sentiment:
pos_index = review_by_sentiment[review_by_sentiment['sentiment']
== 'Positive']['total'].index.values[0]
pos = review_by_sentiment['total'][pos_index]
pos_percentage = (pos / total) * 100
labels.append('Positive {:.2f}%'.format(pos_percentage))
colours['Positive {:.2f}%'.format(pos_percentage)] = '#1B290D'
if "Neutral" in sentiment:
neu_index = review_by_sentiment[review_by_sentiment['sentiment']
== 'Neutral']['total'].index.values[0]
neu = review_by_sentiment['total'][neu_index]
neu_percentage = (neu / total) * 100
labels.append('Neutral {:.2f}%'.format(neu_percentage))
colours['Neutral {:.2f}%'.format(neu_percentage)] = '#8b6636'
if "Negative" in sentiment:
neg_index = review_by_sentiment[review_by_sentiment['sentiment']
== 'Negative']['total'].index.values[0]
neg = review_by_sentiment['total'][neg_index]
neg_percentage = (neg / total) * 100
labels.append('Negative {:.2f}%'.format(neg_percentage))
colours['Negative {:.2f}%'.format(neg_percentage)] = '#6d0000'
obj = plt.figure(figsize=(3.95, 3.75), dpi=100)
patches, texts = plt.pie(
sizes, colors=[colours[key] for key in labels], startangle=90)
obj = plt.legend(patches, labels, loc='center left',
bbox_to_anchor=(1, 0.5), frameon=False)
obj = plt.title('Review by Sentiment', fontproperties=fontprop)
obj = plt.box(False)
obj = plt.grid(False)
obj = plt.tick_params(left=False)
obj = plt.axis('equal')
obj = plt.tight_layout()
obj = plt.savefig(
f'sentiport/artifacts/{temp_dir}/fig_totalreview_sentiment.png', bbox_inches='tight')
idmax = review_by_sentiment['total'].idxmax()
max_sentiment = review_by_sentiment['sentiment'][idmax]
max_value = review_by_sentiment['total'][idmax]
return f'sentiport/artifacts/{temp_dir}/fig_totalreview_sentiment.png', max_sentiment
def plot_sentiment_time(data, temp_dir):
if "week_time" in data.columns:
sentiment_summary = pd.DataFrame(
data['sentiment'].groupby(data['week_time']).value_counts()) # at
sentiment_summary.rename(columns={'sentiment': 'total'}, inplace=True)
sentiment_summary = sentiment_summary.reset_index()
pivot = pd.pivot_table(
sentiment_summary, index='week_time', columns='sentiment', values='total') # at
pivot = pivot.reset_index()
pivot = pivot.rename(columns={'sentiment': 'no'})
pivot['time'] = pivot['week_time']
else:
sentiment_summary = pd.DataFrame(
data['sentiment'].groupby(data['at']).value_counts()) # at
sentiment_summary.rename(columns={'sentiment': 'total'}, inplace=True)
sentiment_summary = sentiment_summary.reset_index()
pivot = pd.pivot_table(sentiment_summary, index='at',
columns='sentiment', values='total') # at
pivot = pivot.reset_index()
pivot = pivot.rename(columns={'sentiment': 'no'})
pivot['time'] = pivot['at'].dt.strftime("%b %Y") # at
# sentiment = []
# for i in pivot['sentiment']:
# sentiment.append(i)
labels = pivot['time']
x_vals = range(0, len(pivot['time']))
x = np.arange(len(labels))
width = 0.35
obj = plt.figure()
fig, ax = plt.subplots(figsize=(8.7084, 2.7394), dpi=100)
if "Negative" in pivot.columns:
idmax_neg = pivot['Negative'].idxmax()
idmin_neg = pivot['Negative'].idxmin()
neg_max_value = pivot['Negative'][idmax_neg]
neg_min_value = pivot['Negative'][idmin_neg]
neg_max_time = pivot['time'][idmax_neg]
neg_min_time = pivot['time'][idmin_neg]
neg = pivot['Negative']
obj = ax.bar(x - width, neg, width, label='Negative', color='#6d0000')
if "Neutral" in pivot.columns:
idmax_neu = pivot['Neutral'].idxmax()
idmin_neu = pivot['Neutral'].idxmin()
nue_max_value = pivot['Neutral'][idmax_neu]
neu_min_value = pivot['Neutral'][idmin_neu]
neu_max_time = pivot['time'][idmax_neu]
neu_min_time = pivot['time'][idmin_neu]
neu = pivot['Neutral']
obj = ax.bar(x, neu, width, label='Neutral', color='#8b6636')
if "Positive" in pivot.columns:
idmax_pos = pivot['Positive'].idxmax()
idmin_pos = pivot['Positive'].idxmin()
pos_max_value = pivot['Positive'][idmax_pos]
pos_min_value = pivot['Positive'][idmin_pos]
pos_max_time = pivot['time'][idmax_pos]
pos_min_time = pivot['time'][idmin_pos]
pos = pivot['Positive']
obj = ax.bar(x + width, pos, width, label='Positive', color='#1B290D')
obj = ax.set_title('Review Sentiment Across Time', fontproperties=fontprop)
# obj = ax.legend()
obj = plt.xticks(x_vals, pivot['time'], fontsize=7)
obj = plt.ylabel("Number of Review", fontproperties=fontprop_label)
obj = plt.box(False)
obj = plt.grid(False)
plt.savefig(
f'sentiport/artifacts/{temp_dir}/fig_sentiment_time.png', bbox_inches='tight')
return f'sentiport/artifacts/{temp_dir}/fig_sentiment_time.png'
def plot_sentiment_version(data, temp_dir):
sentiment_summary = pd.DataFrame(
data['sentiment'].groupby(data['version']).value_counts())
sentiment_summary.rename(columns={'sentiment': 'total'}, inplace=True)
sentiment_summary = sentiment_summary.reset_index()
percent = []
for i in range(len(sentiment_summary)):
persen = sentiment_summary['total'][i]/sum(sentiment_summary['total'])
percent.insert(i, persen)
sentiment_summary['percent'] = percent
value = 0
panjang_data = []
for i in reversed(sentiment_summary.index):
value = value + sentiment_summary['percent'][i]
if value < 0.9:
panjang_data.append(i)
sentiment_summary = sentiment_summary[-len(panjang_data):]
sentiment_summary.reset_index(inplace=True)
pivot = pd.pivot_table(sentiment_summary, index='version',
columns='sentiment', values='total')
pivot = pivot.reset_index()
pivot = pivot.rename(columns={'sentiment': 'no'})
version = pivot['version'].nunique()
if version > 20:
pivot = pivot[:20]
idmax_pos = pivot['Positive'].idxmax()
idmin_pos = pivot['Positive'].idxmin()
pos_max_value = pivot['Positive'][idmax_pos]
pos_min_value = pivot['Positive'][idmin_pos]
pos_max_version = pivot['version'][idmax_pos]
pos_min_version = pivot['version'][idmin_pos]
idmax_neg = pivot['Negative'].idxmax()
idmin_neg = pivot['Negative'].idxmin()
neg_max_value = pivot['Negative'][idmax_neg]
neg_min_value = pivot['Negative'][idmin_neg]
neg_max_version = pivot['version'][idmax_neg]
neg_min_version = pivot['version'][idmin_neg]
labels = pivot['version']
neg = pivot['Negative']
neu = pivot['Neutral']
pos = pivot['Positive']
x_vals = range(0, len(pivot['version']))
x = np.arange(len(labels))
width = 0.35
obj = plt.figure()
fig, ax = plt.subplots(figsize=(8.7084, 2.7394), dpi=100)
obj = ax.bar(x - width, neg, width, label='Negative', color='#6d0000')
obj = ax.bar(x, neu, width, label='Neutral', color='#8b6636')
obj = ax.bar(x + width, pos, width, label='Positive', color='#1B290D')
obj = ax.set_title('Review Sentiment Across Version',
fontproperties=fontprop)
#obj = ax.legend()
obj = plt.xticks(x_vals, pivot['version'], rotation=90, fontsize=7)
obj = plt.ylabel("Number of Review", fontproperties=fontprop_label)
obj = plt.box(False)
obj = plt.grid(False)
plt.savefig(f'sentiport/artifacts/{temp_dir}/fig_sentiment_version.png',
bbox_inches='tight')
return f'sentiport/artifacts/{temp_dir}/fig_sentiment_version.png', pos_max_version, neg_max_version
def sentiment_visual_preprocessing(DATAFRAME):
# TRANSLATING DATAFRAME
# TRANSLATED_DATAFRAME = get_translated_dataframe(DATAFRAME)
# USE ONLY THIS LINE IF YOU WANNA SKIP TRANSLATION
SENTIMENT_DF = get_sentiment_dataframe(DATAFRAME)
# CALCULATING SENTIMENT ANALYSIS, USE THIS LINE WITH TRANSLATION PROCESS
# SENTIMENT_DF = get_sentiment_dataframe(TRANSLATED_DATAFRAME)
return SENTIMENT_DF
# Trial and Error Lines
# DATAFRAME = pd.read_csv('D:\Stuff\Supertype Program\GitHub\data-analyst-github\data_analyst\data-echo-5k.csv')
# one_yr_ago = datetime.now() - relativedelta(years=1)
# DATAFRAME.index = DATAFRAME['at']
# DATAFRAME = DATAFRAME[DATAFRAME.index.map(pd.to_datetime)>one_yr_ago]
# DATAFRAME.reset_index(drop=True, inplace=True)
# DATAFRAME = get_sentiment_dataframe(DATAFRAME)
# # data.to_csv('data-ready-18k.csv')
# print(data.head(10))
# print(data.tail(10))
# print(cek)
# plot_totalreview_time(DATAFRAME)
# plot_totalreview_version(DATAFRAME)
# plot_totalreview_sentiment(DATAFRAME)
# print(b)
# plot_sentiment_time(DATAFRAME)
# plot_sentiment_version(DATAFRAME)
# # PLOTTING DATA
# total_review_by_time(data)
# total_review_by_version(data)
# total_review_by_sentiment(data)
# sentiment_by_time(data)
# sentiment_by_version(data)
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,682,208
|
Nadiantara/sentiport_heroku
|
refs/heads/main
|
/sentiport/forms.py
|
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired, Email, URL
class AppForm(FlaskForm):
app_id = StringField('App id', validators=[DataRequired(), URL()])
email = StringField('Email', validators=[DataRequired(), Email()])
|
{"/sentiport/routes.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/pdf_generator.py", "/sentiport/mail.py", "/sentiport/forms.py", "/sentiport/__init__.py"], "/app_report.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/utilities/helper.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"], "/sentiport/mail.py": ["/sentiport/__init__.py"], "/sentiport/pdf_generator.py": ["/sentiport/utils/utilities/crawling.py", "/sentiport/utils/plot_detect_language/detect_language.py", "/sentiport/utils/plot_rating/rating.py", "/sentiport/utils/plot_sentiment_analysis/sentiment_analysis.py", "/sentiport/utils/pdf_table_reportlab/bad_good_review.py"]}
|
34,764,092
|
JosephVC/tutorial_backend2
|
refs/heads/main
|
/ocr/views.py
|
from django.shortcuts import render
from rest_framework.views import APIView
from rest_framework.parsers import MultiPartParser, FormParser
from rest_framework.response import Response
from rest_framework import status
import ocrmypdf
from subprocess import Popen
from .serializers import FileSerializer
from .models import Post
# Create your views here.
class PostViews(APIView):
parser_classes = (MultiPartParser, FormParser)
def get(self,request, *args, **kwargs):
posts = Post.objects.all()
serializer = FileSerializer(posts, many=True)
return Response(serializer.data)
def post(self, request, *args, **kwargs):
posts_serializer = FileSerializer(data=request.data)
if posts_serializer.is_valid():
uploaded = posts_serializer.save()
process = Popen(['ocrmypdf', uploaded.file.path, 'output.pdf'], cwd="../media/post_files")
posts_serializer.save(process)
return Response(posts_serializer.data, status=status.HTTP_201_CREATED)
else:
print('error', posts_serializer.errors)
return Response(posts_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
{"/ocr/urls.py": ["/ocr/views.py"]}
|
34,764,093
|
JosephVC/tutorial_backend2
|
refs/heads/main
|
/tutorial_backend/urls.py
|
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', include('admin_honeypot.urls', namespace='admin_honeypot')),
path('banana/', admin.site.urls),
path('', include('ocr.urls', namespace='ocr')),
]
|
{"/ocr/urls.py": ["/ocr/views.py"]}
|
34,764,094
|
JosephVC/tutorial_backend2
|
refs/heads/main
|
/ocr/urls.py
|
from django.urls import path
from .views import PostViews
app_name = 'django_ocr_tutorial'
urlpatterns = [
path('', PostViews.as_view(), name='file-upload')
]
|
{"/ocr/urls.py": ["/ocr/views.py"]}
|
34,893,939
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/Cube.py
|
####################################################
#
# Keegan Gunkel
#
# Purpose to find volume and surface area of a cube
#
####################################################
import math
def surfaceArea():
pass
def volume(lengthOfCube):
volume = lengthOfCube**3
return volume
def main():
print("-------------------------------------------------------------")
print("PYTHON PROGRAM TO FIND THE VOLUME AND SURFACE AREA OF A CUBE")
print("------------------------------------------------------------- ")
# User input
lengthOfCube = eval(input("Please enter the Length of any Side of a Cube: "))
print()
# Equations I used
volume = lengthOfCube**3
surfaceArea = 6 * lengthOfCube**2
lateralSurfaceArea= 4 * lengthOfCube**2
# Prints the answers
print(" Surface Area of Cube = ", float(surfaceArea))
print(" Volume of cube = ", float(volume))
print(" Lateral Surface Area of Cube = ", float(lateralSurfaceArea))
print("-------------------------------------------------------------")
if __name__ == '__main__':
main()
def volume(lengthOfCube):
volume = lengthOfCube**3
return volume
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,940
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/geometryCalculator.py
|
import cylinder, cube, cuboid, triangle, trapezoid, sphere,equilateralTriangle, cone
def main():
while True:
print("\nWelcome to my Geometry Calculator")
print("1. Sphere \n2. Cylinder\n3. Cone\n4. Cube\n5. Triangle\n6. Trapezoid\n7. Cuboid\n8. Equilateral triangle\n0. Quit")
selection = int(input("Please enter your selection: "))
if selection == 1:
sphere.main()
if selection == 2:
cylinder.main()
if selection == 3:
cone.main()
if selection == 4:
cube.main()
if selection == 5:
triangle.main()
if selection == 6:
trapezoid.main()
if selection == 7:
cuboid.main()
if selection == 8:
equilateralTriangle.main()
if selection == 0:
break
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,941
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/Sphere.py
|
####################################################
#
# Keegan Gunkel
#
# Purpose to find volume and surface area of a sphere
#
####################################################
import math
def surfaceArea():
pass
def volume(radius):
volume = 4/3 *math.pi * radius**3
return volume
def main():
print("----------------------------------------------------------------")
print("PYTHON PROGRAM TO FIND THE VOLUME AND SURFACE AREA OF A SHPHERE")
print("----------------------------------------------------------------")
# User input
radius = eval(input("Please enter the radius: "))
# Equations I used
surfaceArea = 4 * math.pi * radius**2
volume = 4/3 *math.pi * radius**3
# Prints answer
print("The surface area of the sphere = ", surfaceArea)
print("The volume of a sphere = ", volume)
if __name__ == '__main__':
main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,942
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/sphereTest.py
|
import unittest
import sphere
class sphereTest(unittest.TestCase):
#tests that pass
def test_volume1(self):
assert(sphere.volume(5) == 523.5987755982989)
def test_volume2(self):
assert(sphere.volume(69) == 1376055.2813841724)
#test that fails
def test_volume3(self):
assert(sphere.volume(9) == 0)
if __name__ == '__main__':
unittest.main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,943
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/CylinderTest.py
|
import unittest
import cylinder
class cylinderTest(unittest.TestCase):
#tests that pass
def test_volume1(self):
assert(cylinder.volume(5,10) == 785.3981633974483)
def test_volume2(self):
assert(cylinder.volume(7,69) == 10621.724761787089)
#test that fails
def test_volume3(self):
assert(cylinder.volume(7,69) == 0)
if __name__ == '__main__':
unittest.main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,944
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/Triangle.py
|
####################################################
#
# Keegan Gunkel
#
# Purpose to find the area of a triangle
#
####################################################
import math
def perimeter():
pass
def area(firstSide, secondSide, thirdSide):
perimeter = firstSide + secondSide + thirdSide
semiPerimeter = perimeter/2
triangleVar = round(semiPerimeter*(semiPerimeter - firstSide)*(semiPerimeter - secondSide)*(semiPerimeter - thirdSide), 2)
area = math.sqrt(triangleVar)
return area
def main():
print("-------------------------------------------------------------")
print("PYTHON PROGRAM TO FIND THE AREA OF A TRIANGLE")
print("-------------------------------------------------------------")
firstSide = eval(input("Please Enter the First Side of a Triangle: "))
secondSide = eval(input("Please Enter the Second Side of a Triangle: "))
thirdSide= eval(input("Please Enter the Third Side of a Triangle: "))
perimeter = firstSide + secondSide + thirdSide
semiPerimeter = perimeter/2
triangleVar = round(semiPerimeter*(semiPerimeter - firstSide)*(semiPerimeter - secondSide)*(semiPerimeter - thirdSide), 2)
area = math.sqrt(triangleVar)
print()
print(" The Perimeter of Triangle = ", perimeter)
print(" The Semi perimeter of Triangle = ", semiPerimeter)
print(" The Area of a Triangle is ", area)
print("-------------------------------------------------------------")
if __name__ == '__main__':
main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,945
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/geometryCalculatorTest.py
|
import unittest
import geometryCalculator
class geometryCalculatorTest(unittest.TestCase):
#passing tests
def test_volume1(self):
assert(geometryCalculator.pick(8))
if __name__ == '__main__':
unittest.main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,946
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/Cuboid.py
|
####################################################
#
# Keegan Gunkel
#
# Purpose to find the area of a trapezoid
#
####################################################
import math
def surfaceArea():
pass
def volume(length,width, height):
volume = length*width*height
return volume
def main():
print("---------------------------------------------------------------")
print("PYTHON PROGRAM TO FIND THE VOLUME AND SURFACE AREA OF A CUBOID")
print("---------------------------------------------------------------")
length = eval(input("Please Enter the length :"))
width = eval(input("Please Enter the the width :"))
height = eval(input("Please Enter the height :"))
surfaceArea = (2*length*width)+(2*length*height)+(2*height*width)
volume = length*width*height
lateralSurfaceArea = 2*(length + width)* height
print()
print("The Surface Area of a Cuboid = ", float(surfaceArea))
print("The Volume of a Cuboid = ",float(volume))
print("The lateral Surface Area of a Cuboid = ", float(lateralSurfaceArea))
print("---------------------------------------------------------------")
if __name__ == '__main__':
main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,947
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/trapezoidTest.py
|
import unittest
import trapezoid
class trapezoidTest(unittest.TestCase):
#tests that pass
def test_volume1(self):
assert(trapezoid.area(5,10,15) == 112.5)
def test_volume2(self):
assert(trapezoid.area(7,69,100) == 3800)
#test that fails
def test_volume3(self):
assert(trapezoid.area(3,6,9) == 0)
if __name__ == '__main__':
unittest.main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,948
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/cubeTest.py
|
import unittest
import cube
class cubeTest(unittest.TestCase):
#tests that pass
def test_volume1(self):
assert(cube.volume(5) == 125)
def test_volume2(self):
assert(cube.volume(69) == 328509)
#test that fails
def test_volume3(self):
assert(cube.volume(9) == 0)
if __name__ == '__main__':
unittest.main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,949
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/Cone.py
|
####################################################
#
# Keegan Gunkel
#
# Purpose to find volume and surface area of a cone
#
####################################################
import math
def surfaceArea():
pass
def volume(radius, height):
volume = math.pi * radius**2 * height/3
return volume
def main():
print("-------------------------------------------------------------")
print("PYTHON PROGRAM TO FIND THE VOLUME AND SURFACE AREA OF A CONE")
print("-------------------------------------------------------------")
# User input
radius= eval(input("Please enter the radius of a cone: "))
height= eval(input("Please enter the height of a cone: "))
print()
# Eqautions to figure out problem
surfaceArea = math.pi * radius * (radius + math.sqrt (height**2 + radius**2))
volume = math.pi * radius**2 * height/3
lateralSurfaceArea = math.pi * radius * math.sqrt (height**2 + radius**2)
sideSlantLength = math.sqrt (radius**2 + height**2)
# Printing answers and rounding them
print("Length of side (slant) of the cone = ", sideSlantLength)
print("The surface Area of the cone = ", surfaceArea)
print("The volume of the cone = ", volume)
print("Lateral surface area of the cube = ", lateralSurfaceArea)
if __name__ == '__main__':
main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,950
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/Cylinder.py
|
####################################################
#
# Keegan Gunkel
#
# Purpose to find volume and surface area of a cylinder
#
####################################################
import math
def surfaceArea():
pass
def volume(rad, hi):
volume = math.pi * rad * rad * hi
return volume
def main():
print("-----------------------------------------------------------------")
print("PYTHON PROGRAM TO FIND THE VOLUME AND SURFACE AREA OF A CYLINDER")
print("-----------------------------------------------------------------")
# User input
radius = eval(input("Please enter the radius: "))
height = eval(input("Please enter the height: "))
# Eqautions I used
surfaceArea = 2 * math.pi * radius * height + 2* math.pi * radius**2
volume = math.pi * radius**2 * height
lateralSurfaceArea = 2 * math.pi * radius * height
topOrBottomArea = math.pi * radius**2
# Printing answers
print("The surface area of the cylinder = ", surfaceArea)
print("The volume of the cylinder =", volume)
print("Lateral surface are of the cylinder = ", lateralSurfaceArea)
print("Top OR bottom surface area of the cylinder = ", topOrBottomArea)
print("-------------------------------------------------------------------")
if __name__ == '__main__':
main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,951
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/coneTest.py
|
import unittest
import cone
class coneTest(unittest.TestCase):
#tests that pass
def test_volume1(self):
assert(cone.volume(5,10) == 261.79938779914943)
def test_volume2(self):
assert(cone.volume(7,69) == 3540.5749205956963)
#test that fails
def test_volume3(self):
assert(cone.volume(2,3) == 0)
if __name__ == '__main__':
unittest.main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,952
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/equilateralTriangleTest.py
|
import unittest
import equilateralTriangle
class equilatealTriangleTest(unittest.TestCase):
#tests that pass
def test_volume1(self):
assert(equilateralTriangle.area(5) == 10.825317547305483)
def test_volume2(self):
assert(equilateralTriangle.area(69) == 2061.573473708856)
#test that fails
def test_volume3(self):
assert(equilateralTriangle.area(6) == 0)
if __name__ == '__main__':
unittest.main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,953
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/Trapezoid.py
|
####################################################
#
# Keegan Gunkel
#
# Purpose to find the area of a trapezoid
#
####################################################
import math
def median():
pass
def area(base1, base2, height):
median = (base1 + base2) / 2
area = (median) * height
return area
def main():
print("-------------------------------------------------------------")
print("PYTHON PROGRAM TO FIND THE AREA OF A TRAPEZOID")
print("-------------------------------------------------------------")
base1 = eval(input("Please Enter the base1 :"))
base2 = eval(input("Please Enter the the base2 :"))
height = eval(input("Please Enter the height :"))
median = (base1 + base2) / 2
area = (median) * height
print()
print(" Area of a Trapezoid = ", area)
print(" Median of a trapezoid = ", median)
print("--------------------------------------------------------------")
if __name__ == '__main__':
main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,954
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/triangleTest.py
|
import unittest
import triangle
class triangleTest(unittest.TestCase):
#tests that pass
def test_volume1(self):
assert(triangle.area(5,10,15) == 0)
def test_volume2(self):
assert(triangle.area(20,40,60) == 0)
#test that fails
def test_volume3(self):
assert(triangle.area(8,8,8) == 0)
if __name__ == '__main__':
unittest.main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,955
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/cuboidTest.py
|
import unittest
import cuboid
class cuboidTest(unittest.TestCase):
#tests that pass
def test_volume1(self):
assert(cuboid.volume(5,10,15) == 750)
def test_volume2(self):
assert(cuboid.volume(7,69,100) == 48300)
#test that fails
def test_volume3(self):
assert(cuboid.volume(5,4,3) == 0)
if __name__ == '__main__':
unittest.main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,893,956
|
keegangunkel/SoftwareTestingLab2
|
refs/heads/master
|
/EquilateralTriangle.py
|
####################################################
#
# Keegan Gunkel
#
# Purpose to find the area of an equilateral triangle
#
####################################################
import math
def perimeter():
pass
def area(length):
area = (math.sqrt(3)/4)*length**2
return area
def main():
print("---------------------------------------------------------------")
print("PYTHON PROGRAM TO FIND THE AREA OF AN EQUILATERAL TRIANGLE")
print("---------------------------------------------------------------")
length = eval(input("Please Enter Length of any Equilateral Triangle: "))
area = (math.sqrt(3)/4)*length**2
perimeter = length*3
semiPerimeter = perimeter/2
altitude = length*math.sqrt(3)*.5
print()
print(" Area of Equilateral triangle = ", area)
print(" Perimeter of Equilateral Triangle = ", perimeter)
print(" Semi Perimeter of Equilateral Triangle = ",semiPerimeter)
print(" Altitude of Equilateral Triangle = ", altitude)
print("----------------------------------------------------------------")
if __name__ == '__main__':
main()
|
{"/mainprogram.py": ["/problem6.py", "/problem8.py"], "/geometryCalculatorTest.py": ["/geometryCalculator.py"]}
|
34,907,705
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/utils/client_utils.py
|
"""This module contains utility functions for using client instances under
tests. Client is started in docker during acceptance, cucumber and performance
tests.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests.utils.user_utils import User
from tests.utils.docker_utils import run_cmd
from tests.utils.path_utils import escape_path
from tests.utils.cucumber_utils import repeat_until
from tests.utils.utils import set_dns, get_token, get_oz_cookie
import os
import pytest
import subprocess
class Client:
def __init__(self, docker_id, mount_path):
self.timeout = 0
self.docker_id = docker_id
self.mount_path = mount_path
def set_timeout(self, timeout):
self.timeout = timeout
def mount_users(request, onedata_environment, context, client_ids, env_description_file,
users=[], client_instances=[], mount_paths=[],
client_hosts=[], tokens=[], check=True):
# current version is for environment with one OZ
oz_node = onedata_environment['oz_worker_nodes'][0]
set_dns(onedata_environment)
client_data = onedata_environment['client_data']
clients = create_clients(users, client_hosts, mount_paths, client_ids)
def fin():
params = zip(users, clients)
for user, client in params:
clean_mount_path(user, client)
request.addfinalizer(fin)
parameters = zip(users, clients, client_instances, mount_paths,
client_hosts, tokens)
for user_name, client, client_instance, mount_path, client_host, token_arg in parameters:
data = client_data[client_host][client_instance]
oz_domain = data['zone_domain']
# get OZ cookie from env description file
cookie = get_oz_cookie(env_description_file, oz_domain, node_name=False)
user = context.get_user(user_name)
if not user:
user = User(user_name,
id=user_name,
oz_domain=oz_domain)
# get token for user
if token_arg != 'bad_token':
token = get_token(token_arg, user.id, oz_node, cookie)
client.set_timeout(data.get('default_timeout', 0))
print "User {user} mounts oneclient using token: {token}"\
.format(user=user_name, token=token)
# /root has to be accessible for gdb to access /root/bin/oneclient
assert run_cmd('root', client, 'chmod +x /root') == 0
token_path = "/tmp/token"
cmd = ('mkdir -p {mount_path}'
' && export GLOBAL_REGISTRY_URL={gr_domain}'
' && export PROVIDER_HOSTNAME={op_domain}'
' && export X509_USER_CERT={user_cert}'
' && export X509_USER_KEY={user_key}'
' && echo {token} > {token_path}'
' && gdb oneclient -batch -return-child-result -ex \'run --authentication token --no_check_certificate {mount_path} < {token_path}\' -ex \'bt\' 2>&1'
).format(mount_path=mount_path,
gr_domain=oz_domain,
op_domain=data['op_domain'],
user_cert=data['user_cert'],
user_key=data['user_key'],
user=user_name,
token=token,
token_path=token_path)
ret = run_cmd(user_name, client, cmd)
if ret != 0 and check and token_arg != "bad token":
# if token was different than "bad token" and mounting failed
clean_mount_path(user_name, client)
pytest.skip("Error mounting oneclient")
user.update_clients(client_instance, client)
if not context.has_user(user):
context.add_user(user)
# remove accessToken to mount many clients on one docker
rm(client, recursive=True, force=True,
path=os.path.join(os.path.dirname(mount_path), ".local"))
rm(client, recursive=True, force=True, path=token_path)
if check and token != 'bad_token':
if not clean_spaces_safe(user_name, client):
pytest.skip("Test skipped beacause of failing to clean spaces")
save_op_code(context, user_name, ret)
def ls(client, user="root", path=".", output=True):
"""CAUTION: this function returns list of paths not string"""
cmd = "ls {path}".format(path=escape_path(path))
# sometimes paths are separated with 2 spaces, '\t' or '\n'
return run_cmd(user, client, cmd, output=output).strip()\
.replace(' ', '\n').replace('\t', '\n').split('\n')
def mv(client, src, dest, user="root", output=False):
cmd = "mv {src} {dest}".format(src=escape_path(src), dest=escape_path(dest))
return run_cmd(user, client, cmd, output=output)
def chmod(client, mode, file, user="root", output=False):
cmd = "chmod {mode} {file}".format(mode=mode, file=escape_path(file))
return run_cmd(user, client, cmd, output=output)
def stat(client, path, format=None, user="root", output=True):
cmd = "stat {path} {format}".format(path=escape_path(path),
format="--format='{0}'"
.format(format) if format else "")
return run_cmd(user, client, cmd, output=output)
def rm(client, path, recursive=False, force=False, user="root", output=False):
cmd = "rm {recursive} {force} {path}"\
.format(recursive="-r" if recursive else "",
force="-f" if force else "",
path=escape_path(path))
return run_cmd(user, client, cmd, output=output)
def rmdir(client, dir_path, recursive=False, from_path=None, user="root",
output=False):
cmd = ("{from_path}"
"rmdir {recursive} {path}").format(
from_path="cd {0} &&".format(escape_path(from_path)) if from_path else "",
recursive="-p" if recursive else "",
path=escape_path(dir_path))
return run_cmd(user, client, cmd, output=output)
def mkdir(client, dir_path, recursive=False, user="root", output=False):
cmd = "mkdir {recursive} {path}".format(recursive="-p" if recursive else "",
path=escape_path(dir_path))
return run_cmd(user, client, cmd, output=output)
def touch(client, file_path, user="root", output=False):
cmd = "touch {path}".format(path=escape_path(file_path))
return run_cmd(user, client, cmd, output=output)
def cp(client, src, dest, recursive=False, user="root", output=False):
cmd = "cp {recursive} {src} {dest}"\
.format(
recursive="-r" if recursive else "",
src=escape_path(src),
dest=escape_path(dest))
return run_cmd(user, client, cmd, output=output)
def truncate(client, file_path, size, user="root", output=False):
cmd = "truncate --size={size} {file_path}".format(size=size,
file_path=escape_path(file_path))
return run_cmd(user, client, cmd, output=output)
def dd(client, block_size, count, output_file, unit='M', input_file="/dev/zero",
user="root", output=False, error=False):
cmd = "dd {input} {output} {bs} {count}".format(
input="if={}".format(escape_path(input_file)),
output="of={}".format(escape_path(output_file)),
bs="bs={0}{1}".format(block_size, unit),
count="count={}".format(count))
return run_cmd(user, client, cmd, output=output, error=True)
def echo_to_file(client, text, file_path, new_line=False, escape=False,
user="root", overwrite=True, output=False):
cmd = "echo {newline} {escape} '{text}' {redirect} {file_path}".format(
newline="-n" if not new_line else "",
escape="-e" if escape else "",
text=text,
redirect=">" if overwrite else ">>",
file_path=escape_path(file_path))
return run_cmd(user, client, cmd, output=output)
def cat(client, file_path, user="root", output=True):
cmd = "cat {file_path}".format(file_path=escape_path(file_path))
return run_cmd(user, client, cmd, output=output)
def md5sum(client, file_path, user="root", output=True):
cmd = "md5sum {file_path}".format(file_path=escape_path(file_path))
return run_cmd(user, client, cmd, output=output)
def mktemp(client, path=None, dir=False, user="root", output=True):
cmd = "mktemp {dir} {path}".format(
dir="--directory" if dir else "",
path="--tmpdir={}".format(escape_path(path)) if path else "")
return run_cmd(user, client, cmd, output).strip()
def replace_pattern(client, file_path, pattern, new_text, user='root',
output=False):
cmd = 'sed -i \'s/{pattern}/{new_text}/g\' {file_path}'.format(
pattern=pattern,
new_text=new_text,
file_path=escape_path(file_path))
return run_cmd(user, client, cmd, output=output)
def fusermount(client, path, user='root', unmount=False, lazy=False,
quiet=False, output=False):
cmd = "fusermount {unmount} {lazy} {quiet} {path}".format(
unmount="-u" if unmount else "",
lazy="-z" if lazy else "",
quiet="-q" if quiet else "",
path=escape_path(path)
)
return run_cmd(user, client, cmd, output)
def create_clients(users, client_hosts, mount_paths, client_ids):
clients = []
params = zip(users, client_hosts, mount_paths)
for user, client_host, mount_path in params:
clients.append(Client(client_ids[client_host], mount_path))
return clients
def clean_spaces_safe(user, client):
def condition():
try:
clean_spaces(user, client)
return True
except subprocess.CalledProcessError:
return False
return repeat_until(condition, 5)
def clean_spaces(user, client):
spaces = ls(client, user=user, path=client.mount_path)
# clean spaces
for space in spaces:
rm(client, recursive=True, user=user, force=True,
path=client_mount_path(os.path.join(str(space), '*'),
client))
def clean_mount_path(user, client):
try:
clean_spaces(user, client)
except:
pass
finally:
# get pid of running oneclient node
pid = run_cmd('root', client,
" | ".join(
["ps aux",
"grep './oneclient --authentication token --no_check_certificate '" + client.mount_path,
"grep -v 'grep'",
"awk '{print $2}'"]),
output=True)
if pid != "":
# kill oneclient process
run_cmd("root", client, "kill -KILL " + str(pid))
# unmount onedata
fusermount(client, client.mount_path, user=user, unmount=True)
# lazy=True)
rm(client, recursive=True, force=True, path=client.mount_path)
def client_mount_path(path, client):
return os.path.join(client.mount_path, str(path))
def save_op_code(context, user, op_code):
context.users[user].last_op_ret_code = op_code
def get_client(client_node, user, context):
return context.users[user].clients[client_node]
def user_home_dir(user="root"):
return os.path.join("/home", user)
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,706
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/steps/multi_reg_file_steps.py
|
"""Module implements pytest-bdd steps for operations on regular files.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests import *
import subprocess
from tests.utils.cucumber_utils import *
from tests.utils.client_utils import cp, truncate, dd, echo_to_file, cat, \
md5sum, replace_pattern, client_mount_path, save_op_code, get_client
from tests.utils.docker_utils import run_cmd
@when(parsers.parse('{user} writes "{data}" at offset {offset} to {file} on {client_node}'))
def write_at_offset(user, data, offset, file, client_node, context):
client = get_client(client_node, user, context)
path = client_mount_path(file, client)
write_command = '''python -c "with open(\\"{path}\\", \\"r+b\\") as file:
file.seek({offset})
file.write(\\"{data}\\")"
'''.format(path=path, offset=offset, data=data)
ret = run_cmd(user, client, write_command)
save_op_code(context, user, ret)
@when(parsers.parse('{user} writes {megabytes} MB of random characters to {file} on {client_node} and saves MD5'))
@then(parsers.parse('{user} writes {megabytes} MB of random characters to {file} on {client_node} and saves MD5'))
def write_rand_text(user, megabytes, file, client_node, context):
client = get_client(client_node, user, context)
file_path = client_mount_path(file, client)
ret = dd(client, megabytes, 1, file_path, user=user, output=False)
md5 = md5sum(client, file_path, user=user)
context.md5 = md5.split()[0]
save_op_code(context, user, ret)
@when(parsers.parse('{user} writes "{text}" to {file} on {client_node}'))
@then(parsers.parse('{user} writes "{text}" to {file} on {client_node}'))
def write_text(user, text, file, client_node, context):
client = get_client(client_node, user, context)
file_path = client_mount_path(file, client)
ret = echo_to_file(client, str(text), file_path, escape=True, user=user)
save_op_code(context, user, ret)
@when(parsers.parse('{user} reads "{text}" from {file} on {client_node}'))
@then(parsers.parse('{user} reads "{text}" from {file} on {client_node}'))
def read(user, text, file, client_node, context):
client = get_client(client_node, user, context)
text = text.decode('string_escape')
def condition():
try:
read_text = cat(client, client_mount_path(file, client), user=user)
return read_text == text
except subprocess.CalledProcessError:
return False
assert repeat_until(condition, client.timeout)
@then(parsers.parse('{user} reads "" from {file} on {client_node}'))
def read_empty(user, file, client_node, context):
read(user, '', file, client_node, context)
@then(parsers.parse('{user} cannot read from {file} on {client_node}'))
def cannot_read(user, file, client_node, context):
client = get_client(client_node, user, context)
return_code = cat(client, client_mount_path(file, client), user=user, output=False)
assert return_code != 0
@when(parsers.parse('{user} appends "{text}" to {file} on {client_node}'))
def append(user, text, file, client_node, context):
client = get_client(client_node, user, context)
file_path = client_mount_path(file, client)
ret = echo_to_file(client, str(text), file_path, user=user, overwrite=False)
save_op_code(context, user, ret)
@when(parsers.parse('{user} replaces "{text1}" with "{text2}" in {file} on {client_node}'))
def replace(user, text1, text2, file, client_node, context):
client = get_client(client_node, user, context)
file_path = client_mount_path(file, client)
ret = replace_pattern(client, file_path, text1, text2, user)
save_op_code(context, user, ret)
@when(parsers.parse('{user} executes {file} on {client_node}'))
@then(parsers.parse('{user} executes {file} on {client_node}'))
def execute_script(user, file, client_node, context):
client = get_client(client_node, user, context)
ret = run_cmd(user, client, client_mount_path(file, client))
save_op_code(context, user, ret)
@when(parsers.parse('{user} checks MD5 of {file} on {client_node}'))
@then(parsers.parse('{user} checks MD5 of {file} on {client_node}'))
def check_md5(user, file, client_node, context):
client = get_client(client_node, user, context)
def condition():
try:
md5 = md5sum(client, client_mount_path(file, client), user=user)
return md5.split()[0] == context.md5
except subprocess.CalledProcessError:
return False
assert repeat_until(condition, client.timeout)
@when(parsers.parse('{user} copies regular file {file} to {path} on {client_node}'))
def copy_reg_file(user, file, path, client_node, context):
client = get_client(client_node, user, context)
src_path = client_mount_path(file, client)
dest_path = client_mount_path(path, client)
ret = cp(client, src_path, dest_path, user=user)
save_op_code(context, user, ret)
@when(parsers.parse('{user} changes {file} size to {new_size} bytes on {client_node}'))
def do_truncate(user, file, new_size, client_node, context):
client = get_client(client_node, user, context)
file_path = client_mount_path(file, client)
ret = truncate(client, file_path, new_size, user=user)
save_op_code(context, user, ret)
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,707
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/packaging/rpm/rpm_install_test_data/rpm_install_script.py
|
from __future__ import print_function
import sys
from subprocess import STDOUT, check_call, check_output
# get packages
packages = check_output(['ls', '/root/pkg']).split()
packages = sorted(packages, reverse=True)
op_panel_package = \
[path for path in packages if path.startswith('op-panel') and
path.endswith('.rpm')][0]
cluster_manager_package = \
[path for path in packages if path.startswith('cluster-manager') and
path.endswith('.rpm')][0]
op_worker_package = \
[path for path in packages if path.startswith('op-worker') and
path.endswith('.rpm')][0]
oneprovider_package = \
[path for path in packages if path.startswith('oneprovider') and
path.endswith('.rpm')][0]
oneclient_package = [path for path in packages
if path.startswith('oneclient') and
not path.startswith('oneclient-debuginfo')][0]
# get couchbase
check_call(['wget', 'http://packages.couchbase.com/releases/4.0.0/couchbase'
'-server-community-4.0.0-centos7.x86_64.rpm'])
# install all
check_call(['dnf', '-y', 'install',
'couchbase-server-community-4.0.0-centos7.x86_64.rpm'],
stderr=STDOUT)
check_call(['dnf', '-y', 'install', '/root/pkg/' + op_panel_package],
stderr=STDOUT)
check_call(['dnf', '-y', 'install', '/root/pkg/' + cluster_manager_package],
stderr=STDOUT)
check_call(['dnf', '-y', 'install', '/root/pkg/' + op_worker_package],
stderr=STDOUT)
check_call(['dnf', '-y', 'install', '/root/pkg/' + oneprovider_package],
stderr=STDOUT)
check_call(['dnf', '-y', 'install', '/root/pkg/' + oneclient_package],
stderr=STDOUT)
# package installation validation
check_call(['service', 'op_panel', 'status'])
check_call(['ls', '/etc/cluster_manager/app.config'])
check_call(['ls', '/etc/op_worker/app.config'])
check_call(['/usr/bin/oneclient', '--help'])
# disable OZ cert verification
check_call(['sed', '-i', 's/{verify_oz_cert, true}/{verify_oz_cert, false}/g',
'/etc/op_panel/app.config'])
check_call(['service', 'op_panel', 'restart'])
# download missing bundle
check_call(['wget', '-O', '/etc/ssl/cert.pem',
'https://raw.githubusercontent.com/bagder/ca-bundle/master/'
'ca-bundle.crt'])
# oneprovider configure and install
check_call(['op_panel_admin', '--install', '/root/data/install.yml'])
# validate oneprovider is running
check_call(['service', 'cluster_manager', 'status'])
check_call(['service', 'op_worker', 'status'])
# uninstall
check_call(['op_panel_admin', '--uninstall'])
sys.exit(0)
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,708
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_multiprovider_replication.py
|
"""Test suite for CRUD operations on regular files in onedata.
"""
__author__ = "Tomasz Lichon"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
import pytest
from pytest_bdd import scenario
from tests import *
from tests.utils.cucumber_utils import *
from tests.cucumber.steps.env_steps import *
from tests.cucumber.steps.multi_auth_steps import *
from tests.cucumber.steps.multi_dir_steps import *
from tests.cucumber.steps.multi_file_steps import *
from tests.cucumber.steps.multi_reg_file_steps import *
@pytest.fixture(scope="module", params=["multiprovider_directio_env.json",
"multiprovider_env.json"])
def env_description_file(request):
absolute_path = os.path.join(CUSTOM_CUCUMBER_ENV_DIR, request.param)
return absolute_path
@scenario(
'../features/multiprovider_replication.feature',
'Create files and see them on external provider'
)
def test_create_and_list(env_description_file):
pass
@scenario(
'../features/multiprovider_replication.feature',
'Create empty file and read it on external provider'
)
def test_create_empty_and_read(env_description_file):
pass
@scenario(
'../features/multiprovider_replication.feature',
'Write to file and check size on remote provider'
)
def test_write_and_check_size(env_description_file):
pass
@scenario(
'../features/multiprovider_replication.feature',
'Write to file and read on remote provider'
)
def test_write_and_read(env_description_file):
pass
@scenario(
'../features/multiprovider_replication.feature',
'Big file transfer with MD5 check'
)
def test_big_transfer_and_md5_check(env_description_file):
pass
@scenario(
'../features/multiprovider_replication.feature',
'Create nonempty file and override its contents on remote provider'
)
def test_remote_file_override(env_description_file):
pass
@scenario(
'../features/multiprovider_replication.feature',
'Create nonempty file and remove it on remote provider'
)
def test_remote_file_removal(env_description_file):
pass
@pytest.mark.xfail_env(envs=["multiprovider_directio_env.json",
"multiprovider_env.json"],
reason="environement synchronization")
@scenario(
'../features/multiprovider_replication.feature',
'Create nonempty file, append remotely, append locally and read both'
)
def test_sequential_appends(env_description_file):
pass
# todo fix environement synchronization
@pytest.mark.xfail_env(envs=["multiprovider_directio_env.json",
"multiprovider_env.json"],
reason="environement synchronization")
@scenario(
'../features/multiprovider_replication.feature',
'Concurrently write disjoint ranges and read the same on both providers'
)
def test_conflict_on_disjoint_blocks(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,709
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_directory_CRUD.py
|
"""Test suite for CRUD operations on directories in onedata.
"""
__author__ = "Jakub Kudzia, Piotr Ociepka"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests import *
from tests.cucumber.steps.env_steps import *
from tests.cucumber.steps.auth_steps import *
from tests.utils.cucumber_utils import *
from tests.cucumber.steps.dir_steps import *
from tests.cucumber.steps.file_steps import *
from tests.utils.path_utils import env_file
import pytest
from pytest_bdd import scenario
@pytest.fixture(scope="module",
params=["singleprovider_singleclient_directio",
"singleprovider_singleclient_proxy"])
def env_description_file(request):
return env_file(CUSTOM_CUCUMBER_ENV_DIR, request.param)
@scenario(
'../features/directory_CRUD.feature',
'Create directory'
)
def test_create(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Create directory in spaces directory'
)
def test_create_spaces_dir(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Create space'
)
def test_create_space(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Rename directory'
)
def test_rename(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Delete empty directory'
)
def test_delete(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Delete space'
)
def test_delete_space(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Child directories'
)
def test_children(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Child directories 2'
)
def test_children2(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Duplication'
)
def test_duplication(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Delete empty directory and parents'
)
def test_delete_parents(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Delete non-empty directory in wrong way'
)
def test_delete_non_empty_wrong(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Delete non-empty directory'
)
def test_delete_non_empty(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Move directory'
)
def test_move(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Copy directory'
)
def test_copy(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Move directory to itself'
)
def test_move_to_itself(env_description_file):
pass
@scenario(
'../features/directory_CRUD.feature',
'Move directory to its subtree'
)
def test_move_to_subtree(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,710
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/gui/steps/common.py
|
"""Common steps used in various GUI testing scenarios
"""
__author__ = "Jakub Liput"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
import re
import time
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from tests.utils.cucumber_utils import list_parser
from tests.gui.utils.generic import parse_url
from tests.gui.conftest import WAIT_FRONTEND, WAIT_BACKEND
from pytest_bdd import given, when, then, parsers
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait as Wait
@then(parsers.parse('user should see that a page title contains "{text}"'))
def title_contains(selenium, text):
Wait(selenium, WAIT_FRONTEND).until(EC.title_contains(text))
@when(parsers.parse('user types "{text}" on keyboard'))
def type_string_into_active_element(selenium, text):
selenium.switch_to.active_element.send_keys(text)
@when(parsers.parse('user presses enter on keyboard'))
def press_enter_on_active_element(selenium):
selenium.switch_to.active_element.send_keys(Keys.RETURN)
@then(parsers.parse('user should see {links_names} links'))
def link_with_text_present(selenium, links_names):
for name in list_parser(links_names):
assert selenium.find_element_by_link_text(name)
def _click_on_link_with_text(selenium, link_name):
selenium.find_element_by_link_text(link_name).click()
@given(parsers.parse('user clicks on the "{link_name}" link'))
def g_click_on_link_with_text(selenium, link_name):
_click_on_link_with_text(selenium, link_name)
@when(parsers.parse('user clicks on the "{link_name}" link'))
def w_click_on_link_with_text(selenium, link_name):
_click_on_link_with_text(selenium, link_name)
@when(parsers.parse('user is idle for {seconds:d} seconds'))
def wait_n_seconds(seconds):
time.sleep(seconds)
@when(parsers.re(r'user changes the relative URL to (?P<path>.+)'))
def visit_relative(selenium, path):
selenium.get(parse_url(selenium.current_url).group('base_url') + path)
@then(parsers.parse('user should see a page with "{text}" header'))
def page_with_header(selenium, text):
def header_with_text_presence(s):
headers = s.find_elements_by_css_selector('h1, h2, h3, h4, h5')
try:
return any(map(lambda h: h.text == text, headers))
except StaleElementReferenceException:
return False
Wait(selenium, WAIT_BACKEND).until(header_with_text_presence)
@then(parsers.parse('user sees an {notify_type} notify with text matching to: {text_regexp}'))
def notify_visible_with_text(selenium, notify_type, text_regexp):
text_regexp = re.compile(text_regexp)
def notify_with_text_present(s):
try:
notifiers = s.find_elements_by_css_selector(
'.ember-notify.ember-notify-show.{t} .message'.format(t=notify_type)
)
if len(notifiers) > 0:
matching_elements = [e for e in notifiers if text_regexp.match(e.text)]
return len(matching_elements) > 0
else:
return None
except NoSuchElementException:
return None
Wait(selenium, 2*WAIT_BACKEND).until(notify_with_text_present)
# Below functions are currently unused and should not be used,
# because it involves a knowledge about internals...
@when(parsers.re(r'user changes application path to (?P<path>.+)'))
def on_ember_path(selenium, path):
selenium.get(parse_url(selenium.current_url).group('base_url') + '/#' + path)
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,711
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/__init__.py
|
"""This package contains python test files to run cucumber-like tests of
onedata. Files contains mapping to .feature files from tests/cucumber/features
that define tests scenarios.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,712
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/packaging/deb/deb_install_test_data/deb_install_script.py
|
from __future__ import print_function
import sys
from subprocess import STDOUT, check_call, check_output
# get packages
packages = check_output(['ls', '/root/pkg']).split()
packages = sorted(packages, reverse=True)
op_panel_package = \
[path for path in packages if path.startswith('op-panel')][0]
cluster_manager_package = \
[path for path in packages if path.startswith('cluster-manager')][0]
op_worker_package = \
[path for path in packages if path.startswith('op-worker')][0]
oneprovider_package = [path for path in packages
if path.startswith('oneprovider')][0]
oneclient_package = [path for path in packages
if path.startswith('oneclient') and
not path.startswith('oneclient-debuginfo')][0]
# update repositories
check_call(['apt-get', '-y', 'update'])
# add locale
check_call(['locale-gen', 'en_US.UTF-8'])
# install dependencies
check_call(['apt-get', '-y', 'install', 'curl', 'apt-transport-https', 'wget'])
# get couchbase
check_call(['wget', 'http://packages.couchbase.com/releases/4.0.0/couchbase'
'-server-community_4.0.0-ubuntu14.04_amd64.deb'])
# install
check_call(['sh', '-c',
'dpkg -i couchbase-server-community_4.0.0-ubuntu14.04_amd64.deb '
'; apt-get -f -y install'
], stderr=STDOUT)
check_call(['sh', '-c', 'dpkg -i /root/pkg/{package} ; apt-get -f -y '
'install'.format(package=op_panel_package)
], stderr=STDOUT)
check_call(['sh', '-c', 'dpkg -i /root/pkg/{package} ; apt-get -f -y '
'install'.format(package=cluster_manager_package)
], stderr=STDOUT)
check_call(['sh', '-c', 'dpkg -i /root/pkg/{package} ; apt-get -f -y '
'install'.format(package=op_worker_package)
], stderr=STDOUT)
check_call(['dpkg', '-i', '/root/pkg/{package}'.
format(package=oneprovider_package)], stderr=STDOUT)
check_call(['sh', '-c', 'dpkg -i /root/pkg/{package} ; apt-get -f -y '
'install'.format(package=oneclient_package)
], stderr=STDOUT)
# package installation validation
check_call(['service', 'op_panel', 'status'])
check_call(['ls', '/etc/cluster_manager/app.config'])
check_call(['ls', '/etc/op_worker/app.config'])
check_call(['/usr/bin/oneclient', '--help'])
# disable OZ cert verification
check_call(['sed', '-i', 's/{verify_oz_cert, true}/{verify_oz_cert, false}/g',
'/etc/op_panel/app.config'])
check_call(['service', 'op_panel', 'restart'])
# download missing bundle
check_call(['wget', '-O', '/etc/ssl/cert.pem',
'https://raw.githubusercontent.com/bagder/ca-bundle/master/'
'ca-bundle.crt'])
# oneprovider configure and install
check_call(['op_panel_admin', '--install', '/root/data/install.yml'])
# validate oneprovider is running
check_call(['service', 'cluster_manager', 'status'])
check_call(['service', 'op_worker', 'status'])
# uninstall
check_call(['op_panel_admin', '--uninstall'])
sys.exit(0)
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,713
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/steps/multi_auth_steps.py
|
"""Module implements pytest-bdd steps for authorization and mounting oneclient.
"""
__author__ = "Jakub Kudzia, Piotr Ociepka"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests.utils.client_utils import (ls, mount_users, client_mount_path,
get_client)
from tests.utils.cucumber_utils import *
from pytest_bdd import given
import subprocess
@given(parsers.parse('{users} start oneclients {client_instances} in\n' +
'{mount_paths} on client_hosts\n' +
'{client_hosts} respectively,\n' +
'using {tokens}'))
def multi_mount(users, client_instances, mount_paths, client_hosts, tokens,
request, onedata_environment, context, client_ids,
env_description_file):
mount_users(request, onedata_environment, context, client_ids,
env_description_file, users=list_parser(users),
client_instances=list_parser(client_instances),
mount_paths=list_parser(mount_paths),
client_hosts=list_parser(client_hosts),
tokens=list_parser(tokens))
@then(parsers.parse('{spaces} are mounted for {user} on {client_nodes}'))
def check_spaces(spaces, user, client_nodes, context):
spaces = list_parser(spaces)
user = str(user)
client_nodes = list_parser(client_nodes)
for client_node in client_nodes:
client = get_client(client_node, user, context)
def condition():
try:
spaces_in_client = ls(client, path=client.mount_path, user=user)
for space in spaces:
if space not in spaces_in_client:
return False
return True
except subprocess.CalledProcessError:
return False
assert repeat_until(condition, timeout=client.timeout)
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,714
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/gui/steps/onezone_logged_in_common.py
|
"""Steps for features of Onezone login page.
"""
__author__ = "Jakub Liput"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
import re
from tests.gui.conftest import WAIT_BACKEND, WAIT_FRONTEND
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait as Wait
from selenium.webdriver.support import expected_conditions as EC
from pytest_bdd import given, when, then, parsers
@given(parsers.parse('user expands the "{name}" Onezone sidebar panel'))
def uncollapse_oz_panel(selenium, name):
re_lc_name = re.compile(name, re.I)
def sidebar_group_by_name(s):
groups = s.find_elements_by_css_selector('.main-accordion-group')
for g in groups:
t = g.find_element_by_css_selector('a.main-accordion-toggle')
if re_lc_name.match(t.text):
return g, t
return None
sgroup, toggle = Wait(selenium, WAIT_FRONTEND).until(sidebar_group_by_name)
aria_expanded = sgroup.get_attribute('aria-expanded')
if aria_expanded is None or aria_expanded == 'false':
toggle.click()
@given(parsers.parse('user clicks on the "{name}" provider in Onezone providers sidebar panel'))
def click_on_provider_in_sidebar(selenium, name):
collapse_providers = selenium.find_element_by_css_selector('#collapse-providers')
Wait(selenium, WAIT_FRONTEND).until(lambda s: collapse_providers.get_attribute('aria-expanded') == 'true')
def the_provider_is_present(s):
providers = selenium.find_elements_by_css_selector('.provider-header')
named_providers = [e for e in providers if e.text == name]
if len(named_providers) > 0:
return named_providers[0]
else:
return None
Wait(selenium, WAIT_FRONTEND).until(the_provider_is_present).click()
@given(parsers.parse('user clicks on the "Go to your files" button in provider popup'))
def click_on_go_to_files_provider(selenium):
def go_to_files_button(s):
links = s.find_elements_by_css_selector('.provider-place-drop a, .provider-place-drop button')
for e in links:
if e.text == 'Go to your files':
return e
Wait(selenium, WAIT_FRONTEND).until(go_to_files_button).click()
@when('I click on the user alias edit element')
def click_user_alias_edit(selenium):
alias_edit = Wait(selenium, WAIT_FRONTEND).until(
EC.visibility_of_element_located((By.CSS_SELECTOR, '.alias-panel a .space-header'))
)
alias_edit.click()
# selenium.find_element_by_css_selector('.alias-panel a .space-header').click()
# additional - select all text in active input
selenium.execute_script('$(".alias-panel a input").select()')
@then('User alias should be changed to "<name>"')
@then(parsers.parse('User alias should be changed to "{name}"'))
def user_alias_equals(selenium, name):
alias_header = selenium.find_element_by_css_selector('.alias-panel .space-header')
Wait(selenium, WAIT_BACKEND).until(lambda s: alias_header.text == name)
# @when('I go to provider {provider}')
# def go_to_provider(selenium, provider):
# providers = selenium.find_elements_by_css_selector('.provider-header')
#
# def the_provider_is_present(s):
# named_providers = [e for e in providers if e.text == provider]
# if len(named_providers) > 0:
# return named_providers[0]
# else:
# return None
#
# Wait(selenium, WAIT_FRONTEND).until(the_provider_is_present).click()
# selenium.find_element_by_css_selector('.provider-place-drop a').click()
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,715
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/gui/steps/oneprovider_common.py
|
"""Common steps for Oneprovider.
"""
__author__ = "Jakub Liput"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests.gui.steps import onezone_logged_in_common as onezone_session
from tests.gui.steps import onezone_before_login as onezone_no_session
from pytest_bdd import given, parsers
# @given(parsers.parse('''I'm logged into Oneprovider "{provider}" as development user "{user}"'''))
# def logged_in_dev_to_oneprovider(selenium, base_url, user, provider):
# onezone_no_session.login_dev_onezone_with_url(selenium, base_url, user)
# onezone_session.uncollapse_main_accordion(selenium, 'providers')
# onezone_session.go_to_provider(selenium, provider)
# pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,716
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/gui/steps/oneprovider_data.py
|
"""Steps for features of Onezone login page.
"""
__author__ = "Jakub Liput"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
import re
from tests.gui.conftest import WAIT_FRONTEND, WAIT_BACKEND
from tests.gui.utils.generic import upload_file_path
from pytest_bdd import when, then, parsers
from selenium.webdriver.support.ui import WebDriverWait as Wait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
@when(parsers.re(r'user uses spaces select to change data space to "(?P<space_name>.+)"'))
def change_space(selenium, space_name):
# HACK: because Firefox driver have buggy EC.element_to_be_clickable,
# we wait for loader to disappear
Wait(selenium, WAIT_FRONTEND).until(
EC.invisibility_of_element_located((By.CSS_SELECTOR, '.common-loader-spinner'))
)
Wait(selenium, WAIT_FRONTEND).until(
EC.element_to_be_clickable((By.CSS_SELECTOR, '.data-spaces-select a[data-toggle=dropdown]'))
).click()
spaces = selenium.find_elements_by_css_selector('.data-spaces-select .dropdown-menu a')
def space_by_name(_):
named_spaces = [s for s in spaces if re.match(space_name, s.text.strip(), re.I)]
if len(named_spaces) > 0 and named_spaces[0].is_enabled():
return named_spaces[0]
else:
return None
Wait(selenium, WAIT_FRONTEND).until(space_by_name).click()
def file_browser_ready(driver):
files_table = driver.find_element_by_css_selector('.files-table')
return not re.match(r'.*is-loading.*', files_table.get_attribute('class'))
Wait(selenium, WAIT_BACKEND).until(file_browser_ready)
@when(parsers.parse('user uses upload button in toolbar to upload file "{file_name}" to current dir'))
def upload_file_to_current_dir(selenium, file_name):
"""This interaction is very hacky, because uploading files with Selenium
needs to use input element, but we do not use it directly in frontend.
So we unhide an input element for a while and pass a local file path to it.
"""
# HACK: for Firefox driver - because we cannot interact with hidden elements
selenium.execute_script("$('input#toolbar-file-browse').removeClass('hidden')")
selenium.find_element_by_css_selector('input#toolbar-file-browse').send_keys(
upload_file_path(file_name)
)
selenium.execute_script("$('input#toolbar-file-browse').addClass('hidden')")
# @when(parsers.parse('The upload of file "{file_name}" fails'))
# @then(parsers.parse('The upload of file "{file_name}" should fail'))
# def upload_fails(selenium, file_name):
# Wait(selenium, 2*WAIT_BACKEND).until(
# lambda s: notify_visible_with_text(s, 'error', re.compile(r'.*' + file_name + r'.*' + 'failed' + r'.*'))
# )
#
#
# @then(parsers.parse('The upload of file "{file_name}" should succeed'))
# def upload_succeeds(selenium, file_name):
# Wait(selenium, 2*WAIT_BACKEND).until(
# lambda s: notify_visible_with_text(s, 'info', re.compile(r'.*' + file_name + r'.*' + 'successfully' + r'.*'))
# )
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,717
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_multi_directory_stat.py
|
"""Test suite for reading/changing metadata of directories in onedata,
in multi-client environment.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests.utils.cucumber_utils import *
from tests.cucumber.steps.env_steps import *
from tests.cucumber.steps.multi_auth_steps import *
from tests.cucumber.steps.multi_dir_steps import *
from tests.cucumber.steps.multi_file_steps import *
from tests.cucumber.steps.multi_reg_file_steps import *
from pytest_bdd import scenario
import pytest
@scenario(
'../features/multi_directory_stat.feature',
'Check file type'
)
def test_type(env_description_file):
pass
@scenario(
'../features/multi_directory_stat.feature',
'Check default access permissions'
)
def test_default_access(env_description_file):
pass
@scenario(
'../features/multi_directory_stat.feature',
'Change access permissions'
)
def test_change_access(env_description_file):
pass
@scenario(
'../features/multi_directory_stat.feature',
'Change someone\'s file access permissions'
)
def test_change_access_someone(env_description_file):
pass
@scenario(
'../features/multi_directory_stat.feature',
'Timestamps at creation'
)
def test_timestamp(env_description_file):
pass
# # TODO VFS-1506
@pytest.mark.xfail_env(
envs=["singleprovider_multiclient_directio",
"singleprovider_multiclient_proxy",
"multiprovider_proxy",
"multiprovider_directio"],
reason="touch on file without write permission should fail, "
"it will be checked in VFS-1506")
@scenario(
'../features/multi_directory_stat.feature',
'Update timestamps without write permission'
)
def test_update_timestamp_without_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_stat.feature',
'Update timestamps with write permission'
)
def test_update_timestamp_with_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_stat.feature',
'Access time'
)
def test_access_time(env_description_file):
pass
@scenario(
'../features/multi_directory_stat.feature',
'Modification time'
)
def test_modification_time(env_description_file):
pass
# TODO VFS-1821
@pytest.mark.xfail_env(
envs=["singleprovider_multiclient_directio",
"singleprovider_multiclient_proxy",
"multiprovider_proxy",
"multiprovider_directio"],
reason="status-change times is equal to access and modification, "
"it will be checked VFS-1821")
@scenario(
'../features/multi_directory_stat.feature',
'Status-change time when changing mode'
)
def test_stat_change_time_chmod(env_description_file):
pass
@scenario(
'../features/multi_directory_stat.feature',
'Status-change time when renaming'
)
def test_stat_change_time_mv(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,718
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_directory_stat.py
|
"""Test suite for reading/changing metadata of directories in onedata.
"""
__author__ = "Jakub Kudzia, Piotr Ociepka"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests import *
from tests.cucumber.steps.env_steps import *
from tests.cucumber.steps.auth_steps import *
from tests.utils.cucumber_utils import *
from tests.cucumber.steps.dir_steps import *
from tests.cucumber.steps.file_steps import *
from tests.cucumber.steps.reg_file_steps import *
from tests.utils.path_utils import env_file
from pytest_bdd import scenario
import pytest
@pytest.fixture(scope="module",
params=["singleprovider_singleclient_directio",
"singleprovider_singleclient_proxy"])
def env_description_file(request):
return env_file(CUSTOM_CUCUMBER_ENV_DIR, request.param)
@scenario(
'../features/directory_stat.feature',
'Check file type'
)
def test_type(env_description_file):
pass
@scenario(
'../features/directory_stat.feature',
'Check default access permissions'
)
def test_default_access(env_description_file):
pass
@scenario(
'../features/directory_stat.feature',
'Change access permissions'
)
def test_change_access(env_description_file):
pass
@scenario(
'../features/directory_stat.feature',
'Timestamps at creation'
)
def test_timestamp(env_description_file):
pass
@scenario(
'../features/directory_stat.feature',
'Update timestamps'
)
def test_update_timestamp(env_description_file):
pass
@scenario(
'../features/directory_stat.feature',
'Access time'
)
def test_access_time(env_description_file):
pass
@scenario(
'../features/directory_stat.feature',
'Modification time'
)
def test_modification_time(env_description_file):
pass
# TODO VFS-1821
@pytest.mark.xfail_env(
envs=["singleprovider_singleclient_directio",
"singleprovider_singleclient_proxy"],
reason="status-change times is equal to access and modification")
@scenario(
'../features/directory_stat.feature',
'Status-change time when changing mode'
)
def test_stat_change_time_chmod(env_description_file):
pass
@scenario(
'../features/directory_stat.feature',
'Status-change time when renaming'
)
def test_stat_change_time_mv(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,719
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_luma_proxy.py
|
"""Test suite for operations on different storages with proxy luma
"""
__author__ = "Michal Wrona"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests import *
from tests.utils.path_utils import env_file
from tests.cucumber.steps.auth_steps import *
from tests.cucumber.steps.file_steps import *
from tests.cucumber.steps.reg_file_steps import *
from tests.cucumber.steps.env_steps import *
from pytest_bdd import scenario
import pytest
@pytest.fixture(scope="module", params=["env_luma_proxy"])
def env_description_file(request):
return env_file(CUSTOM_CUCUMBER_ENV_DIR, request.param)
@pytest.mark.skip_env(envs=['env_luma_proxy'],
reason="Luma cucumber test hangs sometimes")
@scenario(
'../features/luma_proxy.feature',
'Operations on POSIX storage'
)
def test_posix_storage_operations(env_description_file):
pass
@pytest.mark.skip_env(envs=['env_luma_proxy'],
reason="Luma cucumber test hangs sometimes")
@scenario(
'../features/luma_proxy.feature',
'Operations on CEPH storage'
)
def test_ceph_storage_operations(env_description_file):
pass
@pytest.mark.skip_env(envs=['env_luma_proxy'],
reason="Luma cucumber test hangs sometimes")
@scenario(
'../features/luma_proxy.feature',
'Operations on Amazon S3 storage'
)
def test_s3_storage_operations(env_description_file):
pass
@pytest.mark.skip_env(envs=['env_luma_proxy'],
reason="Luma cucumber test hangs sometimes")
@scenario(
'../features/luma_proxy.feature',
'Operations on Openstack Swift storage'
)
def test_swift_storage_operations(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,720
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/steps/multi_file_steps.py
|
"""Module implements common steps for operation on files (both regular files
and directories)in multi-client environment.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
import subprocess
from tests.utils.cucumber_utils import *
from tests.utils.client_utils import (ls, mv, chmod, stat, rm, touch,
client_mount_path, save_op_code,
get_client)
@when(parsers.parse('{user} updates {files} timestamps on {client_node}'))
@when(parsers.parse('{user} creates regular files {files} on {client_node}'))
@then(parsers.parse('{user} creates regular files {files} on {client_node}'))
def create_reg_file(user, files, client_node, context):
client = get_client(client_node, user, context)
files = list_parser(files)
for file in files:
file_path = client_mount_path(file, client)
def condition():
return_code = touch(client, file_path, user)
save_op_code(context, user, return_code)
return return_code == 0
assert repeat_until(condition, client.timeout)
@when(parsers.parse('{user} sees {files} in {path} on {client_node}'))
@then(parsers.parse('{user} sees {files} in {path} on {client_node}'))
def ls_present(user, files, path, client_node, context):
client = get_client(client_node, user, context)
path = client_mount_path(path, client)
files = list_parser(files)
def condition():
try:
cmd_output = ls(client, user, path)
for file in files:
if file not in cmd_output:
return False
return True
except subprocess.CalledProcessError:
return False
assert repeat_until(condition, client.timeout)
@when(parsers.parse('{user} doesn\'t see {files} in {path} on {client_node}'))
@then(parsers.parse('{user} doesn\'t see {files} in {path} on {client_node}'))
def ls_absent(user, files, path, client_node, context):
client = get_client(client_node, user, context)
path = client_mount_path(path, client)
files = list_parser(files)
def condition():
try:
cmd_output = ls(client, user, path)
for file in files:
if file in cmd_output:
return False
return True
except subprocess.CalledProcessError:
return False
assert repeat_until(condition, client.timeout)
@when(parsers.parse('{user} renames {file1} to {file2} on {client_node}'))
def rename(user, file1, file2, client_node, context):
client = get_client(client_node, user, context)
src = client_mount_path(file1, client)
dest = client_mount_path(file2, client)
def condition():
cmd_return_code = mv(client, src, dest, user)
save_op_code(context, user, cmd_return_code)
return cmd_return_code == 0
repeat_until(condition, client.timeout)
@when(parsers.parse('{user} deletes files {files} on {client_node}'))
def delete_file(user, files, client_node, context):
client = get_client(client_node, user, context)
files = list_parser(files)
for file in files:
path = client_mount_path(file, client)
def condition():
ret = rm(client, path, user=user)
save_op_code(context, user, ret)
return ret == 0
repeat_until(condition, timeout=client.timeout)
@when(parsers.parse('{user} changes {file} mode to {mode} on {client_node}'))
@then(parsers.parse('{user} changes {file} mode to {mode} on {client_node}'))
def change_mode(user, file, mode, client_node, context):
client = get_client(client_node, user, context)
mode = str(mode)
file_path = client_mount_path(file, client)
def condition():
cmd_return_code = chmod(client, mode, file_path, user)
save_op_code(context, user, cmd_return_code)
return cmd_return_code == 0
repeat_until(condition, client.timeout)
@then(parsers.parse('file type of {user}\'s {file} is {file_type} on {client_node}'))
def check_type(user, file, file_type, client_node, context):
client = get_client(client_node, user, context)
file_path = client_mount_path(file, client)
check_using_stat(user, client, file_path, 'file type', file_type)
@when(parsers.parse('mode of {user}\'s {file} is {mode} on {client_node}'))
@then(parsers.parse('mode of {user}\'s {file} is {mode} on {client_node}'))
def check_mode(user, file, mode, client_node, context):
client = get_client(client_node, user, context)
mode = str(mode)
file_path = client_mount_path(file, client)
check_using_stat(user, client, file_path, 'mode', mode)
@when(parsers.parse('size of {user}\'s {file} is {size} bytes on {client_node}'))
@then(parsers.parse('size of {user}\'s {file} is {size} bytes on {client_node}'))
def check_size(user, file, size, client_node, context):
client = get_client(client_node, user, context)
file_path = client_mount_path(file, client)
size = str(size)
check_using_stat(user, client, file_path, 'size', size)
@then(parsers.parse('{time1} time of {user}\'s {file} is {comparator} to {time2} time on {client_node}'))
@then(parsers.parse('{time1} time of {user}\'s {file} is {comparator} than {time2} time on {client_node}'))
def check_time(user, time1, time2, comparator, file, client_node, context):
client = get_client(client_node, user, context)
opt1 = get_stat_option(time1)
opt2 = get_stat_option(time2)
file_path = client_mount_path(file, client)
def condition():
try:
times = stat(client, file_path, user=user,
format="{t1} {t2}".format(t1=opt1, t2=opt2))
times = times.split(" ")
return compare(int(times[0]), int(times[1]), comparator)
except subprocess.CalledProcessError:
return False
assert repeat_until(condition, client.timeout)
################################################################################
def check_using_stat(user, client, file_path, parameter, expected_value):
opt = get_stat_option(parameter)
def condition():
try:
cmd_output = stat(client, file_path, format=opt, user=user)
return cmd_output == expected_value
except subprocess.CalledProcessError:
return False
assert repeat_until(condition, client.timeout)
def get_timestamp(user, file, client, time_type):
opt = get_stat_option(time_type)
file_path = client_mount_path(file, client)
return stat(client, file_path, format=opt, user=user)
def get_stat_option(parameter):
formats = {
'access': '%X',
'modification': '%Y',
'status-change': '%Z',
'file type': '%F',
'mode': '%a',
'size': '%s'
}
return formats[parameter]
def compare(val1, val2, comparator):
if comparator == 'equal':
return val1 == val2
elif comparator == 'not equal':
return val1 != val2
elif comparator == 'greater':
return val1 > val2
elif comparator == 'less':
return val1 < val2
elif comparator == 'not greater':
return val1 <= val2
elif comparator == 'not less':
return val1 >= val2
else:
raise ValueError("Wrong argument comparator to function compare")
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,721
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_reg_file_CRUD.py
|
"""Test suite for CRUD operations on regular files in onedata.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests import *
from tests.cucumber.steps.env_steps import *
from tests.cucumber.steps.auth_steps import *
from tests.utils.cucumber_utils import *
from tests.cucumber.steps.dir_steps import *
from tests.cucumber.steps.file_steps import *
from tests.cucumber.steps.reg_file_steps import *
from tests.utils.path_utils import env_file
from pytest_bdd import scenario
import pytest
@pytest.fixture(scope="module",
params=["singleprovider_singleclient_directio",
"singleprovider_singleclient_proxy"])
def env_description_file(request):
return env_file(CUSTOM_CUCUMBER_ENV_DIR, request.param)
@scenario(
'../features/reg_file_CRUD.feature',
'Create regular file'
)
def test_create(env_description_file):
pass
@scenario(
'../features/reg_file_CRUD.feature',
'Rename regular file'
)
def test_rename(env_description_file):
pass
@scenario(
'../features/reg_file_CRUD.feature',
'Delete regular file'
)
def test_delete(env_description_file):
pass
@scenario(
'../features/reg_file_CRUD.feature',
'Read and write to regular file'
)
def test_read_write(env_description_file):
pass
@scenario(
'../features/reg_file_CRUD.feature',
'Append regular file'
)
def test_append(env_description_file):
pass
@pytest.mark.xfail_env(envs=["singleprovider_singleclient_directio",
"singleprovider_singleclient_proxy"],
reason="File disappears after replace")
@scenario(
'../features/reg_file_CRUD.feature',
'Replace word in file'
)
def test_replace(env_description_file):
pass
@pytest.mark.xfail_env(envs=["singleprovider_singleclient_directio",
"singleprovider_singleclient_proxy"],
reason="Move fails")
@scenario(
'../features/reg_file_CRUD.feature',
'Move regular file and read'
)
def test_move(env_description_file):
pass
@pytest.mark.xfail_env(envs=["singleprovider_singleclient_directio",
"singleprovider_singleclient_proxy"],
reason="Move fails")
@scenario(
'../features/reg_file_CRUD.feature',
'Move big regular file and check MD5'
)
def test_move_big(env_description_file):
pass
@scenario(
'../features/reg_file_CRUD.feature',
'Copy regular file and read'
)
def test_copy(env_description_file):
pass
@scenario(
'../features/reg_file_CRUD.feature',
'Copy big regular file and check MD5'
)
def test_copy_big(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,722
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/steps/multi_dir_steps.py
|
"""Module implements pytest-bdd steps for operations on directories in multiclient environment.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests.utils.cucumber_utils import *
from tests.utils.client_utils import ls, rm, rmdir, mkdir, cp, client_mount_path, \
save_op_code, get_client
@when(parsers.parse('{user} creates directories {dirs} on {client_node}'))
@when(parsers.parse('{user} creates directories {dirs}\non {client_node}'))
def create(user, dirs, client_node, context):
dirs = list_parser(dirs)
client = get_client(client_node, user, context)
for dir in dirs:
path = client_mount_path(dir, client)
return_code = mkdir(client, path, user=user)
save_op_code(context, user, return_code)
@when(parsers.parse('{user} creates directory and parents {paths} on {client_node}'))
@when(parsers.parse('{user} creates directory and parents {paths}\non {client_node}'))
def create_parents(user, paths, client_node, context):
client = get_client(client_node, user, context)
paths = list_parser(paths)
for path in paths:
return_code = mkdir(client, client_mount_path(path, client), recursive=True, user=user)
save_op_code(context, user, return_code)
@when(parsers.parse('{user} deletes empty directories {dirs} on {client_node}'))
def delete_empty(user, dirs, client_node, context):
client = get_client(client_node, user, context)
dirs = list_parser(dirs)
for dir in dirs:
path = client_mount_path(dir, client)
ret = rmdir(client, path, user=user)
save_op_code(context, user, ret)
@when(parsers.parse('{user} deletes non-empty directories {dirs} on {client_node}'))
def delete_non_empty(user, dirs, client_node, context):
client = get_client(client_node, user, context)
dirs = list_parser(dirs)
for dir in dirs:
path = client_mount_path(dir, client)
ret = rm(client, path, recursive=True, force=True, user=user)
save_op_code(context, user, ret)
@when(parsers.parse('{user} deletes empty directory and parents {paths} on ' +
'{client_node}'))
def delete_parents(user, paths, client_node, context):
client = get_client(client_node, user, context)
paths = list_parser(paths)
for path in paths:
ret = rmdir(client, str(path), recursive=True,
from_path=client.mount_path, user=user)
save_op_code(context, user, ret)
@when(parsers.parse('{user} copies directory {dir1} to {dir2} on {client_node}'))
def copy_dir(user, dir1, dir2, client_node, context):
client = get_client(client_node, user, context)
src_path = client_mount_path(dir1, client)
dest_path = client_mount_path(dir2, client)
ret = cp(client, src_path, dest_path, recursive=True, user=user)
save_op_code(context, user, ret)
@when(parsers.parse('{user} can\'t list {dir} on {client_node}'))
@then(parsers.parse('{user} can\'t list {dir} on {client_node}'))
def cannot_list_dir(user, dir, client_node, context):
client = get_client(client_node, user, context)
path = client_mount_path(dir, client)
def condition():
try:
ls(client, user=user, path=path)
return False
except:
return True
assert repeat_until(condition, client.timeout)
@when(parsers.parse('{user} can list {dir} on {client_node}'))
@then(parsers.parse('{user} can list {dir} on {client_node}'))
def list_dir(user, dir, client_node, context):
client = get_client(client_node, user, context)
path = client_mount_path(dir, client)
def condition():
try:
ls(client, user=user, path=path)
return True
except:
return False
assert repeat_until(condition, client.timeout)
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,723
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_multi_authorization.py
|
"""Test suite for authorization and mounting onedata client,
in multi-client environment.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests import *
from tests.cucumber.steps.env_steps import *
from tests.utils.cucumber_utils import *
from tests.cucumber.steps.multi_auth_steps import *
from tests.utils.path_utils import env_file
from pytest_bdd import scenario
import pytest
@pytest.fixture(scope="module", params=["multiclient_authorization"])
def env_description_file(request):
return env_file(CUSTOM_CUCUMBER_ENV_DIR, request.param)
@scenario(
'../features/multi_authorization.feature',
'Successful authorization - 1 client per user',
)
def test_successful_authorization1(env_description_file):
pass
@scenario(
'../features/multi_authorization.feature',
'Successful authorization - 2 clients of one user',
)
def test_successful_authorization2(env_description_file):
pass
@scenario(
'../features/multi_authorization.feature',
'Successful authorization - 2 clients of one user on different hosts',
)
def test_successful_authorization3(env_description_file):
pass
@scenario(
'../features/multi_authorization.feature',
'Bad and good authorization',
)
def test_good_and_bad_authorization(env_description_file):
pass
@scenario(
'../features/multi_authorization.feature',
'Bad authorization',
)
def test_bad_authorization(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,724
|
RoseySoft/onedata
|
refs/heads/master
|
/docker/run.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import os
import time
import re
import shutil
import socket
import subprocess as sp
import sys
ROOT = '/volumes/persistency'
DIRS = ['/etc/op_panel', '/etc/op_worker', '/etc/cluster_manager',
'/etc/rc.d/init.d', '/var/lib/op_panel', '/var/lib/op_worker',
'/var/lib/cluster_manager', '/usr/lib64/op_panel',
'/opt/couchbase/var/lib/couchbase', '/var/log/op_panel',
'/var/log/op_worker', '/var/log/cluster_manager']
def log(message, end='\n'):
sys.stdout.write(message + end)
sys.stdout.flush()
def replace(file_path, pattern, value):
with open(file_path, 'rw+') as f:
content = f.read()
content = re.sub(pattern, value, content)
f.seek(0)
f.truncate()
f.write(content)
def copy_missing_files():
for rootdir in DIRS:
for subdir, _, files in os.walk(rootdir):
subdir_path = os.path.join(ROOT, subdir[1:])
if not os.path.exists(subdir_path):
stat = os.stat(subdir)
os.makedirs(subdir_path)
os.chown(subdir_path, stat.st_uid, stat.st_gid)
for f in files:
source_path = os.path.join(subdir, f)
dest_path = os.path.join(subdir_path, f)
if not os.path.exists(dest_path):
stat = os.stat(source_path)
shutil.copy(source_path, dest_path)
os.chown(dest_path, stat.st_uid, stat.st_gid)
def remove_dirs():
for rootdir in DIRS:
if not os.path.islink(rootdir):
shutil.rmtree(rootdir)
def link_dirs():
for dest_path in DIRS:
if not os.path.islink(dest_path):
source_path = os.path.join(ROOT, dest_path[1:])
os.symlink(source_path, dest_path)
def set_node_name(file_path):
hostname = socket.getfqdn()
replace(file_path, r'-name .*', '-name onepanel@{0}'.format(hostname))
def set_multicast_address(file_path, multicast_address):
replace(file_path, r'{multicast_address, .*}',
'{{multicast_address, "{0}"}}'.format(multicast_address))
def start_service(service_name, stdout=None):
with open(os.devnull, 'w') as stderr:
sp.check_call(['service', service_name, 'start'], stdout=stdout,
stderr=stderr)
def start_services():
log('Starting couchbase-server: ', '')
with open(os.devnull, 'w') as stdout:
start_service('couchbase-server', stdout)
log('[ OK ]')
start_service('cluster_manager')
time.sleep(5)
start_service('op_worker')
log('\nCongratulations! oneprovider has been successfully started.')
def is_configured():
return 'undefined' not in sp.check_output(['op_panel_admin', '--config'])
def get_container_id():
with open('/proc/self/cgroup', 'r') as f:
return f.readline().split('/')[-1].rstrip('\n')
def inspect_container(container_id):
try:
result = sp.check_output(['curl', '-s', '--unix-socket',
'/var/run/docker.sock', 'http:/containers/{0}/json'.
format(container_id)])
return json.loads(result)
except Exception:
return {}
def show_ip_address(json):
ip = '-'
try:
ip = sp.check_output(['hostname', '-i']).rstrip('\n')
ip = j['NetworkSettings']['Networks'].items()[0][1]['IPAddress']
except Exception:
pass
log('* IP Address: {0}'.format(ip))
def show_ports(json):
ports = json.get('NetworkSettings', {}).get('Ports', {})
ports_format = []
for container_port in ports:
host = ports[container_port]
if host:
for host_port in host:
ports_format.append('{0}:{1} -> {2}'.format(host_port['HostIp'],
host_port['HostPort'], container_port))
else:
ports_format.append(container_port)
ports_str = '\n '.join(ports_format) if ports_format else '-'
log('* Ports: {0}'.format(ports_str))
def show_details():
log('\nContainer details:')
container_id = get_container_id()
json = inspect_container(container_id)
show_ip_address(json)
show_ports(json)
def infinite_loop():
while True:
time.sleep(1)
if __name__ == '__main__':
copy_missing_files()
remove_dirs()
link_dirs()
set_node_name('/etc/op_panel/vm.args')
multicast_address = os.environ.get('ONEPANEL_MULTICAST_ADDRESS')
if multicast_address:
set_multicast_address('/etc/op_panel/app.config', multicast_address)
start_service('op_panel')
if is_configured():
start_services()
else:
batch_mode = os.environ.get('ONEPANEL_BATCH_MODE')
batch_cofig = os.environ.get('ONEPANEL_BATCH_MODE_CONFIG', '')
if batch_mode and batch_mode.lower() == 'true':
sp.check_call(['op_panel_admin', '--install', batch_cofig])
show_details()
infinite_loop()
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,725
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_singleprovider_space_management.py
|
"""Test suite for space management in onedata in singleprovider environment
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests.cucumber.steps.spaces_steps import *
from tests.cucumber.steps.env_steps import *
from tests.cucumber.steps.auth_steps import *
from tests.cucumber.steps.multi_auth_steps import *
from tests.cucumber.steps.user_steps import *
from tests.cucumber.steps.multi_file_steps import *
from tests.cucumber.steps.multi_reg_file_steps import *
from tests.cucumber.steps.multi_dir_steps import *
from tests.utils.cucumber_utils import *
from tests.utils.path_utils import env_file
from pytest_bdd import scenario
from functools import partial
import pytest
scenario = partial(scenario,
'../features/singleprovider_space_management.feature')
@pytest.fixture(scope="module", params=["singleprovider_space_management"])
def env_description_file(request):
return env_file(CUSTOM_CUCUMBER_ENV_DIR, request.param)
@scenario('Create space and don\'t support it')
def test_create_space_no_support(env_description_file):
pass
@scenario('Create space and support it')
def test_create_space_support(env_description_file):
pass
@pytest.mark.xfail_env(envs=["singleprovider_space_management"],
reason="space owner cannot read what invited user wrote to file")
@scenario('Invite user to unused space')
def test_invite(env_description_file):
pass
@scenario('Remove user from space')
def test_remove_user(env_description_file):
pass
@scenario('Delete supported space')
def test_delete_space(env_description_file):
pass
@scenario('Exceed quota')
def test_exceed_quota(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,726
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/steps/auth_steps.py
|
"""Module implements pytest-bdd steps for authorization and mounting oneclient.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
import multi_auth_steps
from tests.utils.cucumber_utils import *
from tests.utils.client_utils import mount_users
from pytest_bdd import given
@given(parsers.parse('{user} starts oneclient in {mount_path} using {token}'))
def default_mount(user, mount_path, token, request, onedata_environment, context,
client_ids, env_description_file):
mount_users(request, onedata_environment, context, client_ids, env_description_file,
users=[user], client_instances=["client1"],
mount_paths=[mount_path], client_hosts=['client-host1'],
tokens=[token])
@when(parsers.parse('{spaces} is mounted for {user}'))
@then(parsers.parse('{spaces} is mounted for {user}'))
@when(parsers.parse('{spaces} are mounted for {user}'))
@then(parsers.parse('{spaces} are mounted for {user}'))
def check_spaces(spaces, user, context):
multi_auth_steps.check_spaces(spaces, user, make_arg_list("client1"),
context)
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,727
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/steps/file_steps.py
|
"""Module implements common steps for operation on files (both regular files
and directories).
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
import multi_file_steps
from tests.utils.cucumber_utils import *
@when(parsers.parse('{user} updates {files} timestamps'))
@when(parsers.parse('{user} creates regular files {files}'))
@then(parsers.parse('{user} creates regular files {files}'))
def create_reg_file(user, files, context):
multi_file_steps.create_reg_file(user, files, "client1", context)
@when(parsers.parse('{user} sees {files} in {path}'))
@then(parsers.parse('{user} sees {files} in {path}'))
def ls_present(user, files, path, context):
multi_file_steps.ls_present(user, files, path, "client1", context)
@when(parsers.parse('{user} doesn\'t see {files} in {path}'))
@then(parsers.parse('{user} doesn\'t see {files} in {path}'))
def ls_absent(user, files, path, context):
multi_file_steps.ls_absent(user, files, path, "client1", context)
@when(parsers.parse('{user} renames {file1} to {file2}'))
def rename(user, file1, file2, context):
multi_file_steps.rename(user, file1, file2, "client1", context)
@when(parsers.parse('{user} deletes files {files}'))
def delete_file(user, files, context):
multi_file_steps.delete_file(user, files, "client1", context)
@then(parsers.parse('file type of {user}\'s {file} is {fileType}'))
def check_type(user, file, fileType, context):
multi_file_steps.check_type(user, file, fileType, "client1", context)
@then(parsers.parse('mode of {user}\'s {file} is {mode}'))
def check_mode(user, file, mode, context):
multi_file_steps.change_mode(user, file, mode, "client1", context)
@then(parsers.parse('{user} changes {file} mode to {mode}'))
@when(parsers.parse('{user} changes {file} mode to {mode}'))
def change_mode(user, file, mode, context):
multi_file_steps.change_mode(user, file, mode, "client1", context)
@when(parsers.parse('size of {user}\'s {file} is {size} bytes'))
@then(parsers.parse('size of {user}\'s {file} is {size} bytes'))
def check_size(user, file, size, context):
multi_file_steps.check_size(user, file, size, "client1", context)
@then(parsers.parse('{time1} time of {user}\'s {file} is {comparator} than {time2} time'))
@then(parsers.parse('{time1} time of {user}\'s {file} is {comparator} to {time2} time'))
def check_time(user, time1, time2, comparator, file, context):
multi_file_steps.check_time(user, time1, time2, comparator, file, "client1",
context)
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,728
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_multi_directory_CRUD.py
|
"""Test suite for CRUD operations on directories in onedata,
in multi-client environment.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests.cucumber.steps.env_steps import *
from tests.utils.cucumber_utils import *
from tests.cucumber.steps.multi_auth_steps import *
from tests.cucumber.steps.multi_dir_steps import *
from tests.cucumber.steps.multi_file_steps import *
from pytest_bdd import scenario
import pytest
@scenario(
'../features/multi_directory_CRUD.feature',
'Create directory'
)
def test_create(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Rename someone\'s directory without permission'
)
def test_rename_someone_without_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Rename someone\'s directory with permission'
)
def test_rename_someone_with_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Rename own directory'
)
def test_rename_own(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Delete someone\'s empty directory'
)
def test_delete_someone(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Delete own empty directory'
)
def test_delete_own(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'List directory without read permission'
)
def test_list_dir_without_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Create file in directory without write permission'
)
def test_create_subfile_without_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Create file in directory with write permission'
)
def test_create_subfile_with_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Delete file in directory without write permission'
)
def test_delete_subfile_without_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Delete file in directory with write permission'
)
def test_delete_subfile_with_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Rename file in directory without write permission'
)
def test_rename_subfile_without_permission(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Rename file in directory without write permission'
)
def test_rename_subfile_with_permission(env_description_file):
pass
# TODO VFS-1824
@pytest.mark.xfail_env(
envs=["singleprovider_multiclient_directio",
"singleprovider_multiclient_proxy",
"multiprovider_proxy",
"multiprovider_directio"],
reason="u2 is unable to create direcory with the same name "
"although first one was deleted")
@scenario(
'../features/multi_directory_CRUD.feature',
'Recreate directory deleted by other user'
)
def test_recreate(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Child directories'
)
def test_children(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Child directories 2'
)
def test_children2(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Duplication'
)
def test_duplication(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Delete empty directory and parents'
)
def test_delete_parents(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Delete non-empty directory in wrong way'
)
def test_delete_non_empty_wrong(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Delete non-empty directory'
)
def test_delete_non_empty(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Move directory'
)
def test_move(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Copy directory'
)
def test_copy(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Move directory to itself'
)
def test_move_to_itself(env_description_file):
pass
@scenario(
'../features/multi_directory_CRUD.feature',
'Move directory to its subtree'
)
def test_move_to_subtree(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,907,729
|
RoseySoft/onedata
|
refs/heads/master
|
/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py
|
"""Test suite for CRUD operations on regular files in onedata,
in multi-client environment.
"""
__author__ = "Jakub Kudzia"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests.utils.cucumber_utils import *
from tests.cucumber.steps.env_steps import *
from tests.cucumber.steps.multi_auth_steps import *
from tests.cucumber.steps.multi_dir_steps import *
from tests.cucumber.steps.multi_file_steps import *
from tests.cucumber.steps.multi_reg_file_steps import *
from pytest_bdd import scenario
import pytest
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Create regular file'
)
def test_create(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Rename regular file without permission'
)
def test_rename_without_permission(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Rename regular file with permission'
)
def test_rename_with_permission(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Delete regular file by owner'
)
def test_delete_by_owner(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Delete regular file by other user'
)
def test_delete_by_other_user(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Read and write to regular file'
)
def test_read_write(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Read regular file without read permission'
)
def test_read_without_permission(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Write to regular file with write permission'
)
def test_write_with_permission(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Write to regular file without write permission'
)
def test_write_without_permission(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Execute file with execute permission'
)
def test_execute_with_permission(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Execute file without execute permission'
)
def test_execute_without_permission(env_description_file):
pass
# TODO
@pytest.mark.xfail_env(
envs=["singleprovider_multiclient_directio",
"singleprovider_multiclient_proxy",
"multiprovider_proxy",
"multiprovider_directio"],
reason="move is being reimplemented")
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Move regular file and read'
)
def test_move(env_description_file):
pass
# TODO
@pytest.mark.xfail_env(
envs=["singleprovider_multiclient_directio",
"singleprovider_multiclient_proxy",
"multiprovider_proxy",
"multiprovider_directio"],
reason="move is being reimplemented")
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Move big regular file and check MD5'
)
def test_move_big(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Copy regular file and read'
)
def test_copy(env_description_file):
pass
@scenario(
'../features/multi_reg_file_CRUD.feature',
'Copy big regular file and check MD5'
)
def test_copy_big(env_description_file):
pass
|
{"/tests/gui/steps/oneprovider_data.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/oneprovider_spaces.py": ["/tests/gui/steps/common.py"], "/tests/gui/steps/onezone_logged_in_common.py": ["/tests/gui/steps/common.py"], "/tests/cucumber/steps/multi_reg_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multiprovider_replication.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_stat.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"], "/tests/cucumber/scenarios/test_directory_stat.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/scenarios/test_luma_proxy.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_file_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_reg_file_CRUD.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/file_steps.py"], "/tests/cucumber/steps/multi_dir_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_authorization.py": ["/tests/cucumber/steps/multi_auth_steps.py"], "/tests/cucumber/scenarios/test_singleprovider_space_management.py": ["/tests/cucumber/steps/auth_steps.py", "/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py", "/tests/cucumber/steps/multi_dir_steps.py"], "/tests/cucumber/steps/auth_steps.py": ["/tests/utils/client_utils.py"], "/tests/cucumber/scenarios/test_multi_directory_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py"], "/tests/cucumber/scenarios/test_multi_reg_file_CRUD.py": ["/tests/cucumber/steps/multi_auth_steps.py", "/tests/cucumber/steps/multi_dir_steps.py", "/tests/cucumber/steps/multi_file_steps.py", "/tests/cucumber/steps/multi_reg_file_steps.py"]}
|
34,929,233
|
okitouni/Learning-symmetries
|
refs/heads/main
|
/sym/models/lcn.py
|
import torch
import torch.nn as nn
from torch.nn.modules.utils import _pair
from math import sqrt
from torch.nn import init
from .. import utils
from collections.abc import Iterable
conv_output_shape = utils.conv_output_shape
def activation_func(activation):
return nn.ModuleDict([
['relu', nn.ReLU(inplace=True)],
['leaky_relu', nn.LeakyReLU(negative_slope=0.01, inplace=True)],
['selu', nn.SELU(inplace=True)],
])[activation]
class LCN(nn.Module):
def __init__(self, in_channels=1, out_channels=10, h=280, w=280, nfilters=10,hidden=None,
kernel_size=28, stride=1, activation='relu', readout_activation=None,
padding=0,bias=True, invar_reduction = None, *args, **kwargs):
super().__init__()
self.activation = activation_func(activation)
self.readout_activation = readout_activation
self.nfilters = nfilters
self.out_channels = out_channels
self.invar_reduction = invar_reduction
if isinstance(nfilters, Iterable):
convlayers = []
for nfilters,channels in zip(nfilters,[in_channels,*nfilters]):
convlayers.append(Conv2d_Local(channels, nfilters, kernel_size=kernel_size,h=h,w=w,
stride=stride, padding=padding, bias=bias))
#convlayers.append(nn.BatchNorm2d(nfilters))
convlayers.append(self.activation)
h,w = conv_output_shape(h_w=(h, w), kernel_size=kernel_size, stride=stride,padding=padding)
self.conv_blocks = nn.Sequential(*convlayers)
else:
self.conv_blocks = nn.Sequential(Conv2d_Local(in_channels, nfilters,h=h,w=w,
kernel_size=kernel_size,stride=stride, padding=0, bias=True),
#nn.BatchNorm2d(nfilters),
self.activation)
h,w = conv_output_shape(h_w=(h, w), kernel_size=kernel_size, stride=stride,padding=padding)
if invar_reduction == "max":
self.reduction = torch.nn.AdaptiveMaxPool2d((1,1))
h,w = 1,1
elif invar_reduction == "mean":
self.reduction = torch.nn.AdaptiveAvgPool2d((1,1))
h,w = 1,1
if hidden is not None:
if not isinstance(hidden, Iterable): hidden = [hidden]
hidden = [h*w*nfilters, *hidden]
layers = []
for i in range(len(hidden)-1):
layers.append(nn.Linear(hidden[i], hidden[i+1]))
layers.append(self.activation)
self.decoder = nn.Sequential(*layers, nn.Linear(hidden[-1], out_channels))
else:
self.decoder = nn.Linear(h*w*nfilters, out_channels)
def forward(self, x):
x = self.conv_blocks(x)
if self.invar_reduction is not None:
x = self.reduction(x)
x = x.view(x.size(0), -1)
x = self.decoder(x)
if self.readout_activation is not None:
x = self.readout_activation(x)
return x
class Conv2d_Local(nn.Module):
def __init__(self, in_channels=1, nfilters=10, h=280, w=280, kernel_size=28, stride=28, padding=0, bias=True):
super().__init__()
self.height_span, self.width_span = conv_output_shape(
h_w=(h, w), kernel_size=kernel_size, stride=stride)
self.weight = nn.Parameter(
torch.Tensor(self.width_span*self.height_span*nfilters,
in_channels, kernel_size, kernel_size)
)
if bias:
self.bias = nn.Parameter(
torch.Tensor(self.width_span*self.height_span *
nfilters, in_channels)
)
else:
self.register_parameter('bias', None)
self.kernel_size = _pair(kernel_size)
self.stride = _pair(stride)
self.in_channels = in_channels
self.nfilters = nfilters
self.pad = padding
init.kaiming_uniform_(self.weight, a=sqrt(5))
if self.bias is not None:
fan_in, _ = init._calculate_fan_in_and_fan_out(self.weight)
bound = 1 / sqrt(fan_in)
init.uniform_(self.bias, -bound, bound)
def forward(self, x):
_, c, h, w = x.size()
x = nn.functional.pad(
x, (self.pad, self.pad, self.pad, self.pad), 'constant', 0)
kh, kw = self.kernel_size
dh, dw = self.stride
x = x.unfold(2, kh, dh)
x = x.unfold(3, kw, dw)
x = x.reshape(x.size(0), -1, self.in_channels, kh, kw)
x = x.repeat(1, self.nfilters, 1, 1, 1)
x = (x * self.weight).sum([-1, -2])
if self.bias is not None:
x += self.bias
x = x.view(-1, self.nfilters, self.height_span, self.width_span)
return x
|
{"/flow/__init__.py": ["/flow/models/__init__.py", "/flow/utils/__init__.py"], "/flow/models/__init__.py": ["/flow/models/efn.py"], "/flow/utils/__init__.py": ["/flow/utils/utils.py"], "/sym/models/lcn.py": ["/sym/__init__.py"], "/sym/__init__.py": ["/sym/models/__init__.py", "/sym/utils/__init__.py"], "/sym/models/fcn.py": ["/sym/__init__.py"], "/sym/utils/__init__.py": ["/sym/utils/utils.py"], "/sym/models/__init__.py": ["/sym/models/lcn.py", "/sym/models/fcn.py"]}
|
34,929,234
|
okitouni/Learning-symmetries
|
refs/heads/main
|
/sym/__init__.py
|
from .models import LCN, CNN, Model
from .utils import Augment, ProgressBar, Classification_report, Logger
__all__ = ["LCN", "CNN", "Model", "Augment",
"ProgressBar", "Classification_report", "Logger"]
|
{"/flow/__init__.py": ["/flow/models/__init__.py", "/flow/utils/__init__.py"], "/flow/models/__init__.py": ["/flow/models/efn.py"], "/flow/utils/__init__.py": ["/flow/utils/utils.py"], "/sym/models/lcn.py": ["/sym/__init__.py"], "/sym/__init__.py": ["/sym/models/__init__.py", "/sym/utils/__init__.py"], "/sym/models/fcn.py": ["/sym/__init__.py"], "/sym/utils/__init__.py": ["/sym/utils/utils.py"], "/sym/models/__init__.py": ["/sym/models/lcn.py", "/sym/models/fcn.py"]}
|
34,929,235
|
okitouni/Learning-symmetries
|
refs/heads/main
|
/sym/utils/utils.py
|
import torch
import pytorch_lightning as pl
from tqdm import tqdm
import sys
from sklearn.metrics import classification_report
from pytorch_lightning.utilities import rank_zero_only
from torch.utils.tensorboard.summary import hparams
from typing import Any, Dict, Optional, Union
from math import floor
import numpy as np
def conv_output_shape(h_w, kernel_size=1, stride=1, padding=0, dilation=1):
if type(kernel_size) is not tuple:
kernel_size = (kernel_size, kernel_size)
h = floor(
((h_w[0] + (2 * padding) - (dilation * (kernel_size[0] - 1)) - 1) / stride) + 1)
w = floor(
((h_w[1] + (2 * padding) - (dilation * (kernel_size[1] - 1)) - 1) / stride) + 1)
return h, w
@torch.jit.script
def Augment(imgs,diag:bool=False):
nums = []
for img in imgs:
i = torch.randint(0, 10, (1,)).item()
j = torch.randint(0, 10, (1,)).item()
istart = i * 28
iend = (i+1) * 28
if diag:
jstart = istart
jend = iend
else:
jstart = j * 28
jend = (j+1) * 28
zeros = torch.zeros(280, 280)
zeros[istart:iend, jstart:jend] = img
nums.append(zeros)
return torch.stack(nums)
class ProgressBar(pl.callbacks.ProgressBar):
def init_validation_tqdm(self):
bar = tqdm(
desc='Validation ...',
position=(2 * self.process_position),
disable=self.is_disabled,
leave=False,
dynamic_ncols=True,
file=sys.stdout,
ascii=True)
return bar
def init_train_tqdm(self) -> tqdm:
bar = tqdm(
desc='Training',
initial=self.train_batch_idx,
position=(2 * self.process_position),
disable=self.is_disabled,
leave=True,
dynamic_ncols=True,
file=sys.stdout,
smoothing=0,
ascii=True)
return bar
def init_test_tqdm(self) -> tqdm:
""" Override this to customize the tqdm bar for testing. """
bar = tqdm(
desc='Testing',
position=(2 * self.process_position),
disable=self.is_disabled,
leave=True,
dynamic_ncols=True,
file=sys.stdout,
ascii=True)
return bar
def init_sanity_tqdm(self) -> tqdm:
""" Override this to customize the tqdm bar for the validation sanity run. """
bar = tqdm(
desc='Validation sanity check',
position=(2 * self.process_position),
disable=self.is_disabled,
leave=False,
dynamic_ncols=True,
file=sys.stdout,
ascii=True)
return bar
def Classification_report(model):
with torch.no_grad():
model.eval()
model.to(device)
pred = []
target = []
for x, y in model.val_dataloader():
x = x.to(device)
pred.append(model(x).cpu().numpy())
target.append(y.numpy())
pred = np.concatenate(pred)
target = np.concatenate(target)
out = classification_report(target, pred.argmax(axis=1))
print(out)
class Logger(pl.loggers.TensorBoardLogger):
def __init__(self, save_dir: str,
name: Union[str, None] = 'default',
version: Union[int, str, None] = None,
log_graph: bool = False,
default_hp_metric: bool = True,
**kwargs):
super().__init__(save_dir, name, version, log_graph, default_hp_metric, **kwargs)
@rank_zero_only
def log_hyperparams(self, params, metrics=None):
# store params to output
self.hparams.update(params)
# format params into the suitable for tensorboard
params = self._flatten_dict(params)
params = self._sanitize_params(params)
if metrics is None:
if self._default_hp_metric:
metrics = {"hp_metric": -1}
elif not isinstance(metrics, dict):
metrics = {"hp_metric": metrics}
if metrics:
exp, ssi, sei = hparams(params, metrics)
writer = self.experiment._get_file_writer()
writer.add_summary(exp)
writer.add_summary(ssi)
writer.add_summary(sei)
def projBv(B,v):
return torch.matmul(torch.outer(v,v),B)/torch.matmul(v,v)
def Misalignment(A,B):
d = A.size(0)
evalA,evecA = torch.eig(A,True)
M = 0
for evec in evecA:
tr1 = projBv(B,evec)
tr2 = projBv(torch.inverse(B),evec)
M += torch.sqrt(torch.trace(tr1)*torch.trace(tr2))
return M-d
def Misalignment2(A,B):
evalA,evecA = torch.eig(A,True)
M = 0
for evec in evecA:
a = torch.dot(evec,torch.matmul(B,evec))
b = torch.dot(evec,torch.matmul(torch.inverse(B),evec))
M += torch.sqrt(a*b) - 1
return M
def cov_matrix(filters):
if filters.ndim != 2:
filters = filters.reshape(filters.shape[0],-1)
if type(filters) == np.ndarray:
cov_filters = np.matmul(filters.T,filters)
elif type(filters) == torch.Tensor:
cov_filters = torch.matmul(filters.T,filters)
else:
raise ValuerError("Input must be tensor or ndarray")
return cov_filters
def minmaxnorm(x):
return (x-x.min())/(x.max()-x.min())
|
{"/flow/__init__.py": ["/flow/models/__init__.py", "/flow/utils/__init__.py"], "/flow/models/__init__.py": ["/flow/models/efn.py"], "/flow/utils/__init__.py": ["/flow/utils/utils.py"], "/sym/models/lcn.py": ["/sym/__init__.py"], "/sym/__init__.py": ["/sym/models/__init__.py", "/sym/utils/__init__.py"], "/sym/models/fcn.py": ["/sym/__init__.py"], "/sym/utils/__init__.py": ["/sym/utils/utils.py"], "/sym/models/__init__.py": ["/sym/models/lcn.py", "/sym/models/fcn.py"]}
|
34,929,236
|
okitouni/Learning-symmetries
|
refs/heads/main
|
/sym/models/fcn.py
|
import torch
import torch.nn as nn
from .. import utils
from collections.abc import Iterable
conv_output_shape = utils.conv_output_shape
def activation_func(activation):
return nn.ModuleDict([
['relu', nn.ReLU(inplace=True)],
['leaky_relu', nn.LeakyReLU(negative_slope=0.01, inplace=True)],
['selu', nn.SELU(inplace=True)],
])[activation]
class FCN(nn.Module):
def __init__(self, in_channels=1, out_channels=10, h=280, w=280, nfilters=10,hidden=None,
kernel_size=28, stride=1, activation='relu', readout_activation=None,
padding=0,bias=True, invar_reduction = None, *args, **kwargs):
super().__init__()
self.activation = activation_func(activation)
self.readout_activation = readout_activation
self.nfilters = nfilters
self.out_channels = out_channels
if invar_reduction is not None:
raise ValueError("invariance reductions is not implemented yet for FCN. Set invar_reduction to None.")
self.invar_reduction = invar_reduction
if isinstance(nfilters, Iterable):
mainlayers = []
for nfilters,channels in zip(nfilters,[in_channels*h*w,*nfilters]):
mainlayers.append(nn.Linear(channels, nfilters))
#mainlayers.append(nn.BatchNorm2d(nfilters))
mainlayers.append(self.activation)
# Would be used if truly wanted FCN embedding of a CNN
#h,w = conv_output_shape(h_w=(h, w), kernel_size=kernel_size, stride=stride,padding=padding)
self.main_blocks = nn.Sequential(*mainlayers)
else:
self.main_blocks = nn.Sequential(nn.Linear(in_channels*h*w, nfilters),
#nn.BatchNorm2d(nfilters),
self.activation)
# h,w = conv_output_shape(h_w=(h, w), kernel_size=kernel_size, stride=stride,padding=padding)
if invar_reduction == "max":
self.reduction = torch.nn.AdaptiveMaxPool2d((1,1))
h,w = 1,1
elif invar_reduction == "mean":
self.reduction = torch.nn.AdaptiveAvgPool2d((1,1))
h,w = 1,1
if hidden is not None:
if not isinstance(hidden, Iterable): hidden = [hidden]
hidden = [nfilters, *hidden]
layers = []
for i in range(len(hidden)-1):
layers.append(nn.Linear(hidden[i], hidden[i+1]))
layers.append(self.activation)
self.decoder = nn.Sequential(*layers, nn.Linear(hidden[-1], out_channels))
else:
self.decoder = nn.Linear(nfilters, out_channels)
def forward(self, x):
x = x.flatten(1)
x = self.main_blocks(x)
if self.invar_reduction is not None:
x = self.reduction(x)
x = x.view(x.size(0), -1)
x = self.decoder(x)
if self.readout_activation is not None:
x = self.readout_activation(x)
return x
|
{"/flow/__init__.py": ["/flow/models/__init__.py", "/flow/utils/__init__.py"], "/flow/models/__init__.py": ["/flow/models/efn.py"], "/flow/utils/__init__.py": ["/flow/utils/utils.py"], "/sym/models/lcn.py": ["/sym/__init__.py"], "/sym/__init__.py": ["/sym/models/__init__.py", "/sym/utils/__init__.py"], "/sym/models/fcn.py": ["/sym/__init__.py"], "/sym/utils/__init__.py": ["/sym/utils/utils.py"], "/sym/models/__init__.py": ["/sym/models/lcn.py", "/sym/models/fcn.py"]}
|
34,929,237
|
okitouni/Learning-symmetries
|
refs/heads/main
|
/sym/utils/__init__.py
|
from .utils import Augment, ProgressBar, Classification_report, Logger, conv_output_shape
__all__ = ["Augment", "ProgressBar",
"Classification_report", "Logger", "conv_output_shape"]
|
{"/flow/__init__.py": ["/flow/models/__init__.py", "/flow/utils/__init__.py"], "/flow/models/__init__.py": ["/flow/models/efn.py"], "/flow/utils/__init__.py": ["/flow/utils/utils.py"], "/sym/models/lcn.py": ["/sym/__init__.py"], "/sym/__init__.py": ["/sym/models/__init__.py", "/sym/utils/__init__.py"], "/sym/models/fcn.py": ["/sym/__init__.py"], "/sym/utils/__init__.py": ["/sym/utils/utils.py"], "/sym/models/__init__.py": ["/sym/models/lcn.py", "/sym/models/fcn.py"]}
|
34,929,238
|
okitouni/Learning-symmetries
|
refs/heads/main
|
/sym/models/__init__.py
|
from .lcn import LCN
from .cnn import CNN
from .fcn import FCN
from .lightningbase import Model
__all__ = ["LCN", "CNN","FCN", "Model"]
|
{"/flow/__init__.py": ["/flow/models/__init__.py", "/flow/utils/__init__.py"], "/flow/models/__init__.py": ["/flow/models/efn.py"], "/flow/utils/__init__.py": ["/flow/utils/utils.py"], "/sym/models/lcn.py": ["/sym/__init__.py"], "/sym/__init__.py": ["/sym/models/__init__.py", "/sym/utils/__init__.py"], "/sym/models/fcn.py": ["/sym/__init__.py"], "/sym/utils/__init__.py": ["/sym/utils/utils.py"], "/sym/models/__init__.py": ["/sym/models/lcn.py", "/sym/models/fcn.py"]}
|
34,929,239
|
okitouni/Learning-symmetries
|
refs/heads/main
|
/sym/models/perm_models.py
|
import numpy as np
import torch
import torch.nn as nn
class PIN1(nn.Module):
def __init__(self, N, width=10, ):
super(PIN1, self).__init__()
self.phi = nn.Sequential(nn.Linear(1,N),
nn.ReLU())
#nn.Linear(width,N+1),
#nn.ReLU())
self.rho1 = nn.Linear(N,1)#width)
self.rho2 = nn.Linear(width,1)
self.N = N
def forward(self, x):
W = torch.zeros((x.size(0),self.N))
for i in range(len(x[0])):
W += self.phi(x[:,i].view(-1,1))
x = self.rho1(W)
#x = nn.ReLU()(x)
#x = self.rho2(x)
return x
class SNN1(nn.Module):
def __init__(self, N, width=10):
super(SNN1,self).__init__()
self.mlp = torch.nn.Sequential(torch.nn.Linear(N,N),
#torch.nn.ReLU(),
#torch.nn.Linear(width,N+1),
torch.nn.ReLU(),
torch.nn.Linear(N,1))#width),
#torch.nn.ReLU(),
#torch.nn.Linear(width,1))
def forward(self,x):
x = self.mlp(x)
return x
class PIN2(nn.Module):
def __init__(self, N, depth=3):
super(PIN2, self).__init__()
self.eqvL = nn.Parameter( torch.abs(torch.randn(depth)) )
self.eqvG = nn.Parameter( torch.abs(torch.randn(depth)) )
self.N = N
self.depth = depth
self.linear = nn.Linear(N,1)
self.pin1 = PIN1(N)
def forward(self, x):
for i in range(self.depth):
L = self.eqvL[i]*torch.eye(self.N)
G = self.eqvG[i]*torch.ones((self.N,self.N))
x = torch.matmul(x,L+G)
x = torch.nn.ReLU()(x)
out = self.pin1(x)#torch.sum(x,1)
return out
class SNN2(nn.Module):
def __init__(self, N, depth=3):
super(SNN2,self).__init__()
self.layerList = []
for i in range(depth):
self.layerList.append(nn.Sequential(nn.Linear(N,N),
nn.ReLU()))
self.snn1 = SNN1(N)
def forward(self,x):
for layer in self.layerList:
x = layer(x)
out = self.snn1(x)
return out
class PIN3(nn.Module):
def __init__(self, N):
super(PIN3, self).__init__()
self.params = nn.Parameter( torch.randn(N) )
self.N = N
self.pin1 = PIN1(np.math.factorial(N))
def forward(self,x):
W = torch.zeros(np.math.factorial(self.N),self.N)
for i,perm in enumerate(list(permutations(range(self.N)))):
perm = torch.LongTensor(perm)
W[i] = self.params[perm]
print(W)
x = torch.matmul(x,torch.transpose(W,0,1))
out = self.pin1(x)
return out
class SNN3(nn.Module):
def __init__(self, N):
super(SNN3, self).__init__()
self.layer1 = nn.Linear(N,np.math.factorial(N))
self.pin1 = PIN1(np.math.factorial(N))
def forward(self,x):
x = self.layer1(x)
out = self.pin1(x)
return out
|
{"/flow/__init__.py": ["/flow/models/__init__.py", "/flow/utils/__init__.py"], "/flow/models/__init__.py": ["/flow/models/efn.py"], "/flow/utils/__init__.py": ["/flow/utils/utils.py"], "/sym/models/lcn.py": ["/sym/__init__.py"], "/sym/__init__.py": ["/sym/models/__init__.py", "/sym/utils/__init__.py"], "/sym/models/fcn.py": ["/sym/__init__.py"], "/sym/utils/__init__.py": ["/sym/utils/utils.py"], "/sym/models/__init__.py": ["/sym/models/lcn.py", "/sym/models/fcn.py"]}
|
35,003,680
|
chrisylb/masterarbeit2
|
refs/heads/master
|
/dataprocess.py
|
import pandas as pd
import numpy as np
import math
actions=np.zeros((33000,1,2))
rewards=np.zeros((33000,1,1))
observations=np.zeros((33000,1,4,4))
pos=0
pos1=0
pos2=0
next_observations=np.zeros((33000,1,4,4))
next_observations_id=[0]
ogdata=pd.read_csv('/content/drive/MyDrive/Colab Notebooks/dataprocess/vehicle_tracks_011.csv')
translate_angle_deger=3.064*180/np.pi
yaws=(180-translate_angle_deger)*np.pi/180
R=np.array([[np.cos(yaws),-np.sin(yaws)],[np.sin(yaws),np.cos(yaws)]])
ogdata.loc[:,'x_trans']=R[0][0]*ogdata.loc[:,'x']+R[0][1]*ogdata.loc[:,'y']
ogdata.loc[:,'y_trans']=R[1][0]*ogdata.loc[:,'x']+R[1][1]*ogdata.loc[:,'y']
ogdata.loc[:,'vx_trans']=R[0][0]*ogdata.loc[:,'vx']+R[0][1]*ogdata.loc[:,'vy']
ogdata.loc[:,'vy_trans']=R[1][0]*ogdata.loc[:,'vx']+R[1][1]*ogdata.loc[:,'vy']
studydata=ogdata.drop(['x','y','vx','vy'],axis=1)
studydata['x_trans']=studydata['x_trans'].map(lambda x:1000-x)
studydata['vx_trans']=studydata['vx_trans'].map(lambda x:-x)
studydata['y_trans']=studydata['y_trans'].map(lambda y:1100-y)
studydata['vy_trans']=studydata['vy_trans'].map(lambda y:-y)
def merge_y(a):
if a.y_trans>20:
a.y_trans=None
return a
merge=studydata.apply(merge_y,axis='columns')
Mergeid=merge.dropna(subset=['y_trans'])
id=Mergeid['track_id'].unique()
for xyz in range(0, len(id)):
def find_frame(d):
if not d.track_id == id[xyz]:
d.track_id = None
return d
studycar = studydata.apply(find_frame, axis='columns')
studycar = studycar.dropna(subset=['track_id'])
studycar = studycar[studycar.x_trans > 80]
studycar = studycar[studycar.x_trans < 156]
studycar.loc[:, 'x_acclearation'] = 10 * (studycar.loc[:, 'vx_trans'].shift(-1) - studycar.loc[:, 'vx_trans'])
studycar.loc[:, 'y_acclearation'] = 10 * (studycar.loc[:, 'vy_trans'].shift(-1) - studycar.loc[:, 'vy_trans'])
studycar.loc[:, 'speed'] = np.sqrt(
(studycar.loc[:, 'vx_trans'] ** 2) + (studycar.loc[:, 'vy_trans'] ** 2))
studycar.loc[:, 'accleration'] = 10 * (studycar.loc[:, 'speed'].shift(-1) - studycar.loc[:, 'speed'])
studycar = studycar.fillna(method='ffill')
studycar.loc[:, 'heading'] = 0.00
studycar.loc[:, 'steering_angle'] = 0.00
studycar.loc[:, 'beta'] = 0.00
studycar = studycar.reset_index(drop=True)
y_1 = studycar.iloc[1].y_trans - studycar.iloc[0].y_trans
x_1 = studycar.iloc[1].x_trans - studycar.iloc[0].x_trans
studycar.loc[0, 'heading'] = np.arctan(y_1 / x_1)
for h in range(1, len(studycar)):
if studycar.loc[h - 1, 'vy_trans'] == 0:
studycar.loc[h, 'total_angle'] = studycar.loc[h - 1, 'heading']
else:
studycar.loc[h, 'total_angle'] = np.arctan(
studycar.loc[h - 1, 'vy_trans'] / studycar.loc[h - 1, 'vx_trans'])
beta = studycar.loc[h, 'total_angle'] - studycar.loc[h - 1, 'heading']
studycar.loc[h, 'steering_angle'] = np.arctan(2 * np.tan(beta))
studycar.loc[h, 'heading'] = studycar.loc[h - 1, 'speed'] * np.sin(beta) / (4.28 / 2) * 0.1 + studycar.loc[
h - 1, 'heading']
# studycar = studycar.fillna(method='ffill')
studycar.loc[:, 'reward'] = 0
goal = False
for g in range(0, len(studycar) - 1):
if studycar.loc[g + 1, 'x_trans'] > 155 and goal == False:
studycar.loc[g + 1, 'reward'] = 10 + studycar.loc[g, 'reward']
goal = True
else:
studycar.loc[g + 1, 'reward'] = 0.01 + studycar.loc[g, 'reward']
# studycar is the merge car and frame_car is the car which is driving with the merge_ego_car
frame_id = studycar.frame_id.unique()
frame_car = studydata.set_index(['frame_id'])
frame_car = frame_car.loc[frame_id]
def find_otherframe(c):
if c.track_id == id[xyz]:
c.track_id = None
return c
frame_car = frame_car.apply(find_otherframe, axis='columns')
frame_car = frame_car.dropna(subset=['track_id'])
#####
study = studycar.set_index(['frame_id'])
for index, row in studycar.iterrows():
observations[pos][0][0][0] = studycar.loc[index, 'x_trans']
observations[pos][0][0][1] = studycar.loc[index, 'y_trans']
observations[pos][0][0][2] = studycar.loc[index, 'vx_trans']
observations[pos][0][0][3] = studycar.loc[index, 'vy_trans']
pos = pos + 1
# 10 safe distance
for k in range(0, len(studycar)):
bd = 0
if frame_id[k] in frame_car.index:
if not isinstance(frame_car.loc[frame_id[k], 'x_trans'], np.float64):
for v in range(0, min(len(frame_car.loc[frame_id[k]]), 3)):
if np.abs(frame_car.loc[frame_id[k], 'x_trans'].iloc[v] - study.loc[frame_id[k], 'x_trans']) < 10:
observations[k + pos1][0][bd + 1][0] = frame_car.loc[frame_id[k], 'x_trans'].iloc[v]
observations[k + pos1][0][bd + 1][1] = frame_car.loc[frame_id[k], 'y_trans'].iloc[v]
observations[k + pos1][0][bd + 1][2] = frame_car.loc[frame_id[k], 'vx_trans'].iloc[v]
observations[k + pos1][0][bd + 1][3] = frame_car.loc[frame_id[k], 'vy_trans'].iloc[v]
bd = bd + 1
else:
observations[k + pos1][0][bd + 1][0] = frame_car.loc[frame_id[k], 'x_trans']
observations[k + pos1][0][bd + 1][1] = frame_car.loc[frame_id[k], 'y_trans']
observations[k + pos1][0][bd + 1][2] = frame_car.loc[frame_id[k], 'vx_trans']
observations[k + pos1][0][bd + 1][3] = frame_car.loc[frame_id[k], 'vy_trans']
pos1 = pos1 + len(studycar)
for p in range(0, len(studycar)):
actions[p + pos2] = [studycar.loc[p, 'accleration'], studycar.loc[p, 'steering_angle']]
for j in range(0, len(studycar)):
rewards[j + pos2] = [studycar.loc[j, 'reward']]
pos2 = pos2 + len(studycar)
next_observations_id.append(pos2 - 1)
studycar_describe.append(studycar.y_trans.min())
print(pos1)
np.save('actions_interaction_4',actions)
np.save('obs_interaction_4',observations)
np.save('rewards_interaction_4',rewards)
for i in range(0,29999):
if i not in next_observations_id:
next_observations[i]=observations[i+1]
else:
next_observations[i]=observations[i]
np.save('next_obs_interaction_4',observations)
|
{"/highway_env/envs/merge_ego.py": ["/highway_env/vehicle/concontroller.py"]}
|
35,059,237
|
hendrix/hendrix
|
refs/heads/main
|
/hendrix/facilities/services.py
|
from OpenSSL import SSL
from OpenSSL.crypto import get_elliptic_curve
from twisted.application import internet, service
from twisted.internet import reactor
from twisted.internet import ssl
from twisted.logger import Logger
from twisted.python.threadpool import ThreadPool
from twisted.web import server
from hendrix.facilities.resources import HendrixResource
def get_size_limiting_request(max_upload_bytes):
class SizeLimitingRequest(server.Request):
size_limit_on_post_data = max_upload_bytes
def handleContentChunk(self, data):
if self.content.tell() + len(data) > self.size_limit_on_post_data:
self.transport.write(b"HTTP/1.1 413 Request Entity Too Large\r\n\r\n")
self.transport.loseConnection()
return super().handleContentChunk(data)
return SizeLimitingRequest
class HendrixService(service.MultiService):
"""
HendrixService is a constructor that facilitates the collection of services
and the extension of resources on the website by subclassing MultiService.
'application' refers to a WSGI application object: likely a django.core.handlers.wsgi.WSGIHandler
'resources' refers to a list of Resources with a namespace attribute
'services' refers to a list of twisted Services to add to the collection.
"""
log = Logger()
def __init__(
self,
application,
threadpool=None,
resources=None,
services=None,
loud=False):
service.MultiService.__init__(self)
# Create, start and add a thread pool service, which is made available
# to our WSGIResource within HendrixResource
if not threadpool:
self.threadpool = ThreadPool(name="HendrixService")
else:
self.threadpool = threadpool
reactor.addSystemEventTrigger('after', 'shutdown', self.threadpool.stop)
ThreadPoolService(self.threadpool).setServiceParent(self)
# create the base resource and add any additional static resources
resource = HendrixResource(reactor, self.threadpool, application, loud=loud)
if resources:
resources = sorted(resources, key=lambda r: r.namespace)
for res in resources:
if hasattr(res, 'get_resources'):
for sub_res in res.get_resources():
resource.putNamedChild(sub_res)
else:
resource.putNamedChild(res)
self.site = server.Site(resource)
def spawn_new_server(self, port, server_class, additional_services=None, *args, **kwargs):
main_web_tcp = server_class(port, self.site, *args, **kwargs)
main_web_tcp.setName('main_web_tcp')
# to get this at runtime use
# hedrix_service.getServiceNamed('main_web_tcp')
main_web_tcp.setServiceParent(self)
# add any additional services
if additional_services:
for srv_name, srv in additional_services:
srv.setName(srv_name)
srv.setServiceParent(self)
def get_port(self, name):
"Return the port object associated to our tcp server"
service = self.getServiceNamed(name)
return service._port
def add_server(self, name, protocol, server):
self.servers[(name, protocol)] = server
class ThreadPoolService(service.Service):
'''
A simple class that defines a threadpool on init
and provides for starting and stopping it.
'''
def __init__(self, pool):
"self.pool returns the twisted.python.ThreadPool() instance."
if not isinstance(pool, ThreadPool):
msg = '%s must be initialised with a ThreadPool instance'
raise TypeError(
msg % self.__class__.__name__
)
self.pool = pool
def startService(self):
service.Service.startService(self)
self.pool.start()
def stopService(self):
service.Service.stopService(self)
self.pool.stop()
from twisted.internet.ssl import DefaultOpenSSLContextFactory
class ContextWithECC(SSL.Context):
def use_privatekey(self, _private_key):
# At some point, we hope to use PyOpenSSL tooling to do this. See #144.
from OpenSSL._util import lib as _OpenSSLlib
use_result = _OpenSSLlib.SSL_CTX_use_PrivateKey(self._context, _private_key._evp_pkey)
if not use_result:
self._raise_passphrase_exception()
class SpecifiedCurveContextFactory(DefaultOpenSSLContextFactory):
def __init__(self, private_key, cert, curve_name=None, *args, **kwargs):
DefaultOpenSSLContextFactory.__init__(self, private_key, cert, *args, **kwargs)
self.set_curve(curve_name)
def set_curve(self, curve_name):
curve = get_elliptic_curve(curve_name)
self._context.set_tmp_ecdh(curve)
class ExistingKeyTLSContextFactory(SpecifiedCurveContextFactory):
_context = None
def __init__(self, private_key, cert, curve_name=None,
sslmethod=SSL.SSLv23_METHOD, _contextFactory=ContextWithECC):
self._private_key = private_key
self.curve_name = curve_name
self.certificate = cert
self.sslmethod = sslmethod
self._contextFactory = _contextFactory
self.cacheContext()
self.set_curve(curve_name)
def cacheContext(self):
if self._context is None:
ctx = self._contextFactory(self.sslmethod)
ctx.set_options(SSL.OP_NO_SSLv2) # No allow v2. Obviously.
ctx.use_certificate(self.certificate)
ctx.use_privatekey(self._private_key)
self._context = ctx
class HendrixTCPService(internet.TCPServer):
def __init__(self, port, site, *args, **kwargs):
max_upload_bytes = kwargs.pop('max_upload_bytes', None)
internet.TCPServer.__init__(self, port, site, *args, **kwargs)
self.site = site
if max_upload_bytes:
self.site.requestFactory = get_size_limiting_request(max_upload_bytes)
class HendrixTCPServiceWithTLS(internet.SSLServer):
def __init__(self, port, site, private_key, cert, context_factory=None, context_factory_kwargs=None, max_upload_bytes=None):
context_factory = context_factory or ssl.DefaultOpenSSLContextFactory
context_factory_kwargs = context_factory_kwargs or {}
self.tls_context = context_factory(
private_key,
cert,
**context_factory_kwargs
)
internet.SSLServer.__init__(
self,
port, # integer port
site, # our site object, see the web howto
contextFactory=self.tls_context
)
self.site = site
if max_upload_bytes:
self.site.requestFactory = get_size_limiting_request(max_upload_bytes)
|
{"/hendrix/facilities/services.py": ["/hendrix/facilities/resources.py"], "/examples/django_hx_chatserver/example_app/chat/views.py": ["/examples/django_hx_chatserver/example_app/chat/models.py"], "/test/debug_test_application.py": ["/hendrix/deploy/base.py"], "/hendrix/mechanics/concurrency/__init__.py": ["/hendrix/mechanics/concurrency/exceptions.py"], "/examples/django_hx_chatserver/example_app/chat/models.py": ["/hendrix/contrib/concurrency/signals.py"], "/hendrix/ux.py": ["/hendrix/contrib/__init__.py", "/hendrix/logger.py", "/hendrix/mechanics/concurrency/exceptions.py", "/hendrix/options.py"], "/hendrix/deploy/base.py": ["/hendrix/__init__.py", "/hendrix/facilities/gather.py", "/hendrix/facilities/protocols.py", "/hendrix/facilities/services.py", "/hendrix/options.py", "/hendrix/utils/__init__.py"], "/hendrix/deploy/tls.py": ["/hendrix/facilities/services.py", "/hendrix/deploy/base.py"], "/test/test_resources.py": ["/hendrix/facilities/resources.py"], "/hendrix/deploy/cache.py": ["/hendrix/contrib/services/cache.py", "/hendrix/deploy/base.py"], "/hendrix/contrib/resources/static.py": ["/hendrix/facilities/resources.py"], "/hendrix/experience/hey_joe.py": ["/hendrix/contrib/concurrency/signals.py", "/hendrix/contrib/concurrency/resources.py"], "/hendrix/contrib/cache/resource.py": ["/hendrix/utils/__init__.py", "/hendrix/contrib/cache/__init__.py", "/hendrix/contrib/cache/backends/memory_cache.py"], "/hendrix/management/commands/hx.py": ["/hendrix/options.py", "/hendrix/ux.py"], "/hendrix/contrib/cache/backends/__init__.py": ["/hendrix/contrib/cache/__init__.py"], "/test/test_testing_utils.py": ["/hendrix/utils/test_utils/__init__.py"], "/hendrix/mechanics/concurrency/decorators.py": ["/hendrix/mechanics/concurrency/__init__.py", "/hendrix/mechanics/concurrency/exceptions.py"], "/examples/django_nyc_demo/run.py": ["/hendrix/deploy/base.py"], "/hendrix/facilities/response.py": ["/hendrix/utils/__init__.py"], "/hendrix/contrib/services/cache.py": ["/hendrix/contrib/cache/resource.py", "/hendrix/facilities/services.py"], "/test/test_hx_launcher.py": ["/hendrix/options.py", "/hendrix/ux.py"], "/test/test_crosstown_traffic.py": ["/hendrix/facilities/resources.py", "/hendrix/mechanics/concurrency/exceptions.py", "/test/resources.py"], "/test/test_deploy.py": ["/test/utils.py"], "/hendrix/contrib/concurrency/resources.py": ["/hendrix/facilities/resources.py", "/hendrix/contrib/concurrency/messaging.py", "/hendrix/contrib/concurrency/signals.py"], "/examples/django_hx_chatserver/example_app/run.py": ["/hendrix/deploy/base.py"], "/hendrix/deploy/hybrid.py": ["/hendrix/deploy/cache.py", "/hendrix/deploy/tls.py"], "/hendrix/contrib/cache/backends/memory_cache.py": ["/hendrix/contrib/cache/__init__.py", "/hendrix/contrib/cache/backends/__init__.py"], "/hendrix/facilities/resources.py": ["/hendrix/facilities/response.py"], "/setup.py": ["/hendrix/__init__.py"], "/test/test_ux.py": ["/hendrix/__init__.py", "/hendrix/contrib/__init__.py", "/hendrix/deploy/base.py", "/hendrix/options.py", "/test/utils.py"], "/hendrix/options.py": ["/hendrix/__init__.py"], "/hendrix/utils/test_utils/__init__.py": ["/hendrix/mechanics/concurrency/decorators.py"], "/hendrix/logger.py": ["/hendrix/defaults.py"], "/examples/tls_server/run_server.py": ["/hendrix/deploy/tls.py"], "/test/test_request_behavior.py": ["/hendrix/deploy/base.py", "/hendrix/facilities/resources.py", "/test/resources.py"], "/hendrix/experience/crosstown_traffic.py": ["/hendrix/mechanics/concurrency/decorators.py"], "/hendrix/utils/conf.py": ["/hendrix/utils/__init__.py"], "/performance-tools/pyramid_with_crosstown_traffic.py": ["/hendrix/deploy/base.py"], "/test/resources.py": ["/hendrix/mechanics/concurrency/__init__.py"], "/test/test_tls_requests.py": ["/hendrix/deploy/tls.py", "/hendrix/facilities/resources.py", "/hendrix/facilities/services.py", "/test/resources.py"], "/test/utils.py": ["/hendrix/contrib/__init__.py", "/hendrix/defaults.py", "/hendrix/deploy/base.py", "/hendrix/utils/__init__.py"], "/performance-tools/simple_delay_server.py": ["/hendrix/deploy/base.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.