code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1
value |
|---|---|---|---|---|---|
import numpy as np
def get_policy_iteration(env, gamma=0.99999999999):
R = env.R
P = env.P
nstates, nactions = P.shape[-1], P.shape[0]
def Pd(d):
pd = np.einsum('ast,sa->st', P, d)
return pd
def rd(d):
return np.einsum('sa,sa->s', R, d)
def bellman_content(v):
x = np.einsum("ast,t->sa", P, v)
return R + gamma * x
def policy_evaluation_pio(d):
gamma_pd = gamma * Pd(d)
gamma_pd = np.identity(nstates) - gamma_pd
return np.linalg.solve(gamma_pd, rd(d))
def bellman_policy(v): # looks weird because we want the policy as an array! not a vector
d_next = np.zeros((nstates, nactions))
cont = bellman_content(v)
best_actions = np.argmax(cont, axis=1)
d_next[list(range(len(best_actions))), best_actions] = 1
return d_next
def policy_iteration_operator(d):
v = policy_evaluation_pio(d)
return v, bellman_policy(v)
guess = np.identity(len(env.graph.ngraph.nodes))
old_guess = None
while True:
if np.all(guess == old_guess):
break
old_guess = guess
guess = policy_iteration_operator(guess)
guess = guess[1].astype(int)
return guess
def get_backward_induction_actions(fin_env):
"""
The env must be one created with make_horizon=True
"""
nb_nodes = fin_env.graph.ngraph.order()
def bellman_content(v):
x = np.einsum("ast,t->sa", fin_env.P, v)
return fin_env.R + x
def bellman_optimality_operator(v):
return np.max(bellman_content(v), axis=1)
def bellman_policy_operator(v):
return np.argmax(bellman_content(v), axis=1)
vts = [np.zeros((nb_nodes,))] # start with only the final v, a vector of zeros
pts = []
for t in range(len(fin_env.horizon_states)):
v_tm1 = vts[-1]
v_t = bellman_optimality_operator(v_tm1)
p_t = bellman_policy_operator(v_tm1)
pts.append(p_t)
vts.append(v_t)
sequence = []
position = fin_env.graph.origin
for epoch in reversed(pts):
sequence.append(epoch[position])
position = epoch[position]
return sequence | /route-gym-0.0.11.tar.gz/route-gym-0.0.11/routegym/train.py | 0.566738 | 0.537163 | train.py | pypi |
import copy
import networkx as nx
import numpy as np
def make_bigram(list):
return [(list[i], list[i + 1]) for i in range(len(list) - 1)]
def get_correct_adj_mat(ngaph):
initial_adjacency_matrix = np.squeeze(
np.asarray(
nx.adjacency_matrix(ngaph, nodelist=sorted(ngaph.nodes()), weight=None).todense()
)
)
adjacency_matrix = np.squeeze(
np.asarray(
nx.adjacency_matrix(ngaph, nodelist=sorted(ngaph.nodes())).todense()
)
)
adjacency_matrix[initial_adjacency_matrix == 0] = -1 # replace non-edges by -1 instead of 0
return adjacency_matrix
class Graph:
def __init__(self, networkx_graph, origin, goal, weights=None, random_weights=(0,10), make_horizon=False):
networkx_graph = networkx_graph.copy()
self.was_directed = nx.is_directed(networkx_graph)
networkx_graph = networkx_graph.to_directed()
networkx_graph = nx.convert_node_labels_to_integers(networkx_graph, label_attribute="old_name")
if nx.is_weighted(networkx_graph) and weights is not None:
print("WARNING: your weights will be ignored.")
if not nx.is_weighted(networkx_graph): # First, make sure the graph is weighted
edges = networkx_graph.edges()
dico = {}
if weights is not None:
for e1, e2 in edges:
try: # Runs once, doesn't have to be optimized
dico[(e1, e2)] = weights[e1][e2]
except:
try:
dico[(e1, e2)] = weights[(e1,e2)]
except:
raise Exception("The weights passed to Graph must either be indexed by (edge one, edge two) tuples or by edge one, and then edge two."
"\n\nSo either weights[(e1,e2)]=value or weights[e1][e2]=value.")
for e1e2 in edges:
if e1e2 not in dico:
dico[e1e2] = np.random.randint(random_weights[0], random_weights[1], size=1)[0]
nx.set_edge_attributes(networkx_graph, dico, "weight")
networkx_graph, origin, goal = (networkx_graph, origin, goal) if not make_horizon else self._make_horizons(networkx_graph, origin, goal)
if sorted(list(networkx_graph.nodes)) != list(range(networkx_graph.order())): # must rename to make nodes from 0 to order-1
dico = {}
for i, node in enumerate(networkx_graph.nodes):
dico[node] = i
if node == goal:
goal = i
elif node == origin:
origin = i
networkx_graph = nx.relabel_nodes(networkx_graph, dico)
self.made_horizon = make_horizon
self.ngraph = networkx_graph
self.adj_mat = get_correct_adj_mat(networkx_graph)
self.adjacent_indices = [np.nonzero(self.adj_mat[i] != -1)[0] for i in range(self.adj_mat.shape[0])]
self._set_problem(origin, goal)
def _transform_to_horizon(self, ngraph, origin, goal):
all_paths = sorted(nx.all_simple_paths(ngraph, origin, goal), key=len, reverse=True)
max_len = len(all_paths[0])
adj_mat = get_correct_adj_mat(ngraph)
_new_name = [ngraph.order() + 1]
def get_new_name(_new_name=_new_name, check=True):
temp = _new_name[0]
if check:
while temp == goal or temp == origin:
temp += 1
_new_name[0] = temp + 1
return temp
# go-go gadgeto extendo paths
for path in all_paths:
while len(path) < max_len:
path.insert(-1, get_new_name())
# rename paths
flow_graph = nx.DiGraph()
flow_graph.add_node(origin)
flow_graph.add_node(goal)
for path in all_paths:
new_u_name = origin
for i, uv in enumerate(make_bigram(path)):
u, v = uv
new_v_name = v
if v != goal:
new_v_name = get_new_name()
flow_graph.add_node(new_v_name)
w = 0
if u < ngraph.order():
if v < ngraph.order():
w = adj_mat[u, v]
else:
w = adj_mat[u, goal]
flow_graph.add_edge(new_u_name, new_v_name, weight=w)
new_u_name = new_v_name
# collapse end
front = goal
while True:
neighs = list(flow_graph.predecessors(front))
for neigh in neighs.copy():
if flow_graph[neigh][front]["weight"] != 0:
neighs.remove(neigh)
if len(neighs) <= 1:
break
front = neighs[0]
for neigh in neighs[1:]:
for pred in flow_graph.predecessors(neigh):
flow_graph.add_edge(pred, front, weight=flow_graph[pred][neigh]["weight"])
flow_graph.remove_node(neigh)
# final_relabeling
dont_rename_poi = True
if origin > flow_graph.order()-1 or goal > flow_graph.order()-1:
dont_rename_poi = False
rename_origin = origin
rename_goal = goal
_new_name[0] = 0
for n in list(flow_graph.nodes):
if dont_rename_poi and n in [origin, goal]:
continue
new_name = get_new_name(check=dont_rename_poi)
if not dont_rename_poi:
if n == origin:
rename_origin = new_name
if n == goal:
rename_goal = new_name
nx.relabel_nodes(flow_graph, {n: new_name}, copy=False)
return flow_graph, rename_origin, rename_goal
def _make_horizons(self, ngraph, origin, goal):
def test_if_already_ok():
visited = {origin}
front = [origin]
while True:
if len(front) == 0:
break
sucs = []
for f in front:
sucs += list(ngraph.successors(f))
if len(sucs) == 0:
break
suc_set = set(sucs)
if len(suc_set.intersection(visited)) > 0:
return False
front = list(suc_set)
visited = visited.union(suc_set)
# true
if ngraph.order() > len(visited):
not_visited = set(ngraph.nodes) - visited
for nv in not_visited:
ngraph.remove_node(nv)
return True
if not test_if_already_ok():
ngraph, origin, goal = self._transform_to_horizon(ngraph, origin, goal)
self.horizon_acts = []
self.horizon_states = []
front = [origin]
while True:
self.horizon_states.append(front)
all_sucs = set()
acts_per_state = []
self.horizon_acts.append(acts_per_state)
for node in front:
sucs = list(ngraph.successors(node))
acts_per_state.append(sucs)
all_sucs = all_sucs.union(set(sucs))
if len(all_sucs) == 0:
break
front = list(all_sucs)
return ngraph, origin, goal
def _set_problem(self, origin, goal):
self._set_position(origin)
self.origin = origin
self.goal = goal
self.path = [origin]
self.path_bigram = []
self.dijkstra_path = nx.dijkstra_path(self.ngraph, origin, goal)
self.dijkstra_bigram = make_bigram(self.dijkstra_path)
self.dijkstra_rew = sum([self.adj_mat[(e1, e2)] for e1, e2 in self.dijkstra_bigram])
all_simple_paths = list(nx.all_simple_paths(self.ngraph, origin, goal))
ws = []
for path in all_simple_paths:
big = make_bigram(path)
weight = 0
for e1, e2 in big:
weight += self.adj_mat[e1,e2]
ws.append(weight)
i = np.argmax(np.array(ws))
self.longest_path_rew = ws[i]
self.longest_path = all_simple_paths[i]
self.longest_path_bigram = make_bigram(self.longest_path)
def reset(self, origin=None, goal=None):
if origin is None:
origin = self.origin
if goal is None:
goal = self.goal
self._set_problem(origin, goal)
def _set_position(self, pos):
self.position = pos
def transition(self, new_pos):
self.path.append(new_pos)
self.path_bigram = make_bigram(self.path)
if new_pos not in self.adjacent_indices[self.position]:
print(f"{new_pos} not in {self.adjacent_indices[self.position]}")
import time
time.sleep(100)
return False, False
reward = self.adj_mat[self.position, new_pos]
self._set_position(new_pos)
done = self.position == self.goal
return reward, done | /route-gym-0.0.11.tar.gz/route-gym-0.0.11/routegym/graph.py | 0.487063 | 0.362743 | graph.py | pypi |
import os
import logging
import json
import random
import time
from datetime import datetime, timedelta
import copy
from route_planner_common.utils.logger import Logger
class Util:
@classmethod
def check_if_integer(cls, key, value):
try:
convert_to_int = int(value)
except:
Logger.error(
u"Key: {0}, does not contain a valid integer value. Value is: {1}".format(key, value))
return False
return True
@classmethod
def check_if_float(cls, key, value):
try:
convert_to_float = float(value)
except:
Logger.error(
u"Key: {0}, does not contain a valid float value. Value is: {1}".format(key, value))
return False
return True
@classmethod
def check_if_json(cls, key, value):
try:
convert_to_json = json.loads(value)
except:
Logger.error(
u"Key: {0}, does not contain a valid JSON value. Value is: {1}".format(key, value))
return False
return True
@classmethod
def check_if_string(cls, key, value):
try:
convert_to_string = str(value)
except:
Logger.error(
u"Key: {0}, does not contain a valid string value. Value is: {1}".format(key, value))
return False
return True
@classmethod
def check_if_true(cls, key, value):
new_value = None
if isinstance(value, bool):
new_value = str(value).lower()
elif isinstance(value, bytes):
new_value = value.lower()
elif isinstance(value, str):
new_value = value.lower()
if new_value and new_value == 'true':
return True
else:
Logger.info(
u"Key: {0}, does not contain a valid TRUE boolean value. Value is: {1}".format(key, value))
return False
@classmethod
def check_if_false(cls, key, value):
new_value = None
if isinstance(value, bool):
new_value = str(value).lower()
elif isinstance(value, bytes):
new_value = value.lower()
elif isinstance(value, str):
new_value = value.lower()
if new_value and new_value == 'false':
return True
else:
Logger.info(
u"Key: {0}, does not contain a valid FALSE boolean value. Value is: {1}".format(key, value))
return False
@classmethod
def check_attribute_type(cls, key, value):
if isinstance(value, bytes):
return 'string'
elif isinstance(value, str):
return 'string'
elif isinstance(value, dict):
return 'dict'
elif isinstance(value, list):
return 'list'
@classmethod
def generate_random_string(cls):
return cls.get_uuid()
@classmethod
def get_uuid(cls):
import uuid
return str(uuid.uuid4())
@classmethod
def upload_file_to_bucket(cls, payload, company_id, unique_id, unique_key):
from google.cloud import storage
if not company_id:
company_id = 9999999999999999
try:
GCS_BUCKET_NAME = os.getenv('GCS_BUCKET_NAME')
GCS_ROUTE_OPTIMIZATION_RESULT_PATH_PREFIX = os.getenv(
'GCS_ROUTE_OPTIMIZATION_RESULT_PATH_PREFIX')
bucket_name = GCS_BUCKET_NAME
destination_blob_name = GCS_ROUTE_OPTIMIZATION_RESULT_PATH_PREFIX.format(
company_id=company_id,
optimization_id=unique_id)
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)
blob = bucket.blob('{}_{}'.format(
destination_blob_name, unique_key))
# blob.upload_from_string(data=json.dumps({'payload': payload}), content_type='application/json')
Logger.info(u'file path is : {} '.format(payload))
with open(payload, "rb") as my_file:
blob.upload_from_file(my_file, num_retries=10)
blob.make_public()
Logger.info(u"Blob {} is publicly accessible at {}".format(
blob.name, blob.public_url))
return blob.public_url
except Exception as file_upload_exception:
Logger.info(
'error uploading data to bucket, optimization will not be initiated')
Logger.info(file_upload_exception)
return None
@classmethod
def save_and_upload_file_to_bucket(cls, filename, company_id, unique_id, unique_key, data=None, file_to_save=None, file_path_to_save=None):
if data is not None:
file_path_to_save = os.path.join('/', filename)
file_to_save = open(file_path_to_save, "w")
file_to_save.writelines([json.dumps(data)])
file_to_save.close()
elif not file_to_save or not file_path_to_save:
return None
Logger.info('file_path_to_save: {}'.format(file_path_to_save))
return cls.upload_file_to_bucket(payload=file_path_to_save, company_id=company_id, unique_id=unique_id, unique_key=unique_key)
@classmethod
def download_file(cls, path, name=None):
import requests
try:
file_download_request = requests.get(path)
if file_download_request.status_code != 200:
return None
except Exception as e:
Logger.info(e)
return None
return file_download_request.content
@classmethod
def get_request_content(cls, request, logging_info=None):
request_content = None
if request.method == 'POST':
if request.headers.get('Content-Type') == 'x-www-form-urlencoded':
request_content = request.form.to_dict()
else:
request_content = request.get_json()
elif request.method == 'GET':
request_content = request.args.to_dict()
query_params = request.args.to_dict()
Logger.info('query_params : {}' .format(
query_params), logging_info=logging_info)
Logger.info('request_content : {}' .format(
request_content), logging_info=logging_info)
return request_content, query_params
@classmethod
def log(cls, msg):
Logger.info('{} {}'.format(datetime.now(), msg))
@classmethod
def get_owner_from_auth(cls, auth_info):
from route_planner_common.exceptions import OwnerNotFound
owner = auth_info.get('user', dict()).get('company_id', None)
if not owner:
raise OwnerNotFound()
return owner
@classmethod
def get_user_id_from_auth(cls, auth_info):
from route_planner_common.exceptions import UserIdNotFound
userid = auth_info.get('user', dict()).get('userId', None)
if not userid:
raise UserIdNotFound()
return userid
@classmethod
def is_valid_uuid(cls, uuid_to_test, version=4):
from uuid import UUID
try:
uuid_obj = UUID(uuid_to_test, version=version)
except ValueError:
return False
return str(uuid_obj) == uuid_to_test
@classmethod
def generate_headers_for_arrivy_from_request(cls, request):
headers = dict(request.headers)
if headers.get('Host'):
del headers['Host']
if headers.get('Content-Length'):
del headers['Content-Length']
if headers.get('Re-Cookie'):
headers['Cookie'] = headers.get('Re-Cookie')
return headers
@classmethod
def load_dict(cls, _json):
_json = _json if _json else dict()
if _json and Util.check_attribute_type('_json', _json) != 'dict':
return json.loads(_json)
return _json
@classmethod
def is_admin(cls, request):
import os
return request.headers.get('Admin-Key') and request.headers.get('Admin-Key') == os.getenv('ADMIN_KEY')
@classmethod
def print_traceback(cls):
import traceback
Logger.info(traceback.format_exc())
# [START cloud_tasks_create_queue]
@classmethod
def enqueue_http_request(cls, url, payload=None, in_seconds=None):
project = os.getenv('PROJECT_ID')
location = os.getenv('LOCATION')
queue_name = os.getenv('QUEUE_NAME')
base_url = os.getenv('BASE_URL')
if not url.startswith('http'):
url = '{}/{}'.format(base_url, url)
if not cls.does_queue_exists(project=project, location=location, queue_name=queue_name):
Logger.info(
'no queue exists with the name : {}, creating new queue'.format(queue_name))
create_queue_response = cls.create_queue(
project=project, queue_name=queue_name, location=location)
Logger.info('create_queue_response is : {}'.format(
create_queue_response))
try:
create_task_response = cls.create_http_task(project=project, queue=queue_name, location=location,
task_name='{}_{}'.format(
os.getenv('TASK_NAME_PREFIX'), cls.generate_random_string()),
url=url, payload=payload, in_seconds=in_seconds)
return "success"
except Exception as e:
Logger.info('Exception in enqueue_http_request ')
Logger.info(e)
return "error"
@classmethod
def create_queue(cls, project, queue_name, location):
"""Create a task queue."""
from google.cloud import tasks_v2
# Create a client.
client = tasks_v2.CloudTasksClient()
# Construct the fully qualified location path.
parent = f"projects/{project}/locations/{location}"
# Construct the create queue request.
queue = {"name": client.queue_path(project, location, queue_name)}
# Use the client to create the queue.
response = client.create_queue(
request={"parent": parent, "queue": queue})
Logger.info("Created queue {}".format(response.name))
return response
# [END cloud_tasks_create_queue]
@classmethod
def does_queue_exists(cls, project, location, queue_name):
from google.cloud import tasks_v2
# projects/arrivy-sandbox/locations/us-central1/queues/worker-requests
# Create a client.
client = tasks_v2.CloudTasksClient()
# Construct the fully qualified location path.
parent = f"projects/{project}/locations/{location}"
queue_full_name = client.queue_path(project, location, queue_name)
# Use the client to obtain the queues.
response = client.list_queues(request={"parent": parent})
return len(list(filter(lambda q: q.name == queue_full_name, response))) > 0
# [START create_http_task]
@classmethod
def create_http_task(cls,
project, queue, location, url, payload=None, in_seconds=None, task_name=None
):
from google.cloud import tasks_v2
from google.protobuf import timestamp_pb2, duration_pb2
# [START cloud_tasks_create_http_task]
"""Create a task for a given queue with an arbitrary payload."""
# Create a client.
client = tasks_v2.CloudTasksClient()
# TODO(developer): Uncomment these lines and replace with your values.
# project = 'my-project-id'
# queue = 'my-queue'
# location = 'us-central1'
# url = 'https://example.com/task_handler'
# payload = 'hello' or {'param': 'value'} for application/json
# in_seconds = 180
# task_name = 'my-unique-task'
# Construct the fully qualified queue name.
parent = client.queue_path(project, location, queue)
# Construct the request body.
d = duration_pb2.Duration()
d.FromSeconds(15)
task = {
"dispatch_deadline": d,
"http_request": { # Specify the type of request.
"http_method": tasks_v2.HttpMethod.POST,
"url": url, # The full url path that the task will be sent to.
}
}
if payload is not None:
if isinstance(payload, dict):
# Convert dict to JSON string
payload = json.dumps(payload)
# specify http content-type to application/json
task["http_request"]["headers"] = {
"Content-type": "application/json"}
# The API expects a payload of type bytes.
converted_payload = payload.encode()
# Add the payload to the request.
task["http_request"]["body"] = converted_payload
if in_seconds is not None:
# Convert "seconds from now" into an rfc3339 datetime string.
d = datetime.utcnow() + timedelta(seconds=in_seconds)
# Create Timestamp protobuf.
timestamp = timestamp_pb2.Timestamp()
timestamp.FromDatetime(d)
# Add the timestamp to the tasks.
task["schedule_time"] = timestamp
if task_name is not None:
# Add the name to tasks.
task["name"] = client.task_path(
project, location, queue, task_name)
# Use the client to build and send the task.
response = client.create_task(request={"parent": parent, "task": task})
Logger.info("Created task {}".format(response.name))
# [END cloud_tasks_create_http_task]
return response
# [END create_http_task]
@classmethod
def log_exception(cls, e):
import os
if not os.getenv('IS_DEV'):
from google.cloud import error_reporting
client = error_reporting.Client()
client.report_exception(e)
@classmethod
def create_logging_info(cls, auth_info):
owner = auth_info.get('user').get('company_id')
session_id = cls.get_uuid()
logging_info = 'Owner: {}, Session ID: {}'.format(owner, session_id)
return logging_info | /route_planner_common-1.0.1.tar.gz/route_planner_common-1.0.1/src/route_planner_common/utils/util.py | 0.505615 | 0.176778 | util.py | pypi |
TASK_DEFAULT_ATTRIBUTES = dict(
template_type='TASK',
entity_ids=[],
resource_ids=[],
file_ids=[],
worker_ids=[],
document_ids=[],
skill_ids=[],
enable_time_window_display=False,
unscheduled=False,
is_linked=False,
task_without_time=False,
is_locked=False,
is_booking=False,
enable_expected_date_range=False,
)
ROUTE_DEFAULT_ATTRIBUTES = dict(
entity_ids=[],
resource_ids=[],
start_and_end_at_depot=False,
is_published_once=False,
padding_between_tasks=0,
)
TASK_ATTRIBUTES_TO_EXCLUDE_IN_VALIDATION = [
'owner', 'status', 'status_id', 'status_title', 'notifications', 'notifications_sent', 'series',
'pending_review_reminder_time', 'pending_review_reminder_attempts_left',
'queued_task_name', 'is_archived', 'created', 'updated', 'routes', 'mileage',
'travel_time', 'task_time', 'total_time', 'do_not_send_webhook_notification', 'entity_confirmation_statuses',
'structured_entity_confirmation_statuses', 'task_final_confirmation_status', 'series_id', 'id', 'parent_task_external_id',
]
TASK_ATTRIBUTES = ['created_by', 'source', 'source_id', 'template', 'template_type', 'title', 'details',
'start_datetime', 'start_datetime_original_iso_str', 'start_datetime_timezone', 'end_datetime',
'end_datetime_original_iso_str', 'end_datetime_timezone', 'extra_fields', 'entity_ids', 'resource_ids',
'customer_first_name', 'customer_last_name', 'customer_email', 'customer_company_name',
'customer_address_line_1', 'customer_address_line_2', 'customer_address', 'customer_city',
'customer_state', 'customer_country', 'customer_zipcode', 'customer_exact_location',
'customer_phone', 'customer_mobile_number', 'customer_id', 'customer_notes', 'customer_timezone',
'enable_time_window_display', 'time_window_start', 'use_assignee_color', 'file_ids', 'unscheduled',
'external_id', 'external_url', 'additional_addresses', 'current_destination', 'group_id', 'items',
'route_id', 'internal_route_id', 'duration', 'worker_ids', 'all_day', 'number_of_workers_required',
'company_id', 'template_extra_fields', 'document_ids', 'external_type', 'is_supply_provided_locked',
'is_supply_returned_locked', 'is_linked', 'forms', 'linked_internal_ref', 'linked_external_ref',
'customer_type', 'is_customer_address_geo_coded', 'use_lat_lng_address', 'skill_ids', 'task_without_time',
'is_locked', 'is_booking', 'booking_id', 'booking_slot_id', 'additional_info', 'external_resource_type',
'enable_expected_date_range', 'expected_start_datetime', 'expected_start_datetime_original_iso_str',
'expected_end_datetime', 'expected_end_datetime_original_iso_str', 'external_live_track_link',
'additional_contacts', 'recurring_tasks_settings_id', 'recurring_tasks_settings_title', 'created_by_user',
'updated_by_user', 'updated_by', 'position_in_route', 'owner', 'status', 'status_id', 'status_title',
'notifications', 'notifications_sent', 'series', 'pending_review_reminder_time', 'pending_review_reminder_attempts_left',
'queued_task_name', 'is_archived', 'routes', 'mileage', 'travel_time', 'task_time', 'total_time',
'do_not_send_webhook_notification', 'entity_confirmation_statuses', 'structured_entity_confirmation_statuses',
'task_final_confirmation_status', 'id', 'series_id', 'parent_task_external_id', 'due_datetime',
'due_datetime_original_iso_str', 'self_scheduling', 'is_without_datetime', 'basic_schedule', 'is_multi_day', 'activity_type',
'is_route_activity', 'checklists', 'checklist_items', 'external_integration_info', 'customer_name']
ROUTE_ATTRIBUTES = ['owner', 'created_by', 'start_datetime', 'start_datetime_original_iso_str', 'end_datetime',
'end_datetime_original_iso_str', 'title', 'description', 'extra_fields', 'entity_ids', 'resource_ids', 'external_id',
'is_disabled', 'total_tasks', 'status', 'type', 'padding_between_tasks', 'start_and_end_at_depot', 'depot_addresses',
'is_published_once', 'group_id', 'color']
ATTRIBUTES_THAT_CAN_BE_UPDATED_IN_ROUTE_DRAFT = ['title', 'start_datetime', 'start_datetime_original_iso_str',
'end_datetime', 'end_datetime_original_iso_str', 'entity_ids',
'total_tasks', 'start_and_end_at_depot', 'depot_addresses',
'padding_between_tasks', 'resource_ids']
ATTRIBUTES_REQUIRED_TO_CREATE_NEW_ROUTE_DRAFT = ['title', 'start_datetime', 'start_datetime_original_iso_str',
'end_datetime', 'end_datetime_original_iso_str', 'entity_ids',
'total_tasks', 'start_and_end_at_depot', 'depot_addresses',
'padding_between_tasks', 'is_published_once']
# any change in below list should also be made in route.js file in front-end's helpers directory
ATTRIBUTES_THAT_CAN_BE_UPDATED_IN_TASK_DRAFT = ['start_datetime', 'start_datetime_original_iso_str',
'start_datetime_timezone', 'end_datetime',
'end_datetime_original_iso_str', 'end_datetime_timezone', 'duration',
'entity_ids', 'resource_ids', 'unscheduled',
'enable_time_window_display', 'time_window_start', 'is_locked', 'task_without_time',
'position_in_route']
# Assumption:
# start_datetime attr will always come first and then end_datetime.
# any change in below list should consider this order.
ATTRIBUTES_REQUIRED_TO_CREATE_NEW_TASK_DRAFT = ['start_datetime', 'start_datetime_original_iso_str',
'start_datetime_timezone', 'end_datetime',
'end_datetime_original_iso_str', 'end_datetime_timezone', 'duration',
'entity_ids', 'resource_ids', 'unscheduled',
'internal_route_id', 'enable_time_window_display',
'time_window_start', 'is_locked', 'task_without_time',
'additional_route_ids']
TASK_DRAFT_ATTRIBUTES_WHICH_INDICATES_WARNING_ON_UI = ['start_datetime', 'end_datetime', 'duration', 'entity_ids',
'resource_ids', 'enable_time_window_display', 'time_window_start',
'task_without_time'] | /route_planner_common-1.0.1.tar.gz/route_planner_common-1.0.1/src/route_planner_common/constants/route_editor.py | 0.415254 | 0.334372 | route_editor.py | pypi |
from __future__ import annotations
from dataclasses import dataclass
import re
from typing import Iterator
from typing import Pattern
from typing import Union
from typing import Any
from typing_extensions import TypeGuard
class RouteSequence:
__pattern: Pattern = re.compile("^[A-Z]{2}[0-9]{4}$")
def __init__(self, seed: Union[int, str] = 'AA0000'):
'''Route Sequence. Seed can be given either by:
type: str, where the range is between AA0001 and ZZ9999.
type: int, where the range is between 0 and 6759999.
'''
if self.is_valid_int(seed):
self.__seed = seed
elif self.is_valid_str(seed):
self.__seed = self.to_int(seed)
else:
raise ValueError
@classmethod
def is_valid_str(cls, seed: Any) -> TypeGuard[str]:
''' Checks if a given string is a valid sequence number '''
return True if re.findall(cls.__pattern, str(seed)) and isinstance(seed, str) else False
@classmethod
def is_valid_int(cls, seed: Any) -> TypeGuard[int]:
''' Checks if a given integer is a valid sequence number '''
try:
return seed >= 0 and seed < 6760000
except TypeError:
return False
@classmethod
def from_int(cls, seed: int) -> RouteSequence:
''' Creates a new class instance using an integer as a seed '''
if cls.is_valid_int(seed):
return cls(cls.to_str(seed))
else:
raise ValueError(f"Integer range is from 0 to 6760000")
@classmethod
def from_str(cls, seed: str) -> RouteSequence:
''' Creates a new class instance using an string as a seed '''
return cls(seed)
@staticmethod
def to_str(value: int) -> str:
''' Converts a given integer to the corresponding string value '''
a = chr(((value // 10000) // 26) % 26 + 65)
b = chr((value // 10000) % 26 + 65)
c = str(value % 10000).zfill(4)
return a + b + c
@staticmethod
def to_int(value: str) -> int:
''' Converts a given string to the corresponding integer value '''
a = (ord(value[0]) - 65) * 260000
b = (ord(value[1]) - 65) * 10000
c = int(value[2:])
return a + b + c
def as_int(self) -> int:
return self.__seed
def as_str(self) -> str:
return self.to_str(self.__seed)
def __str__(self) -> str:
return f"{self.to_str(self.__seed)}"
def __repr__(self) -> str:
return f"RouteSequence({self.to_str(self.__seed)})"
def __int__(self):
return self.__seed
def __next__(self) -> RouteSequence:
if self.__seed == 6760000 - 1:
raise StopIteration
self.__seed += 1
return type(self)(self.__seed)
def __iter__(self) -> Iterator[RouteSequence]:
while True:
try:
yield self.__next__()
except StopIteration:
break
def __eq__(self, other: Union[str, int, RouteSequence]) -> bool: # type: ignore
if isinstance(other, int):
return True if self.as_int() == other else False
else:
return True if self.as_str() == other else False
def __gt__(self, other: Union[str, int, RouteSequence]) -> bool:
return True if str(self) > str(other) else False
def __lt__(self, other: Union[str, int, RouteSequence]) -> bool:
return True if str(self) < str(other) else False
def __add__(self, other: Union[str, int, RouteSequence]) -> RouteSequence:
if isinstance(other, RouteSequence):
new_value = self.__seed + other.__seed
elif isinstance(other, str):
new_value = self.__seed + self.to_int(other)
elif isinstance(other, int):
new_value = self.__seed + other
else:
new_value = self + other
if new_value >= 6760000:
raise OverflowError(f"Upper limit of 6759999 / ZZ9999 surpassed")
return RouteSequence.from_int(new_value)
def __sub__(self, other: Union[str, int, RouteSequence]) -> RouteSequence:
if isinstance(other, RouteSequence):
new_value = self.__seed - other.__seed
elif isinstance(other, str):
new_value = self.__seed - self.to_int(other)
elif isinstance(other, int):
new_value = self.__seed - other
else:
new_value = self + other
if new_value < 0:
raise OverflowError(f"Lower limit of 0 / AA0000 surpassed")
return RouteSequence(new_value)
def __mul__(self, other: Union[str, int, RouteSequence]) -> RouteSequence:
if isinstance(other, RouteSequence):
return RouteSequence(self.to_str(self.__seed * other.__seed))
elif isinstance(other, str):
return RouteSequence(self.to_str(self.__seed * self.to_int(other)))
elif isinstance(other, int):
return RouteSequence(self.to_str(self.__seed * other))
else:
return self * other
def __div__(self, other: Union[str, int, RouteSequence]) -> RouteSequence:
if isinstance(other, RouteSequence):
return RouteSequence(self.to_str(self.__seed // other.__seed))
elif isinstance(other, str):
return RouteSequence(self.to_str(self.__seed // self.to_int(other)))
elif isinstance(other, int):
return RouteSequence(self.to_str(self.__seed // other))
else:
return self // other
def __truediv__(self, other: Union[str, int, RouteSequence]) -> RouteSequence:
return self.__div__(other)
def __floordiv__(self, other: Union[str, int, RouteSequence]) -> RouteSequence:
return self.__div__(other) | /route_sequence-0.1.0-py3-none-any.whl/route_sequence/main.py | 0.849956 | 0.352257 | main.py | pypi |
import datetime
import json
from typing import Tuple, Dict
import jwt
import requests
import pandas as pd
from .utils import date_range, endpoints
ACCESS_TOKEN_EXPIRY = 3600
def get_apple_data(access_token: str, org_id: int, start_date: "datetime.datetime",
end_date: "datetime.datetime") -> "pd.DataFrame":
"""Return pd.DataFrame of Apple Search Ads data for a given organization
between start and end dates (inclusive)
Parameters
----------
access_token : str
Valid access token with permissions to access organization's ad account
https://developer.apple.com/documentation/apple_search_ads/implementing_oauth_for_the_apple_search_ads_api
org_id : int
Organization whose ad account we are accessing data from
start_date : datetime.date
Inclusive start date to pull data
end_date : datetime.date
Inclusive end date to pull data
Returns
-------
full_df : pd.DataFrame
DataFrame containing search ad data between start and end date for the
organization
"""
date_ranges = date_range.calculate_date_ranges(start_date, end_date)
date_range_dfs = []
for start_date, end_date in date_ranges:
resp = requests.post(
url=endpoints.APPLE_CAMPAIGN_API_ENDPOINT,
json=_post_json_data(start_date=start_date, end_date=end_date),
headers=_post_request_header(access_token=access_token, org_id=org_id)
)
json_resp = json.loads(resp.text)
date_range_dfs.append(_process_resp(json_resp))
full_df = pd.concat(date_range_dfs)
full_df = _process_output_df(full_df=full_df)
return full_df
def request_access_token(client_id: str, client_secret: str) -> dict:
"""Return JSON response after POST requesting an access token
Parameters
----------
client_id : str
Valid client ID
client_secret : str
Valid client secret
Returns
-------
Response
POST request response with refreshed access token
"""
return requests.post(
url=endpoints.OAUTH2_API_ENDPOINT,
headers={
"Host": "appleid.apple.com",
"Content-Type": "application/x-www-form-urlencoded"
},
data={
"grant_type": "client_credentials",
"client_id": client_id,
"client_secret": client_secret,
"scope": "searchadsorg"
}
)
def validate_client_secret(client_secret: str, client_id: str, team_id: str, key_id: str, private_key: str) -> str:
"""Return client secret after being validated. If expired than create a
new one
Parameters
----------
client_secret : str
Client secret loaded from file
Returns
-------
client_secret : str
Client secret validated and refreshed if necessary
"""
token_payload = jwt.decode(client_secret, verify=False, algorithms="ES256")
if _token_expired(token_payload["exp"]):
client_secret = refresh_client_secret(
client_id=client_id,
team_id=team_id,
key_id=key_id,
private_key=private_key
)
return client_secret
def refresh_client_secret(client_id: str, team_id: str, key_id: str, private_key: str) -> str:
"""Return a refreshed client secret
Parameters
----------
client_id : str
Valid client ID of the user requesting the data
team_id : str
Valid team ID from Apple Search Ads platform
key_id : str
Valid key ID from Apple Search Ads platform
Returns
-------
client_secret : str
Refreshed, valid client secret
"""
# Necessary metadata
AUDIENCE = 'https://appleid.apple.com'
ALGORITHM = 'ES256'
# Datetimes associated with JWT
issued_at_timestamp, expiration_timestamp = _calculate_expiration_timestamp(86400*180)
# JWT payload
headers = {
"alg": ALGORITHM,
"kid": key_id
}
payload = {
"sub": client_id,
"aud": AUDIENCE,
"iat": issued_at_timestamp,
"exp": expiration_timestamp,
"iss": team_id
}
# Encoded secret
client_secret = jwt.encode(
payload=payload,
headers=headers,
algorithm=ALGORITHM,
key=private_key
)
client_secret = client_secret.decode("utf-8")
return client_secret
def refresh_access_token(client_id: str, client_secret: str) -> str:
"""Returns a refreshed access token
Parameters
----------
client_id : str
Valid client ID of the user requesting the data
client_secret : str
Valid client secret token
Returns
-------
credentials_json
Credentials JSON with valid access token
"""
issued_at_timestamp, expiration_timestamp = _calculate_expiration_timestamp(ACCESS_TOKEN_EXPIRY)
resp = request_access_token(client_id=client_id, client_secret=client_secret)
# Add issue and expiration dates to the created credentials
credentials_json = json.loads(resp.text)
credentials_json.update({
"iat": issued_at_timestamp,
"exp": expiration_timestamp
})
return credentials_json
def _token_expired(exp_timestamp: int, cushion: int = 0) -> bool:
"""Return boolean checking if current UTC is equal to or greater than
expiration timestamp
Parameters
----------
exp_timestamp : int
UNIX timestamp for expiration
cushion : int, optional
Offset cushion to subtract from expiration date - done to prevent
a token that's going to expire in i.e. 10 seconds that wouldn't be
refreshed but would be invalid by the time it can be used
Returns
-------
expired : bool
True if current time is greater than or equal to expiration else False
"""
expired = datetime.datetime.utcnow() >= datetime.datetime.fromtimestamp(exp_timestamp - int(cushion))
return expired
def _calculate_expiration_timestamp(expiration_offset: "datetime.datetime") -> Tuple[int, int]:
"""Return a UNIX timestamp for the issued time and the expiration time in UTC
Parameters
----------
expiration_offset : int
Time in seconds from issued timestamp to expire
Returns
-------
issued_at_timestamp : int
UNIX timestamp for time issued
expiration_timestmap : int
UNIX timestamp for expiration
"""
issued_at_timestamp = int(datetime.datetime.utcnow().timestamp())
expiration_timestamp = issued_at_timestamp + expiration_offset
return issued_at_timestamp, expiration_timestamp
def _process_output_df(full_df: "pd.DataFrame") -> "pd.DataFrame":
"""Return output DataFrame after processing"""
full_df = full_df.sort_values(["campaign_name", "date"])
full_df = full_df[
["date", "campaign_id", "campaign_name", "impressions", "spend", "taps",
"installs", "new_downloads", "redownloads", "lat_on_installs",
"lat_off_installs", "ttr", "cpa", "cpt", "cpm", "conversion_rate"]
]
return full_df
def _post_json_data(start_date: "datetime.date", end_date: "datetime.date") -> Dict[str, str]:
"""Return dictionary of JSON data to be POSTed to API endpoint"""
return {
"startTime": f"{start_date.strftime('%Y-%m-%d')}",
"endTime": f"{end_date.strftime('%Y-%m-%d')}",
"selector": {
"orderBy": [
{
"field": "countryOrRegion",
"sortOrder": "ASCENDING"
}
],
},
"groupBy": [
"countryOrRegion"
],
"timeZone": "UTC",
"returnRowTotals": False,
"granularity": "DAILY",
"returnGrandTotals": False
}
def _process_resp(resp: dict) -> "pd.DataFrame":
"""Return DataFrame containing the processed raw response data"""
campaign_resps = resp["data"]["reportingDataResponse"]["row"]
campaign_dfs = [_process_campaign(campaign) for campaign in campaign_resps]
df = pd.concat(campaign_dfs)
return df
def _process_campaign(campaign: dict) -> "pd.DataFrame":
"""Return DataFrame containing processed campaign raw API response data"""
processed_results = [_process_result(result) for result in campaign["granularity"]]
campaign_df = pd.DataFrame(processed_results)
campaign_df = campaign_df.assign(
campaign_id=campaign["metadata"]["campaignId"],
campaign_name=campaign["metadata"]["campaignName"]
)
return campaign_df
def _process_result(result: Dict[str, str]) -> Dict[str, str]:
"""Return dictionary of parsed data from raw API response"""
return {
"impressions": result["impressions"],
"taps": result["taps"],
"installs": result["installs"],
"new_downloads": result["newDownloads"],
"redownloads": result["redownloads"],
"lat_on_installs": result["latOnInstalls"],
"lat_off_installs": result["latOffInstalls"],
"ttr": result["ttr"],
"cpa": result["avgCPA"]["amount"],
"cpt": result["avgCPT"]["amount"],
"cpm": result["avgCPM"]["amount"],
"spend": result["localSpend"]["amount"],
"conversion_rate": result["conversionRate"],
"date": result["date"]
}
def _post_request_header(access_token: str, org_id: int) -> Dict[str, str]:
"""Return an authorized header for requesting from API endpoints"""
return {
"Authorization": f"Bearer {access_token}",
"X-AP-Context": f"orgId={org_id}"
} | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/apple_search_ads.py | 0.834339 | 0.261072 | apple_search_ads.py | pypi |
import datetime
import json
from typing import Dict, List
from six import string_types
from urllib.parse import urlencode
import requests
import pandas as pd
import numpy as np
from .utils import date_range, endpoints
def get_tiktok_data(
access_token: str,
advertiser_id: int,
data_level: str = "AUCTION_AD",
dimensions: List[str] = ['ad_id', 'stat_time_day'],
metrics: List[str] = [
'campaign_name',
'adgroup_name',
'ad_id',
'spend',
'impressions',
'reach',
'clicks',
],
start_date: "datetime.datetime" = None,
end_date: "datetime.datetime" = None,
) -> "pd.DataFrame":
"""Return pd.DataFrame of TikTok Marketing API data for an authorized advertiser.
Parameters
----------
access_token : str
Valid access token with permissions to access advertiser's ad account
data via API
https://ads.tiktok.com/marketing_api/docs?id=1701890912382977
advertiser_id : int
Ad account we want to pull data from
data_level : str
Level of data to pull from. Campaign ID grouping needs AUCTION_CAMPAIGN,
Adgroup ID grouping needs ADGROUP_ADGROUP, etc. Default is AUCTION_AD.
dimensions : List[str]
List of dimension(s) to group by. Each request can only have one ID dimension
and one time dimension. ID dimensions include advertiser_id, campaign_id,
adgroup_id, and ad_id. Time dimensions include stat_time_day and stat_time_hour.
Default is ['ad_id', 'stat_time_day']
https://ads.tiktok.com/marketing_api/docs?id=1707957200780290
start_date : datetime.date
Inclusive datetime object start date to pull data. Default is today.
end_date : datetime.date
Inclusive datetime object end date to pull data. Default is seven days before end_date.
Returns
-------
df : pd.DataFrame
DataFrame containing search ad data between start and end date for the
organization
"""
if end_date is None:
end_date = datetime.datetime.today()
if start_date is None:
start_date = end_date - datetime.timedelta(days=7)
date_ranges = date_range.calculate_date_ranges(start_date, end_date)
date_range_dfs = []
for start_date, end_date in date_ranges:
query_param_str = _format_url_query_param_string(
advertiser_id=advertiser_id,
data_level=data_level,
dimensions=dimensions,
metrics=metrics,
start_date=start_date,
end_date=end_date
)
url = f"{endpoints.TIKTOK_REPORTING_ENDPOINT}?{query_param_str}"
resp = requests.get(
url=url,
headers={"Access-Token": access_token}
)
date_range_dfs.append(_process_response(resp))
df = pd.concat(date_range_dfs)
return df
def _format_url_query_param_string(
advertiser_id: int,
data_level: str,
dimensions: List[str],
metrics: List[str],
start_date: "datetime.date",
end_date: "datetime.date"
) -> str:
"""Return a URL encoded query string with parameters to GET request from
TikTok endpoint
"""
query_param_dict = _format_query_param_dict(
advertiser_id=advertiser_id,
data_level=data_level,
dimensions=dimensions,
metrics=metrics,
start_date=start_date,
end_date=end_date
)
query_param_str = _url_encoded_query_param(query_param_dict=query_param_dict)
return query_param_str
def _url_encoded_query_param(query_param_dict: Dict[str, str]) -> str:
"""Return URL encoded query parameters for GET requesting TikTok Marketing
API reporting endpoint
"""
url = urlencode(
{k: v if isinstance(v, string_types) else json.dumps(v)
for k, v in query_param_dict.items()}
)
return url
def _format_query_param_dict(
advertiser_id: int,
data_level: str,
dimensions: List[str],
metrics: List[str],
start_date: "datetime.date",
end_date: "datetime.date"
) -> Dict[str, str]:
"""Return dictionary with data we will request from TikTok Marketing API
reporting endpoint
"""
return {
'advertiser_id': advertiser_id,
'service_type': 'AUCTION',
'report_type': 'BASIC',
'data_level': data_level,
'dimensions': dimensions,
'metrics': metrics,
'start_date': start_date.strftime("%Y-%m-%d"),
'end_date': end_date.strftime("%Y-%m-%d"),
'page': 1,
'page_size': 200
}
def _process_response(resp: Dict[str, str]) -> "pd.DataFrame":
"""Return a DataFrame containing raw API response data"""
resp_json = json.loads(resp.text)
resp_data = resp_json['data']['list']
rows = [{**row["metrics"], **row["dimensions"]} for row in resp_data]
df = pd.DataFrame(rows)
return df | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/tiktok.py | 0.803829 | 0.294437 | tiktok.py | pypi |
import json
from typing import Dict, List
import requests
import pandas as pd
from .utils import endpoints
def get_linkedin_data(ad_account_id: str, access_token: str,
start_date: "datetime.date") -> "pd.DataFrame":
"""Return a DataFrame of LinkedIn data pulled via LinkedIn Marketing API"""
campaigns_map = get_campaigns(access_token=access_token)
ad_analytics_df = get_ad_analytics(ad_account_id=ad_account_id, access_token=access_token, start_date=start_date)
ad_analytics_df['campaign'] = ad_analytics_df['id'].map(campaigns_map)
ad_analytics_df = ad_analytics_df[["date", "campaign", "impressions", "clicks", "cost"]]
return ad_analytics_df
def get_ad_analytics(ad_account_id: str, access_token: str,
start_date: "datetime.date") -> "pd.DataFrame":
"""Return analytics data from LinkedIn adCampaignsV2 endpoint"""
url = _format_analytics_request_url(
ad_account_id=ad_account_id,
start_date=start_date,
fields=[
"impressions",
"clicks",
"costInUsd",
"dateRange",
"pivotValue",
]
)
resp = _authorized_request(url=url, access_token=access_token)
df = _process_ad_analytics_resp(resp=resp)
return df
def _process_ad_analytics_resp(resp) -> "pd.DataFrame":
json_data = json.loads(resp.text)
date = lambda x: x['dateRange']['start']
parsed_data = [
{
"date": f"{date(row)['year']}-{date(row)['month']}-{date(row)['day']}",
"id": row["pivotValue"].split(":")[-1],
"impressions": row["impressions"],
"cost": row["costInUsd"],
"clicks": row["clicks"]
} for row in json_data['elements']
]
return pd.DataFrame(parsed_data)
def get_campaigns(access_token: str) -> Dict[str, str]:
"""Return a dictionary map of LinkedIn campaign IDs to campaigns"""
campaign_url = f"{endpoints.AD_CAMPAIGNS_ENDPOINT}?q=search"
resp = _authorized_request(url=campaign_url, access_token=access_token)
campaign_map = _process_campaigns_resp(resp=resp)
return campaign_map
def _process_campaigns_resp(resp) -> "pd.DataFrame":
"""Return a DataFrame with the processed campaign data"""
json_data = json.loads(resp.text)
parsed_data = {str(campaign["id"]): campaign["name"] for campaign in json_data['elements']}
return parsed_data
def _format_analytics_request_url(ad_account_id: str, start_date: "datetime.date",
fields: List[str]) -> str:
"""Return analytics insights GET request URL formatted with the given parameters"""
return f"{endpoints.AD_ANALYTICS_ENDPOINT}?q=analytics&pivot=CAMPAIGN&dateRange.start.day={start_date.day}&dateRange.start.month={start_date.month}&dateRange.start.year={start_date.year}&timeGranularity=DAILY&fields={','.join(fields)}&accounts=urn:li:sponsoredAccount:{ad_account_id}"
def _authorized_request(url: str, access_token: str) -> "requests.models.Response":
"""Return the response of an authorized GET request"""
return requests.get(
url=url,
headers={
"Authorization": f"Bearer {access_token}"
}
) | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/linkedin.py | 0.677581 | 0.180847 | linkedin.py | pypi |
import asyncio
import json
from typing import Dict, List
import aiohttp
import pandas as pd
def get_share_of_clicks_trend(
api_key: str, domain_id: str, date_start: str, date_end: str, competitors: List[str] = None,
search_term_groups: List[str] = None, whole_market: bool = False, traffic_type: str = "paid",
device: str = "desktop"
) -> "pd.DataFrame":
"""Return pandas.DataFrame of share of clicks trend data
Parameters
----------
api_key : str
Valid API key for accessing the Adthena API
domain_id : str
Domain ID of the account you are accessing data for
date_start : str
Start date to pull data for in YYYY-MM-DD
date_end : str
End date to pull data for in YYYY-MM-DD
competitors : List[str] = None
List of competitors to pull data for
search_term_groups : List[str] = None
List of search term groups to pull data for
whole_market: bool = False
Pull data from Whole Market or My Terms
traffic_type: str = "paid"
Traffic type to pull i.e. paid/organic/total/totalpaid
(where totalpaid = PLA + Text Ads, and total includes everything.)
device: str = "desktop"
Device to pull data for
Returns
-------
df : pd.DataFrame
DataFrame constructed from processed JSON response from Adthena API
with the given parameters of data
"""
urls = []
if search_term_groups is not None:
for search_term in search_term_groups:
url = _construct_share_of_clicks_trend_url(
domain_id=domain_id, date_start=date_start, date_end=date_end,
competitors=competitors, search_term_groups=[search_term],
whole_market=whole_market, traffic_type=traffic_type, device=device)
urls.append((url, search_term))
else:
url = _construct_share_of_clicks_trend_url(domain_id=domain_id,
date_start=date_start, date_end=date_end,
competitors=competitors, search_term_groups=None,
whole_market=whole_market, traffic_type=traffic_type, device=device)
urls.append((url, None))
response_df = asyncio.run(
_request_all_urls(
urls=urls,
headers=_construct_header(api_key=api_key)
)
)
return response_df
async def _request_all_urls(urls: List[str], headers: Dict[str, str]) -> List[str]:
"""Return responses asynchronously requested from Adthena"""
async with aiohttp.ClientSession() as session:
responses = []
for url, search_term in urls:
async with session.get(url, headers=headers) as resp:
responses.append((await resp.text(), search_term))
response_dfs = []
for resp, search_term in responses:
df = _process_response(resp, search_term)
response_dfs.append(df)
response_df = pd.concat(response_dfs)
return response_df
def _construct_share_of_clicks_trend_url(domain_id: str, date_start: str,
date_end: str, competitors: List[str],
search_term_groups: List[str],
whole_market: bool, traffic_type: str, device: str) -> str:
"""Return URL for calling share of clicks trend API"""
base_url = _construct_base_api_url(domain_id)
query_params = _construct_api_url_query_params(
date_start=date_start, date_end=date_end, competitors=competitors,
search_term_groups=search_term_groups, whole_market=whole_market,
traffic_type=traffic_type, device=device
)
url = f"{base_url}/share-of-clicks-trend/all?{query_params}"
return url
def _construct_api_url_query_params(date_start: str, date_end: str, competitors: List[str],
search_term_groups: List[str], whole_market: bool,
traffic_type: str, device: str) -> str:
"""Return query parameters formatted from user input"""
query_param = f"periodstart={date_start}&periodend={date_end}"
query_param += f"&traffictype={traffic_type}"
query_param += f"&device={device}"
if competitors is not None:
query_param += _combine_query_params('competitor', competitors)
if search_term_groups is not None:
search_term_groups = [term.replace(" ", "+") for term in search_term_groups]
query_param += _combine_query_params('kg', search_term_groups)
if whole_market:
query_param += "&wholemarket=true"
return query_param
def _construct_base_api_url(domain_id: str) -> "str":
"""Return base URL from given domaind ID"""
return f"https://api.adthena.com/wizard/{domain_id}"
def _process_response(resp: str, search_term: str) -> "pd.DataFrame":
"""Return DataFrame of processed response data"""
resp_dict = json.loads(resp)
all_data = []
for competitor_data in resp_dict:
competitor = competitor_data["Competitor"]
data = competitor_data["Data"]
for date_dict in data:
date_dict["Competitor"] = competitor
all_data.append(date_dict)
df = pd.DataFrame(all_data)
df = df.assign(Search_Term=search_term)
return df
def _combine_query_params(key: str, values: List[str]) -> str:
"""Return string of combined query parameters"""
return f"&{key}=" + f"&{key}=".join(values)
def _construct_header(api_key: str) -> Dict[str, str]:
"""Return header dictionary for POST request with API key"""
return {"Adthena-api-key": api_key, "Accept": "application/json"}
if __name__ == "__main__":
import os
# Add Adthena API key and domain ID as environment vars to call function
API_KEY = os.environ.get("ADTHENA_API_KEY")
DOMAIN_ID = os.environ.get("ADTHENA_DOMAIN_ID")
df = get_share_of_clicks_trend(
api_key=API_KEY,
domain_id=DOMAIN_ID,
date_start="2022-03-10",
date_end="2022-03-23",
whole_market=True
) | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/adthena.py | 0.70028 | 0.26492 | adthena.py | pypi |
import tempfile
from typing import Union, Sequence, Dict, Tuple, List
from pathlib import Path
import boto3
from . import onedrive
FilenameVar = Union[str, Sequence[Union[str, None]]]
def get_most_recent_filename(s3, bucket: str, prefix: str = "") -> str:
"""Return the key name as it appears in s3 bucket of the most recently modified
file in bucket
Parameters
----------
s3
Connection to AWS S3 bucket
bucket : str
Name of the bucket that contains data we want
prefix : str, optional
Prefix to filter data
Returns
-------
key
Name of the most recently modified file as it appears in S3 bucket
"""
# pylint: disable=unsubscriptable-object
paginator = s3.get_paginator("list_objects_v2")
page_iterator = paginator.paginate(Bucket=bucket, Prefix=prefix)
latest = None
for page in page_iterator:
if "Contents" in page:
latest_test = max(page["Contents"], key=lambda x: x["LastModified"])
if latest is None or latest_test["LastModified"] > latest["LastModified"]:
latest = latest_test
key = latest["Key"]
return key
def connect_to_s3(aws_access_key_id: str, aws_secret_access_key: str, region_name: str):
"""Returns a connection to s3 bucket via AWS
Parameters
----------
aws_access_key_id : str
AWS access key
aws_secret_access_key : str
AWS secret access key
region_name : str
Default region name
Returns
-------
s3
Connection to s3 bucket via AWS
"""
s3 = boto3.client(
"s3",
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name
)
return s3
def upload_to_s3(s3, bucket: str, filename: Union[str, Sequence[str]],
key: Union[str, Sequence[str]] = None) -> None:
"""Uploads a file to AWS s3 bucket
Parameters
----------
s3
Connection to s3 bucket
filename : str
Local filepath of file to be uploaded
bucket : str
Name of s3 bucket to upload file to
key : str (optional)
Remote filename to upload file as
"""
filename_to_key_map = _create_filename_key_map(filename, key, filename_required=True)
for s3_key, local_fname in filename_to_key_map.items():
s3.upload_file(
Filename=local_fname,
Bucket=bucket,
Key=s3_key
)
def download_from_s3(s3, bucket: str, key: str, filename: str = None) -> List[str]:
"""
Download file via AWS s3 bucket and return the fpath to the local file
Parameters
----------
s3
Connection to s3 bucket
bucket : str
Name of s3 bucket to download file from
key : str
Remote filename to download from the bucket
"""
filename_to_key_map = _create_filename_key_map(filename, key, key_required=True)
for s3_key, local_fname in filename_to_key_map.items():
s3.download_file(
Bucket=bucket,
Key=s3_key,
Filename=local_fname
)
return list(filename_to_key_map.values())
def copy_object_to_onedrive(s3, bucket: str, key: str, access_token: str,
drive_id: str, remote_fpath: str = None) -> None:
"""Copy object from S3 bucket to OneDrive at given URL
Parameters
----------
s3
Valid S3 connection created using aws.connect_to_s3
bucket : str
Existing bucket on AWS
key : str
Key name of the file as it will appear in S3
access_token : str
Valid access token for accessing OneDrive
drive_id : str
OneDrive drive ID to upload to
remote_fpath : str = None
Remote filepath to upload to. If none is provided use the provided S3
key name and upload to root folder of drive
"""
if remote_fpath is None:
remote_fpath = f"/{key}"
with tempfile.NamedTemporaryFile("wb+") as outfile:
download_from_s3(s3=s3, bucket=bucket, key=key, filename=outfile.name)
onedrive.upload_file(
access_token=access_token,
drive_id=drive_id,
remote_fpath=remote_fpath,
local_fpath=outfile.name
)
def _create_filename_key_map(filename: FilenameVar,
key: FilenameVar,
filename_required: bool = False,
key_required: bool = False) -> Dict[str, str]:
"""Return a dictionary of string pairs mapping key as it appears on AWS to
local filename"""
filename = _coerce_input_to_list(filename)
key = _coerce_input_to_list(key)
_filenames_and_keys_are_valid_inputs(
filename=filename,
key=key,
filename_required=filename_required,
key_required=key_required
)
if filename_required:
key = _fill_values(filename, key)
elif key_required:
filename = _fill_values(key, filename)
return {key_val: filename_val for key_val, filename_val in zip(key, filename)}
def _fill_values(full_seq, missing_seq) -> List[str]:
"""Fill missing values with names created from the full sequence"""
missing_seq_is_empty = len(missing_seq) == 0
if missing_seq_is_empty:
new_missing_seq = [Path(fpath).name for fpath in full_seq]
else:
new_missing_seq = []
for full_value, missing_value in zip(full_seq, missing_seq):
# If value is "" or None then use good value from assumed full seq
if _bad_seq_value(missing_value):
value = full_value
else:
value = missing_value
new_missing_seq.append(value)
return(new_missing_seq)
def _bad_seq_value(val: Union[str, None]) -> bool:
"""Returns True if string or None"""
return val == "" or val is None
def _filenames_and_keys_are_valid_inputs(filename: Tuple[str],
key: Tuple[str],
filename_required: bool = False,
key_required: bool = False):
"""Validation checks on user input"""
_validate_lengths(filename, key)
_validate_input(filename, filename_required, "Filename")
_validate_input(key, key_required, "Key")
def _validate_lengths(filename: Tuple[str], key: Tuple[str]) -> None:
"""If lengths of both are greater than zero but do not match raise ValueError"""
filename_num = len(filename)
key_num = len(key)
if filename_num > 0 and key_num > 0:
if filename_num != key_num:
raise ValueError("Filename and key cannot both be greater than zero and unequal length as this means the keys won't map together properly")
def _validate_input(seq: Tuple[str], required: bool, name: str) -> None:
"""Validate input is correct otherwise raise ValueError"""
seq_is_zero = len(seq) == 0
contains_none = _sequence_contains_none(seq)
if required and contains_none or required and seq_is_zero:
raise(ValueError(f"{name} cannot be missing or contain NoneType values!"))
def _sequence_contains_none(seq: Tuple[str]) -> bool:
"""Return True if None in sequence else False"""
return any([val is None for val in seq])
def _coerce_input_to_list(seq: FilenameVar) -> Tuple[str]:
"""Return tuple of values from string or sequence as argument provided by user"""
if isinstance(seq, str):
seq = [seq]
elif seq is None:
seq = []
else:
seq = list(seq)
return seq | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/aws.py | 0.855233 | 0.216985 | aws.py | pypi |
import time
import pandas as pd
from facebook_business.api import FacebookAdsApi
from facebook_business.adobjects.adaccount import AdAccount
from facebook_business.adobjects.ad import Ad
def get_insights(
access_token: str,
ad_account_id: str,
fields: list = None,
params: dict = None
) -> "pd.DataFrame":
"""
Return a pandas.DataFrame containing insights associated with a Facebook ad
account.
Parameters
----------
access_token : str
Access token with ads_read permission generated from a Facebook app.
ad_account_id : str
Valid ad account ID (i.e. act_123456789101112)
fields : list
List of valid Facebook Marketing API fields.
See Fields table at https://developers.facebook.com/docs/marketing-api/insights/parameters/v10.0
for docs on available fields.
params : dict
Dict of valid Facebook Marketing API parameters.
See Parameters table at https://developers.facebook.com/docs/marketing-api/insights/parameters/v10.0
for docs on available parameters.
Returns
-------
insights_df : pandas.DataFrame
pandas.DataFrame containing the requested insights data from the
Facebook Marketing API
"""
# Default fields/params
if fields is None:
fields = ["campaign_name", "adset_name", "ad_name", "clicks",
"impressions", "reach", "ctr", "actions", "spend"]
if params is None:
params = {"date_preset": "last_30d", "time_increment": 1}
# Get insights data from the Facebook Marketing API
ad_account = _connect_ad_account(
access_token=access_token, ad_account_id=ad_account_id
)
ads = _get_ads(ad_account=ad_account)
insights = []
for i, ad in enumerate(ads):
# Facebook seems to have a problem with too many requests too quickly
# print(i)
time.sleep(.5)
insights += _get_ad_insights(ad=ad, fields=fields, params=params)
# Process the returned insights data
insight_dicts = [_construct_insights_dict(insight) for insight in insights]
insights_df = pd.DataFrame(insight_dicts)
insights_df = _wrangle_dataframe(insights_df)
return insights_df
def get_age_gender_insights(
access_token: str,
ad_account_id: str,
) -> "pd.DataFrame":
"""
Return a pandas.DataFrame containing insights from the last 30 days associated with a Facebook
ad account broken down by age and gender.
Parameters
----------
access_token : str
Access token with ads_read permission generated from a Facebook app.
ad_account_id : str
Valid ad account ID (i.e. act_123456789101112)
Returns
-------
age_gender_df : pandas.DataFrame
pandas.DataFrame containing the requested insights data from the
Facebook Marketing API broken down by age and gender
"""
fields = ["campaign_name", "adset_name", "ad_name", "clicks",
"impressions", "reach", "ctr", "actions", "spend"]
params = {
"date_preset": "last_30d",
"time_increment": 1, "breakdowns": ["age", "gender"]
}
age_gender_df = get_insights(
access_token=access_token,
ad_account_id=ad_account_id,
fields=fields,
params=params
)
return age_gender_df
def get_region_insights(
access_token: str,
ad_account_id: str,
) -> "pd.DataFrame":
"""
Return a pandas.DataFrame containing insights from the last 30 days associated with a Facebook
ad account broken down by region.
Parameters
----------
access_token : str
Access token with ads_read permission generated from a Facebook app.
ad_account_id : str
Valid ad account ID (i.e. act_123456789101112)
Returns
-------
region_df : pandas.DataFrame
pandas.DataFrame containing the requested insights data from the
Facebook Marketing API broken down by region
"""
fields = ["campaign_name", "adset_name", "ad_name", "clicks",
"impressions", "reach", "ctr", "actions", "spend"]
params = {
"date_preset": "last_30d",
"time_increment": 1, "breakdowns": ["region"]
}
region_df = get_insights(
access_token=access_token,
ad_account_id=ad_account_id,
fields=fields,
params=params
)
return region_df
def _connect_ad_account(access_token: str, ad_account_id: str) -> "AdAccount":
"""Return a connection to a Facebook Ad Account"""
FacebookAdsApi.init(access_token=access_token)
return AdAccount(ad_account_id)
def _get_ads(ad_account: "AdAccount") -> list:
"""Return list of Ad instances associated with an ad account"""
ads = ad_account.get_ads()
return list(ads)
def _get_ad_insights(ad: "Ad", fields: list, params: dict) -> list:
"""Return list of insights for a specific ad"""
insights = ad.get_insights(
fields=fields,
params=params
)
return list(insights)
def _construct_insights_dict(insight) -> dict:
"""Return a list of dictionaries of parsed insights data"""
data_dict = {key: val for key,
val in insight.items() if key != "actions"}
try:
actions_dict = {action["action_type"]: action["value"]
for action in insight["actions"]}
except KeyError:
actions_dict = {}
combined_dict = {**data_dict, **actions_dict}
return combined_dict
def _wrangle_dataframe(insights_df: "pd.DataFrame") -> "pd.DataFrame":
"""Return a wrangled DataFrame that is in the final expected format"""
insights_df = insights_df.fillna(0)
try:
insights_df = insights_df.sort_values("date_start")
except KeyError:
insights_df = pd.DataFrame()
return insights_df | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/facebook.py | 0.698638 | 0.324556 | facebook.py | pypi |
from typing import List
import pandas as pd
from google.ads.googleads.client import GoogleAdsClient
from google.protobuf import json_format
GOOGLEADS_VERSION = "v13"
def connect_to_google_ads(google_yaml_fpath: str) -> "GoogleAdsClient":
"""Return a connection to Google Ads API via YAML file with necessary credentials
Parameters
----------
google_yaml_fpath : str
Fpath to YAML file containing Google Ads credentials
Returns
-------
GoogleAdsClient
Connection to Google Ads API via Python wrapper
"""
return GoogleAdsClient.load_from_storage(google_yaml_fpath, version=GOOGLEADS_VERSION)
def get_customers_linked_to_manager_account(client) -> List[str]:
"""Return list of customer IDs linked to account. Additional information
available at https://developers.google.com/google-ads/api/docs/account-management/listing-accounts
Parameters
==========
client : GoogleAdsClient
Authenticated GoogleAdsClient
Returns
=======
available_customers : List[str]
List of customers linked to Google Ads client
"""
customer_service = client.get_service("CustomerService")
available_customers = customer_service.list_accessible_customers()
available_customers = [customer.replace("customers/", "") for customer in available_customers.resource_names]
return available_customers
def get_google_ads_data(google_ads_client: "GoogleAdsClient", customer_id: str, query: str) -> "pd.DataFrame":
"""Return a connection to Google Ads API via YAML file with necessary credentials
Parameters
----------
google_ads_client : GoogleAdsClient
Authenticated client for accessing Google Ads API
customer_id : str
Customer ID of the customer whose data will be accessed
query : str
Valid GAQL query
See https://developers.google.com/google-ads/api/fields/v10/overview_query_builder
for more details
Returns
-------
df : pd.DataFrame
DataFrame of data pulled via API and GAQL query
"""
# NOTE: search_stream and iterator have to occur in same scope otherwise
# segfault (see this GitHub Issue https://github.com/googleads/google-ads-python/issues/384)
ga_service = google_ads_client.get_service("GoogleAdsService")
search_request = google_ads_client.get_type("SearchGoogleAdsStreamRequest")
search_request.customer_id = customer_id
search_request.query = query
resp_data = [
pd.json_normalize(
json_format.MessageToDict(row._pb)
) for batch in ga_service.search_stream(search_request)
for row in batch.results
]
try:
df = pd.concat(resp_data)
except ValueError:
df = pd.DataFrame()
else:
df = _convert_dtypes(df)
return df
def _convert_dtypes(df: "pd.DataFrame") -> "pd.DataFrame":
"""Return DataFrame with attempted conversion of dtypes across columns"""
return df[df.columns].apply(pd.to_numeric, errors="ignore") | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/google/google_ads.py | 0.844794 | 0.295459 | google_ads.py | pypi |
import tempfile
import pandas as pd
from googleapiclient.discovery import build
from .. import aws
def upload_gsheets_spreadsheet(gsheets_conn: "googleapiclient.discovery.Resource",
filename: str, spreadsheet_id: str,
spreadsheet_name: str
) -> None:
"""Clears a Google Sheet and uploads from file to sheet
Parameters
----------
gsheets_conn : googleapiclient.discovery.Resource
Connection to Google Sheets API
filename : str
Name of the file to be uploaded
spreadsheet_id : str
ID of the Google Sheet to upload to
spreadsheet_name : str
Name of the specific Sheet to write to
"""
df = pd.read_csv(filename)
df = df.fillna("")
clear_google_sheet(
gsheets_conn=gsheets_conn,
spreadsheet_id=spreadsheet_id,
spreadsheet_name=spreadsheet_name
)
gsheets_conn.spreadsheets().values().update(
spreadsheetId=spreadsheet_id,
valueInputOption='USER_ENTERED',
range=f"{spreadsheet_name}!A1",
body={
"majorDimension": "ROWS",
"values": df.T.reset_index().T.values.tolist()
}
).execute()
def connect_to_gsheets(credentials: "google.oauth2.credentials.Credentials"
) -> "googleapiclient.discovery.Resource":
"""Return a connection to Google Sheets
Parameters
----------
credentials : google.oath2.credentials.Credentials
Valid Credentials object with necessary authentication
Returns
-------
gsheets : googleapiclient.discovery.Resource
Connection to Google Sheets API
"""
gsheets_conn = build('sheets', 'v4', credentials=credentials)
return gsheets_conn
def clear_google_sheet(gsheets_conn: "googleapiclient.discovery.Resource",
spreadsheet_id: str, spreadsheet_name: str) -> None:
"""Clear specified Google Sheet
gsheets_conn : googleapiclient.discovery.Resource
Connection to Google Sheets API
spreadsheet_id : str
ID of the Google Sheets spreadsheet
spreadsheet_name : str
Specific name of Google Sheet to be cleared
"""
range_all = f'{spreadsheet_name}!A1:Z'
gsheets_conn.spreadsheets().values().clear(spreadsheetId=spreadsheet_id,
range=range_all, body={}).execute()
def download_gsheets_spreadsheet(gsheets_conn: "googleapiclient.discovery.Resource",
filename: str, spreadsheet_id: str,
spreadsheet_name: str) -> None:
"""Download a file from a specified Google Sheet
Parameters
----------
gsheets_conn : googleapiclient.discovery.Resource
Connection to Google Sheets API
filename : str
Name of the local filename to download to
spreadsheet_id : str
ID of the Google Sheet to download from
spreadsheet_name : str
Name of the specific Sheet to write to
"""
range_all = f'{spreadsheet_name}!A1:Z'
result = gsheets_conn.spreadsheets().values().get(
spreadsheetId=spreadsheet_id,
range=range_all).execute()
values = result.get('values', [])
df = pd.DataFrame(values[1:], columns=values[0])
df.to_csv(filename, index=False)
def copy_sheet_to_aws_s3(gsheets_conn: "googleapiclient.discovery.Resource",
spreadsheet_id: str, spreadsheet_name: str, s3, bucket: str,
key: str = None) -> None:
"""Copy file at given Google Sheet to S3 bucket
Parameters
----------
gsheets_conn : googleapiclient.discovery.Resource
Connection to Google Sheets API
spreadsheet_id : str
ID of the Google Sheet to download from
spreadsheet_name : str
Name of the specific Sheet to write to
s3
Valid S3 connection created using aws.connect_to_s3
bucket : str
Existing bucket on AWS
key : str = None
(Optional) Key name of the file as it will appear in S3. If left blank
it will default to the same name that's in OneDrive
"""
if key is None:
key = f"{spreadsheet_name}.csv"
with tempfile.NamedTemporaryFile("wb+") as outfile:
download_gsheets_spreadsheet(
gsheets_conn=gsheets_conn,
spreadsheet_id=spreadsheet_id,
spreadsheet_name=spreadsheet_name,
filename=outfile.name
)
outfile.seek(0)
aws.upload_to_s3(s3=s3, bucket=bucket, filename=outfile.name, key=key) | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/google/gsheets.py | 0.711331 | 0.327426 | gsheets.py | pypi |
from typing import List
import warnings
from googleapiclient.discovery import build
from google.analytics.data_v1beta import BetaAnalyticsDataClient
from .utils import _universal_analytics, _ga4
def connect_to_google_analytics(
credentials: "google.oauth2.credentials.Credentials",
ga4: bool = False
) -> "googleapiclient.discovery.Resource":
"""Return a connection to Google Drive
Parameters
----------
credentials : google.oath2.credentials.Credentials
Valid Credentials object with necessary authentication
Returns
-------
google_conn : googleapiclient.discovery.Resource
Connection to Google Analytics API
"""
warnings.warn("In the future ga4=True will become the default behavior for this function. Google is sunsetting Universal Analytics on July 1st, 2023 and is recommending you migrate to Google Analytics 4. More information can be found here: https://support.google.com/analytics/answer/11583528?hl=en", FutureWarning)
if ga4:
google_conn = BetaAnalyticsDataClient(credentials=credentials)
else:
google_conn = build('analyticsreporting', 'v4', credentials=credentials)
warnings.warn("Google is sunsetting Universal Analytics on July 1st, 2023 and is recommending you migrate to Google Analytics 4. More information can be found here: https://support.google.com/analytics/answer/11583528?hl=en", DeprecationWarning)
return google_conn
def get_google_analytics_data(
analytics,
view_id: str,
dimensions: List[str] = None,
metrics: List[str] = None,
start_date: str = "7daysAgo",
end_date: str = "today"
) -> "pd.DataFrame":
"""Return a pd.DataFrame of Google Analytics data between the requested
dates for the specified view ID
Parameters
----------
view_id : str
View ID that we want to view
dimensions : List[str]
List of dimensions
https://ga-dev-tools.web.app/dimensions-metrics-explorer/
metrics : List[str]
List of metrics
https://ga-dev-tools.web.app/dimensions-metrics-explorer/
start_date : str
Dynamic preset such as 7daysago or YYYY-MM-DD
end_date : str
Dynamic preset such as today or YYYY-MM-DD
Returns
-------
df : pd.DataFrame
"""
is_ga4_data = isinstance(analytics, BetaAnalyticsDataClient)
if is_ga4_data:
processing_func = _ga4.process_ga4_data
else:
processing_func = _universal_analytics.process_universal_analytics_data
df = processing_func(
analytics=analytics,
view_id=view_id,
dimensions=dimensions,
metrics=metrics,
start_date=start_date,
end_date=end_date
)
return df | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/google/google_analytics.py | 0.782205 | 0.399021 | google_analytics.py | pypi |
from typing import List
import os
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from ..utils import endpoints
def get_token_from_full_auth_flow(authorized_user_file: str,
client_secrets_file: str, scopes: List[str],
port: int = 0) -> "google.oauth2.credentials.Credentials":
"""Return authorized and authenticated credentials for accessing Google APIs.
If refresh token hasn't yet been generated or is invalid, this function will
open a user consent screen and then save the credentials that are returned.
Reference code sample: https://developers.google.com/docs/api/quickstart/python
Parameters
----------
authorized_user_file : str
Filepath to token JSON file. If file does not exist then this becomes
the filepath the token will be dumped to for future use after going
through the user consent screen.
client_secrets_file : str
Filepath to client secrets file downloaded from GCP after creating credentials
scopes : List[str]
Enabled APIs that we want our app to have access to
port : int
Port to open user consent screen on
Returns
-------
creds : google.oauth2.credentials.Credentials
Authenticated and authorized credentials for accessing Google API
"""
creds = None
if os.path.exists(authorized_user_file):
creds = refresh_token_from_authorized_user_file(authorized_user_file=authorized_user_file)
if creds is None or not creds.valid:
expired_but_has_refresh_token = (creds is not None) and (creds.expired) and (creds.refresh_token)
if expired_but_has_refresh_token:
_refresh_credentials(creds=creds)
else:
creds = get_token_from_user_consent_screen(
client_secrets_file=client_secrets_file,
scopes=scopes,
port=port,
fpath=authorized_user_file
)
return creds
def get_token_from_user_consent_screen(client_secrets_file: str, scopes: List[str],
port: int = 0, fpath: str = None) -> "google.oauth2.credentials.Credentials":
"""Return valid credentials after opening user consent screen authorizing
the app to access scopes enabled for the app outlined in client secrets file.
Parameters
----------
client_secrets_file : str
Filepath to client secrets file downloaded from OAuth 2.0 Client IDs
on Google Cloud Platform after generating OAuth credentials
scopes : List[str]
Scopes of APIs that have been enabled on Google Cloud Platform
port : int = 0
Port to open user consent screen on
fpath : str = None
If specified, dumps the token to this filepath as a JSON
Returns
-------
creds : google.oauth2.credentials.Credentials
Authenticated and authorized credentials for accessing Google API
"""
flow = InstalledAppFlow.from_client_secrets_file(client_secrets_file=client_secrets_file, scopes=scopes)
creds = flow.run_local_server(port=port)
if fpath is not None:
_save_credentials(creds=creds, fpath=fpath)
return creds
def refresh_token_from_authorized_user_file(authorized_user_file: str):
"""Return valid credentials after refreshing from previously saved credentials.
authorized_user_file : str
Filepath to a file containing refresh token and various credentials
acquired after user consented to scopes.
Returns
-------
creds : google.oauth2.credentials.Credentials
Authenticated and authorized credentials for accessing Google API
"""
creds = Credentials.from_authorized_user_file(filename=authorized_user_file)
if creds.expired:
_refresh_credentials(creds=creds)
return creds
def refresh_token_from_credentials(refresh_token: str,
client_id: str, client_secret: str,
scopes: List[str] = None) -> "google.oauth2.credentials.Credentials":
"""Return valid credentials refreshed from explicitly passed refresh token,
client ID, and client secret
Parameters
----------
refresh_token : str
Valid refresh token
client_id : str
Client ID acquired from creating credentials in APIs & Services on GCP
client_secret : str
Client secret acquired from creating credentials in APIs & Services on GCP
scopes : List[str] = None
Optional scopes to pass. This has no bearing on the token refresh but it's
a good idea to explicitly set what scopes we have access to to keep track
of permissions.
Returns
-------
creds : google.oauth2.credentials.Credentials
Authenticated and authorized credentials for accessing Google API
"""
creds = Credentials(
token=None,
refresh_token=refresh_token,
client_id=client_id,
client_secret=client_secret,
token_uri=endpoints.GOOGLE_TOKEN_ENDPOINT,
scopes=scopes
)
_refresh_credentials(creds=creds)
return creds
def _save_credentials(creds: "google.oauth2.credentials.Credentials", fpath: str) -> None:
"""Save credentials to JSON file at fpath"""
with open(fpath, "w") as outjson:
outjson.write(creds.to_json())
def _refresh_credentials(creds: "google.oauth2.credentials.Credentials") -> None:
"""Return credentials object in place"""
creds.refresh(Request()) | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/google/credentials.py | 0.714827 | 0.171685 | credentials.py | pypi |
from typing import List, Dict
import itertools
import pandas as pd
import numpy as np
from google.analytics.data_v1beta.types import (
DateRange,
Dimension,
Metric,
RunReportRequest,
)
def process_ga4_data(
analytics,
view_id: str,
dimensions: List[str] = None,
metrics: List[str] = None,
start_date: str = "7daysAgo",
end_date: str = "today"
) -> "pd.DataFrame":
"""Return pd.DataFrame of GA4 data pulled via the
Google Analytics Data API"""
resp = _request_ga4_data(
analytics=analytics,
view_id=view_id,
dimensions=dimensions,
metrics=metrics,
start_date=start_date,
end_date=end_date
)
resp_df = _process_raw_ga4_data(resp=resp)
return resp_df
def _process_raw_ga4_data(resp) -> "pd.DataFrame":
"""Return a DataFrame containing the processed data extracted from GA4"""
rows = []
keys = _build_list_from_resp(resp.dimension_headers, resp.metric_headers, attr_name = "name")
metric_dtypes = _build_metric_type_list_from_resp(resp)
for row in resp.rows:
values = _build_list_from_resp(row.dimension_values, row.metric_values, attr_name = "value")
row_dict = dict(zip(keys, values))
rows.append(row_dict)
df = pd.DataFrame(rows)
df = df.astype(metric_dtypes)
return df
def _build_list_from_resp(*args, attr_name: str) -> List[str]:
"""Return list of strings of values parsed from header information in response"""
return [getattr(val, attr_name) for val in list(itertools.chain.from_iterable(args))]
def _build_metric_type_list_from_resp(resp) -> Dict[str, str]:
"""Return a dict of strings detailing data type of the returned metric"""
return {val.name: _lookup_dtype(val.type_.name) for val in resp.metric_headers}
def _lookup_dtype(resp_type: str) -> str:
"""Return dtype for pd.DataFrmae column associated with Google's provided dtype"""
dtype_lookup_table = {
"TYPE_INTEGER": np.int32
}
return dtype_lookup_table.get(resp_type, str)
def _request_ga4_data(
analytics,
view_id: str,
dimensions: List[str] = None,
metrics: List[str] = None,
start_date: str = "7daysAgo",
end_date: str = "today"
):
"""Return response from reporting request to Google Analytics Data API"""
request = RunReportRequest(
property=f"properties/{view_id}",
dimensions=[Dimension(name=dim) for dim in dimensions],
metrics=[Metric(name=metric) for metric in metrics],
date_ranges=[DateRange(start_date=start_date, end_date=end_date)],
)
return analytics.run_report(request) | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/google/utils/_ga4.py | 0.801198 | 0.259567 | _ga4.py | pypi |
from typing import List, Union, Dict, Tuple
import itertools
import pandas as pd
import numpy as np
def process_universal_analytics_data(
analytics,
view_id: str,
dimensions: List[str] = None,
metrics: List[str] = None,
start_date: str = "7daysAgo",
end_date: str = "today"
) -> "pd.DataFrame":
"""Return pd.DataFrame of Universal Analytics data pulled via the
Google Analytics Reporting API"""
resp_df_arr = []
next_page_token = None
while True:
resp = _request_universal_analytics_data(
analytics=analytics,
view_id=view_id,
dimensions=dimensions,
metrics=metrics,
start_date=start_date,
end_date=end_date,
next_page_token=next_page_token
)
resp_df = _process_raw_universal_analytics_data(resp=resp)
resp_df_arr.append(resp_df)
next_page_token = _get_next_page_token(resp=resp)
if next_page_token is None:
break
df = pd.concat(resp_df_arr)
return df
def _get_next_page_token(resp: Dict[str, str]) -> Union[str, None]:
"""Return Boolean indicating if paginated data exists"""
return resp["reports"][0].get("nextPageToken")
def _request_universal_analytics_data(
analytics,
view_id: str,
dimensions: List[str] = None,
metrics: List[str] = None,
start_date: str = "7daysAgo",
end_date: str = "today",
next_page_token: Union[str, None] = None
) -> Dict[str, Union[str, List, Dict, bool]]:
"""Returns response from reporting request to the Google Analytics Reporting API
built from arguments
Parameters
----------
view_id : str
View ID that we want to view
dimensions : List[str]
List of dimensions
https://ga-dev-tools.web.app/dimensions-metrics-explorer/
metrics : List[str]
List of metrics
https://ga-dev-tools.web.app/dimensions-metrics-explorer/
start_date : str
Dynamic preset such as 7daysago or YYYY-MM-DD
end_date : str
Dynamic preset such as today or YYYY-MM-DD
Returns
-------
resp : Dict[str, Union[str, List, Dict, bool]]
"""
return analytics.reports().batchGet(
body={'reportRequests': _process_report_requests(
view_id=view_id,
dimensions=dimensions,
metrics=metrics,
start_date=start_date,
end_date=end_date,
next_page_token=next_page_token
)}
).execute()
def _process_raw_universal_analytics_data(resp: Dict[str, Union[str, List, Dict, bool]]) -> "pd.DataFrame":
""" Return a DataFrame parsed and constructed from the raw response from
Google Analytics"""
resp_data = resp['reports'][0]
columns_metadata = _process_columns(resp_data['columnHeader'])
columns = list(columns_metadata)
values = _process_rows(resp_data['data'])
df = pd.DataFrame(values, columns=columns)
df = df.astype(columns_metadata)
return df
def _process_rows(values_resp) -> List[List[str]]:
"""Return list of lists containing values parsed from API response"""
rows = values_resp['rows']
processed_rows = []
for row in rows:
try:
dimensions = row['dimensions']
except KeyError:
dimensions = []
metrics = [metric['values'] for metric in row['metrics']]
metrics = list(itertools.chain.from_iterable(metrics))
processed_rows.append([*dimensions, *metrics])
return processed_rows
def _process_columns(column_header_resp: Dict[str, str]) -> List[Tuple[str]]:
"""Return a dictionary containing column name and associated dtype as parsed
from the Google Analytics API
"""
dimensions_cols = _process_dimensions_columns(column_header_resp=column_header_resp)
metrics_cols = _process_metrics_columns(column_header_resp=column_header_resp)
columns_metadata = [*dimensions_cols, *metrics_cols]
return {key.replace("ga:", ""): val for key, val in columns_metadata}
def _process_metrics_columns(column_header_resp) -> List[Tuple]:
"""Return list of tuple's containing metrics and their associated dtype"""
metrics_col_data = column_header_resp['metricHeader']['metricHeaderEntries']
metrics_cols = [(metric['name'], _lookup_dtype(metric['type']))
for metric in metrics_col_data]
return metrics_cols
def _process_dimensions_columns(column_header_resp) -> List[Tuple[str, str]]:
"""Return list of tuple's containing dimensions and their associated dtype"""
try:
dimensions_col_data = column_header_resp['dimensions']
except KeyError:
dimensions_cols = []
else:
dimensions_cols = [(dimension, str) for dimension in dimensions_col_data]
return dimensions_cols
def _lookup_dtype(resp_type: str) -> Dict[str, str]:
"""Return dtype for pd.DataFrame associated with column as determined
from the API response
"""
dtypes = {
"INTEGER": np.int32,
"FLOAT": np.float32,
"TIME": str,
"CURRENCY": np.float32
}
return dtypes[resp_type]
def _process_report_requests(
view_id: str,
dimensions: Union[List[str], None],
metrics: Union[List[str], None],
start_date: str,
end_date: str,
next_page_token: Union[str, None]
) -> Dict[str, str]:
"""Return a dictionary containing formatted data request to Google Analytics
API"""
report_requests = {
"viewId": f"ga:{view_id}",
"dateRanges": [{"startDate": start_date, "endDate": end_date}],
"pageSize": 100_000
}
if next_page_token is not None:
report_requests["pageToken"] = next_page_token
if dimensions is not None:
report_requests['dimensions'] = _process_dimensions(dimensions)
if metrics is not None:
report_requests['metrics'] = _process_metrics(metrics)
return [report_requests]
def _process_dimensions(dimensions: List[str]) -> List[Dict[str, str]]:
"""Return list of dictionary's containing the dimensions formatted for Google
Analytics Reporting API to accept the request"""
return [{"name": f"ga:{dimension}"} for dimension in dimensions]
def _process_metrics(metrics: List[str]) -> List[Dict[str, str]]:
"""Return list of dictionary's containing the metrics formatted for Google
Analytics Reporting API to accept the request"""
return [{"expression": f"ga:{metric}"} for metric in metrics] | /route1io-connectors-0.15.3.tar.gz/route1io-connectors-0.15.3/route1io_connectors/google/utils/_universal_analytics.py | 0.903777 | 0.307212 | _universal_analytics.py | pypi |
from route4me import Route4Me
from route4me.api_endpoints import ROUTE_HOST
from route4me.constants import (
ALGORITHM_TYPE,
OPTIMIZE,
DEVICE_TYPE,
TRAVEL_MODE,
DISTANCE_UNIT,
METRIC,
)
KEY = "11111111111111111111111111111111"
# codebeat:disable[LOC, ABC]
def main():
route4me = Route4Me(KEY)
optimization = route4me.optimization
address = route4me.address
optimization.algorithm_type(ALGORITHM_TYPE.CVRP_TW_SD)
optimization.share_route(0)
optimization.store_route(0)
optimization.route_time(0)
optimization.parts(20)
optimization.route_max_duration(86400)
optimization.vehicle_capacity(1)
optimization.vehicle_max_distance_mi(10000)
optimization.route_name('Single Depot, Multiple Driver')
optimization.optimize(OPTIMIZE.DISTANCE)
optimization.distance_unit(DISTANCE_UNIT.MI)
optimization.device_type(DEVICE_TYPE.WEB)
optimization.travel_mode(TRAVEL_MODE.DRIVING)
optimization.metric(METRIC.ROUTE4ME_METRIC_GEODESIC)
address.add_address(
address="455 S 4th St, Louisville, KY 40202",
lat=38.251698,
lng=-85.757308,
is_depot=1,
time=300,
)
address.add_address(
address="1604 PARKRIDGE PKWY, Louisville, KY, 40214",
lat=38.141598,
lng=-85.793846,
is_depot=0,
time=300,
)
address.add_address(
address="1407 MCCOY, Louisville, KY, 40215",
lat=38.202496,
lng=-85.786514,
is_depot=0,
time=300,
)
address.add_address(
address="4805 BELLEVUE AVE, Louisville, KY, 40215",
lat=38.178844,
lng=-85.774864,
is_depot=0,
time=300,
)
address.add_address(
address="730 CECIL AVENUE, Louisville, KY, 40211",
lat=38.248684,
lng=-85.821121,
is_depot=0,
time=300,
)
address.add_address(
address="650 SOUTH 29TH ST UNIT 315, Louisville, KY, 40211",
lat=38.251923,
lng=-85.800034,
is_depot=0,
time=300,
)
address.add_address(
address="4629 HILLSIDE DRIVE, Louisville, KY, 40216",
lat=38.176067,
lng=-85.824638,
is_depot=0,
time=300,
)
address.add_address(
address="4738 BELLEVUE AVE, Louisville, KY, 40215",
lat=38.179806,
lng=-85.775558,
is_depot=0,
time=300,
)
address.add_address(
address="318 SO. 39TH STREET, Louisville, KY, 40212",
lat=38.259335,
lng=-85.815094,
is_depot=0,
time=300,
)
address.add_address(
address="1324 BLUEGRASS AVE, Louisville, KY, 40215",
lat=38.179253,
lng=-85.785118,
is_depot=0,
time=300,
)
address.add_address(
address="7305 ROYAL WOODS DR, Louisville, KY, 40214",
lat=38.162472,
lng=-85.792854,
is_depot=0,
time=300,
)
address.add_address(
address="1661 W HILL ST, Louisville, KY, 40210",
lat=38.229584,
lng=-85.783966,
is_depot=0,
time=300,
)
address.add_address(
address="3222 KINGSWOOD WAY, Louisville, KY, 40216",
lat=38.210606,
lng=-85.822594,
is_depot=0,
time=300,
)
address.add_address(
address="1922 PALATKA RD, Louisville, KY, 40214",
lat=38.153767,
lng=-85.796783,
is_depot=0,
time=300,
)
address.add_address(
address="1314 SOUTH 26TH STREET, Louisville, KY, 40210",
lat=38.235847,
lng=-85.796852,
is_depot=0,
time=300,
)
address.add_address(
address="2135 MCCLOSKEY AVENUE, Louisville, KY, 40210",
lat=38.218662,
lng=-85.789032,
is_depot=0,
time=300,
)
address.add_address(
address="1409 PHYLLIS AVE, Louisville, KY, 40215",
lat=38.206154,
lng=-85.781387,
is_depot=0,
time=300,
)
address.add_address(
address="4504 SUNFLOWER AVE, Louisville, KY, 40216",
lat=38.187511,
lng=-85.839149,
is_depot=0,
time=300,
)
address.add_address(
address="2512 GREENWOOD AVE, Louisville, KY, 40210",
lat=38.241405,
lng=-85.795059,
is_depot=0,
time=300,
)
address.add_address(
address="5500 WILKE FARM AVE, Louisville, KY, 40216",
lat=38.166065,
lng=-85.863319,
is_depot=0,
time=300,
)
address.add_address(
address="3640 LENTZ AVE, Louisville, KY, 40215",
lat=38.193283,
lng=-85.786201,
is_depot=0,
time=300,
)
address.add_address(
address="1020 BLUEGRASS AVE, Louisville, KY, 40215",
lat=38.17952,
lng=-85.780037,
is_depot=0,
time=300,
)
address.add_address(
address="123 NORTH 40TH ST, Louisville, KY, 40212",
lat=38.26498,
lng=-85.814156,
is_depot=0,
time=300,
)
address.add_address(
address="7315 ST ANDREWS WOODS CIRCLE UNIT 104, Louisville, KY, 40214",
lat=38.151072,
lng=-85.802867,
is_depot=0,
time=300,
)
address.add_address(
address="3210 POPLAR VIEW DR, Louisville, KY, 40216",
lat=38.182594,
lng=-85.849937,
is_depot=0,
time=300,
)
address.add_address(
address="4519 LOUANE WAY, Louisville, KY, 40216",
lat=38.1754,
lng=-85.811447,
is_depot=0,
time=300,
)
address.add_address(
address="6812 MANSLICK RD, Louisville, KY, 40214",
lat=38.161839,
lng=-85.798279,
is_depot=0,
time=300,
)
address.add_address(
address="1524 HUNTOON AVENUE, Louisville, KY, 40215",
lat=38.172031,
lng=-85.788353,
is_depot=0,
time=300,
)
address.add_address(
address="1307 LARCHMONT AVE, Louisville, KY, 40215",
lat=38.209663,
lng=-85.779816,
is_depot=0,
time=300,
)
address.add_address(
address="434 N 26TH STREET #2, Louisville, KY, 40212",
lat=38.26844,
lng=-85.791962,
is_depot=0,
time=300,
)
address.add_address(
address="678 WESTLAWN ST, Louisville, KY, 40211",
lat=38.250397,
lng=-85.80629,
is_depot=0,
time=300,
)
address.add_address(
address="2308 W BROADWAY, Louisville, KY, 40211",
lat=38.248882,
lng=-85.790421,
is_depot=0,
time=300,
)
address.add_address(
address="2332 WOODLAND AVE, Louisville, KY, 40210",
lat=38.233579,
lng=-85.794257,
is_depot=0,
time=300,
)
address.add_address(
address="1706 WEST ST. CATHERINE, Louisville, KY, 40210",
lat=38.239697,
lng=-85.783928,
is_depot=0,
time=300,
)
address.add_address(
address="1699 WATHEN LN, Louisville, KY, 40216",
lat=38.216465,
lng=-85.792397,
is_depot=0,
time=300,
)
address.add_address(
address="2416 SUNSHINE WAY, Louisville, KY, 40216",
lat=38.186245,
lng=-85.831787,
is_depot=0,
time=300,
)
address.add_address(
address="6925 MANSLICK RD, Louisville, KY, 40214",
lat=38.158466,
lng=-85.798355,
is_depot=0,
time=300,
)
address.add_address(
address="2707 7TH ST, Louisville, KY, 40215",
lat=38.212438,
lng=-85.785082,
is_depot=0,
time=300,
)
address.add_address(
address="2014 KENDALL LN, Louisville, KY, 40216",
lat=38.179394,
lng=-85.826668,
is_depot=0,
time=300,
time_window_start=51600,
time_window_end=52200
)
address.add_address(
address="612 N 39TH ST, Louisville, KY, 40212",
lat=38.273354,
lng=-85.812012,
is_depot=0,
time=300,
)
address.add_address(
address="2215 ROWAN ST, Louisville, KY, 40212",
lat=38.261703,
lng=-85.786781,
is_depot=0,
time=300,
)
address.add_address(
address="1826 W. KENTUCKY ST, Louisville, KY, 40210",
lat=38.241611,
lng=-85.78653,
is_depot=0,
time=300,
)
address.add_address(
address="1810 GREGG AVE, Louisville, KY, 40210",
lat=38.224716,
lng=-85.796211,
is_depot=0,
time=300,
)
address.add_address(
address="4103 BURRRELL DRIVE, Louisville, KY, 40216",
lat=38.191753,
lng=-85.825836,
is_depot=0,
time=300,
)
address.add_address(
address="359 SOUTHWESTERN PKWY, Louisville, KY, 40212",
lat=38.259903,
lng=-85.823463,
is_depot=0,
time=300,
)
address.add_address(
address="2407 W CHESTNUT ST, Louisville, KY, 40211",
lat=38.252781,
lng=-85.792109,
is_depot=0,
time=300,
)
address.add_address(
address="225 S 22ND ST, Louisville, KY, 40212",
lat=38.257616,
lng=-85.786658,
is_depot=0,
time=300,
)
address.add_address(
address="1404 MCCOY AVE, Louisville, KY, 40215",
lat=38.202122,
lng=-85.786072,
is_depot=0,
time=300,
)
address.add_address(
address="117 FOUNT LANDING CT, Louisville, KY, 40212",
lat=38.270061,
lng=-85.799438,
is_depot=0,
time=300,
)
address.add_address(
address="5504 SHOREWOOD DRIVE, Louisville, KY, 40214",
lat=38.145851,
lng=-85.7798,
is_depot=0,
time=300,
)
response = route4me.run_optimization()
print('Optimization Link: {}'.format(response.links.view))
for address in response.addresses:
print('Route {0} link: {1} route_id: {2}'.format(address.address,
ROUTE_HOST,
address.route_id))
# codebeat:enable[LOC, ABC]
if __name__ == '__main__':
main() | /route4me-0.0.7.1-py3-none-any.whl/examples/single_depot_multiple_driver.py | 0.489259 | 0.183009 | single_depot_multiple_driver.py | pypi |
from route4me import Route4Me
from route4me.api_endpoints import ROUTE_HOST
from route4me.constants import (
ALGORITHM_TYPE,
OPTIMIZE,
DEVICE_TYPE,
TRAVEL_MODE,
DISTANCE_UNIT,
)
KEY = "11111111111111111111111111111111"
def main():
route4me = Route4Me(KEY)
optimization = route4me.optimization
address = route4me.address
optimization.algorithm_type(ALGORITHM_TYPE.TSP)
optimization.share_route(0)
optimization.store_route(0)
optimization.route_time(0)
optimization.route_max_duration(86400)
optimization.vehicle_capacity(1)
optimization.vehicle_max_distance_mi(10000)
optimization.route_name('Single Driver Round Trip')
optimization.optimize(OPTIMIZE.DISTANCE)
optimization.distance_unit(DISTANCE_UNIT.MI)
optimization.device_type(DEVICE_TYPE.WEB)
optimization.travel_mode(TRAVEL_MODE.DRIVING)
address.add_address(
address='754 5th Ave New York, NY 10019',
lat=40.7636197,
lng=-73.9744388,
alias='Bergdorf Goodman',
is_depot=1,
time=0
)
address.add_address(
address='717 5th Ave New York, NY 10022',
lat=40.7669692,
lng=-73.9693864,
alias='Giorgio Armani',
time=0
)
address.add_address(
address='888 Madison Ave New York, NY 10014',
lat=40.7715154,
lng=-73.9669241,
alias='Ralph Lauren Women\'s and Home',
time=0
)
address.add_address(
address='1011 Madison Ave New York, NY 10075',
lat=40.7772129,
lng=-73.9669,
alias='Yigal Azrou\u00ebl',
time=0
)
address.add_address(
address='440 Columbus Ave New York, NY 10024',
lat=40.7808364,
lng=-73.9732729,
alias='Frank Stella Clothier',
time=0
)
address.add_address(
address='324 Columbus Ave #1 New York, NY 10023',
lat=40.7803123,
lng=-73.9793079,
alias='Liana',
time=0
)
address.add_address(
address='110 W End Ave New York, NY 10023',
lat=40.7753077,
lng=-73.9861529,
alias='Toga Bike Shop',
time=0
)
address.add_address(
address='555 W 57th St New York, NY 10019',
lat=40.7718005,
lng=-73.9897716,
alias='BMW of Manhattan',
time=0
)
address.add_address(
address='57 W 57th St New York, NY 10019',
lat=40.7558695,
lng=-73.9862019,
alias='Verizon Wireless',
time=0
)
response = route4me.run_optimization()
print('Optimization Link: {}'.format(response.links.view))
for address in response.addresses:
print('\tRoute {0} link: {1}\troute_id={2}'.format(address.address,
ROUTE_HOST,
address.route_id))
if __name__ == '__main__':
main()
# codebeat:enable[SIMILARITY, LOC, ABC] | /route4me-0.0.7.1-py3-none-any.whl/examples/single_driver_round_trip.py | 0.639511 | 0.177704 | single_driver_round_trip.py | pypi |
from route4me import Route4Me
from route4me.api_endpoints import ROUTE_HOST
from route4me.constants import (
ALGORITHM_TYPE,
OPTIMIZE,
DEVICE_TYPE,
TRAVEL_MODE,
DISTANCE_UNIT,
METRIC,
)
KEY = "11111111111111111111111111111111"
# codebeat:disable[LOC, ABC]
def main():
route4me = Route4Me(KEY)
optimization = route4me.optimization
address = route4me.address
optimization.route_name('Multiple Depot, Multiple Driver, Time window')
optimization.algorithm_type(ALGORITHM_TYPE.CVRP_TW_MD)
optimization.share_route(0)
optimization.store_route(1)
optimization.device_type(DEVICE_TYPE.WEB)
optimization.distance_unit(DISTANCE_UNIT.MI)
optimization.travel_mode(TRAVEL_MODE.DRIVING)
optimization.metric(METRIC.ROUTE4ME_METRIC_MATRIX)
optimization.vehicle_capacity(9999)
optimization.vehicle_max_distance_mi(99999)
optimization.parts(10)
optimization.route_time(0)
optimization.rt(1)
optimization.route_max_duration(86400)
optimization.optimize(OPTIMIZE.TIME)
address.add_address(
address="455 S 4th St, Louisville, KY 40202",
lat=38.251698,
lng=-85.757308,
is_depot=1,
time=300,
time_window_start=28800,
time_window_end=29400
)
address.add_address(
address="1604 PARKRIDGE PKWY, Louisville, KY, 40214",
lat=38.141598,
lng=-85.793846,
is_depot=0,
time=300,
time_window_start=29400,
time_window_end=30000
)
address.add_address(
address="1407 MCCOY, Louisville, KY, 40215",
lat=38.202496,
lng=-85.786514,
is_depot=0,
time=300,
time_window_start=30000,
time_window_end=30600
)
address.add_address(
address="4805 BELLEVUE AVE, Louisville, KY, 40215",
lat=38.178844,
lng=-85.774864,
is_depot=0,
time=300,
time_window_start=30600,
time_window_end=31200
)
address.add_address(
address="730 CECIL AVENUE, Louisville, KY, 40211",
lat=38.248684,
lng=-85.821121,
is_depot=0,
time=300,
time_window_start=31200,
time_window_end=31800
)
address.add_address(
address="650 SOUTH 29TH ST UNIT 315, Louisville, KY, 40211",
lat=38.251923,
lng=-85.800034,
is_depot=0,
time=300,
time_window_start=31800,
time_window_end=32400
)
address.add_address(
address="4629 HILLSIDE DRIVE, Louisville, KY, 40216",
lat=38.176067,
lng=-85.824638,
is_depot=0,
time=300,
time_window_start=32400,
time_window_end=33000
)
address.add_address(
address="4738 BELLEVUE AVE, Louisville, KY, 40215",
lat=38.179806,
lng=-85.775558,
is_depot=0,
time=300,
time_window_start=33000,
time_window_end=33600
)
address.add_address(
address="318 SO. 39TH STREET, Louisville, KY, 40212",
lat=38.259335,
lng=-85.815094,
is_depot=0,
time=300,
time_window_start=33600,
time_window_end=34200
)
address.add_address(
address="1324 BLUEGRASS AVE, Louisville, KY, 40215",
lat=38.179253,
lng=-85.785118,
is_depot=0,
time=300,
time_window_start=34200,
time_window_end=34800
)
address.add_address(
address="7305 ROYAL WOODS DR, Louisville, KY, 40214",
lat=38.162472,
lng=-85.792854,
is_depot=0,
time=300,
time_window_start=34800,
time_window_end=35400
)
address.add_address(
address="1661 W HILL ST, Louisville, KY, 40210",
lat=38.229584,
lng=-85.783966,
is_depot=0,
time=300,
time_window_start=35400,
time_window_end=36000
)
address.add_address(
address="3222 KINGSWOOD WAY, Louisville, KY, 40216",
lat=38.210606,
lng=-85.822594,
is_depot=0,
time=300,
time_window_start=36000,
time_window_end=36600
)
address.add_address(
address="1922 PALATKA RD, Louisville, KY, 40214",
lat=38.153767,
lng=-85.796783,
is_depot=0,
time=300,
time_window_start=36600,
time_window_end=37200
)
address.add_address(
address="1314 SOUTH 26TH STREET, Louisville, KY, 40210",
lat=38.235847,
lng=-85.796852,
is_depot=0,
time=300,
time_window_start=37200,
time_window_end=37800
)
address.add_address(
address="2135 MCCLOSKEY AVENUE, Louisville, KY, 40210",
lat=38.218662,
lng=-85.789032,
is_depot=0,
time=300,
time_window_start=37800,
time_window_end=38400
)
address.add_address(
address="1409 PHYLLIS AVE, Louisville, KY, 40215",
lat=38.206154,
lng=-85.781387,
is_depot=0,
time=300,
time_window_start=38400,
time_window_end=39000
)
address.add_address(
address="4504 SUNFLOWER AVE, Louisville, KY, 40216",
lat=38.187511,
lng=-85.839149,
is_depot=0,
time=300,
time_window_start=39000,
time_window_end=39600
)
address.add_address(
address="2512 GREENWOOD AVE, Louisville, KY, 40210",
lat=38.241405,
lng=-85.795059,
is_depot=0,
time=300,
time_window_start=39600,
time_window_end=40200
)
address.add_address(
address="5500 WILKE FARM AVE, Louisville, KY, 40216",
lat=38.166065,
lng=-85.863319,
is_depot=0,
time=300,
time_window_start=40200,
time_window_end=40800
)
address.add_address(
address="3640 LENTZ AVE, Louisville, KY, 40215",
lat=38.193283,
lng=-85.786201,
is_depot=0,
time=300,
time_window_start=40800,
time_window_end=41400
)
address.add_address(
address="1020 BLUEGRASS AVE, Louisville, KY, 40215",
lat=38.17952,
lng=-85.780037,
is_depot=0,
time=300,
time_window_start=41400,
time_window_end=42000
)
address.add_address(
address="123 NORTH 40TH ST, Louisville, KY, 40212",
lat=38.26498,
lng=-85.814156,
is_depot=0,
time=300,
time_window_start=42000,
time_window_end=42600
)
address.add_address(
address="7315 ST ANDREWS WOODS CIRCLE UNIT 104, Louisville, KY, 40214",
lat=38.151072,
lng=-85.802867,
is_depot=0,
time=300,
time_window_start=42600,
time_window_end=43200
)
address.add_address(
address="3210 POPLAR VIEW DR, Louisville, KY, 40216",
lat=38.182594,
lng=-85.849937,
is_depot=0,
time=300,
time_window_start=43200,
time_window_end=43800
)
address.add_address(
address="4519 LOUANE WAY, Louisville, KY, 40216",
lat=38.1754,
lng=-85.811447,
is_depot=0,
time=300,
time_window_start=43800,
time_window_end=44400
)
address.add_address(
address="6812 MANSLICK RD, Louisville, KY, 40214",
lat=38.161839,
lng=-85.798279,
is_depot=0,
time=300,
time_window_start=44400,
time_window_end=45000
)
address.add_address(
address="1524 HUNTOON AVENUE, Louisville, KY, 40215",
lat=38.172031,
lng=-85.788353,
is_depot=0,
time=300,
time_window_start=45000,
time_window_end=45600
)
address.add_address(
address="1307 LARCHMONT AVE, Louisville, KY, 40215",
lat=38.209663,
lng=-85.779816,
is_depot=0,
time=300,
time_window_start=45600,
time_window_end=46200
)
address.add_address(
address="434 N 26TH STREET #2, Louisville, KY, 40212",
lat=38.26844,
lng=-85.791962,
is_depot=0,
time=300,
time_window_start=46200,
time_window_end=46800
)
address.add_address(
address="678 WESTLAWN ST, Louisville, KY, 40211",
lat=38.250397,
lng=-85.80629,
is_depot=0,
time=300,
time_window_start=46800,
time_window_end=47400
)
address.add_address(
address="2308 W BROADWAY, Louisville, KY, 40211",
lat=38.248882,
lng=-85.790421,
is_depot=0,
time=300,
time_window_start=47400,
time_window_end=48000
)
address.add_address(
address="2332 WOODLAND AVE, Louisville, KY, 40210",
lat=38.233579,
lng=-85.794257,
is_depot=0,
time=300,
time_window_start=48000,
time_window_end=48600
)
address.add_address(
address="1706 WEST ST. CATHERINE, Louisville, KY, 40210",
lat=38.239697,
lng=-85.783928,
is_depot=0,
time=300,
time_window_start=48600,
time_window_end=49200
)
address.add_address(
address="1699 WATHEN LN, Louisville, KY, 40216",
lat=38.216465,
lng=-85.792397,
is_depot=0,
time=300,
time_window_start=49200,
time_window_end=49800
)
address.add_address(
address="2416 SUNSHINE WAY, Louisville, KY, 40216",
lat=38.186245,
lng=-85.831787,
is_depot=0,
time=300,
time_window_start=49800,
time_window_end=50400
)
address.add_address(
address="6925 MANSLICK RD, Louisville, KY, 40214",
lat=38.158466,
lng=-85.798355,
is_depot=0,
time=300,
time_window_start=50400,
time_window_end=51000
)
address.add_address(
address="2707 7TH ST, Louisville, KY, 40215",
lat=38.212438,
lng=-85.785082,
is_depot=0,
time=300,
time_window_start=51000,
time_window_end=51600
)
address.add_address(
address="2014 KENDALL LN, Louisville, KY, 40216",
lat=38.179394,
lng=-85.826668,
is_depot=0,
time=300,
time_window_start=51600,
time_window_end=52200
)
address.add_address(
address="612 N 39TH ST, Louisville, KY, 40212",
lat=38.273354,
lng=-85.812012,
is_depot=0,
time=300,
time_window_start=52200,
time_window_end=52800
)
address.add_address(
address="2215 ROWAN ST, Louisville, KY, 40212",
lat=38.261703,
lng=-85.786781,
is_depot=0,
time=300,
time_window_start=52800,
time_window_end=53400
)
address.add_address(
address="1826 W. KENTUCKY ST, Louisville, KY, 40210",
lat=38.241611,
lng=-85.78653,
is_depot=0,
time=300,
time_window_start=53400,
time_window_end=54000
)
address.add_address(
address="1810 GREGG AVE, Louisville, KY, 40210",
lat=38.224716,
lng=-85.796211,
is_depot=0,
time=300,
time_window_start=54000,
time_window_end=54600
)
address.add_address(
address="4103 BURRRELL DRIVE, Louisville, KY, 40216",
lat=38.191753,
lng=-85.825836,
is_depot=0,
time=300,
time_window_start=54600,
time_window_end=55200
)
address.add_address(
address="359 SOUTHWESTERN PKWY, Louisville, KY, 40212",
lat=38.259903,
lng=-85.823463,
is_depot=0,
time=300,
time_window_start=55200,
time_window_end=55800
)
address.add_address(
address="2407 W CHESTNUT ST, Louisville, KY, 40211",
lat=38.252781,
lng=-85.792109,
is_depot=0,
time=300,
time_window_start=55800,
time_window_end=56400
)
address.add_address(
address="225 S 22ND ST, Louisville, KY, 40212",
lat=38.257616,
lng=-85.786658,
is_depot=0,
time=300,
time_window_start=56400,
time_window_end=57000
)
address.add_address(
address="1404 MCCOY AVE, Louisville, KY, 40215",
lat=38.202122,
lng=-85.786072,
is_depot=0,
time=300,
time_window_start=57000,
time_window_end=57600
)
address.add_address(
address="117 FOUNT LANDING CT, Louisville, KY, 40212",
lat=38.270061,
lng=-85.799438,
is_depot=0,
time=300,
time_window_start=57600,
time_window_end=58200
)
address.add_address(
address="5504 SHOREWOOD DRIVE, Louisville, KY, 40214",
lat=38.145851,
lng=-85.7798,
is_depot=0,
time=300,
time_window_start=58200,
time_window_end=58800
)
optimization = route4me.run_optimization()
print('Optimization Link: {}'.format(optimization.links.view))
for address in optimization.addresses:
print('Route {0} link: {1} route_id: {2}'.format(address.address,
ROUTE_HOST,
address.route_id))
# codebeat:enable[LOC, ABC]
if __name__ == '__main__':
main() | /route4me-0.0.7.1-py3-none-any.whl/examples/multiple_depot_multiple_driver_time_window.py | 0.476092 | 0.158207 | multiple_depot_multiple_driver_time_window.py | pypi |
from route4me import Route4Me
from route4me.api_endpoints import ROUTE_HOST
from route4me.constants import (
ALGORITHM_TYPE,
OPTIMIZE,
DEVICE_TYPE,
TRAVEL_MODE,
DISTANCE_UNIT,
METRIC,
)
KEY = "11111111111111111111111111111111"
# codebeat:disable[LOC, ABC]
def main():
route4me = Route4Me(KEY)
optimization = route4me.optimization
address = route4me.address
optimization.algorithm_type(ALGORITHM_TYPE.CVRP_TW_MD)
optimization.share_route(0)
optimization.store_route(0)
optimization.route_time(0)
optimization.parts(20)
optimization.route_max_duration(86400)
optimization.vehicle_capacity(1)
optimization.vehicle_max_distance_mi(10000)
optimization.route_name('Multiple Depot, Multiple Driver')
optimization.optimize(OPTIMIZE.DISTANCE)
optimization.distance_unit(DISTANCE_UNIT.MI)
optimization.device_type(DEVICE_TYPE.WEB)
optimization.travel_mode(TRAVEL_MODE.DRIVING)
optimization.metric(METRIC.ROUTE4ME_METRIC_GEODESIC)
address.add_address(
address="455 S 4th St, Louisville, KY 40202",
lat=38.251698,
lng=-85.757308,
is_depot=1,
time=300,
)
address.add_address(
address="1604 PARKRIDGE PKWY, Louisville, KY, 40214",
lat=38.141598,
lng=-85.793846,
is_depot=0,
time=300,
)
address.add_address(
address="1407 MCCOY, Louisville, KY, 40215",
lat=38.202496,
lng=-85.786514,
is_depot=0,
time=300,
)
address.add_address(
address="4805 BELLEVUE AVE, Louisville, KY, 40215",
lat=38.178844,
lng=-85.774864,
is_depot=0,
time=300,
)
address.add_address(
address="730 CECIL AVENUE, Louisville, KY, 40211",
lat=38.248684,
lng=-85.821121,
is_depot=0,
time=300,
)
address.add_address(
address="650 SOUTH 29TH ST UNIT 315, Louisville, KY, 40211",
lat=38.251923,
lng=-85.800034,
is_depot=0,
time=300,
)
address.add_address(
address="4629 HILLSIDE DRIVE, Louisville, KY, 40216",
lat=38.176067,
lng=-85.824638,
is_depot=0,
time=300,
)
address.add_address(
address="4738 BELLEVUE AVE, Louisville, KY, 40215",
lat=38.179806,
lng=-85.775558,
is_depot=0,
time=300,
)
address.add_address(
address="318 SO. 39TH STREET, Louisville, KY, 40212",
lat=38.259335,
lng=-85.815094,
is_depot=0,
time=300,
)
address.add_address(
address="1324 BLUEGRASS AVE, Louisville, KY, 40215",
lat=38.179253,
lng=-85.785118,
is_depot=0,
time=300,
)
address.add_address(
address="7305 ROYAL WOODS DR, Louisville, KY, 40214",
lat=38.162472,
lng=-85.792854,
is_depot=0,
time=300,
)
address.add_address(
address="1661 W HILL ST, Louisville, KY, 40210",
lat=38.229584,
lng=-85.783966,
is_depot=0,
time=300,
)
address.add_address(
address="3222 KINGSWOOD WAY, Louisville, KY, 40216",
lat=38.210606,
lng=-85.822594,
is_depot=0,
time=300,
)
address.add_address(
address="1922 PALATKA RD, Louisville, KY, 40214",
lat=38.153767,
lng=-85.796783,
is_depot=0,
time=300,
)
address.add_address(
address="1314 SOUTH 26TH STREET, Louisville, KY, 40210",
lat=38.235847,
lng=-85.796852,
is_depot=0,
time=300,
)
address.add_address(
address="2135 MCCLOSKEY AVENUE, Louisville, KY, 40210",
lat=38.218662,
lng=-85.789032,
is_depot=0,
time=300,
)
address.add_address(
address="1409 PHYLLIS AVE, Louisville, KY, 40215",
lat=38.206154,
lng=-85.781387,
is_depot=0,
time=300,
)
address.add_address(
address="4504 SUNFLOWER AVE, Louisville, KY, 40216",
lat=38.187511,
lng=-85.839149,
is_depot=0,
time=300,
)
address.add_address(
address="2512 GREENWOOD AVE, Louisville, KY, 40210",
lat=38.241405,
lng=-85.795059,
is_depot=0,
time=300,
)
address.add_address(
address="5500 WILKE FARM AVE, Louisville, KY, 40216",
lat=38.166065,
lng=-85.863319,
is_depot=0,
time=300,
)
address.add_address(
address="3640 LENTZ AVE, Louisville, KY, 40215",
lat=38.193283,
lng=-85.786201,
is_depot=0,
time=300,
)
address.add_address(
address="1020 BLUEGRASS AVE, Louisville, KY, 40215",
lat=38.17952,
lng=-85.780037,
is_depot=0,
time=300,
)
address.add_address(
address="123 NORTH 40TH ST, Louisville, KY, 40212",
lat=38.26498,
lng=-85.814156,
is_depot=0,
time=300,
)
address.add_address(
address="7315 ST ANDREWS WOODS CIRCLE UNIT 104, Louisville, KY, 40214",
lat=38.151072,
lng=-85.802867,
is_depot=0,
time=300,
)
address.add_address(
address="3210 POPLAR VIEW DR, Louisville, KY, 40216",
lat=38.182594,
lng=-85.849937,
is_depot=0,
time=300,
)
address.add_address(
address="4519 LOUANE WAY, Louisville, KY, 40216",
lat=38.1754,
lng=-85.811447,
is_depot=0,
time=300,
)
address.add_address(
address="6812 MANSLICK RD, Louisville, KY, 40214",
lat=38.161839,
lng=-85.798279,
is_depot=0,
time=300,
)
address.add_address(
address="1524 HUNTOON AVENUE, Louisville, KY, 40215",
lat=38.172031,
lng=-85.788353,
is_depot=0,
time=300,
)
address.add_address(
address="1307 LARCHMONT AVE, Louisville, KY, 40215",
lat=38.209663,
lng=-85.779816,
is_depot=0,
time=300,
)
address.add_address(
address="434 N 26TH STREET #2, Louisville, KY, 40212",
lat=38.26844,
lng=-85.791962,
is_depot=0,
time=300,
)
address.add_address(
address="678 WESTLAWN ST, Louisville, KY, 40211",
lat=38.250397,
lng=-85.80629,
is_depot=0,
time=300,
)
address.add_address(
address="2308 W BROADWAY, Louisville, KY, 40211",
lat=38.248882,
lng=-85.790421,
is_depot=0,
time=300,
)
address.add_address(
address="2332 WOODLAND AVE, Louisville, KY, 40210",
lat=38.233579,
lng=-85.794257,
is_depot=0,
time=300,
)
address.add_address(
address="1706 WEST ST. CATHERINE, Louisville, KY, 40210",
lat=38.239697,
lng=-85.783928,
is_depot=0,
time=300,
)
address.add_address(
address="1699 WATHEN LN, Louisville, KY, 40216",
lat=38.216465,
lng=-85.792397,
is_depot=0,
time=300,
)
address.add_address(
address="2416 SUNSHINE WAY, Louisville, KY, 40216",
lat=38.186245,
lng=-85.831787,
is_depot=0,
time=300,
)
address.add_address(
address="6925 MANSLICK RD, Louisville, KY, 40214",
lat=38.158466,
lng=-85.798355,
is_depot=0,
time=300,
)
address.add_address(
address="2707 7TH ST, Louisville, KY, 40215",
lat=38.212438,
lng=-85.785082,
is_depot=0,
time=300,
)
address.add_address(
address="2014 KENDALL LN, Louisville, KY, 40216",
lat=38.179394,
lng=-85.826668,
is_depot=0,
time=300,
time_window_start=51600,
time_window_end=52200
)
address.add_address(
address="612 N 39TH ST, Louisville, KY, 40212",
lat=38.273354,
lng=-85.812012,
is_depot=0,
time=300,
)
address.add_address(
address="2215 ROWAN ST, Louisville, KY, 40212",
lat=38.261703,
lng=-85.786781,
is_depot=0,
time=300,
)
address.add_address(
address="1826 W. KENTUCKY ST, Louisville, KY, 40210",
lat=38.241611,
lng=-85.78653,
is_depot=0,
time=300,
)
address.add_address(
address="1810 GREGG AVE, Louisville, KY, 40210",
lat=38.224716,
lng=-85.796211,
is_depot=0,
time=300,
)
address.add_address(
address="4103 BURRRELL DRIVE, Louisville, KY, 40216",
lat=38.191753,
lng=-85.825836,
is_depot=0,
time=300,
)
address.add_address(
address="359 SOUTHWESTERN PKWY, Louisville, KY, 40212",
lat=38.259903,
lng=-85.823463,
is_depot=0,
time=300,
)
address.add_address(
address="2407 W CHESTNUT ST, Louisville, KY, 40211",
lat=38.252781,
lng=-85.792109,
is_depot=0,
time=300,
)
address.add_address(
address="225 S 22ND ST, Louisville, KY, 40212",
lat=38.257616,
lng=-85.786658,
is_depot=0,
time=300,
)
address.add_address(
address="1404 MCCOY AVE, Louisville, KY, 40215",
lat=38.202122,
lng=-85.786072,
is_depot=0,
time=300,
)
address.add_address(
address="117 FOUNT LANDING CT, Louisville, KY, 40212",
lat=38.270061,
lng=-85.799438,
is_depot=0,
time=300,
)
address.add_address(
address="5504 SHOREWOOD DRIVE, Louisville, KY, 40214",
lat=38.145851,
lng=-85.7798,
is_depot=0,
time=300,
)
response = route4me.run_optimization()
print('Optimization Link: %s'.format(response.links.view))
for address in response.addresses:
print('Route {0}\tlink: {1}\troute_id: {2}'.format(address.address,
ROUTE_HOST,
address.route_id))
# codebeat:enable[LOC, ABC]
if __name__ == '__main__':
main() | /route4me-0.0.7.1-py3-none-any.whl/examples/multiple_depot_multiple_driver.py | 0.486575 | 0.184804 | multiple_depot_multiple_driver.py | pypi |
from route4me import Route4Me
from route4me.api_endpoints import ROUTE_HOST
from route4me.constants import (
ALGORITHM_TYPE,
OPTIMIZE,
DISTANCE_UNIT,
DEVICE_TYPE,
TRAVEL_MODE,
OPTIMIZATION_STATE,
)
KEY = "11111111111111111111111111111111"
# codebeat:disable[LOC, ABC]
def main():
route4me = Route4Me(KEY)
api = route4me
optimization = api.optimization
address = api.address
optimization.algorithm_type(ALGORITHM_TYPE.TSP)
optimization.share_route(0)
optimization.store_route(0)
optimization.route_time(0)
optimization.route_max_duration(86400)
optimization.vehicle_capacity(1)
optimization.vehicle_max_distance_mi(10000)
optimization.route_name('Single Driver Round Trip')
optimization.optimize(OPTIMIZE.DISTANCE)
optimization.distance_unit(DISTANCE_UNIT.MI)
optimization.device_type(DEVICE_TYPE.WEB)
optimization.travel_mode(TRAVEL_MODE.DRIVING)
address.add_address(
address='754 5th Ave New York, NY 10019',
lat=40.7636197,
lng=-73.9744388,
alias='Bergdorf Goodman',
is_depot=1,
time=0
)
address.add_address(
address='717 5th Ave New York, NY 10022',
lat=40.7669692,
lng=-73.9693864,
alias='Giorgio Armani',
time=0
)
address.add_address(
address='888 Madison Ave New York, NY 10014',
lat=40.7715154,
lng=-73.9669241,
alias='Ralph Lauren Women\'s and Home',
time=0
)
address.add_address(
address='1011 Madison Ave New York, NY 10075',
lat=40.7772129,
lng=-73.9669,
alias='Yigal Azrou\u00ebl',
time=0
)
address.add_address(
address='440 Columbus Ave New York, NY 10024',
lat=40.7808364,
lng=-73.9732729,
alias='Frank Stella Clothier',
time=0
)
address.add_address(
address='324 Columbus Ave #1 New York, NY 10023',
lat=40.7803123,
lng=-73.9793079,
alias='Liana',
time=0
)
address.add_address(
address='110 W End Ave New York, NY 10023',
lat=40.7753077,
lng=-73.9861529,
alias='Toga Bike Shop',
time=0
)
address.add_address(
address='555 W 57th St New York, NY 10019',
lat=40.7718005,
lng=-73.9897716,
alias='BMW of Manhattan',
time=0
)
address.add_address(
address='57 W 57th St New York, NY 10019',
lat=40.7558695,
lng=-73.9862019,
alias='Verizon Wireless',
time=0
)
response = route4me.run_optimization()
print('Optimization status: {}'.format(
OPTIMIZATION_STATE.reverse_mapping.get(response.state)
))
print('Optimization Link: {}'.format(response.links.view))
for address in response.addresses:
print('Route {0} link: {1} route_id={2}'.format(
address.address,
ROUTE_HOST,
address.route_id
))
print('Re-optimization...')
optimization_problem_id = response.optimization_problem_id
response = api.reoptimization(optimization_problem_id)
print('Re-optimization status: {}'.format(
OPTIMIZATION_STATE.reverse_mapping.get(response.state)
))
# codebeat:enable[LOC, ABC]
if __name__ == '__main__':
main() | /route4me-0.0.7.1-py3-none-any.whl/examples/single_driver_round_trip_reoptimization.py | 0.561215 | 0.151404 | single_driver_round_trip_reoptimization.py | pypi |
from route4me import Route4Me
from route4me.api_endpoints import ROUTE_HOST
from route4me.constants import (
ALGORITHM_TYPE,
OPTIMIZE,
DISTANCE_UNIT,
DEVICE_TYPE,
)
KEY = "11111111111111111111111111111111"
# codebeat:disable[LOC, ABC]
def main():
route4me = Route4Me(KEY)
optimization = route4me.optimization
address = route4me.address
optimization.add({
'algorithm_type': ALGORITHM_TYPE.TSP,
'share_route': 0,
'route_name': 'Single Driver Route 10 Stops',
'optimize': OPTIMIZE.DISTANCE,
'distance_unit': DISTANCE_UNIT.MI,
'device_type': DEVICE_TYPE.WEB,
})
address.add_address(
address='151 Arbor Way Milledgeville GA 31061',
lat=33.132675170898,
lng=-83.244743347168,
is_depot=1,
time=0
)
address.add_address(
address='230 Arbor Way Milledgeville GA 31061',
lat=33.129695892334,
lng=-83.24577331543,
time=0
)
address.add_address(
address='148 Bass Rd NE Milledgeville GA 31061',
lat=33.143497,
lng=-83.224487,
time=0
)
address.add_address(
address='117 Bill Johnson Rd NE Milledgeville GA 31061',
lat=33.141784667969,
lng=-83.237518310547,
time=0
)
address.add_address(
address='119 Bill Johnson Rd NE Milledgeville GA 31061',
lat=33.141086578369,
lng=-83.238258361816,
time=0
)
address.add_address(
address='131 Bill Johnson Rd NE Milledgeville GA 31061',
lat=33.142036437988,
lng=-83.238845825195,
time=0
)
address.add_address(
address='138 Bill Johnson Rd NE Milledgeville GA 31061',
lat=33.14307,
lng=-83.239334,
time=0
)
address.add_address(
address='139 Bill Johnson Rd NE Milledgeville GA 31061',
lat=33.142734527588,
lng=-83.237442016602,
time=0
)
address.add_address(
address='145 Bill Johnson Rd NE Milledgeville GA 31061',
lat=33.143871307373,
lng=-83.237342834473,
time=0
)
address.add_address(
address='221 Blake Cir Milledgeville GA 31061',
lat=33.081462860107,
lng=-83.208511352539,
time=0
)
response = route4me.run_optimization()
print('Optimization Link: {}'.format(response.links.view))
for address in response.addresses:
print('Route {0} link: {1} route_id={2}'.format(address.address,
ROUTE_HOST,
address.route_id))
# codebeat:enable[LOC, ABC]
if __name__ == '__main__':
main() | /route4me-0.0.7.1-py3-none-any.whl/examples/single_driver_route_10_stops.py | 0.515376 | 0.17266 | single_driver_route_10_stops.py | pypi |
import logging
from time import sleep
from typing import Optional
import boto3
route53 = boto3.client("route53")
logger = logging.getLogger(__name__)
def get_hosted_zone_id(zone_name: str) -> str:
"""Gets the ZoneId of the required zone, by iterating over all list zones and checking the zone name.
Args:
zone_name (str): The DNS name of the zone, like `mydomain.com`
Returns:
str: the id of the zone as returned by Route53, like `/hostedzone/XXXXXXXXXXX`
"""
zones = route53.list_hosted_zones()
logger.debug(f"list_hosted_zones output: {zones}")
if not zones["HostedZones"]:
logger.fatal(f"No zone found in the account. Please check if you have the right AWS credentials in place.")
raise KeyError(f"No zone found in the account")
for zone in zones["HostedZones"]:
if zone["Name"].startswith(zone_name):
logger.info(f"Found zone {zone['Id']} with name {zone['Name']} matching the expected {zone_name}")
return zone["Id"]
raise ValueError(f"No zone found matching {zone_name}")
def wait_for_change_completion(change_id: str, wait_time: int = 5) -> None:
"""Wait for the change to be propagated.
This is simply a wrappe around boto3.get_change, calling it every
`wait_time` seconds until the change result `INSYNC`.
Note:
Route53 API documentation set the only possible values of a change to be either `PENDING` or `INSYNC`.
Args:
change_id (str): the ID of the change to track, returned by any route53 API that returns a `ChangeInfo`
wait_time (:obj:`int`, optional): the number of seconds between checks of the change status, defaults to 5
"""
change_status = None
while True: # TODO: add a limit of the iterations
# Get the curret status
response = route53.get_change(Id=change_id)
change_status = response["ChangeInfo"]["Status"]
logger.debug(f"get_change output: {response}")
if change_status == "INSYNC":
logger.info(f"Change {change_id} has completed with status {change_status}")
return
logger.info(f"Status of change {change_id} is still pending. Waiting {wait_time} seconds")
sleep(wait_time)
def get_current_ip(zone_id: str, record_name: str) -> Optional[str]:
logger.info("Checking current records")
zone_records = route53.list_resource_record_sets(HostedZoneId=zone_id)
matched = None
for record in zone_records["ResourceRecordSets"]:
logger.info(f"Found record of type {record['Type']} with ttl {record['TTL']} named {record['Name']}")
if record["Name"] == f"{record_name}.":
matched = record
break
if matched:
logger.info(f"Found matching record for {record_name}: {record}")
if matched["Type"] != "A":
raise ValueError(
f"The current record for {record_name} is of type {matched['Type']}! Use a different record."
)
resource_record = record["ResourceRecords"]
if len(resource_record) > 1:
raise ValueError(
f"The current A record for {record_name} has {len(resource_record)} entries: "
f"{', '.join(i['Value'] for i in resource_record)}. "
f"This operations is unsafe, please use a different record name, or remove the existing entry manually."
)
current_dns_ip = resource_record[0]["Value"]
logger.info(f"The current target for {record_name} is {current_dns_ip}")
return current_dns_ip
else:
logger.info(f"No records found matching {record_name}. A new 'A' record would be created.")
return None
def update_record(zone_name: str, record_name: str, target_ip: str, dryrun: bool = False) -> None:
zone_id = get_hosted_zone_id(zone_name=zone_name)
current_ip = get_current_ip(zone_id=zone_id, record_name=record_name)
if current_ip == target_ip:
logger.info(f"The current value of {record_name} matches the current IP, nothing to do.")
return
else:
logger.info(f"The current value of {record_name} points to {current_ip}. Will update to {target_ip}")
if dryrun:
logger.info("Running in dryrun mode, not updating Route53")
return
logger.info(f"Submitting the change for {record_name} to point to {target_ip}")
update_response = route53.change_resource_record_sets(
HostedZoneId=zone_id,
ChangeBatch={
"Comment": f"Updating record to {target_ip}",
"Changes": [
{
"Action": "UPSERT",
"ResourceRecordSet": {
"Name": record_name,
"Type": "A",
"TTL": 60,
"ResourceRecords": [{"Value": target_ip}],
},
}
],
},
)
change_id = update_response["ChangeInfo"]["Id"]
wait_for_change_completion(change_id=change_id)
logger.info("Update completed") | /route53-ddns-0.0.3.tar.gz/route53-ddns-0.0.3/src/route53_ddns/route53_interface.py | 0.763043 | 0.335677 | route53_interface.py | pypi |
import logging
from ipaddress import ip_address
import logging.handlers
import sys
from typing import List
import click
from route53_ddns.ip_utilities import get_ip, verify_propagation, wait_for_propagation
from route53_ddns.route53_interface import update_record
def _setup_logging(verbose: int = 0, log_file: str = None, quiet: bool = False):
"""Setup the logging for route54-ddns.
if a `file` is provided, will use a `RotatingFileHandler` with 1MB maximum, with three backups.
Verbosity currently supports two levels:
* 1: sends the logs to stdout as well
* 2: increaste log level to DEBUG (still tells urllib3 to stay at INFO level)
Arguments:
verbose (int): set the verbosity level. 1 sends to stdout, 2 enable DEBUG logs
log_file (:obj:`str`, optional): the path of the log file
quiet (bool): quiet mode, prevent any log to be emitted
"""
handlers: List[logging.Handler] = []
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
if quiet:
# Quiet overrides any other setting.
handlers.append(logging.NullHandler())
else:
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(formatter)
handlers.append(stdout_handler)
if log_file:
file_handler = logging.handlers.RotatingFileHandler(filename=log_file, maxBytes=1_048_576, backupCount=3)
file_handler.setFormatter(formatter)
handlers.append(file_handler)
logging.basicConfig(level=logging.DEBUG if verbose > 0 else logging.INFO, handlers=handlers)
# urllib and botocore are very verbose. Forcing it to stay at INFO level
logging.getLogger("urllib3").setLevel(logging.INFO)
logging.getLogger("botocore").setLevel(logging.INFO)
@click.command()
@click.option("--zone", required=True, type=str, help="The name of the zone to update, for example 'example.net'")
@click.option("--record", required=True, type=str, help="The record to update, for example 'home'")
@click.option("--ip", required=False, help="Provide the IP to set, rather than detect it from ipify.org")
@click.option("-c", "--check-only", is_flag=True, default=False, help="Only preform a check if DNS entry is correct")
@click.option("-d", "--dryrun", is_flag=True, default=False, help="Doesn't update DNS")
@click.option("-v", "--verbose", count=True, help="Increase logging verbosity")
@click.option("-l", "--log-file", required=False, help="Log file. If not provided no logs file will be produced")
@click.option("-q", "--quiet", is_flag=True, help="Quiet mode, suppresses all logs")
def route53_ddns(
zone: str, record: str, ip: str, check_only: bool, dryrun: bool, verbose: int, log_file: str, quiet: bool
) -> int:
"""Simple CLI that updated an AWS Route53 A record to point to the current IP.
If you have a Hosted Zone in your AWS account called `example.com` and you want to have
`home.example.net` pointing to your public IP address, you can do:
$ route53-ddns --zone example.com --record home
$ route53-ddns --zone example.com --record home.example.com
If the record doesn't end with the zone, it will be automatically appended for you.
"""
_setup_logging(verbose=verbose, log_file=log_file, quiet=quiet)
if not record.endswith(zone):
record = f"{record}.{zone}"
logging.info(f"Adjusting target record to be {record}")
if ip:
try:
logging.debug(f"Validating ip {ip}")
ip_address(ip)
target_ip = ip
except ValueError as e:
logging.fatal(f"IP {ip} doesn't appear to be valid: {e}")
return 1
else:
target_ip = get_ip()
if check_only:
logging.info(f"Running in check-only mode. Validating propagation of {target_ip}")
verify_propagation(record=record, target_ip=target_ip)
return 0
update_record(zone_name=zone, record_name=record, target_ip=target_ip, dryrun=dryrun)
wait_for_propagation(record=record, target_ip=target_ip)
return 0
if __name__ == "__main__":
sys.exit(route53_ddns()) | /route53-ddns-0.0.3.tar.gz/route53-ddns-0.0.3/src/route53_ddns/cli.py | 0.629205 | 0.209874 | cli.py | pypi |
from enum import Enum
from pydantic import Field
from typing import List, Optional
from route53_transfer.hashable_model import HashableModel
class AliasTargetModel(HashableModel):
DNSName: str = Field(
None,
description="DNS name of the target host",
examples=["test1.example.com."],
)
EvaluateTargetHealth: bool = Field(
None,
description="Whether or not to evaluate the health of the target",
examples=[False, True],
)
HostedZoneId: str = Field(
None,
description="Hosted zone ID of the target host",
examples=["Z0992A3F3Q3HY06FU"],
)
class ResourceRecord(HashableModel):
Value: str = Field(
None,
description="Value of the resource record",
examples=["test1.example.com."],
)
class RegionEnum(str, Enum):
us_east_1 = "us-east-1"
us_east_2 = "us-east-2"
us_west_1 = "us-west-1"
us_west_2 = "us-west-2"
ca_central_1 = "ca-central-1"
ap_northeast_1 = "ap-northeast-1"
ap_northeast_2 = "ap-northeast-2"
ap_southeast_1 = "ap-southeast-1"
ap_southeast_2 = "ap-southeast-2"
ap_south_1 = "ap-south-1"
eu_central_1 = "eu-central-1"
eu_west_1 = "eu-west-1"
eu_west_2 = "eu-west-2"
eu_west_3 = "eu-west-3"
sa_east_1 = "sa-east-1"
class ContinentCodeEnum(str, Enum):
Africa = "AF"
Antarctica = "AN"
Asia = "AS"
Europe = "EU"
NorthAmerica = "NA"
Oceania = "OC"
SouthAmerica = "SA"
class GeoLocationModel(HashableModel):
ContinentCode: Optional[str] = Field(
default=None,
description="Continent code of the location",
examples=[ContinentCodeEnum.Antarctica],
)
CountryCode: Optional[str] = Field(
default=None,
description="Country code or '*' for default or fallback",
examples=["US"],
)
SubdivisionCode: Optional[str] = Field(
default=None,
description="Subdivision code of the location",
examples=["CA"],
)
class R53Record(HashableModel):
Name: str = Field(
None,
description="Name of the DNS record",
examples=["test1.example.com."],
)
Type: str = Field(
None,
description="Type of DNS record",
examples=["A"],
)
TTL: int = Field(
None,
description="Time to leave of the DNS record in seconds",
examples=[60, 300],
)
Region: Optional[str] = Field(
None,
description="If the record has latency routing policy, this field will"
" indicate which AWS region is the record pointing to."
" Must be a valid AWS region name",
examples=["eu-west-1", "us-east-2"],
)
GeoLocation: Optional[GeoLocationModel] = None
AliasTarget: Optional[AliasTargetModel] = None
ResourceRecords: Optional[List[ResourceRecord]] = None
SetIdentifier: Optional[str] = Field(
default=None,
description="Assigns an arbitrary identifier to the record",
examples=["rp-geo-default", "europe-main"],
)
Weight: Optional[int] = Field(
default=None,
description="If the record has weighted routing policy, this field will"
" indicate the weight of the record.",
examples=[0, 100],
)
Failover: Optional[str] = Field(
default=None,
description="Can be either PRIMARY or SECONDARY",
examples=["PRIMARY", "SECONDARY"],
)
MultiValueAnswer: Optional[bool] = Field(
default=None,
examples=[False, True],
)
HealthCheckId: Optional[str] = Field(
default=None,
description="Unique identifier of the associated health check",
examples=["ff59b681-c8b6-4039-98ed-0e5b77edc1ac"],
)
TrafficPolicyInstanceId: Optional[str] = Field(
default=None,
examples=["ff59b681-c8b6-4039-98ed-0e5b77edc1ac"],
)
@staticmethod
def from_dict(record_dict: dict) -> "R53Record":
return R53Record(**record_dict)
def is_alias(self) -> bool:
return self.AliasTarget is not None
def is_alias_in_zone(self, zone_id: str) -> bool:
return self.is_alias() and self.AliasTarget.HostedZoneId == zone_id
def alias_target(self):
return self.AliasTarget.DNSName if self.is_alias() else None
def __str__(self):
dict_ = self.dict(exclude_none=True)
return str(dict_) | /route53-transfer-ng-1.0.5.tar.gz/route53-transfer-ng-1.0.5/route53_transfer/models.py | 0.923087 | 0.452294 | models.py | pypi |
import urllib.parse
from typing import Any, Dict, List, NewType
import requests
from routemaster_sdk.types import (
Label,
State,
LabelRef,
Metadata,
LabelName,
StateMachine,
)
from routemaster_sdk.exceptions import (
DeletedLabel,
UnknownLabel,
LabelAlreadyExists,
UnknownStateMachine,
)
Json = NewType('Json', Dict[str, Any])
class RoutemasterAPI:
"""Wrapper around an instance of the routemaster HTTP API."""
def __init__(self, api_url: str, session: requests.Session) -> None:
"""Create a new api wrapper around a given session and api base url."""
self._api_url = api_url
self._session = session
self.delete = session.delete
self.get = session.get
self.patch = session.patch
self.post = session.post
def build_url(self, endpoint: str) -> str:
"""Build the url to the given endpoint for the wrapped API instance."""
return urllib.parse.urljoin(self._api_url, endpoint)
def build_label_url(self, label: LabelRef) -> str:
"""Build the url for a label in the wrapped API instance."""
return self.build_url('state-machines/{0}/labels/{1}'.format(
label.state_machine,
label.name,
))
def build_state_machine_url(self, state_machine: StateMachine) -> str:
"""Build the url for a state machine in the wrapped API instance."""
return self.build_url(
'state-machines/{0}/labels'.format(state_machine),
)
def get_status(self) -> Json:
"""Get the status of the wrapped API instance."""
response = self.get(self.build_url(''))
response.raise_for_status()
return response.json()
def get_state_machines(self) -> List[StateMachine]:
"""Get the state machines known to the wrapped API instance."""
response = self.get(self.build_url('state-machines'))
response.raise_for_status()
return [
StateMachine(data['name'])
for data in response.json()['state-machines']
]
def get_labels(self, state_machine: StateMachine) -> List[LabelRef]:
"""List the labels in the given state machine."""
response = self.get(self.build_state_machine_url(state_machine))
if response.status_code == 404:
raise UnknownStateMachine(state_machine)
response.raise_for_status()
return [
LabelRef(
name=LabelName(data['name']),
state_machine=state_machine,
)
for data in response.json()['labels']
]
def get_label(self, label: LabelRef) -> Label:
"""
Get a label within a given state machine.
Errors:
- ``UnknownLabel`` if the label is not known (HTTP 404).
- ``DeletedLabel`` if the label has been deleted (HTTP 410).
- ``requests.HTTPError`` for other HTTP errors.
"""
response = self.get(self.build_label_url(label))
if response.status_code == 404:
raise UnknownLabel(label)
elif response.status_code == 410:
raise DeletedLabel(label)
response.raise_for_status()
data = response.json()
return Label(
ref=label,
metadata=data['metadata'],
state=State(data['state']),
)
def create_label(self, label: LabelRef, metadata: Metadata) -> Label:
"""
Create a label with a given metadata, and start it in the state machine.
Errors:
- ``UnknownStateMachine`` if the state machine is not known (HTTP 404).
- ``LabelAlreadyExists`` if the label already exists (HTTP 409).
- ``requests.HTTPError`` for other HTTP errors.
"""
response = self.post(
self.build_label_url(label),
json={'metadata': metadata},
)
if response.status_code == 404:
raise UnknownStateMachine(label.state_machine)
elif response.status_code == 409:
raise LabelAlreadyExists(label)
response.raise_for_status()
data = response.json()
return Label(
ref=label,
metadata=data['metadata'],
state=State(data['state']),
)
def update_label(self, label: LabelRef, metadata: Metadata) -> Label:
"""
Update a label in a state machine.
Triggering progression if necessary according to the state machine
configuration. Updates are _merged_ with existing metadata.
Errors:
- ``UnknownLabel`` if the label is not known (HTTP 404).
- ``DeletedLabel`` if the label has been deleted (HTTP 410).
- ``requests.HTTPError`` for other HTTP errors.
"""
response = self.patch(
self.build_label_url(label),
json={'metadata': metadata},
)
if response.status_code == 404:
raise UnknownLabel(label)
elif response.status_code == 410:
raise DeletedLabel(label)
response.raise_for_status()
data = response.json()
return Label(
ref=label,
metadata=data['metadata'],
state=State(data['state']),
)
def delete_label(self, label: LabelRef) -> None:
"""
Delete a label in a state machine.
Marks a label as deleted, but does not remove it from the database.
Deleted labels cannot be updated and will not move state.
Errors:
- ``UnknownStateMachine`` if the state machine is not known (HTTP 404).
- ``requests.HTTPError`` for other HTTP errors.
"""
response = self.delete(self.build_label_url(label))
if response.status_code == 404:
raise UnknownStateMachine(label.state_machine)
response.raise_for_status() | /routemaster-sdk-0.0.1.tar.gz/routemaster-sdk-0.0.1/routemaster_sdk/api.py | 0.879315 | 0.368178 | api.py | pypi |
import dataclasses
import enum
import ipaddress
import json
import typing
import asyncio_zabbix_sender
import router_log_preprocessor.domain as domain
import router_log_preprocessor.hooks.zabbix._known_clients as _known_clients
def map_client_message(
record: domain.LogRecord, message: domain.Message
) -> typing.Generator[asyncio_zabbix_sender.Measurement, None, None]:
assert record.process is not None
# Ensure process is formatted according to Zabbix recommendations
process = record.process.lower().replace("-", "_")
# Convert record datetime to timestamp in full seconds
clock = int(record.timestamp.timestamp())
ns = None
if isinstance(message, domain.WlcEventModel):
ns = message.event.value
# Generate the measurements from the model
model_fields = dataclasses.fields(message)
for field in model_fields:
if field.name == "mac_address":
continue
value = getattr(message, field.name)
if isinstance(value, enum.Enum):
value = value.value
elif isinstance(value, (ipaddress.IPv4Address, ipaddress.IPv6Address)):
value = str(value)
yield asyncio_zabbix_sender.Measurement(
host=record.hostname,
key=f"rlp.{process}[{field.name},{message.mac_address}]",
value=value,
clock=clock,
ns=ns,
)
def map_client_discovery(
record: domain.LogRecord, known_clients: _known_clients.KnownClients
) -> asyncio_zabbix_sender.Measurements:
assert record.process is not None
value = json.dumps(
[{"mac": str(mac)} for mac in known_clients.clients(record.process)],
indent=None,
separators=(",", ":"),
)
return asyncio_zabbix_sender.Measurements([
asyncio_zabbix_sender.Measurement(
host=record.hostname,
key=f"rlp.client_discovery[{record.process.lower()}]",
value=value,
clock=int(record.timestamp.timestamp()),
)
]) | /router_log_preprocessor-0.1.7-py3-none-any.whl/router_log_preprocessor/hooks/zabbix/_mapper.py | 0.603231 | 0.21626 | _mapper.py | pypi |
import typing
import anyio
import asyncio_zabbix_sender
import router_log_preprocessor.domain as domain
import router_log_preprocessor.hooks.abc as abc
import router_log_preprocessor.hooks.zabbix._known_clients as known_clients
import router_log_preprocessor.hooks.zabbix._mapper as mapper
import router_log_preprocessor.util.logging as logging
class ZabbixTrapper(abc.Hook):
def __init__(
self, sender, client_discovery_wait_time=50, measurement_bundle_wait_time=10
):
super().__init__()
self._sender = sender
self._client_discovery_wait_time = client_discovery_wait_time
self._known_clients = known_clients.KnownClients(client_discovery_wait_time)
self._measurement_bundle_wait_time = measurement_bundle_wait_time
self._is_bundling_measurements = False
self._measurements = asyncio_zabbix_sender.Measurements()
async def send(
self, record: domain.LogRecord, message: typing.Optional[domain.Message]
) -> None:
"""Send the preprocessed message to the corresponding Zabbix Trapper item(s).
For client messages a low-level discovery will be sent first and the
corresponding Zabbix Trapper item(s) will be delayed until Zabbix have been
given time to synchronize caches.
If the message is None, then this method returns immediately.
:param record: The log record containing hostname, process name and timestamp.
:param message: The message containing the mac address.
"""
if message is None:
return
seconds_until_discovered = await self.discover_client(record, message)
if seconds_until_discovered > 0:
# Allow the Zabbix server(s) to discover and create prototype items
logging.logger.debug(
"Pending discovery event of %s on %s. Waiting %f seconds",
message.mac_address,
record.process,
seconds_until_discovered,
)
await anyio.sleep(seconds_until_discovered)
for measurement in mapper.map_client_message(record, message):
self._measurements.add_measurement(measurement)
await self._start_bundling()
async def discover_client(
self, record: domain.LogRecord, message: domain.Message
) -> float:
"""Discover a new client based on the mac address in the message.
There are three cases of client discovery:
1) The client have not been discovered before
2) The client have recently been discovered
3) The client have been discovered for a long time
A discovery packet will only be sent to Zabbix in the first case and the callee
will be instructed to wait for the full default_wait_time period before sending
the actual data to Zabbix. This ensures that the Zabbix Trapper process is aware
of the (newly created) item prototype(s).
If a client have recently been discovered the callee will be instructed to wait
the remaining time of the default_wait_time before sending the actual data to
Zabbix.
For the last case the callee will be instructed to wait 0 seconds, i.e. they can
send the data to Zabbix immediately.
:param record: The log record containing hostname, process name and timestamp.
:param message: The message containing the mac address.
"""
assert record.process is not None
if self._known_clients.is_client_known(record.process, message.mac_address):
# MAC address is already known, so no need to rediscover it,
# but we might need to wait in the case that the discovery were just sent
return self._known_clients.remaining_wait_time(
record.process, message.mac_address
)
# Mark client as known
self._known_clients.add_client(record.process, message.mac_address)
measurements = mapper.map_client_discovery(record, self._known_clients)
logging.logger.info("Discovering: %r", measurements)
response = await self._sender.send(measurements)
logging.logger.info("Response: %r", response)
assert response.processed == 1, response
return self._client_discovery_wait_time
async def _start_bundling(self):
"""Ensure that bundling of measurements has started.
If another process already started the bundling of measurements, then nothing
further is done. Otherwise, this will take responsibility of both bundling the
measurements and sending the measurements to Zabbix after the
`measurement_bundle_wait_time` has elapsed.
If the sending of measurements fails, then the process is retried indefinitely.
"""
if self._is_bundling_measurements:
# This process is done and have handed over the responsibility to send the
# measurements to another process
return
# Allow other processes to add measurements while this process sleeps
self._is_bundling_measurements = True
await anyio.sleep(self._measurement_bundle_wait_time)
# Get the measurements and prepare an empty measurement container
# Once control is handed back to this process we have control of the
# measurements until the next await statement
self._is_bundling_measurements = False
measurements = self._measurements
self._measurements = asyncio_zabbix_sender.Measurements()
try:
logging.logger.info("Sending data: %r", measurements)
response = await self._sender.send(measurements)
logging.logger.info("Response: %r", response)
except ConnectionError as connection_error:
logging.logger.warning(
"Connection error to Zabbix server: %r",
connection_error
)
# Add the failed measurements and retry
for measurement in measurements:
self._measurements.add_measurement(measurement)
await self._start_bundling() | /router_log_preprocessor-0.1.7-py3-none-any.whl/router_log_preprocessor/hooks/zabbix/_trapper.py | 0.637144 | 0.342489 | _trapper.py | pypi |
import collections
import datetime
import typing
import router_log_preprocessor.domain as domain
class KnownClients:
def __init__(self, client_discovery_wait_time: float) -> None:
self._total_wait_time = client_discovery_wait_time
self._known_clients: typing.DefaultDict[
str, typing.Dict[domain.MAC, datetime.datetime]
] = collections.defaultdict(dict)
@staticmethod
def _now() -> datetime.datetime:
return datetime.datetime.utcnow()
def add_client(self, process: str, mac_address: domain.MAC) -> None:
"""Add a client to the repository marking the date and time of the addition.
:param process: The process of the log entry.
:param mac_address: The mac address of the client.
"""
self._known_clients[process][mac_address] = KnownClients._now()
def is_client_known(self, process: str, mac_address: domain.MAC) -> bool:
"""Verify if a client (mac address) is known for a given process.
:param process: The process of the log entry.
:param mac_address: The mac address of the client.
:return: True if the client is already known and False otherwise.
"""
return mac_address in self._known_clients[process]
def remaining_wait_time(self, process: str, mac_address: domain.MAC) -> float:
"""Calculate the remaining wait time before a client is assumed to be
discovered by Zabbix.
:param process: The process of the log entry.
:param mac_address: The mac address of the client.
:return: The remaining wait time. 0 if the client can be assumed to be
discovered.
"""
now = KnownClients._now()
known_at = self._known_clients[process][mac_address]
known_for = (now - known_at).total_seconds()
remaining_wait_time = self._total_wait_time - known_for
if remaining_wait_time < 0:
return 0.0
return remaining_wait_time
def clients(self, process: str) -> typing.Generator[domain.MAC, None, None]:
"""Generate a list of clients known for the given process.
:param process: The process of the log entry.
"""
for key in self._known_clients[process]:
yield key | /router_log_preprocessor-0.1.7-py3-none-any.whl/router_log_preprocessor/hooks/zabbix/_known_clients.py | 0.794943 | 0.336331 | _known_clients.py | pypi |
import urllib
import urlparse
import re
#: Regex for URL definitions.
_ROUTE_REGEX = re.compile(r'''
\< # The exact character "<"
(\w*) # The optional variable name (restricted to a-z, 0-9, _)
(?::([^>]*))? # The optional :regex part
\> # The exact character ">"
''', re.VERBOSE)
class BaseRoute(object):
"""Interface for URL routes. Custom routes must implement some or all
methods and attributes from this class.
"""
#: Route name, used to build URLs.
name = None
#: True if this route is only used for URL generation and never matches.
build_only = False
def match(self, request):
"""Matches this route against the current request.
:param request:
A ``webob.Request`` instance.
:returns:
A tuple ``(handler, args, kwargs)`` if the route matches, or None.
"""
raise NotImplementedError()
def build(self, request, args, kwargs):
"""Builds and returns a URL for this route.
:param request:
The current ``Request`` object.
:param args:
Tuple of positional arguments to build the URL.
:param kwargs:
Dictionary of keyword arguments to build the URL.
:returns:
An absolute or relative URL.
"""
raise NotImplementedError()
def get_routes(self):
"""Generator to get all routes from a route.
:yields:
This route or all nested routes that it contains.
"""
yield self
def get_match_routes(self):
"""Generator to get all routes that can be matched from a route.
:yields:
This route or all nested routes that can be matched.
"""
if not self.build_only:
yield self
elif not self.name:
raise ValueError("Route %r is build_only but doesn't have a "
"name" % self)
def get_build_routes(self):
"""Generator to get all routes that can be built from a route.
:yields:
This route or all nested routes that can be built.
"""
if self.name is not None:
yield self
class Route(BaseRoute):
"""A URL route definition. A route template contains parts enclosed by
``<>`` and is used to match requested URLs. Here are some examples::
route = Route(r'/article/<id:[\d]+>', ArticleHandler)
route = Route(r'/wiki/<page_name:\w+>', WikiPageHandler)
route = Route(r'/blog/<year:\d{4}>/<month:\d{2}>/<day:\d{2}>/<slug:\w+>', BlogItemHandler)
Based on `Another Do-It-Yourself Framework`_, by Ian Bicking. We added
URL building, non-keyword variables and other improvements.
"""
def __init__(self, template, handler=None, name=None, defaults=None,
build_only=False):
"""Initializes a URL route.
:param template:
A route template to be matched, containing parts enclosed by ``<>``
that can have only a name, only a regular expression or both:
============================= ==================================
Format Example
============================= ==================================
``<name>`` ``r'/<year>/<month>'``
``<:regular expression>`` ``r'/<:\d{4}>/<:\d{2}>'``
``<name:regular expression>`` ``r'/<year:\d{4}>/<month:\d{2}>'``
============================= ==================================
If the name is set, the value of the matched regular expression
is passed as keyword argument to the :class:`RequestHandler`.
Otherwise it is passed as positional argument.
The same template can mix parts with name, regular expression or
both.
:param handler:
A :class:`RequestHandler` class or dotted name for a class to be
lazily imported, e.g., ``my.module.MyHandler``.
:param name:
The name of this route, used to build URLs based on it.
:param defaults:
Default or extra keywords to be returned by this route. Values
also present in the route variables are used to build the URL
when they are missing.
:param build_only:
If True, this route never matches and is used only to build URLs.
"""
self.template = template
self.handler = handler
self.name = name
self.defaults = defaults or {}
self.build_only = build_only
# Lazy properties.
self.regex = None
self.variables = None
self.reverse_template = None
def _parse_template(self):
self.variables = {}
last = count = 0
regex = template = ''
for match in _ROUTE_REGEX.finditer(self.template):
part = self.template[last:match.start()]
name = match.group(1)
expr = match.group(2) or '[^/]+'
last = match.end()
if not name:
name = '__%d__' % count
count += 1
template += '%s%%(%s)s' % (part, name)
regex += '%s(?P<%s>%s)' % (re.escape(part), name, expr)
self.variables[name] = re.compile('^%s$' % expr)
regex = '^%s%s$' % (regex, re.escape(self.template[last:]))
self.regex = re.compile(regex)
self.reverse_template = template + self.template[last:]
self.has_positional_variables = count > 0
def _regex(self):
if self.regex is None:
self._parse_template()
return self.regex
def _variables(self):
if self.variables is None:
self._parse_template()
return self.variables
def _reverse_template(self):
if self.reverse_template is None:
self._parse_template()
return self.reverse_template
def match(self, request):
"""Matches this route against the current request.
.. seealso:: :meth:`BaseRoute.match`.
"""
regex = self.regex or self._regex()
match = regex.match(request.path)
if match:
kwargs = self.defaults.copy()
kwargs.update(match.groupdict())
if kwargs and self.has_positional_variables:
args = tuple(value[1] for value in sorted((int(key[2:-2]), \
kwargs.pop(key)) for key in \
kwargs.keys() if key.startswith('__')))
else:
args = ()
return self.handler, args, kwargs
def build(self, request, args, kwargs):
"""Builds a URL for this route.
.. seealso:: :meth:`Router.build`.
"""
return self._build(request, args, kwargs)[0]
def _build(self, request, args, kwargs):
full = kwargs.pop('_full', False)
scheme = kwargs.pop('_scheme', None)
netloc = kwargs.pop('_netloc', None)
anchor = kwargs.pop('_anchor', None)
if full or scheme or netloc:
if not netloc:
netloc = request.host
if not scheme:
scheme = 'http'
path, query = self._build_path(args, kwargs.copy())
return urlunsplit(scheme, netloc, path, query, anchor), query
def _build_path(self, args, kwargs):
"""Builds the path for this route.
:returns:
A tuple ``(path, query)`` with the built URL path and extra
keywords to be used as URL query arguments.
"""
variables = self.variables or self._variables()
if self.has_positional_variables:
for index, value in enumerate(args):
key = '__%d__' % index
if key in variables:
kwargs[key] = value
values = {}
for name, regex in variables.iteritems():
value = kwargs.pop(name, self.defaults.get(name))
if not value:
raise KeyError('Missing argument "%s" to build URL.' % \
name.strip('_'))
if not isinstance(value, basestring):
value = str(value)
if not regex.match(value):
raise ValueError('URL buiding error: Value "%s" is not '
'supported for argument "%s".' % (value, name.strip('_')))
values[name] = value
return (self.reverse_template % values, kwargs)
class Router(object):
"""A simple URL router used to match the current URL, dispatch the handler
and build URLs for other resources.
"""
#: Class used when the route is a tuple.
route_class = Route
def __init__(self, routes=None):
"""Initializes the router.
:param routes:
A list of :class:`Route` instances to initialize the router.
"""
# All routes that can be matched.
self.match_routes = []
# All routes that can be built.
self.build_routes = {}
if routes:
for route in routes:
self.add(route)
def add(self, route):
"""Adds a route to this router.
:param route:
A :class:`Route` instance.
"""
if isinstance(route, tuple):
# Default route.
route = self.route_class(*route)
for r in route.get_match_routes():
self.match_routes.append(r)
for r in route.get_build_routes():
self.build_routes.setdefault(r.name, []).append(r)
def match(self, request):
"""Matches all routes against the current request. The first one that
matches is returned.
:param request:
A ``webob.Request`` instance.
:returns:
A tuple ``(route, args, kwargs)`` if a route matched, or None.
"""
for route in self.match_routes:
match = route.match(request)
if match:
return match
def build(self, name, request, args, kwargs):
"""Builds and returns a URL for a named :class:`Route`.
:param name:
The route name.
:param request:
The current ``Request`` object.
:param args:
Tuple of positional arguments to build the URL.
:param kwargs:
Dictionary of keyword arguments to build the URL.
:returns:
An absolute or relative URL.
"""
routes = self.build_routes.get(name)
if not routes:
raise KeyError('Route %r is not defined.' % name)
best_match = None
for route in routes:
try:
url, query = route._build(request, args, kwargs)
query_count = len(query)
if query_count == 0:
return url
if best_match is None or query_count < best_match[0]:
best_match = (query_count, url)
except (KeyError, ValueError), e:
pass
if best_match:
return best_match[1]
raise ValueError('No routes are suitable to build %r with '
'arguments %r and keyword arguments %r.' % (name, args, kwargs))
def to_utf8(value):
"""Returns a string encoded using UTF-8.
This function comes from `Tornado`_.
:param value:
A unicode or string to be encoded.
:returns:
The encoded string.
"""
if isinstance(value, unicode):
return value.encode('utf-8')
assert isinstance(value, str)
return value
def to_unicode(value):
"""Returns a unicode string from a string, using UTF-8 to decode if needed.
This function comes from `Tornado`_.
:param value:
A unicode or string to be decoded.
:returns:
The decoded string.
"""
if isinstance(value, str):
return value.decode('utf-8')
assert isinstance(value, unicode)
return value
def urlunsplit(scheme=None, netloc=None, path=None, query=None, fragment=None):
"""Similar to ``urlparse.urlunsplit``, but will escape values and
urlencode and sort query arguments.
:param scheme:
URL scheme, e.g., `http` or `https`.
:param netloc:
Network location, e.g., `localhost:8080` or `www.google.com`.
:param path:
URL path.
:param query:
URL query as an escaped string, or a dictionary or list of key-values
tuples to build a query.
:param fragment:
Fragment identifier, also known as "anchor".
:returns:
An assembled absolute or relative URL.
"""
if not scheme or not netloc:
scheme = None
netloc = None
if path:
path = urllib.quote_plus(to_utf8(path), '/')
if query and not isinstance(query, basestring):
if isinstance(query, dict):
query = query.items()
query_args = []
for key, values in query:
if isinstance(values, basestring):
values = (values,)
for value in values:
query_args.append((to_utf8(key), to_utf8(value)))
# Sorting should be optional? Sorted args are commonly needed to build
# URL signatures for services.
query_args.sort()
query = urllib.urlencode(query_args)
if fragment:
fragment = urllib.quote_plus(to_utf8(fragment))
return urlparse.urlunsplit((scheme, netloc, path, query, fragment)) | /router-0.1.zip/router-0.1/router.py | 0.765681 | 0.192634 | router.py | pypi |
from typing import Optional
import click
import nagiosplugin
@click.group()
@click.option(
"--host",
required=True,
help="Hostname or IP address of the device to connect to",
)
@click.option(
"--hostname",
help="Use this hostname to check the SSL certificates",
)
@click.option(
"--port",
default=None,
help="The port to use. Defaults to 8728 for non SSL connections and 8729 for SSL connections",
)
@click.option(
"--username",
required=True,
help="The username of the monitoring user. Do NOT use a user with admin privileges",
)
@click.option(
"--password",
required=True,
help="The password of the monitoring user",
)
@click.option(
"--routeros-version",
default="auto",
help=(
"Version of RouterOS running on the device. "
"The value 'auto' is special and if set the check will try to detect the version automatically. "
"The 'auto' option is recommended. "
"Examples: '6', '6.48.8', '7', '7.8', 'auto' "
"(Default: auto)"
)
)
@click.option(
"--ssl/--no-ssl",
"use_ssl",
default=True,
help="Use a SSL encrypted connections to communicate with the device",
)
@click.option(
"--ssl-cafile",
help="Custom CA file to check SSL certificates",
)
@click.option(
"--ssl-capath",
help="Custom path to look for CA files to check SSL certificates",
)
@click.option(
"--ssl-force-no-certificate",
is_flag=True,
default=False,
help="Force an anonymous connection",
)
@click.option(
"--ssl-verify/--no-ssl-verify",
default=True,
help="Verify the SSL certificate",
)
@click.option("--ssl-verify-hostname/--no-ssl-verify-hostname", default=True)
@click.option("-v", "--verbose", count=True)
@click.pass_context
def cli(ctx, host: str, hostname: Optional[str], port: int, username: str, password: str, routeros_version: str,
use_ssl: bool, ssl_cafile: Optional[str], ssl_capath: Optional[str], ssl_force_no_certificate: bool,
ssl_verify: bool, ssl_verify_hostname: bool, verbose: int):
ctx.ensure_object(dict)
ctx.obj["host"] = host
ctx.obj["hostname"] = hostname
ctx.obj["port"] = port
ctx.obj["username"] = username
ctx.obj["password"] = password
ctx.obj["routeros_version"] = routeros_version
ctx.obj["ssl"] = use_ssl
ctx.obj["ssl_cafile"] = ssl_cafile
ctx.obj["ssl_capath"] = ssl_capath
ctx.obj["ssl_force_no_certificate"] = ssl_force_no_certificate
ctx.obj["ssl_verify"] = ssl_verify
ctx.obj["ssl_verify_hostname"] = ssl_verify_hostname
ctx.obj["verbose"] = verbose
runtime = nagiosplugin.Runtime()
runtime.verbose = verbose | /routeros_check-0.9.2-py3-none-any.whl/routeros_check/cli.py | 0.746601 | 0.161717 | cli.py | pypi |
import re
from typing import Dict, List, Set
import click
import nagiosplugin
from ..cli import cli
from ..helper import logger, RouterOSVersion
from ..resource import RouterOSCheckResource
class SystemFanResource(RouterOSCheckResource):
name = "FAN"
def __init__(
self,
cmd_options,
check: nagiosplugin.Check,
warning_values: List[str],
critical_values: List[str],
use_regex: bool
):
super().__init__(cmd_options=cmd_options)
self._check = check
self.fan_names: Set[str] = set()
self.fan_values: Dict[str, int] = {}
self.use_regex: bool = use_regex
self.warning_values: Dict[str, str] = {}
self.critical_values: Dict[str, str] = {}
self.warning_regex_values: Dict[re.Pattern, str] = {}
self.critical_regex_values: Dict[re.Pattern, str] = {}
if self.use_regex:
self.warning_regex_values = self.prepare_regex_thresholds(warning_values)
self.critical_regex_values = self.prepare_regex_thresholds(critical_values)
else:
self.warning_values = self.prepare_thresholds(warning_values)
self.critical_values = self.prepare_thresholds(critical_values)
self._fetch_data()
def _fetch_data(self):
logger.info("Fetching data ...")
call = self.api.path(
"/system/health"
)
api_results = tuple(call)
if self.routeros_version < RouterOSVersion("7"):
api_result_items = []
for name, value in api_results[0].items():
api_result_items.append({
"name": name,
"value": value,
})
else:
api_result_items = api_results
regex_name = re.compile(r"(?P<name>fan\d+)-(?P<type>(speed))")
for item in api_result_items:
m = regex_name.match(item["name"])
if not m:
continue
if self.use_regex:
for regex, threshold in self.warning_regex_values.items():
if regex.match(item["name"]):
self.warning_values[item["name"]] = threshold
break
for regex, threshold in self.critical_regex_values.items():
if regex.match(item["name"]):
self.critical_values[item["name"]] = threshold
break
if m.group("type") in ("speed",):
self.fan_values[item["name"]] = int(item["value"])
self.fan_names.add(m.group("name"))
def probe(self):
for name, value in self.fan_values.items():
self._check.add(nagiosplugin.ScalarContext(
name=name,
warning=self.warning_values.get(name),
critical=self.critical_values.get(name),
))
yield nagiosplugin.Metric(
name=name,
value=value,
)
@cli.command("system.fan")
@click.option(
"warning_values",
"--value-warning",
multiple=True,
help=(
"Set a warning threshold for a value. "
"Example: If fan1-speed should be in the range of 4000 to 5000 you can set "
"--value-warning fan1-speed:4000:5000 "
"Can be specified multiple times"
)
)
@click.option(
"critical_values",
"--value-critical",
multiple=True,
help=(
"Set a critical threshold for a value. "
"Example: If fan1-speed should be in the range of 4000 to 5000 you can set "
"--value-critical fan1-speed:4000:5000 "
"Can be specified multiple times"
)
)
@click.option(
"--regex",
"use_regex",
default=False,
is_flag=True,
help="Treat values from --value-warning and --value-critical as regex to find all matching values"
)
@click.option(
"--no-fan-ok",
is_flag=True,
default=False,
help="The check will be unknown if no fan is available. Provide this option to ignore this."
)
@click.option(
"expected_names",
"--expect-fan",
multiple=True,
default=[],
help="Name of the fan to expect. Can be specified multiple times."
)
@click.pass_context
@nagiosplugin.guarded
def system_fan(ctx, warning_values, critical_values, use_regex, no_fan_ok, expected_names):
check = nagiosplugin.Check()
fan_resource = SystemFanResource(
cmd_options=ctx.obj,
check=check,
warning_values=warning_values,
critical_values=critical_values,
use_regex=use_regex,
)
check.add(fan_resource)
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Ok,
hint=f"Looks like all fans work as expected: {', '.join(sorted(fan_resource.fan_names))}"
)
)
if len(fan_resource.fan_names) == 0 and not no_fan_ok:
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Unknown,
hint="No FANs found"
)
)
if len(expected_names) > 0:
missing_names = []
for name in expected_names:
if name not in fan_resource.fan_names:
missing_names.append(name)
if len(missing_names) > 0:
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Warn,
hint=f"Expected FAN(s) not found: {', '.join(missing_names)}"
)
)
check.main(verbose=ctx.obj["verbose"]) | /routeros_check-0.9.2-py3-none-any.whl/routeros_check/check/system_fan.py | 0.583203 | 0.255286 | system_fan.py | pypi |
from pprint import pformat
import re
from typing import Dict, List
import click
import librouteros
import librouteros.query
import nagiosplugin
from ..cli import cli
from ..helper import logger
from ..resource import RouterOSCheckResource
class SystemCpuResource(RouterOSCheckResource):
name = "CPU"
def __init__(
self,
cmd_options,
check: nagiosplugin.Check,
warning_values: List[str],
critical_values: List[str],
use_regex: bool
):
super().__init__(cmd_options=cmd_options)
self._check = check
self.values: Dict[str, float] = {}
self.use_regex: bool = use_regex
self.warning_values: Dict[str, str] = {}
self.critical_values: Dict[str, str] = {}
self.warning_regex_values: Dict[re.Pattern, str] = {}
self.critical_regex_values: Dict[re.Pattern, str] = {}
if self.use_regex:
self.warning_regex_values = self.prepare_regex_thresholds(warning_values)
self.critical_regex_values = self.prepare_regex_thresholds(critical_values)
else:
self.warning_values = self.prepare_thresholds(warning_values)
self.critical_values = self.prepare_thresholds(critical_values)
def probe(self):
key_cpu_load = librouteros.query.Key("cpu-load")
api = self._connect_api()
logger.info("Fetching global data ...")
call = api.path(
"/system/resource"
).select(
key_cpu_load
)
results = tuple(call)
result = results[0]
logger.debug(f"Extracted values {pformat(result)}")
yield nagiosplugin.Metric(
name="cpu-load",
value=result["cpu-load"],
uom="%",
min=0,
max=100,
)
logger.info("Fetching cpu data ...")
call = api.path(
"/system/resource/cpu"
)
results = tuple(call)
logger.debug(f"Extracted values {pformat(results)}")
for cpu in results:
name = cpu["cpu"]
for value_name_suffix in ("load", "irq", "disk"):
value_name = f"{name}-{value_name_suffix}"
if self.use_regex:
for regex, threshold in self.warning_regex_values.items():
if regex.match(value_name):
self.warning_values[value_name] = threshold
break
for regex, threshold in self.critical_regex_values.items():
if regex.match(value_name):
self.critical_values[value_name] = threshold
break
self.values[value_name] = float(cpu[value_name_suffix])
for name, value in self.values.items():
self._check.add(nagiosplugin.ScalarContext(
name=name,
warning=self.warning_values.get(name),
critical=self.critical_values.get(name),
))
yield nagiosplugin.Metric(
name=name,
value=value,
uom="%",
min=0,
max=100,
)
class SystemCpuSummary(nagiosplugin.Summary):
def ok(self, results: List[nagiosplugin.Result]):
for result in results:
if result.metric and result.metric.name == "cpu-load":
return f"System load is {result.metric.value}%"
return ""
@cli.command("system.cpu")
@click.option(
"--load-warning",
help="Warning threshold for global cpu load",
)
@click.option(
"--load-critical",
help="Critical threshold for global cpu load",
)
@click.option(
"warning_values",
"--value-warning",
multiple=True,
help=(
"Set a warning threshold for a value. "
"Example: If cpu1-load should be in the range of 10% to 20% you can set "
"--value-warning cpu-load:10:200 "
"Can be specified multiple times"
)
)
@click.option(
"critical_values",
"--value-critical",
multiple=True,
help=(
"Set a critical threshold for a value. "
"Example: If cpu1-load should be in the range of 10% to 20% you can set "
"--value-critical cpu-load:10:200 "
"Can be specified multiple times"
)
)
@click.option(
"--regex",
"use_regex",
default=False,
is_flag=True,
help=(
"Treat values from --value-warning and --value-critical as regex to find all matching values."
"Example: Warn if cpu load of at least one CPU is above 80%: --value-warning 'cpu\\d+-load:80'"
)
)
@click.pass_context
@nagiosplugin.guarded
def system_cpu(ctx, load_warning, load_critical, warning_values, critical_values, use_regex):
"""This command reads the information from /system/resource and /system/resource/cpu to extract the required
information.
"""
check = nagiosplugin.Check()
resource = SystemCpuResource(
cmd_options=ctx.obj,
check=check,
warning_values=warning_values,
critical_values=critical_values,
use_regex=use_regex,
)
check.add(
resource,
nagiosplugin.ScalarContext(
name="cpu-load",
warning=load_warning,
critical=load_critical,
),
SystemCpuSummary(),
)
check.main(verbose=ctx.obj["verbose"]) | /routeros_check-0.9.2-py3-none-any.whl/routeros_check/check/system_cpu.py | 0.48438 | 0.233805 | system_cpu.py | pypi |
import re
from typing import Dict, List, Set
import click
import nagiosplugin
from ..cli import cli
from ..helper import logger, RouterOSVersion
from ..resource import RouterOSCheckResource
class SystemTemperatureResource(RouterOSCheckResource):
name = "Temperature"
def __init__(
self,
cmd_options,
check: nagiosplugin.Check,
warning_values: List[str],
critical_values: List[str],
use_regex: bool
):
super().__init__(cmd_options=cmd_options)
self._check = check
self.names: Set[str] = set()
self.values: Dict[str, float] = {}
self.use_regex: bool = use_regex
self.warning_values: Dict[str, str] = {}
self.critical_values: Dict[str, str] = {}
self.warning_regex_values: Dict[re.Pattern, str] = {}
self.critical_regex_values: Dict[re.Pattern, str] = {}
if self.use_regex:
self.warning_regex_values = self.prepare_regex_thresholds(warning_values)
self.critical_regex_values = self.prepare_regex_thresholds(critical_values)
else:
self.warning_values = self.prepare_thresholds(warning_values)
self.critical_values = self.prepare_thresholds(critical_values)
self._fetch_data()
def _fetch_data(self):
logger.info("Fetching data ...")
call = self.api.path(
"/system/health"
)
api_result_items = tuple(call)
if self.routeros_version < RouterOSVersion("7"):
api_result_items = self._convert_v6_list_to_v7(api_result_items)
regex_name = re.compile(r".*temperature.*")
for item in api_result_items:
m = regex_name.match(item["name"])
if not m:
continue
if self.use_regex:
for regex, threshold in self.warning_regex_values.items():
if regex.match(item["name"]):
self.warning_values[item["name"]] = threshold
break
for regex, threshold in self.critical_regex_values.items():
if regex.match(item["name"]):
self.critical_values[item["name"]] = threshold
break
self.names.add(item["name"])
self.values[item["name"]] = float(item["value"])
def probe(self):
for name, value in self.values.items():
self._check.add(nagiosplugin.ScalarContext(
name=name,
warning=self.warning_values.get(name),
critical=self.critical_values.get(name),
))
yield nagiosplugin.Metric(
name=name,
value=value,
)
@cli.command("system.temperature")
@click.option(
"warning_values",
"--value-warning",
multiple=True,
help=(
"Set a warning threshold for a value. "
"Example: If cpu-temperature should be in the range of 40 and 60°C you can set "
"--value-warning cpu-temperature:40:60 "
"If cpu-temperature should not be higher than 50.5°C you can set "
"--value-warning cpu-temperature:50.5 "
"Can be specified multiple times"
)
)
@click.option(
"critical_values",
"--value-critical",
multiple=True,
help=(
"Set a critical threshold for a value. "
"Example: If cpu-temperature should be in the range of 40 and 60°C you can set "
"--value-critical cpu-temperature:40:60 "
"If cpu-temperature should not be higher than 50.5°C you can set "
"--value-critical cpu-temperature:50.5 "
"Can be specified multiple times"
)
)
@click.option(
"--regex",
"use_regex",
default=False,
is_flag=True,
help="Treat values from --value-warning and --value-critical as regex to find all matching values"
)
@click.option(
"--no-temperature-ok",
is_flag=True,
default=False,
help="The check will be unknown if no temperature is available. Provide this option to ignore this."
)
@click.option(
"expected_names",
"--expect-temperature",
multiple=True,
default=[],
help="Name of the temperature to expect. Can be specified multiple times. Example: board-temperature1"
)
@click.pass_context
@nagiosplugin.guarded
def system_temperature(ctx, warning_values, critical_values, use_regex, no_temperature_ok, expected_names):
"""This command reads the information from /system/health and extracts all values containing the
word temperature in its name. Like 'board-temperature', 'board-temperature1', 'cpu-temperature', ...
Be aware that not all devices return the same values.
"""
check = nagiosplugin.Check()
temperature_resource = SystemTemperatureResource(
cmd_options=ctx.obj,
check=check,
warning_values=warning_values,
critical_values=critical_values,
use_regex=use_regex,
)
check.add(temperature_resource)
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Ok,
hint=f"Looks like all temperatures are OK: {', '.join(sorted(temperature_resource.names))}"
)
)
if len(temperature_resource.names) == 0 and not no_temperature_ok:
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Unknown,
hint="No temperatures found"
)
)
if len(expected_names) > 0:
missing_names = []
for name in expected_names:
if name not in temperature_resource.names:
missing_names.append(name)
if len(missing_names) > 0:
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Warn,
hint=f"Expected temperature(s) not found: {', '.join(missing_names)}"
)
)
check.main(verbose=ctx.obj["verbose"]) | /routeros_check-0.9.2-py3-none-any.whl/routeros_check/check/system_temperature.py | 0.759761 | 0.284514 | system_temperature.py | pypi |
import re
from typing import Dict, List, Set
import click
import nagiosplugin
from ..cli import cli
from ..context import BooleanContext
from ..helper import logger, RouterOSVersion
from ..resource import RouterOSCheckResource
class SystemPsuResource(RouterOSCheckResource):
name = "PSU"
def __init__(
self, cmd_options, check: nagiosplugin.Check, warning_values: List[str], critical_values: List[str],
no_psu_ok: bool,
):
super().__init__(cmd_options=cmd_options)
self._check = check
self.psu_names: Set[str] = set()
self.psu_states: Dict[str, str] = {}
self.psu_values: Dict[str, float] = {}
self.warning_values = self._prepare_thresholds(warning_values)
self.critical_values = self._prepare_thresholds(critical_values)
self.no_psu_ok = no_psu_ok
self._fetch_data()
def _fetch_data(self):
logger.info("Fetching data ...")
call = self.api.path(
"/system/health"
)
api_results = tuple(call)
if self.routeros_version < RouterOSVersion("7"):
api_result_items = []
for name, value in api_results[0].items():
api_result_items.append({
"name": name,
"value": value,
})
else:
api_result_items = api_results
regex_name = re.compile(r"(?P<name>psu\d+)-(?P<type>(state|current|voltage))")
for api_result_item in api_result_items:
m = regex_name.match(api_result_item["name"])
if not m:
continue
self.psu_names.add(m.group("name"))
if m.group("type") in ("current", "voltage"):
self.psu_values[api_result_item["name"]] = float(api_result_item["value"])
if m.group("type") == "state":
self.psu_states[m.group("name")] = api_result_item["value"]
if not self.no_psu_ok and len(self.psu_values) == 0 and len(self.psu_states) == 0:
self._check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Unknown,
hint="No PSU values and stats found"
)
)
@staticmethod
def _prepare_thresholds(thresholds: List[str]):
results = {}
for threshold in thresholds:
name, _, value = threshold.partition(":")
if value is None or value == "":
logger.warning(f"Unable to parse threshold for {name}")
results[name] = value
return results
def probe(self):
for name, value in self.psu_values.items():
self._check.add(nagiosplugin.ScalarContext(
name=name,
warning=self.warning_values.get(name),
critical=self.critical_values.get(name),
))
yield nagiosplugin.Metric(
name=name,
value=value,
)
for name, value in self.psu_states.items():
value_name = f"{name}-state-ok"
self._check.add(
BooleanContext(value_name)
)
if value != "ok":
self._check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Warn,
hint=f"PSU: {name} state {value}"
)
)
yield nagiosplugin.Metric(
name=value_name,
value=(value == "ok")
)
@cli.command("system.psu")
@click.option(
"warning_values",
"--value-warning",
multiple=True,
help=(
"Set a warning threshold for a value. "
"Example: If psu1-voltage should be in the range of 12-12.1V you can set --value-warning psu1-voltage:12:12.1 "
"Can be specified multiple times"
)
)
@click.option(
"critical_values",
"--value-critical",
multiple=True,
help=(
"Set a critical threshold for a value. "
"Example: If psu1-voltage should be in the range of 12-12.1V you can set --value-critical psu1-voltage:12:12.1 "
"Can be specified multiple times"
)
)
@click.option(
"--no-psu-ok",
is_flag=True,
default=False,
help="The check will be unknown if not at least one psu stat or value is available. Set this to ignore this."
)
@click.option(
"expected_psu_names",
"--expect-psu",
multiple=True,
default=[],
help="Name of the PSU to expect at least one value or state. Can be specified multiple times."
)
@click.pass_context
@nagiosplugin.guarded
def system_psu(ctx, warning_values, critical_values, no_psu_ok, expected_psu_names):
"""Check the power supply units (PSU)"""
check = nagiosplugin.Check()
psu_resource = SystemPsuResource(
cmd_options=ctx.obj,
check=check,
warning_values=warning_values,
critical_values=critical_values,
no_psu_ok=no_psu_ok,
)
check.add(psu_resource)
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Ok,
hint=f"Looks like all PSU work like expected: {', '.join(psu_resource.psu_names)}"
)
)
if len(expected_psu_names) > 0:
missing_psu_names = []
for psu_name in expected_psu_names:
if psu_name not in psu_resource.psu_names:
missing_psu_names.append(psu_name)
if len(missing_psu_names) > 0:
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Warn,
hint=f"Expected PSU(s) not found: {', '.join(missing_psu_names)}"
)
)
check.main(verbose=ctx.obj["verbose"]) | /routeros_check-0.9.2-py3-none-any.whl/routeros_check/check/system_psu.py | 0.629091 | 0.207536 | system_psu.py | pypi |
import re
from typing import Any, Dict, List, Optional, Union
import click
import nagiosplugin
from ..cli import cli
from ..context import BooleanContext, ScalarPercentContext
from ..helper import escape_filename, logger
from ..resource import RouterOSCheckResource
class InterfaceResource(RouterOSCheckResource):
name = "Interface"
def __init__(
self,
cmd_options: Dict[str, Any],
check: nagiosplugin.Check,
names: List[str],
regex: bool,
single_interface: bool,
ignore_disabled: bool,
cookie_filename: str,
warning_values: List[str],
critical_values: List[str],
override_values: List[str],
):
super().__init__(cmd_options=cmd_options)
self._check = check
self._interface_data: Optional[Dict[str, Any]] = None
self.names: List[Union[Any]] = names
self.regex = regex
if self.regex:
regex_names = []
for name in names:
regex_names.append(re.compile(name))
self.names = regex_names
self.single_interface = single_interface
self.ignore_disabled = ignore_disabled
self.cookie_filename = cookie_filename
self._parsed_warning_values: Dict[str, str] = self.prepare_thresholds(warning_values)
self._parsed_critical_values: Dict[str, str] = self.prepare_thresholds(critical_values)
self._parsed_override_values: Dict[str, str] = self.prepare_override_values(override_values)
self._routeros_metric_values = [
# Later values depend on the speed
{
"name": "speed",
"missing_ok": True,
"dst_value_name": "speed-byte",
"type": self.parse_routeros_speed,
"factor": 1 / 8,
"no_metric": True,
},
{
"name": "speed",
"missing_ok": True,
"type": self.parse_routeros_speed,
"min": 0,
},
{
"name": "disabled",
"type": bool,
"context_class": None,
},
{
"name": "running",
"type": bool,
"context_class": None,
},
{
"name": "actual-mtu",
"type": int,
"min": 0,
},
{
"name": "fp-rx-byte",
"type": int,
"min": 0,
"uom": "B",
"rate": True,
"rate_percent_total_name": "speed-byte",
},
{
"name": "fp-rx-packet",
"type": int,
"min": 0,
"uom": "c",
"rate": True,
},
{
"name": "fp-tx-byte",
"type": int,
"min": 0,
"uom": "B",
"rate": True,
"rate_percent_total_name": "speed-byte",
},
{
"name": "fp-tx-packet",
"type": int,
"min": 0,
"uom": "c",
"rate": True,
},
{
"name": "l2mtu",
"type": int,
"min": 0,
# CHR devices don't report l2mtu
"missing_ok": True,
},
{
"name": "link-downs",
"type": int,
"min": 0,
"uom": "c",
},
# {"name": "mtu", "type": int, "min": 0},
{
"name": "rx-byte",
"type": int,
"min": 0,
"uom": "B",
"rate": True,
"rate_percent_total_name": "speed-byte",
},
{
"name": "rx-drop",
"type": int,
"min": 0,
"uom": "c",
"rate": True,
},
{
"name": "rx-error",
"type": int,
"min": 0,
"uom": "c",
"rate": True,
},
{
"name": "rx-packet",
"type": int,
"min": 0,
"uom": "c",
"rate": True,
"rate_percent_total_name": "speed-byte",
},
{
"name": "tx-byte",
"type": int,
"min": 0,
"uom": "B",
"rate": True,
},
{
"name": "tx-drop",
"type": int,
"min": 0,
"uom": "c",
"rate": True,
},
{
"name": "tx-error",
"type": int,
"min": 0,
"uom": "c",
"rate": True,
},
{
"name": "tx-packet",
"type": int,
"min": 0,
"uom": "c",
"rate": True,
},
{
"name": "tx-queue-drop",
"type": int,
"min": 0,
"uom": "c",
"rate": True
},
]
def _add_contexts(self, name, values, metric_prefix=""):
self._check.add(
InterfaceDisabledContext(f"{metric_prefix.format(name=name)}disabled", interface_name=name),
InterfaceRunningContext(f"{metric_prefix.format(name=name)}running", interface_name=name),
)
custom_metric_names = ["disabled", "running"]
for metric_value in self._routeros_metric_values:
metric_value_name = metric_value.get("dst", metric_value["name"])
if metric_value_name in custom_metric_names:
continue
if metric_value.get("no_metric"):
continue
context_class = metric_value.get("context_class", nagiosplugin.ScalarContext)
self._check.add(
context_class(
f"{metric_prefix.format(name=name)}{metric_value_name}",
warning=self._parsed_warning_values.get(metric_value["name"]),
critical=self._parsed_critical_values.get(metric_value["name"]),
)
)
if metric_value.get("rate"):
rate_percent_total_name = metric_value.get("rate_percent_total_name")
rate_total_value = None
if rate_percent_total_name:
rate_total_value = values.get(rate_percent_total_name)
if rate_total_value is not None:
rate_context_class_percent = metric_value.get("context_class", ScalarPercentContext)
self._check.add(
rate_context_class_percent(
name=f"{metric_prefix.format(name=name)}{metric_value_name}_rate",
total_value=rate_total_value,
warning=self._parsed_warning_values.get(f"{metric_value['name']}_rate"),
critical=self._parsed_critical_values.get(f"{metric_value['name']}_rate"),
)
)
else:
rate_context_class = metric_value.get("context_class", nagiosplugin.ScalarContext)
self._check.add(
rate_context_class(
name=f"{metric_prefix.format(name=name)}{metric_value_name}_rate",
warning=self._parsed_warning_values.get(metric_value["name"]),
critical=self._parsed_critical_values.get(metric_value["name"]),
)
)
def fetch_data(self) -> Dict[str, Dict]:
if self._interface_data:
return self._interface_data
api = self._connect_api()
logger.info("Fetching data ...")
interface_ethernet_data = {}
call = api.path(
"/interface/ethernet"
)
call_results = tuple(call)
for result in call_results:
interface_ethernet_data[result["name"]] = {
"speed": result["speed"],
}
call = api.path(
"/interface"
)
call_results = tuple(call)
self._interface_data = {}
for result in call_results:
if self.ignore_disabled and result["disabled"]:
continue
if result["name"] in interface_ethernet_data:
result.update(interface_ethernet_data[result["name"]])
result.update(self._parsed_override_values)
if len(self.names) == 0:
self._interface_data[result["name"]] = result
elif self.regex:
for name in self.names:
if name.match(result["name"]):
self._interface_data[result["name"]] = result
elif result["name"] in self.names:
self._interface_data[result["name"]] = result
return self._interface_data
@property
def interface_names(self):
return tuple(self.fetch_data().keys())
def probe(self):
routeros_metrics = []
data = self.fetch_data()
if self.single_interface:
if len(self.interface_names) == 1:
cookie_filename = self.cookie_filename.format(
name=escape_filename(self.interface_names[0])
)
with nagiosplugin.Cookie(cookie_filename) as cookie:
routeros_metrics += self.get_routeros_metric_item(data[self.interface_names[0]], cookie=cookie)
self._add_contexts(name=self.interface_names[0], values=data[self.interface_names[0]])
else:
for name in self.interface_names:
cookie_filename = self.cookie_filename.format(
name=escape_filename(name)
)
with nagiosplugin.Cookie(cookie_filename) as cookie:
routeros_metrics += self.get_routeros_metric_item(data[name], name_prefix=f"{name} ", cookie=cookie)
self._add_contexts(name=name, values=data[name], metric_prefix="{name} ")
return routeros_metrics
class InterfaceDisabledContext(BooleanContext):
def __init__(self, name, interface_name):
super().__init__(name=name)
self._interface_name = interface_name
def evaluate(self, metric, resource: InterfaceResource):
if metric.value is True:
return self.result_cls(
nagiosplugin.state.Warn,
hint="Interface '{self._interface_name}' is disabled",
metric=metric
)
return self.result_cls(nagiosplugin.state.Ok)
class InterfaceRunningContext(BooleanContext):
def __init__(self, name, interface_name):
super().__init__(name=name)
self._interface_name = interface_name
def evaluate(self, metric, resource: InterfaceResource):
if metric.value is False:
return self.result_cls(
state=nagiosplugin.state.Warn,
hint=f"Interface '{self._interface_name}' not running",
metric=metric
)
return self.result_cls(nagiosplugin.state.Ok)
@cli.command("interface")
@click.option(
"--name",
"names",
default=[],
multiple=True,
help="The name of the GRE interface to monitor. This can be specified multiple times",
)
@click.option(
"--regex",
"regex",
default=False,
is_flag=True,
help="Treat the specified names as regular expressions and try to find all matching interfaces. (Default: not set)",
)
@click.option(
"--single",
"single",
default=False,
is_flag=True,
help="If set the check expects the interface to exist",
)
@click.option(
"--ignore-disabled/--no-ignore-disabled",
default=True,
is_flag=True,
help="Ignore disabled interfaces",
)
@click.option(
"--cookie-filename",
"cookie_filename",
default="/tmp/check_routeros_interface_{name}.data",
help=(
"The filename to use to store the information to calculate the rate. '{name}' will be replaced with an "
"internal uniq id. It Will create one file per interface."
"(Default: /tmp/check_routeros_interface_{name}.data)"
),
)
@click.option(
"override_values",
"--value-override",
multiple=True,
help=(
"Override a value read from the RouterOS device. "
"Format of the value must be compatible with RouterOS values. "
"Example: Override/Set the speed value for bridges or tunnels: "
"--value-override speed:10Gbps"
)
)
@click.option(
"warning_values",
"--value-warning",
multiple=True,
help=(
"Set a warning threshold for a value. "
"Example: If cpu1-load should be in the range of 10% to 20% you can set "
"--value-warning cpu-load:10:200 "
"Can be specified multiple times"
)
)
@click.option(
"critical_values",
"--value-critical",
multiple=True,
help=(
"Set a critical threshold for a value. "
"Example: If cpu1-load should be in the range of 10% to 20% you can set "
"--value-critical cpu-load:10:200 "
"Can be specified multiple times"
)
)
@click.pass_context
def interface(
ctx, names, regex, single, ignore_disabled, cookie_filename, warning_values, critical_values, override_values
):
"""Check the state and the stats of interfaces"""
check = nagiosplugin.Check()
resource = InterfaceResource(
cmd_options=ctx.obj,
check=check,
names=names,
regex=regex,
single_interface=single,
ignore_disabled=ignore_disabled,
cookie_filename=cookie_filename,
warning_values=warning_values,
critical_values=critical_values,
override_values=override_values,
)
check.add(resource)
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Ok,
"All interfaces UP"
)
)
if single and len(resource.interface_names) != 1:
check.results.add(
nagiosplugin.Result(
nagiosplugin.state.Unknown,
f"Only one matching interface is allowed. Found {len(resource.interface_names)}"
)
)
check.main(verbose=ctx.obj["verbose"]) | /routeros_check-0.9.2-py3-none-any.whl/routeros_check/check/interface.py | 0.753285 | 0.269512 | interface.py | pypi |
from fnmatch import fnmatch
from typing import Dict, List
class Settings:
"""Parser settings
You can customise settings in one of two ways.
The simplest way is to pass settings to RouterOSConfig.parse():
RouterOSConfig.parse(s=my_config, settings=dict(
natural_keys={
"/ip address": "address",
...
},
no_deletions={
"/interface ethernet",
...
},
no_creations={
"/interface ethernet",
...
},
expression_order_important={
"/ip firewall*",
...
},
))
Note that section paths can be specified using '*' wildcards.
For example, `/ip firewall*`.
Alternatively, you can extend this class and override its methods.
This allows you to implement more complex logic should you require.
In this case, you can pass your customised class to the parser as follows:
RouterOSConfig.parse(my_config, settings=MyCustomSettings())
"""
# Natural keys for each section name.
# 'name' will be used if none is found below
# (and only if the 'name' value is available)
natural_keys = {
"/interface ethernet": "default-name",
"/interface bridge port": "interface",
"/ip address": "address",
"/ipv6 address": "address",
"/routing ospf interface": "interface",
"/routing ospf-v3 interface": "interface",
"/routing ospf network": "network",
"/routing ospf-v3 network": "network",
"/mpls ldp interface": "interface",
"/ip dhcp-server network": "address",
"/ip dhcp-server lease": "mac-address",
"/ipv6 nd": "interface",
"/ipv6 nd prefix": "interface",
"/ipv6 dhcp-client": "interface",
}
# Don't perform deletions in these sections
no_deletions = {"/interface ethernet", "/interface wireless security-profiles"}
# Don't perform creations in these sections
no_creations = {
"/interface ethernet",
}
# Ordering is important in these sections. Ensure
# entities maintain their order. Natural keys/ids must be
# present in sections listed here
expression_order_important = {
"/ip firewall calea",
"/ip firewall filter",
"/ip firewall mangle",
"/ip firewall nat",
}
def __init__(
self,
natural_keys: Dict[str, str] = None,
no_deletions: List[str] = None,
no_creations: List[str] = None,
):
if natural_keys is not None:
self.natural_keys = natural_keys
if no_deletions is not None:
self.no_deletions = no_deletions
if no_creations is not None:
self.no_creations = no_creations
def get_natural_key(self, section_path: str):
"""Get the natural key for a given section path
Will default to 'name' if no entry is found in NATURAL_KEYS
"""
return self.natural_keys.get(section_path, "name")
def deletion_allowed(self, section_path: str):
foo = [fnmatch(section_path, pattern) for pattern in self.no_deletions]
return not any(fnmatch(section_path, pattern) for pattern in self.no_deletions)
def creation_allowed(self, section_path: str):
return not any(fnmatch(section_path, pattern) for pattern in self.no_creations)
def is_expression_order_important(self, section_path: str):
return any(
fnmatch(section_path, pattern)
for pattern in self.expression_order_important
) | /routeros_diff-0.6a1-py3-none-any.whl/routeros_diff/settings.py | 0.879419 | 0.379062 | settings.py | pypi |
from dataclasses import dataclass
from typing import Union, List, TYPE_CHECKING, Optional
from routeros_diff.settings import Settings
from routeros_diff.utilities import quote, unescape_string
from routeros_diff.exceptions import CannotDiff
if TYPE_CHECKING:
from routeros_diff.expressions import Expression
@dataclass
class AbstractArgValue:
"""Represent a single value
For example in the following expression:
add name=core router-id=10.127.0.88
The values are `core` and `10.127.0.88`.
"""
value: Union[str, "Expression"]
def __eq__(self, other):
other_value = other.value if isinstance(other, AbstractArgValue) else str(other)
return id(self) == id(other) or self.value == other_value
def __hash__(self):
return hash(self.value)
def __lt__(self, other):
other_value = other.value if isinstance(other, AbstractArgValue) else str(other)
return str(self.value) < other_value
def __gt__(self, other):
other_value = other.value if isinstance(other, AbstractArgValue) else str(other)
return str(self.value) > other_value
def __contains__(self, item):
return item in self.value
def __str__(self):
return str(self.value)
def __html__(self):
return str(self)
@dataclass(eq=False, init=False)
class ArgValue(AbstractArgValue):
"""Represent a single standard value
For example in the following expression:
add name=core router-id=10.127.0.88
The values are `core` and `10.127.0.88`. These are just simple values, nothing special
"""
value: str
def __init__(self, value: str, force_quote: bool = False):
self.value = unescape_string(value)
self.force_quote = force_quote
def quote(self) -> str:
return quote(self.value, force=self.force_quote)
def __html__(self):
return f'<span class="ros-v">{self.quote()}</span>'
@dataclass(eq=False)
class ExpressionArgValue(AbstractArgValue):
"""Represent an expression value
For example in the following expression:
add chain=b place-before=[ find where comment~ID:3 ]
The value `[ find where comment~ID:3 ]` is an expression value.
"""
value: "Expression"
def quote(self) -> str:
return f"[ {self.value} ]"
def __html__(self):
return f'<span class="ros-v ros-vc">{self}</span>'
@dataclass(init=False)
class Arg:
"""A single key=value pair as part of a RouterOS expression
For example:
router-id=100.127.0.1
In the above, router-id is the key, and 100.127.0.1 is the value.
Positional args have a value of None. For example, in this expression:
set core router-id=100.127.0.1
The first argument would have a key of `core` and value of `None`.
"""
key: str
value: Union[ArgValue, ExpressionArgValue, None]
# Common comparators are = or ~
comparator: str = "="
settings: Settings
def __init__(
self,
key: str,
value: Union[str, "Expression", AbstractArgValue, None] = None,
comparator: str = "=",
settings: Settings = None,
):
from routeros_diff.expressions import Expression
self.key = key
self.comparator = comparator
self.settings = settings or Settings()
# Normalise our value into some kind of AbstractArgValue
if isinstance(value, str):
# Always quote regex expressions, otherwise RouterOS tends to ignore them
force_quote = comparator == "~"
self.value = ArgValue(value, force_quote=force_quote)
elif isinstance(value, Expression):
self.value = ExpressionArgValue(value)
elif value is None:
self.value = None
elif isinstance(value, AbstractArgValue):
self.value = value
else:
raise ValueError(f"Invalid arg value: {value}")
def __str__(self):
"""Render this argument as a string"""
if self.value is None:
# Positional argument, so just render the key
return self.key
else:
# Standard key/value pair
return f"{self.key}{self.comparator}{self.value.quote()}"
@staticmethod
def parse(s: str, section_path: str, settings: Settings = None):
"""Parse an argument string
Can be either key/value, or positional
"""
if "=" in s:
key, value = s.split("=", maxsplit=1)
else:
key = s
value = None
# IPv6 addresses need normalising as RouterOS omits any /64 prefix
if section_path == "/ipv6 address" and key == "address":
if "/" not in value:
value = f"{value}/64"
assert (
key != "["
), "Something went wrong, failed to detect find expression correctly"
return Arg(key=key, value=value)
@property
def is_positional(self):
"""A positional argument has no corresponding value
For example, take the expression:
set 0 foo=bar
Here, `0` would be a positional argument
"""
return not self.value
@property
def is_key_value(self):
"""A key-value argument has both a key and a value
For example, take the expression:
set 0 foo=bar
Here, `foo=bar` would be a key-value argument
"""
return not self.is_positional
def __html__(self, natural_key=None):
if self.value is None:
# Positional argument, so just render the key
html = f'<span class="ros-a ros-pa">{self.key}</span>'
else:
# Standard key/value pair
html = (
f'<span class="ros-k">{self.key}</span>'
f'<span class="ros-com">{self.comparator}</span>'
f"{self.value.__html__()}"
)
if natural_key == "comment-id":
natural_key = "comment"
if natural_key and self.key == natural_key:
html = f'<span class="ros-nat">{html}</span>'
return html
class ArgList(list):
"""A list of several arguments"""
def __str__(self):
"""Turn this parsed list of args back into a config string"""
return " ".join([str(a) for a in self])
def __html__(self, natural_key=None):
return " ".join(
[f'<span class="ros-a">{a.__html__(natural_key)}</span>' for a in self]
)
def __getitem__(self, item):
"""Key an item by index or by key"""
if isinstance(item, str):
# By key
for arg in self:
if arg.key == item:
return arg.value
raise KeyError(item)
else:
# By index
return super().__getitem__(item)
def __contains__(self, key):
"""Do these args contain an argument with the given key?"""
return key in self.keys()
def __delitem__(self, key):
"""Delete the arg with the given key"""
if isinstance(key, str):
for i, arg in enumerate(self):
if arg.key == key:
del self[i]
else:
return super().__delitem__(key)
def get(self, key, default=None):
"""Get the arg for the given key"""
try:
return self[key]
except KeyError:
return default
def keys(self) -> List[str]:
"""Get a list of keys for all args"""
return [arg.key for arg in self]
def diff(
self, old: "ArgList", old_verbose: Optional["ArgList"] = None
) -> "ArgList":
"""Diff this list with the given old list, and return a new list of args
This may:
* Return args which are only present in this list
* Return args which which appear in both lists but with different values
* Return args which do not appear in this list, in which case their values will be set to ""
"""
added = []
removed = []
modified = []
old_keys = old.keys()
new_keys = self.keys()
diffed_arg_list = ArgList()
if self[0].is_positional != old[0].is_positional:
raise CannotDiff(
f"Diffing arguments in different formats. One has a positional starting argument "
f"and the other does not:\n"
f" Old: {old}\n"
f" New: {self}\n"
)
if self[0].is_positional:
if self[0].key != old[0].key:
raise CannotDiff(
f"Diffing arguments in different formats. Initial positional arguments "
f"do not match, so they are explicitly trying to modify different things:\n"
f" Old: {old}\n"
f" New: {self}\n"
)
else:
# Make sure we keep the positional arg
diffed_arg_list.append(Arg(key=self[0].key, value=None))
for k in old_keys:
if k not in new_keys:
if k == "disabled" and old[k] == "yes":
# disabled=yes has been removed, so let's enable it
diffed_arg_list.append(Arg("disabled", "no"))
else:
removed.append(k)
for k in new_keys:
if k not in old_keys:
# key is not present in the old list, so it must be new
added.append(k)
for k in set(old_keys).intersection(new_keys):
# key is in both lists, but the value has changed
if self[k] != old[k]:
modified.append(k)
for k in old_keys:
if k in removed:
# Removed keys are given a blank value
diffed_arg_list.append(Arg(key=f"{k}", value=""))
for k in new_keys:
if k in added or k in modified:
# Added keys are added with their value only if
# their value does not match the value in the
# old verbose output
if old_verbose is None or self[k] != old_verbose.get(k):
diffed_arg_list.append(Arg(key=k, value=self[k]))
return diffed_arg_list
def sort(self):
"""Sort the list by key
But still ensure positional arguments appear at the start
"""
positional = [a for a in self if a.is_positional]
key_value = [a for a in self if a.is_key_value]
key_value = sorted(key_value, key=str)
return ArgList(positional + key_value) | /routeros_diff-0.6a1-py3-none-any.whl/routeros_diff/arguments.py | 0.922203 | 0.404566 | arguments.py | pypi |
import itertools
import re
from dataclasses import dataclass, replace
from typing import List, Optional
from routeros_diff.arguments import Arg, ArgList
from routeros_diff.settings import Settings
from routeros_diff.expressions import Expression
from routeros_diff.utilities import find_expression
from routeros_diff.exceptions import CannotDiff
@dataclass
class Section:
"""An entire configuration section, including the path and its various expressions
For example, the following is a section with path `/ip address` and two expressions:
/ip address
add address=1.2.3.4
add address=5.6.7.8
"""
path: str
expressions: List[Expression]
settings: Settings
def __str__(self):
"""Convert this parsed expression into a valid RouterOS configuration"""
s = f"{self.path}\n"
for expression in self.expressions:
s += f"{expression}\n"
return s
def __html__(self):
s = f'<span class="ros-p">{self.path}</span><br>\n'
for expression in self.expressions:
s += f"{expression.__html__()}<br>\n"
return f'<span class="ros-s">{s}</span>'
@classmethod
def parse(cls, s: str, settings: Settings = None):
"""
Parse an input string into a Section instance
Example input:
/routing ospf network
add area=core network=10.100.0.0/24
add area=towers network=100.126.0.0/29
"""
settings = settings or Settings()
s = s.strip()
assert s.startswith("/"), "Was not passed a section block"
# Remove any empty lines that only contain an escape
s = re.sub(r"\n *\\ *\n *", "\n", s)
# Remove any trailing escapes
while s.endswith("\\"):
s = s[:-1]
# Split on any new line which is not preceded by a \
lines = re.split(r"(?<!\\)\n", s)
# Remove blank lines and comments
lines = [
l.strip() for l in lines if l.strip() and not l.strip().startswith("#")
]
path = lines[0]
# Santity checks
assert path.startswith(
"/"
), f"Section path must start with a '/'. It was: {path}"
for l in lines[1:]:
assert not l.startswith(
"/"
), f"Expression must not start with a '/'. It was: {path}"
expressions = [
Expression.parse(l, section_path=path, settings=settings) for l in lines[1:]
]
return cls(
path=path, expressions=[e for e in expressions if e], settings=settings
)
@property
def uses_natural_ids(self):
"""Does this section use natural IDs to identify its entities?"""
return not any(i is None for i in self.natural_ids)
@property
def modifies_default_only(self):
"""Do expressions in this section only find default entities
This normally translates as the section containing a single
expression in the form:
set [ default=yes ] foo=bar
"""
return self.expressions and all(i.finds_by_default for i in self.expressions)
@property
def has_any_default_entry(self):
"""Does any expression in this section find based upon defaults entities"""
return self.expressions and any(i.finds_by_default for i in self.expressions)
@property
def is_single_object_section(self):
"""Some sections do not contain multiple entities to update. Is this one of them?
For example, `/system/identity`
"""
return self.expressions and all(
i.is_single_object_expression for i in self.expressions
)
def expression_index_for_natural_key(self, natural_key, natural_id):
"""Get the position of the expression identified by the given natural key & id"""
for i, expression in enumerate(self.expressions):
if expression.natural_key_and_id == (natural_key, natural_id):
return i
raise KeyError(f"({natural_key}, {natural_id})")
def diff(
self, old: "Section", old_verbose: Optional["Section"] = None
) -> "Section":
"""Compare self to the given old section
Returns a section which will migrate the old section to
be the new section
Note that this is a great place to start debugging
strange diff behaviour.
"""
if self.path != old.path:
raise CannotDiff(f"Section paths do not match")
if self.is_single_object_section or old.is_single_object_section:
# Eg. /system/identity
diff = self._diff_single_object(old, old_verbose)
elif self.modifies_default_only and old.modifies_default_only:
# Both sections only change the default record
diff = self._diff_default_only(old, old_verbose)
elif self.modifies_default_only and not old.expressions:
# The new one sets values on the default entry, but the entry
# isn't mentioned in the old section (probably because it has
# entirely default values)
return self
elif old.modifies_default_only:
if not self.has_any_default_entry:
# Old config modifies default entry, and the new config
# makes no mention of it. We cannot delete default entries,
# so just ignore it. We ignore it by removing it and starting
# the diff process again
diff = self.diff(
Section(old.path, [], settings=self.settings), old_verbose
)
else:
raise CannotDiff(
"Cannot handle section which contain a mix of default setting and non-default setting"
)
elif old.uses_natural_ids and self.uses_natural_ids:
# We have natural keys * ids, so do a diff using those
diff = self._diff_by_id(old, old_verbose)
else:
# Well we lack natural keys/ids, so just compare values and do the
# best we can. This will result in additions/deletions, but no
# modifications.
diff = self._diff_by_value(old, old_verbose)
# Handle ordering if we need to, and if we have changes
if self.settings.is_expression_order_important(self.path) and diff.expressions:
if self.uses_natural_ids and old.uses_natural_ids:
# We can ID each record, so apply the correct ordering
for i, diff_expression in enumerate(diff.expressions):
natural_key, natural_id = diff_expression.natural_key_and_id
try:
new_expression_index = self.expression_index_for_natural_key(
natural_key, natural_id
)
except KeyError:
diff.expressions[i] = diff_expression.as_delete()
break
new_expression = self.expressions[new_expression_index]
# Find the next expression which also appears in the old section
next_expression = None
for expression in self.expressions[new_expression_index + 1 :]:
try:
_, natural_id = expression.natural_key_and_id
next_expression = old[natural_id]
# We will now place our new statement before that expression
break
except KeyError:
pass
# Update with place-before value if next_expression is available.
# Otherwise this is the last expression in the list, so just add
# it as normal (as this will append it to the end, which is what we want)
if next_expression:
new_expression.args.append(
Arg(
key="place-before",
value=find_expression(
*next_expression.natural_key_and_id, self.settings
),
)
)
else:
# Cannot be smart, so do a full wipe and recreate
wipe_expression = Expression(
section_path=self.path,
command="remove",
find_expression=Expression(
"", "find", None, ArgList(), settings=self.settings
),
args=ArgList(),
settings=self.settings,
)
diff = replace(self, expressions=[wipe_expression] + self.expressions)
return diff
def _diff_single_object(
self, old: "Section", old_verbose: Optional["Section"] = None
) -> "Section":
"""Diff for a single object section
Eg. /system/identity
"""
if len(self.expressions) > 1 or len(old.expressions) > 1:
raise CannotDiff(
f"Section {self.path} is a single object section and so must not contain more than expression. "
f"Please condense multiple expressions into a single expression"
)
if not self.expressions:
# No expressions, so return this empty section
# and assume it will not be printed
return self
if not old.expressions:
# No old expressions, so just return this section
# within needing to do any merging
return self
# Ok, we need to do some merging
new_expression = self.expressions[0]
old_verbose_args = old_verbose.expressions[0].args if old_verbose else None
old_expression = old.expressions[0]
diffed_args = new_expression.args.diff(old_expression.args, old_verbose_args)
if not diffed_args:
diff_expressions = []
else:
diff_expressions = [replace(new_expression, args=diffed_args)]
return replace(self, expressions=diff_expressions)
def _diff_default_only(
self, old: "Section", old_verbose: Optional["Section"] = None
) -> "Section":
"""Diff a section based on selection of default entity
I.e. set [ find default=yes ] foo=bar
"""
if len(old.expressions) > 1:
raise CannotDiff(
"Section can only contain one expression if using [ find default=x ]"
)
if len(self.expressions) > 1:
raise CannotDiff(
"Section can only contain one expression if using [ find default=x ]"
)
old_verbose_args = old_verbose.expressions[0].args if old_verbose else None
args_diff = self.expressions[0].args.diff(
old.expressions[0].args, old_verbose_args
)
if args_diff:
expressions = [replace(self.expressions[0], args=args_diff)]
else:
expressions = []
return Section(path=self.path, expressions=expressions, settings=self.settings,)
def _diff_by_id(
self, old: "Section", old_verbose: Optional["Section"] = None
) -> "Section":
"""Diff using natural keys/ids"""
all_natural_ids = sorted(set(self.natural_ids) | set(old.natural_ids))
new_expression: Optional[Expression]
old_expression: Optional[Expression]
remove = []
modify = []
create = []
for natural_id in all_natural_ids:
try:
new_expression = self[natural_id]
except KeyError:
new_expression = None
try:
old_expression = old[natural_id]
except KeyError:
old_expression = None
if old_expression and not new_expression:
# Deletion
remove.append(old_expression.as_delete())
elif new_expression and not old_expression:
# Creation
create.append(new_expression.as_create())
else:
# Modification
old_expression_verbose = (
old_verbose.get(natural_id) if old_verbose else None
)
modify.extend(
new_expression.diff(old_expression, old_expression_verbose)
)
# No point modifying if nothing needs changing
modify = [e for e in modify if e.has_kw_args]
# Note we remove first, as this avoids issue with value conflicts
expressions = remove + modify + create
return Section(
path=self.path,
expressions=[e for e in expressions if e],
settings=self.settings,
)
def _diff_by_value(
self, old: "Section", old_verbose: Optional["Section"] = None
) -> "Section":
"""Diff based on the values of expressions
This is the diff of last resort. It is not possible to
detect modifications in this case. All we can do is delete
and recreate.
"""
remove = []
create = []
old_expressions = {str(e.with_ordered_args()): e for e in old.expressions}
new_expressions = {str(e.with_ordered_args()): e for e in self.expressions}
for old_expression_str, old_expression in old_expressions.items():
if (
old_expression_str not in new_expressions
and old_expression.args.get("disabled") != "yes"
):
remove.append(old_expression.as_delete())
for new_expression_str, new_expression in new_expressions.items():
if new_expression_str not in old_expressions:
create.append(new_expression.as_create())
expressions = remove + create
return Section(
path=self.path,
expressions=[e for e in expressions if e],
settings=self.settings,
)
@property
def natural_ids(self) -> List[str]:
"""Get all the natural IDs for expressions in this section"""
return [e.natural_key_and_id[1] for e in self.expressions]
def __getitem__(self, natural_id):
"""Get an expression by its natural ID"""
for expression in self.expressions:
natural_id_ = expression.natural_key_and_id[-1]
if natural_id == natural_id_:
return expression
raise KeyError(natural_id)
def get(self, natural_id, default=None):
"""Get the expression for the given natural ID"""
try:
return self[natural_id]
except KeyError:
return default
def with_only_removals(self):
"""Return a copy of this section containing only 'remove' expressions"""
return replace(
self, expressions=[e for e in self.expressions if e.command == "remove"]
)
def without_any_removals(self):
"""Return a copy of this section containing everything except 'remove' expressions"""
return replace(
self, expressions=[e for e in self.expressions if e.command != "remove"]
) | /routeros_diff-0.6a1-py3-none-any.whl/routeros_diff/sections.py | 0.814791 | 0.39004 | sections.py | pypi |
import re
from copy import copy
from dataclasses import dataclass
from datetime import datetime
from typing import List, Tuple, Optional, Dict, Union
import dateutil.parser
from routeros_diff.settings import Settings
from routeros_diff.exceptions import CannotDiff
from routeros_diff.sections import Section
@dataclass
class RouterOSConfig:
"""An entire RouterOS config file.
You probably want ot use `RouterOSConfig.parse(config_string)`
to parse your config data.
"""
# Timestamp, as parsed from header comment (if present)
timestamp: Optional[datetime]
# RouterOS version, as parsed from header comment (if present)
router_os_version: Optional[Tuple[int, int, int]]
# All sections parsed from the config file
sections: List[Section]
settings: Settings = None
def __str__(self):
return "\n".join(str(s) for s in self.sections if s.expressions)
def __html__(self):
html = "<br>\n".join(s.__html__() for s in self.sections if s.expressions)
return f'<span class="ros">{html}</span>'
@classmethod
def parse(cls, s: str, settings: Union[Settings, dict] = None):
"""Takes an entire RouterOS configuration blob"""
settings = settings or Settings()
if isinstance(settings, dict):
settings = Settings(**settings)
# Normalise new lines
s = s.strip().replace("\r\n", "\n")
# Parse out version & timestamp
first_line: str
first_line, *_ = s.split("\n", maxsplit=1)
if first_line.startswith("#") and " by RouterOS " in first_line:
first_line = first_line.strip("#").strip()
timestamp, *_ = first_line.split(" by ")
timestamp = dateutil.parser.parse(timestamp)
router_os_version = re.search(r"(\d\.[\d\.]+\d)", first_line).group(1)
router_os_version = tuple([int(x) for x in router_os_version.split(".")])
else:
timestamp = None
router_os_version = None
# Split on lines that start with a slash as these are our sections
sections = ("\n" + s).split("\n/")
# Add the slash back in, and skip off the first comment
sections = ["/" + s for s in sections[1:]]
# Note that this dict will maintain it's ordering
parsed_sections: Dict[str, Section] = {}
for section in sections:
# Parse the section
parsed_section = Section.parse(section, settings=settings)
if parsed_section.path not in parsed_sections:
# Not seen this section, so store it as normal
parsed_sections[parsed_section.path] = parsed_section
else:
# This is a duplicate section, so append its expressions to the existing section
parsed_sections[parsed_section.path].expressions.extend(
parsed_section.expressions
)
return cls(
timestamp=timestamp,
router_os_version=router_os_version,
sections=list(parsed_sections.values()),
settings=settings,
)
def keys(self):
"""Get all section paths in this config file"""
return [section.path for section in self.sections]
def __getitem__(self, path):
"""Get the section at the given path"""
for section in self.sections:
if section.path == path:
return section
raise KeyError(path)
def __contains__(self, path):
"""Is the given section path in this config file?"""
return path in self.keys()
def get(self, path, default=None):
"""Get the section for the given section path"""
try:
return self[path]
except KeyError:
return default
def diff(
self, old: "RouterOSConfig", old_verbose: Optional["RouterOSConfig"] = None
):
"""Diff this config file with an old config file
Will return a new config file which can be used to
migrate from the old config to the new config.
"""
new_sections = self.keys()
old_sections = old.keys()
diffed_sections = []
# Sanity checks
if len(new_sections) != len(set(new_sections)):
raise CannotDiff("Duplicate section names present in new config")
if len(old_sections) != len(set(old_sections)):
raise CannotDiff("Duplicate section names present in old config")
# Create a list of sections paths which are present in
# either config file
section_paths = copy(new_sections)
for section_path in old_sections:
if section_path not in new_sections:
section_paths.append(section_path)
# Diff each section
for section_path in section_paths:
if section_path in new_sections:
new_section = self[section_path]
else:
# Section not found in new config, so just create a dummy empty section
new_section = Section(
path=section_path, expressions=[], settings=self.settings
)
if section_path in old_sections:
old_section = old[section_path]
else:
# Section not found in old config, so just create a dummy empty section
old_section = Section(
path=section_path, expressions=[], settings=self.settings
)
old_section_verbose = old_verbose.get(section_path) if old_verbose else None
diffed_sections.append(
new_section.diff(old_section, old_verbose=old_section_verbose)
)
return RouterOSConfig(
timestamp=None,
router_os_version=None,
sections=[s for s in diffed_sections if s.expressions],
) | /routeros_diff-0.6a1-py3-none-any.whl/routeros_diff/parser.py | 0.82425 | 0.244025 | parser.py | pypi |
import logging
import os
import time
import routeros_api
from routeros_api.exceptions import RouterOsApiConnectionError, RouterOsApiError
from routeros_telegraf_exporter import DEFAULT_MEASUREMNT, CONNECTIONS
from routeros_telegraf_exporter.utils import format_value, format_tag
from routeros_telegraf_exporter.models import JsonData
MEASUREMENT = os.environ.get("ROUTEROS_EXPORTER_MEASUREMENT", DEFAULT_MEASUREMNT)
EXPORT_OUTPUT_LINE = MEASUREMENT + ",{} {} {}"
last_resouce_run_dict = {}
log = logging.getLogger(__name__)
def host_output(args):
"""Aggregates RouterOS-API path into list
Args:
args (object): Parameters object
Returns:
list: Multidimensional aggregated list
"""
connection = CONNECTIONS.get(args.host)
if not connection:
return
try:
api = connection.get_api()
except RouterOsApiError as e:
log.debug(e)
return
if not api:
return
list_adress = api.get_resource(args.resource.get("path"))
res = []
tags_fields = args.resource.get("tags")
values_fields = args.resource.get("values")
values_transform = args.resource.get("values_transform")
try:
values = list_adress.get()
except RouterOsApiError as e:
log.debug(e)
return
for address in values:
extra_values = []
tag_values = [("router_name", args.host)]
# If value key is missing from address
for value_field in values_fields:
if value_field not in address.keys() and values_transform:
transform_values = list(filter(lambda x: x.get(value_field), values_transform))
if transform_values:
address[value_field] = "missing"
for key, value in address.items():
# Tags
if format_tag(value) and key in tags_fields:
tag_values.append((key, format_tag(value)))
# Values
if format_value(value) and key in values_fields:
extra_values.append((key, format_value(value)))
# Transform values
if values_transform:
transform_values = list(filter(lambda x: x.get(key), values_transform))
if transform_values:
transform_dict = transform_values[0].get(key)
default_value = transform_dict.get("default")
name = transform_dict.get("rename", key)
value = transform_dict.get(value, default_value)
extra_values.append((name, value))
if extra_values and tag_values:
if args.output_type == "json":
res.append(JsonData(measurement=MEASUREMENT, tags=dict(tag_values), fields=dict(extra_values)).__dict__)
elif args.output_type == "influx":
res.append(
EXPORT_OUTPUT_LINE.format(','.join(list(map(lambda x: "{v[0]}={v[1]}".format(v=x), tag_values))),
','.join(list(map(lambda x: "{v[0]}={v[1]}".format(v=x), extra_values))),
time.time_ns())
)
return res
def extract_default_resouces(args):
"""Helper function to extracts default resources from config file
Args:
args (object): Arguments object
Returns:
dict: Default section from config or None
"""
res = list(filter(lambda x: x.get("default"), args.hosts_config))
if res:
return res[0]['default']['resources']
return None
def close_connections():
"""Helper function for closing routeros connections
"""
for srv, connection in CONNECTIONS.items():
connection.disconnect()
def get_connections(args):
"""Helper function for building connection pool for routers
Args:
args (object): Arguments object
"""
if not args.hosts:
raise RuntimeError("Missing hosts param")
hosts = args.hosts.split(",")
for host in hosts:
if host == "rte_default_gw":
return
args.host = host
connection = CONNECTIONS.get(args.host)
if not connection:
CONNECTIONS[args.host] = get_connection(args)
def get_connection(args):
connection = None
try:
connection = routeros_api.RouterOsApiPool(args.host,
port=args.port,
username=args.user,
password=args.password,
plaintext_login=True)
except RouterOsApiConnectionError as e:
logging.error("Unable to connect {}: {}".format(args.host, e))
return connection
def get_routers_data(args, hosts, q):
"""Iterates over hosts and returns aggregated values
Args:
args (object): Parameters object
hosts (str): Comma separated hosts
q (Queue): Queue object
Returns:
list: List of agregated routers values
"""
routers_values = []
for host in hosts:
router_value = get_router_data(args, host, q)
routers_values.append(router_value)
return routers_values
def get_router_data(args, host, q):
"""Main RouterOS-API values aggregator
Args:
args (object): Arguments object
host (str): Host string
q (Queue): Queue object
Returns:
list: Agregated list of values
"""
global last_resouce_run_dict
router_values = []
host_config = list(filter(lambda x: x.get(host), args.hosts_config))
if not host_config:
return
default_config_resources = extract_default_resouces(args)
host_config = host_config[0].get(host)
resources = host_config.get('resources')
if not resources:
resources = []
if default_config_resources:
resources.extend(default_config_resources)
for resource in resources:
args.host = host
args.resource = resource
if not args.ignore_interval and args.daemon:
resource_path = resource.get("path")
resource_interval_millis = resource.get("interval", 60) * 1000
last_resource_run_key = "{}_{}".format(host.replace(".", "_"), resource_path.replace("/", "_"))
current_milli_sec = int(round(time.time() * 1000))
last_resouce_run_millis = last_resouce_run_dict.get(last_resource_run_key)
if not last_resouce_run_millis:
last_resouce_run_dict[last_resource_run_key] = current_milli_sec
last_resouce_run_millis = current_milli_sec
if (current_milli_sec - last_resouce_run_millis) < resource_interval_millis:
continue
last_resouce_run_dict[last_resource_run_key] = current_milli_sec
values = host_output(args)
log.debug(values)
if values:
if not q.full():
q.put(values)
router_values.append(values)
return router_values
def worker(args, q, daemon=True):
"""Main worker for cli and web application
Args:
args (object): Arguments object
q (Queue): Queue object where the results is stored
daemon (bool): On True iterates endlessly
Returns:
list: Multidimensional list of agregated values
"""
get_connections(args)
hosts = args.hosts.split(",")
values = []
if not daemon:
values = get_routers_data(args, hosts, q)
while daemon:
values = get_routers_data(args, hosts, q)
return values | /routeros_telegraf_exporter-0.1.13.tar.gz/routeros_telegraf_exporter-0.1.13/routeros_telegraf_exporter/routeros_exporter.py | 0.50293 | 0.151529 | routeros_exporter.py | pypi |
# README
Router Scraper
# Details
This project aims at providing a python package to interact with different
routers.
# Getting started
Import the required class from the `routerscraper` package:
- `fastgate_dn8245f2.py` for the Fastgate Huawei DN8245f2
- `technicolor_tg789vacv2.py` for the Technicolor TG789vac v2
- `tplink_m7000.py` for the TP-Link M7000
The constructor needs the following parameters
- `host`: the hostname (or IP address) of the router
- `user`: the username to log in the router
- `password`: the password to log in the router
Then you can get relevant information with:
- `listDevices()`: get the list of connected devices
- `getSmsList()`: get the list of SMS present on the device
The functions automatically issue a login request if necessary.
## Supported functions
Not all functions are supported by all devices. See table below for supported
functions:
| Function | `listDevices()` | `getSmsList()` |
| :----------------------- | :-------------: | :------------: |
| Fastgate Huawei DN8245f2 | X | |
| Technicolor TG789vac v2 | X | |
| TP-Link M7000 | | X |
## Saving and restoring sessions
It is possible to export the sessions to later restore them; this can be used
to avoid having to login every time.
To do this, you can export the current status through `exportSessionStatus`,
which returns a string (base64 encoded JSON dict) that can be saved. The
`restoreSessionStatus` function restores the status from that string.
# Supported routers
At present the package was tested with the following routers firmwares
- Fastgate Huawei DN8245f2 - software 1.0.1b
- Technicolor TG789vac v2 - software 16.3.7636
- TP-Link M7000 - software 1.0.10 Build 211230 Rel.1026n
# Developer notes
Here are some additional notes for developing the library (not just using it).
## Project layout
- `README.md`: This file
- `README.md.license`: License information for this file
- `pyproject.toml`: Configuration file for build environment
- `setup.py`: Fallback file for editable installs
- `Makefile`: Makefile to help running development scripts
- **src/routerscraper**: Folder with the scraping package
- `basescraper.py`: Contains the base class implementation
- `requestscraper.py`: Contains the base class for scrapers using requests
- `seleniumscraper.py`: Contains the base class for scrapers using Selenium
- `dataTypes.py`: Module to group data types used in the functions
- `fastgate_dn8245f2.py`: Contains the implementation for the Fastgate
Huawei DN8245f2
- `technicolor_tg789vacv2.py`: Contains the implementation for the
Technicolor TG789vac v2
- `tplink_m7000.py`: Contains the implementation for the TP-Link M7000
- **tests**: Folder with the unit tests. Each test file in this folder
implements tests linked to the corresponding file in the
**routerscraper** folder; if necessary, helper files group
functions needed by the corresponding test file. **files_\***
folder contains files needed by the test files.
`helpers_common.py` implements some classes useful for all the
tests.
- **examples**: Folder with example code
- `fastgate_dn8245f2.py`: Contains an example implementation for the
Fastgate Huawei DN8245f2
- `technicolor_tg789vacv2.py`: Contains an example implementation for the
Technicolor TG789vac v2
- `tplink_m7000.py`: Contains an example implementation for the TP-Link
M7000
- **LICENSES**: Folder with the licenses statements
## Examples
All example scripts behave in the same way. They will connect to the router and
print the list of connected devices (or the received SMS, in the case of the
TP-Link).
Call the script with three parameters:
1. URL of the router
2. USERNAME
3. PASSWORD
## Makefile
For development purposes there is a Makefile to automate the different actions.
The available targets are:
- **all**: Build the package (equal to make dist); this is the goal (i.e.
target executed when calling make without targets)
- **clean**: Clean the project (removing all the .pyc files)
- **dist**: Build the package (both .tar.gz and .whl archives)
- **deploy**: Upload the package on PyPI
- *.venv/bin/activate*: Target to create the virtual environment
- **create_venv**: Easier to remember PHONY to create the virtual environment
- **clean_venv**: Remove the virtual environment
- **code_review**: Run the commands to review the code (flake8 and reuse)
- **tests**: Run the tests on the library
- **release-tests**: Execute all the checks for a release; this target is
automatically executed by the other **release-** targets.
- **release-major**: Release the current version bumping the major index. This
target needs that the GIT has no uncommitted changes and
must be run from the main branch only.
- **release-minor**: Release the current version bumping the minor index. This
target needs that the GIT has no uncommitted changes and
must be run from the main branch only.
- **release-patch**: Release the current version bumping the patch index. This
target needs that the GIT has no uncommitted changes and
must be run from the main branch only.
- **check-git-clean**: Helper recipe that tests if GIT repo is clean
- **check-git-on-main**: Helper recipe that tests if GIT repo is on main branch
Note: **bold** targets are PHONY, *italic* ones are files.
All the operations will happen in a virtual environment. The virtual
environment folder is set in environment variable VENV, which defaults to
*.venv*.
NOTE: if you change the prerequisites in the pyproject.toml, remember to run
`make clean_venv` to recreate the virtual environment with the new data.
## Release procedure
In order to release a new version, everything shall be already committed to the
GIT repo (since the process requires a clean GIT repo); in addition, the repo
shall be on main branch. If this is not true, the process will fail.
After having committed the last modifications, issue the following commands:
make clean
make release-XXX
The release target shall be `release-major`, `release-minor` or `release-patch`,
according to which part of the software version shall be increased.
If everything is correct, run `make deploy` to upload the files to PyPI.
## Setup the repository
Clone the repository from
[git@github.com:fra87/RouterScraper.git](git@github.com:fra87/RouterScraper.git)
| /routerscraper-0.3.1.tar.gz/routerscraper-0.3.1/README.md | 0.867162 | 0.711969 | README.md | pypi |
import heapq
from routes1846.boardtile import EastTerminalCity, WestTerminalCity
class Route(object):
@staticmethod
def create(path):
return Route(tuple(path))
@staticmethod
def empty():
return Route(tuple())
@staticmethod
def single(tile):
return Route.create((tile, ))
def __init__(self, path):
self._path = tuple(path)
self._edges = [{path[k-1], path[k]} for k in range(1, len(path))]
def merge(self, route):
return Route.create(self._path + route._path)
def value(self, train, railroad, phase):
edges = [self._path[0], self._path[-1]]
east_to_west = not bool({EastTerminalCity, WestTerminalCity} - {type(tile) for tile in edges})
if east_to_west:
with_bonus = sum(heapq.nlargest(train.collect - 2, [tile.value(railroad, phase) for tile in self._path[1:-1]])) + sum([edge.value(railroad, phase, east_to_west) for edge in edges])
without_bonus = sum(heapq.nlargest(train.collect, [tile.value(railroad, phase) for tile in self]))
value = max((with_bonus, without_bonus))
else:
value = sum(heapq.nlargest(train.collect, [tile.value(railroad, phase) for tile in self]))
return value
def overlap(self, other):
for edge in self._edges:
if edge in other._edges:
return True
return False
def subroutes(self, start):
if not self.contains_cell(start):
return Route.empty()
start_index = [index for index, tile in enumerate(self._path) if tile.cell == start][0]
backwards_subroutes = {Route.create(self._path[index:start_index]) for index in range(start_index - 1, -1, -1)}
forwards_subroutes = {Route.create(self._path[start_index:index]) for index in range(start_index + 1, len(self._path))}
subroutes = backwards_subroutes.union(forwards_subroutes)
return [subroute for subroute in subroutes if len(subroute.cities) >= 2]
def contains_cell(self, cell):
return cell in [tile.cell for tile in self]
@property
def cities(self):
return [tile for tile in self._path if tile.is_city]
def __iter__(self):
return iter(self._path)
def __bool__(self):
return bool(self._path)
def __len__(self):
return len(self._path)
def __hash__(self):
return hash(tuple(set(self._path)))
def __eq__(self, other):
return isinstance(other, Route) and set(other._path) == set(self._path)
def __str__(self):
return ", ".join([str(tile.cell) for tile in self])
def run(self, train, railroad, phase):
value = self.value(train, railroad, phase)
return _RunRoute(self, value, train)
class _RunRoute(object):
def __init__(self, route, value, train):
self._route = route
self.value = value
self.train = train
self._mail_contract = False
def overlap(self, other):
return self._route.overlap(other._route)
def add_mail_contract(self):
if not self._mail_contract:
self.value += len(self._route.cities) * 10
self._mail_contract = True
@property
def cities(self):
return self._route.cities
def __str__(self):
return str(self._route)
def __iter__(self):
return iter(self._route) | /routes-1846-0.5.tar.gz/routes-1846-0.5/routes1846/route.py | 0.735357 | 0.336958 | route.py | pypi |
import itertools
CHICAGO_CELL = None # Defined below
_CELL_DB = {}
class Cell(object):
@staticmethod
def from_coord(coord):
if len(coord) < 2 or len(coord) > 3:
raise ValueError("Provided invalid coord: {}".format(coord))
row, col = coord[0], int(coord[1:])
if row not in _CELL_DB or col not in _CELL_DB[row]:
raise ValueError("The coordinate provided is not legal: {}".format(coord))
return _CELL_DB[row][col]
def __init__(self, row, col):
self.__row = row
self.__col = col
@property
def neighbors(self):
return {
0: _CELL_DB.get(chr(ord(self.__row) + 1), {}).get(self.__col - 1),
1: _CELL_DB.get(self.__row, {}).get(self.__col - 2),
2: _CELL_DB.get(chr(ord(self.__row) - 1), {}).get(self.__col - 1),
3: _CELL_DB.get(chr(ord(self.__row) - 1), {}).get(self.__col + 1),
4: _CELL_DB.get(self.__row, {}).get(self.__col + 2),
5: _CELL_DB.get(chr(ord(self.__row) + 1), {}).get(self.__col + 1)
}
def __hash__(self):
return hash(str(self))
def __eq__(self, other):
if not isinstance(other, Cell):
return False
return self.__col == other.__col and self.__row == other.__row
def __gt__(self, other):
if self.__row == other.__row:
return self.__col > other.__col
else:
return self.__row > other.__row
def __lt__(self, other):
if self.__row == other.__row:
return self.__col < other.__col
else:
return self.__row < other.__row
def __ge__(self, other):
return self > other or self == other
def __le__(self, other):
return self < other or self == other
def __str__(self):
return "{}{}".format(self.__row, self.__col)
def __repr__(self):
return str(self)
_CELL_DB = {
"A": {15: Cell("A", 15)},
"B": {col: Cell("B", col) for col in range(8, 19, 2)},
"C": {col: Cell("C", col) for col in itertools.chain([5], range(7, 18, 2), [21])},
"D": {col: Cell("D", col) for col in itertools.chain(range(6, 15, 2), range(18, 23, 2))},
"E": {col: Cell("E", col) for col in range(5, 24, 2)},
"F": {col: Cell("F", col) for col in range(4, 23, 2)},
"G": {col: Cell("G", col) for col in range(3, 22, 2)},
"H": {col: Cell("H", col) for col in itertools.chain(range(2, 17, 2), [20])},
"I": {col: Cell("I", col) for col in itertools.chain(range(1, 12, 2), range(15, 18, 2))},
"J": {col: Cell("J", col) for col in itertools.chain(range(4, 11, 2))},
"K": {3: Cell("K", 3)}
}
CHICAGO_CELL = Cell.from_coord("D6")
def board_cells():
for row, columns in _CELL_DB.items():
for column, cell in columns.items():
yield cell | /routes-1846-0.5.tar.gz/routes-1846-0.5/routes1846/cell.py | 0.73307 | 0.285976 | cell.py | pypi |
import collections
import json
from routes1846 import get_data_file
from routes1846.cell import Cell, CHICAGO_CELL
from routes1846.tokens import MeatPackingToken, SeaportToken, Station
BASE_BOARD_FILENAME = "base-board.json"
class BoardSpace(object):
def __init__(self, name, cell, phase, paths, is_city=False, is_z=False, is_chicago=False, is_terminal_city=False,
port_value=0, meat_value=0):
self.name = name or str(cell)
self.cell = cell
self.phase = phase
self._paths = paths
self.port_value = port_value
self.port_token = None
self.meat_value = meat_value
self.meat_token = None
self.is_city = is_city
self.is_z = is_z
self.is_chicago = is_chicago
self.is_terminal_city = is_terminal_city
def paths(self, enter_from=None, railroad=None):
if enter_from:
return self._paths[enter_from]
else:
return tuple(self._paths.keys())
def place_seaport_token(self, railroad):
if self.port_value == 0:
raise ValueError("It is not legal to place the seaport token on this space ({}).".format(self.cell))
self.port_token = SeaportToken(self.cell, railroad)
def place_meat_packing_token(self, railroad):
if self.meat_value == 0:
raise ValueError("It is not legal to place the meat packing token on this space ({}).".format(self.cell))
self.meat_token = MeatPackingToken(self.cell, railroad)
def port_bonus(self, railroad):
return self.port_value if self.port_token and self.port_token.railroad == railroad else 0
def meat_bonus(self, railroad):
return self.meat_value if self.meat_token and self.meat_token.railroad == railroad else 0
class Track(BoardSpace):
@staticmethod
def create(coord, edges, phase=None):
cell = Cell.from_coord(coord)
paths = collections.defaultdict(list)
for start_edge, end_edge in edges:
start_cell = cell.neighbors[start_edge]
end_cell = cell.neighbors[end_edge]
paths[start_cell].append(end_cell)
paths[end_cell].append(start_cell)
return Track(cell, phase, paths)
def __init__(self, cell, phase, paths):
super(Track, self).__init__(None, cell, phase, paths)
def value(self, railroad, phase):
return 0
class City(BoardSpace):
@staticmethod
def create(coord, name, phase=0, edges=[], value=0, capacity=0, is_z=False, port_value=0, meat_value=0):
cell = Cell.from_coord(coord)
neighbors = {cell.neighbors[side] for side in edges}
if cell == CHICAGO_CELL:
paths = {cell.neighbors[side]: [] for side in edges}
return Chicago(phase, paths, neighbors, value, capacity, port_value=port_value, meat_value=meat_value)
else:
paths = {neighbor: list(neighbors - {neighbor}) for neighbor in neighbors}
return City(name, cell, phase, paths, neighbors, value, capacity, is_z, False, port_value=port_value, meat_value=meat_value)
def __init__(self, name, cell, phase, paths, neighbors, value, capacity, is_z=False, is_chicago=False, port_value=0, meat_value=0):
super(City, self).__init__(name, cell, phase, paths, True, is_z, is_chicago, port_value=port_value, meat_value=meat_value)
self.neighbors = neighbors
self._value = value
self.capacity = capacity
self._stations = []
@property
def stations(self):
return tuple(self._stations)
def value(self, railroad, phase):
return self._value + self.port_bonus(railroad) + self.meat_bonus(railroad)
def add_station(self, railroad):
if self.has_station(railroad.name):
raise ValueError("{} already has a station in {} ({}).".format(railroad.name, self.name, self.cell))
if self.capacity <= len(self.stations):
raise ValueError("{} ({}) cannot hold any more stations.".format(self.name, self.cell))
station = Station(self.cell, railroad)
self._stations.append(station)
return station
def get_station(self, railroad_name):
for station in self._stations:
if station.railroad.name == railroad_name:
return station
return None
def has_station(self, railroad_name):
return bool(self.get_station(railroad_name))
def passable(self, railroad):
return self.capacity - len(self.stations) > 0 or self.has_station(railroad.name)
class Chicago(City):
def __init__(self, phase, paths, neighbors, value, capacity, port_value, meat_value):
super(Chicago, self).__init__("Chicago", CHICAGO_CELL, phase, paths, neighbors, value, capacity, False, True,
port_value=port_value, meat_value=meat_value)
self.exit_cell_to_station = {}
def add_station(self, railroad, exit_cell):
station = super(Chicago, self).add_station(railroad)
self.exit_cell_to_station[exit_cell] = station
return station
def passable(self, railroad):
return False
def get_station_exit_cell(self, user_station):
for exit_cell, station in self.exit_cell_to_station.items():
if station == user_station:
return exit_cell
raise ValueError("The requested station was not found: {}".format(user_station))
class TerminalCity(BoardSpace):
@staticmethod
def create(coord, name, edges, values, is_east=False, is_west=False, port_value=0, meat_value=0):
cell = Cell.from_coord(coord)
paths = {cell.neighbors[side]: [] for side in edges}
neighbors = set(paths.keys())
if is_east:
return EastTerminalCity(name, cell, paths, neighbors, values, port_value=port_value, meat_value=meat_value)
elif is_west:
return WestTerminalCity(name, cell, paths, neighbors, values, port_value=port_value, meat_value=meat_value)
else:
return TerminalCity(name, cell, paths, neighbors, values, port_value=port_value, meat_value=meat_value)
def __init__(self, name, cell, paths, neighbors, value_dict, port_value, meat_value):
super(TerminalCity, self).__init__(name, cell, None, paths, True, is_terminal_city=True, port_value=port_value, meat_value=meat_value)
self.neighbors = neighbors
self.phase1_value = value_dict["phase1"]
self.phase3_value = value_dict["phase3"]
def value(self, railroad, phase):
value = self.phase1_value if phase in (1, 2) else self.phase3_value
return value + self.port_bonus(railroad) + self.meat_bonus(railroad)
def passable(self, railroad):
return False
class EastTerminalCity(TerminalCity):
def __init__(self, name, cell, paths, neighbors, value_dict, port_value, meat_value):
super(EastTerminalCity, self).__init__(name, cell, paths, neighbors, value_dict, port_value, meat_value)
self.bonus = value_dict["bonus"]
def value(self, railroad, phase, east_to_west=False):
return super(EastTerminalCity, self).value(railroad, phase) + (self.bonus if east_to_west else 0)
class WestTerminalCity(TerminalCity):
def __init__(self, name, cell, paths, neighbors, value_dict, port_value, meat_value):
super(WestTerminalCity, self).__init__(name, cell, paths, neighbors, value_dict, port_value, meat_value)
self.bonus = value_dict["bonus"]
def value(self, railroad, phase, east_to_west=False):
return super(WestTerminalCity, self).value(railroad, phase) + (self.bonus if east_to_west else 0)
def load():
board_tiles = []
with open(get_data_file(BASE_BOARD_FILENAME)) as board_file:
board_json = json.load(board_file)
board_tiles.extend([Track.create(coord, **track_args) for coord, track_args in board_json["tracks"].items()])
board_tiles.extend([City.create(coord, **city_args) for coord, city_args in board_json["cities"].items()])
board_tiles.extend([TerminalCity.create(coord, **board_edge_args) for coord, board_edge_args in board_json["edges"].items()])
return board_tiles | /routes-1846-0.5.tar.gz/routes-1846-0.5/routes1846/boardtile.py | 0.646237 | 0.232223 | boardtile.py | pypi |
import itertools
from routes1846 import boardtile
from routes1846.cell import Cell, CHICAGO_CELL, board_cells
from routes1846.placedtile import Chicago, PlacedTile
from routes1846.tokens import Station
class Board(object):
@staticmethod
def load():
board_tiles = {board_tile.cell: board_tile for board_tile in boardtile.load()}
return Board(board_tiles)
def __init__(self, board_tiles):
self._board_tiles = board_tiles
self._placed_tiles = {}
def place_tile(self, coord, tile, orientation):
cell = Cell.from_coord(coord)
if cell == CHICAGO_CELL or tile.is_chicago:
raise ValueError("Since Chicago ({}) is a special tile, please use Board.place_chicago().".format(CHICAGO_CELL))
old_tile = self.get_space(cell)
if int(orientation) not in range(0, 6):
raise ValueError("Orientation out of range. Expected between 0 and 5, inclusive. Got {}.".format(orientation))
if old_tile and old_tile.is_terminal_city:
raise ValueError("Cannot upgrade the terminal cities.")
if not old_tile or not old_tile.is_city:
if tile.is_city or tile.is_z:
tile_type = "Z city" if tile.is_z else "city"
raise ValueError("{} is a track space, but you placed a {} ({}).".format(cell, tile_type, tile.id))
elif old_tile.is_z:
if not tile.is_z:
tile_type = "city" if tile.is_city else "track"
raise ValueError("{} is a Z city space, but you placed a {} ({}).".format(cell, tile_type, tile.id))
elif old_tile.is_city:
if not tile.is_city or tile.is_z:
tile_type = "Z city" if tile.is_z else "track"
raise ValueError("{} is a regular city space, but you placed a {} ({}).".format(cell, tile_type, tile.id))
if old_tile:
if old_tile.phase is None:
raise ValueError("{} cannot be upgraded.".format(cell))
elif old_tile.phase >= tile.phase:
raise ValueError("{}: Going from phase {} to phase {} is not an upgrade.".format(cell, old_tile.phase, tile.phase))
new_tile = PlacedTile.place(old_tile.name, cell, tile, orientation, stations=old_tile.stations, port_value=old_tile.port_value, meat_value=old_tile.meat_value)
for old_start, old_ends in old_tile._paths.items():
old_paths = tuple([(old_start, end) for end in old_ends])
new_paths = tuple([(start, end) for start, ends in new_tile._paths.items() for end in ends])
if not all(old_path in new_paths for old_path in old_paths):
raise ValueError("The new tile placed on {} does not preserve all the old paths.".format(cell))
else:
new_tile = PlacedTile.place(None, cell, tile, orientation)
self._placed_tiles[cell] = new_tile
def place_station(self, coord, railroad):
cell = Cell.from_coord(coord)
if cell == CHICAGO_CELL:
raise ValueError("Since Chicago ({}) is a special tile, please use Board.place_chicago_station().".format(CHICAGO_CELL))
tile = self.get_space(cell)
if not tile.is_city:
raise ValueError("{} is not a city, so it cannot have a station.".format(cell))
tile.add_station(railroad)
def place_chicago(self, tile):
cell = CHICAGO_CELL
old_tile = self._placed_tiles.get(cell) or self._board_tiles.get(cell)
if not old_tile.phase or old_tile.phase >= tile.phase:
raise ValueError("{}: Going from phase {} to phase {} is not an upgrade.".format(cell, old_tile.phase, tile.phase))
new_tile = Chicago.place(tile, old_tile.exit_cell_to_station, port_value=old_tile.port_value, meat_value=old_tile.meat_value)
self._placed_tiles[cell] = new_tile
def place_chicago_station(self, railroad, exit_side):
chicago = self.get_space(CHICAGO_CELL)
exit_cell = CHICAGO_CELL.neighbors[exit_side]
chicago.add_station(railroad, exit_cell)
def place_seaport_token(self, coord, railroad):
current_cell = Cell.from_coord(coord)
for cell in board_cells():
space = self.get_space(cell)
if space and space.port_token and cell != current_cell:
raise ValueError("Cannot place the seaport token on {}. It's already been placed on {}.".format(current_cell, cell))
self.get_space(current_cell).place_seaport_token(railroad)
def place_meat_packing_token(self, coord, railroad):
current_cell = Cell.from_coord(coord)
for cell in board_cells():
space = self.get_space(cell)
if space and space.meat_token and cell != current_cell:
raise ValueError("Cannot place the meat packing token on {}. It's already been placed on {}.".format(current_cell, cell))
self.get_space(current_cell).place_meat_packing_token(railroad)
def stations(self, railroad_name=None):
all_tiles = list(self._placed_tiles.values()) + list(self._board_tiles.values())
all_stations = itertools.chain.from_iterable([tile.stations for tile in all_tiles if isinstance(tile, (boardtile.City, PlacedTile))])
if railroad_name:
return tuple([station for station in all_stations if station.railroad.name == railroad_name])
else:
return tuple(all_stations)
def get_space(self, cell):
return self._placed_tiles.get(cell) or self._board_tiles.get(cell)
def validate(self):
invalid = []
for cell, placed_tile in self._placed_tiles.items():
if not placed_tile.stations:
for neighbor_cell in placed_tile.paths():
neighbor = self.get_space(neighbor_cell)
if neighbor and cell in neighbor.paths():
break
else:
invalid.append(cell)
if invalid:
invalid_str = ", ".join([str(cell) for cell in invalid])
raise ValueError("Tiles at the following spots have no neighbors and no stations: {}".format(invalid_str)) | /routes-1846-0.5.tar.gz/routes-1846-0.5/routes1846/board.py | 0.553747 | 0.296552 | board.py | pypi |
import collections
from routes1846.cell import Cell, CHICAGO_CELL
from routes1846.tokens import MeatPackingToken, SeaportToken, Station
class PlacedTile(object):
@staticmethod
def _rotate(side, orientation):
# ((side num) + (number of times rotated)) mod (number of sides)
return (side + int(orientation)) % 6
@staticmethod
def get_paths(cell, tile, orientation):
paths = {}
for start, ends in tile.paths.items():
start_cell = cell.neighbors[PlacedTile._rotate(start, orientation)]
paths[start_cell] = tuple([cell.neighbors[PlacedTile._rotate(end, orientation)] for end in ends])
if None in paths:
raise ValueError("Placing tile {} in orientation {} at {} goes off-map.".format(tile.id, orientation, cell))
return paths
@staticmethod
def place(name, cell, tile, orientation, stations=[], port_value=None, meat_value=None):
paths = {}
for start, ends in tile.paths.items():
start_cell = cell.neighbors[PlacedTile._rotate(start, orientation)]
paths[start_cell] = tuple([cell.neighbors[PlacedTile._rotate(end, orientation)] for end in ends])
# This will cause problems if B&O or PRR use their special station...
if None in paths:
raise ValueError("Placing tile {} in orientation {} at {} goes off-map.".format(tile.id, orientation, cell))
return PlacedTile(name, cell, tile, stations, paths, port_value, meat_value)
def __init__(self, name, cell, tile, stations=[], paths={}, port_value=None, meat_value=None):
self.name = name or str(cell)
self.cell = cell
self.tile = tile
self.capacity = tile.capacity
self._stations = list(stations)
self._paths = paths
self.port_value = port_value
self.port_token = None
self.meat_value = meat_value
self.meat_token = None
self.phase = self.tile.phase
self.is_city = self.tile.is_city
self.is_z = self.tile.is_z
self.is_terminal_city = False
def value(self, railroad, phase):
return self.tile.value + self.port_bonus(railroad) + self.meat_bonus(railroad)
def passable(self, railroad):
return self.capacity - len(self.stations) > 0 or self.has_station(railroad.name)
@property
def stations(self):
return tuple(self._stations)
def add_station(self, railroad):
if self.has_station(railroad.name):
raise ValueError("{} already has a station in {} ({}).".format(railroad.name, self.name, self.cell))
if self.capacity <= len(self.stations):
raise ValueError("{} ({}) cannot hold any more stations.".format(self.name, self.cell))
station = Station(self.cell, railroad)
self._stations.append(station)
return station
def get_station(self, railroad_name):
for station in self._stations:
if station.railroad.name == railroad_name:
return station
return None
def has_station(self, railroad_name):
return bool(self.get_station(railroad_name))
def place_seaport_token(self, railroad):
if self.port_value == 0:
raise ValueError("It is not legal to place the seaport token on this space ({}).".format(self.cell))
self.port_token = SeaportToken(self.cell, railroad)
def place_meat_packing_token(self, railroad):
if self.meat_value == 0:
raise ValueError("It is not legal to place the meat packing token on this space ({}).".format(self.cell))
self.meat_token = MeatPackingToken(self.cell, railroad)
def port_bonus(self, railroad):
return self.port_value if self.port_token and self.port_token.railroad == railroad else 0
def meat_bonus(self, railroad):
return self.meat_value if self.meat_token and self.meat_token.railroad == railroad else 0
def paths(self, enter_from=None, railroad=None):
if enter_from:
return self._paths[enter_from]
else:
return tuple(self._paths.keys())
class Chicago(PlacedTile):
@staticmethod
def place(tile, exit_cell_to_station={}, port_value=None, meat_value=None):
paths = PlacedTile.get_paths(CHICAGO_CELL, tile, 0)
return Chicago(tile, exit_cell_to_station, paths, port_value, meat_value)
def __init__(self, tile, exit_cell_to_station={}, paths={}, port_value=None, meat_value=None):
super(Chicago, self).__init__("Chicago", CHICAGO_CELL, tile, list(exit_cell_to_station.values()), paths, port_value, meat_value)
self.exit_cell_to_station = exit_cell_to_station
def paths(self, enter_from=None, railroad=None):
paths = list(super(Chicago, self).paths(enter_from))
if railroad:
enter_from_station = self.exit_cell_to_station.get(enter_from)
if enter_from_station:
if enter_from_station.railroad != railroad:
paths = []
else:
if not enter_from:
station = self.get_station(railroad.name)
paths = [self.get_station_exit_cell(station), Cell.from_coord("C5")] if station else []
else:
for exit in paths:
station = self.exit_cell_to_station.get(exit)
if station and station.railroad != railroad:
paths.remove(exit)
return tuple(paths)
def add_station(self, railroad, exit_cell):
if exit_cell not in self.paths():
raise ValueError("Illegal exit cell for Chicago")
station = super(Chicago, self).add_station(railroad)
self.exit_cell_to_station[exit_cell] = station
return station
def get_station_exit_cell(self, user_station):
for exit_cell, station in self.exit_cell_to_station.items():
if station == user_station:
return exit_cell
raise ValueError("The requested station was not found: {}".format(user_station)) | /routes-1846-0.5.tar.gz/routes-1846-0.5/routes1846/placedtile.py | 0.676727 | 0.311728 | placedtile.py | pypi |
import json
import math
_TRAINS_FILENAME = "trains.json"
class Train:
@staticmethod
def _get_name(collect, visit):
if collect == visit:
return str(collect)
else:
return f"{collect} / {visit}"
@staticmethod
def create(name, collect, visit, phase):
if not collect:
collect = math.inf
if not visit:
visit = collect
name = name or Train._get_name(collect, visit)
return Train(name, collect, visit, phase)
def __init__(self, name, collect, visit, phase):
self.name = name
self.collect = collect
self.visit = visit
self.phase = phase
def __str__(self):
return self.name
def __hash__(self):
return hash((self.collect, self.visit))
def __eq__(self, other):
return isinstance(other, Train) and \
self.collect == other.collect and \
self.visit == other.visit
class TrainContainer(Train):
@staticmethod
def from_string(train_str):
parts = train_str.split("/")
try:
collect = int(parts[0].strip())
except ValueError:
collect = math.inf
visit = collect if len(parts) == 1 else int(parts[1].strip())
return TrainContainer(collect, visit)
def __init__(self, collect, visit):
self.collect = collect
self.visit = visit
def convert(train_info, trains_str):
if not trains_str:
return []
railroad_trains = []
for train_str in trains_str.split(","):
if train_str:
raw_train = TrainContainer.from_string(train_str)
if raw_train in train_info:
railroad_trains.append(train_info[train_info.index(raw_train)])
return railroad_trains
def load_train_info(game):
with open(game.get_data_file(_TRAINS_FILENAME)) as trains_file:
trains_json = json.load(trains_file)
return [Train.create(info.get("name"), info["collect"], info.get("visit"), info["phase"]) for info in trains_json["trains"]] | /routes-18xx-0.9.1.tar.gz/routes-18xx-0.9.1/routes18xx/trains.py | 0.485112 | 0.188175 | trains.py | pypi |
import csv
import json
from routes18xx import trains, train_limits
from routes18xx.tokens import Station
from routes18xx.cell import Cell
from routes18xx import boardtile, placedtile
_RAILROADS_FILENAME = "railroads.json"
FIELDNAMES = ("name", "trains", "stations")
class Railroad(object):
@staticmethod
def create(name, railroad_trains):
return Railroad(name, railroad_trains)
def __init__(self, name, railroad_trains):
self.name = name
self.trains = railroad_trains
self._private_companies = []
def add_private_company(self, name):
self._private_companies.append(name)
def has_private_company(self, name):
return name in self._private_companies
@property
def is_removed(self):
return False
class RemovedRailroad(Railroad):
@staticmethod
def create(name):
return RemovedRailroad(name, [])
def add_private_company(self, name):
raise ValueError(f"Cannot assign a private company to a removed railroad: {self.name}")
def has_private_company(self, name):
raise ValueError(f"A removed failroad cannot hold any private companies: {self.name}")
@property
def is_removed(self):
return True
class ClosedRailroad(RemovedRailroad):
@staticmethod
def create(name):
return ClosedRailroad(name, [])
def _split_station_entry(station_entry):
if ':' not in station_entry:
return station_entry, None
coord, branch_str = station_entry.split(':')
branch_str = branch_str.strip()
if branch_str.startswith('[') and branch_str.endswith(']'):
branch_str = branch_str[1:-1]
branch = tuple([coord.strip() for coord in branch_str.split()])
else:
branch = (branch_str.strip(), )
return coord.strip(), branch
def _load_railroad_info(game):
with open(game.get_data_file(_RAILROADS_FILENAME)) as railroads_file:
return json.load(railroads_file)
def load_from_csv(game, board, railroads_filepath):
with open(railroads_filepath, newline='') as railroads_file:
return load(game, board, csv.DictReader(railroads_file, fieldnames=FIELDNAMES, delimiter=';', skipinitialspace=True))
def load(game, board, railroads_rows):
railroad_info = _load_railroad_info(game)
train_info = trains.load_train_info(game)
train_limit_info = train_limits.load_train_limits(game)
railroad_rows_list = list(railroads_rows)
railroads = {}
for railroad_args in railroad_rows_list:
name = railroad_args["name"]
info = railroad_info.get(name, {})
if not info:
raise ValueError(f"Unrecognized railroad name: {name}")
trains_str = (railroad_args.get("trains") or "").strip().lower()
if trains_str == "removed":
if not info.get("is_removable"):
raise ValueError("Attempted to remove a non-removable railroad.")
railroad = RemovedRailroad.create(railroad_args["name"])
elif trains_str == "closed":
if not game.rules.railroads_can_close:
raise ValueError(f"Attempted to close a railroad, although railroads cannot close in {game.name}.")
railroad = ClosedRailroad.create(railroad_args["name"])
else:
railroad_trains = trains.convert(train_info, trains_str)
railroad = Railroad.create(railroad_args["name"], railroad_trains)
if railroad.name in railroads:
raise ValueError(f"Found multiple {railroad.name} definitions.")
railroads[railroad.name] = railroad
# Capturing the phase allows us to place stations
game.capture_phase(railroads)
# Now that we know the phase, check the train limits
for name, railroad in railroads.items():
train_limit_info.validate(game, railroad)
# Place all home stations. This is done before placing other stations to
# enforce a heirarchy of error messages.
for name, railroad in railroads.items():
info = railroad_info.get(name, {})
if not isinstance(railroad, ClosedRailroad):
board.place_station(game, info["home"], railroad)
# Initializing parts of the board that depend on the railroads having been
# created.
for name, info in railroad_info.items():
# Railroads which are in play.
board.get_space(board.cell(info["home"])).home = name
if name not in railroads or not isinstance(railroads[name], RemovedRailroad):
for reserved_coord in info.get("reserved", []):
board.get_space(board.cell(reserved_coord)).reserved = name
if name in railroads:
# Allow referring to the railroads in play by their nicknames.
for nickname in info.get("nicknames", []):
railroads[nickname] = railroads[name]
for railroad_args in railroad_rows_list:
name = railroad_args["name"]
info = railroad_info.get(name, {})
railroad = railroads[name]
station_entries_str = railroad_args.get("stations")
if station_entries_str:
station_entries = [entry.strip() for entry in station_entries_str.split(",")]
for entry in station_entries:
coord, branch = _split_station_entry(entry)
if coord and coord != info["home"]:
if isinstance(board.get_space(board.cell(coord)), (placedtile.SplitCity, boardtile.SplitCity)):
if not branch:
raise ValueError(f"A split city ({coord}) is listed as a station for {railroad.name}, but no station branch was specified.")
board.place_split_station(game, coord, railroad, branch)
else:
board.place_station(game, coord, railroad)
return railroads | /routes-18xx-0.9.1.tar.gz/routes-18xx-0.9.1/routes18xx/railroads.py | 0.622459 | 0.177276 | railroads.py | pypi |
import json
BASE_BOARD_FILENAME = "base-board.json"
class Cell(object):
def __init__(self, row, col):
self.__row = row
self.__col = col
self.__neighbors = {}
@property
def neighbors(self):
return self.__neighbors
@neighbors.setter
def neighbors(self, neighbors_dict):
if not self.__neighbors:
self.__neighbors = neighbors_dict
def __hash__(self):
return hash(str(self))
def __eq__(self, other):
if not isinstance(other, Cell):
return False
return self.__col == other.__col and self.__row == other.__row
def __gt__(self, other):
if self.__row == other.__row:
return self.__col > other.__col
else:
return self.__row > other.__row
def __lt__(self, other):
if self.__row == other.__row:
return self.__col < other.__col
else:
return self.__row < other.__row
def __ge__(self, other):
return self > other or self == other
def __le__(self, other):
return self < other or self == other
def __str__(self):
return f"{self.__row}{self.__col}"
def __repr__(self):
return str(self)
def load(game):
cell_grid = {}
with open(game.get_data_file(BASE_BOARD_FILENAME)) as board_file:
boundaries_json = json.load(board_file)["boundaries"]
for row, col_ranges in boundaries_json.items():
cell_grid[row] = {}
for col_range in col_ranges:
if isinstance(col_range, int):
cell_grid[row][col_range] = Cell(row, col_range)
elif isinstance(col_range, list):
for col in range(col_range[0], col_range[1] + 1, 2):
cell_grid[row][col] = Cell(row, col)
for row, cols in cell_grid.items():
for col, cell in cols.items():
cell.neighbors = {
0: cell_grid.get(chr(ord(row) + 1), {}).get(col - 1),
1: cell_grid.get(row, {}).get(col - 2),
2: cell_grid.get(chr(ord(row) - 1), {}).get(col - 1),
3: cell_grid.get(chr(ord(row) - 1), {}).get(col + 1),
4: cell_grid.get(row, {}).get(col + 2),
5: cell_grid.get(chr(ord(row) + 1), {}).get(col + 1)
}
return cell_grid | /routes-18xx-0.9.1.tar.gz/routes-18xx-0.9.1/routes18xx/cell.py | 0.638272 | 0.204819 | cell.py | pypi |
import collections
import itertools
import json
from routes18xx.cell import Cell
from routes18xx.tokens import Station
BASE_BOARD_FILENAME = "base-board.json"
class BoardSpace(object):
@staticmethod
def _calc_paths(cell, edges):
paths = collections.defaultdict(list)
for exits in edges:
if isinstance(exits, list):
for path in itertools.permutations(exits, 2):
paths[cell.neighbors[path[0]]].append(cell.neighbors[path[1]])
else:
paths[cell.neighbors[exits]] = []
return paths
def __init__(self, name, nickname, cell, upgrade_level, paths, upgrade_attrs=[], properties={}):
self.name = name or str(cell)
self.nickname = nickname or self.name
self.cell = cell
self.upgrade_level = None if upgrade_level == 4 else upgrade_level # A built-in upgrade_level 4 tile is similar to a terminus
self._paths = paths
self.tokens = []
self.is_city = isinstance(self, City)
self.is_town = isinstance(self, Town)
self.is_terminus = isinstance(self, Terminus)
self.is_stop = self.is_city or self.is_terminus or self.is_town
self.upgrade_attrs = sorted(sorted(attr) if isinstance(attr, list) else [attr] for attr in upgrade_attrs) or [[]]
self.properties = properties
def paths(self, enter_from=None, railroad=None):
if railroad and railroad.is_removed:
raise ValueError(f"A removed railroad cannot run routes: {railroad.name}")
if enter_from:
return self._paths[enter_from]
else:
return tuple(self._paths.keys())
def place_token(self, railroad, TokenType):
self.tokens.append(TokenType.place(self.cell, railroad, self.properties))
def passable(self, enter_cell, railroad):
return True
class Track(BoardSpace):
@staticmethod
def create(cell, edges, upgrade_level=None):
paths = BoardSpace._calc_paths(cell, edges)
return Track(cell, upgrade_level, paths)
def __init__(self, cell, upgrade_level, paths):
super().__init__(None, None, cell, upgrade_level, paths)
def value(self, game, railroad, train):
return 0
class Town(BoardSpace):
@staticmethod
def create(cell, name, nickname=None, upgrade_level=0, edges=[], value=0, upgrade_attrs=[], properties={}):
paths = BoardSpace._calc_paths(cell, edges)
return Town(name, nickname, cell, upgrade_level, paths, value, upgrade_attrs, properties)
def __init__(self, name, nickname, cell, upgrade_level, paths, value, upgrade_attrs=[], properties={}):
super().__init__(name, nickname, cell, upgrade_level, paths, upgrade_attrs, properties)
self._value = value
def value(self, game, railroad, train):
return self._value + sum(token.value(game, railroad) for token in self.tokens)
class City(BoardSpace):
@staticmethod
def create(cell, name, nickname=None, upgrade_level=0, edges=[], value=0, capacity=0, upgrade_attrs=[], properties={}):
paths = BoardSpace._calc_paths(cell, edges)
if isinstance(capacity, dict):
return SplitCity.create(name, nickname, cell, upgrade_level, paths, value, capacity, upgrade_attrs, properties)
else:
return City(name, nickname, cell, upgrade_level, paths, value, capacity, upgrade_attrs, properties)
def __init__(self, name, nickname, cell, upgrade_level, paths, value, capacity, upgrade_attrs=[], properties={}):
super().__init__(name, nickname, cell, upgrade_level, paths, upgrade_attrs, properties)
self._value = value
self.capacity = capacity
self._stations = []
self.home = None
self.reserved = None
@property
def stations(self):
return tuple(self._stations)
def value(self, game, railroad, train):
return self._value + sum(token.value(game, railroad) for token in self.tokens)
def add_station(self, game, railroad):
if self.has_station(railroad.name):
raise ValueError(f"{railroad.name} already has a station in {self.name} ({self.cell}).")
if len(self.stations) >= self.capacity :
raise ValueError(f"{self.name} ({self.cell}) cannot hold any more stations.")
if self.home and self.home != railroad.name and not self.has_station(self.home) and len(self.stations) + 1 >= self.capacity:
raise ValueError(f"{self.name} ({self.cell}) must leave a slot for {self.home}, its home railroad.")
if game.rules.stations_reserved_until and game.compare_phases(game.rules.stations_reserved_until) < 0:
if self.reserved and self.reserved != railroad.name and not self.has_station(self.reserved) and len(self.stations) + 1 >= self.capacity:
raise ValueError(f"{self.name} ({self.cell}) has no open slot for its {self.reserved} reservation.")
station = Station(self.cell, railroad)
self._stations.append(station)
return station
def get_station(self, railroad_name):
for station in self._stations:
if station.railroad.name == railroad_name:
return station
return None
def has_station(self, railroad_name):
return bool(self.get_station(railroad_name))
def passable(self, enter_cell, railroad):
return self.capacity - len(self.stations) > 0 or self.has_station(railroad.name)
class SplitCity(City):
@staticmethod
def _branches_with_unique_exits(branch_dict):
# Indicating a branch on a split city can be done by a single unqiue
# neighbor, if such a neighbor exists. This determines what they are,
# then add them to the branch keys.
branch_to_sides = {branch_key: tuple(set(itertools.chain.from_iterable(branch_key))) for branch_key in branch_dict}
unique_exit_sides = {}
for key, sides in branch_to_sides.items():
# Get all the neighbors that appear in branches other than the
# current one, and remove them from the current branch. If any
# remain, they must be unique.
unique_exits = set(sides) - set(itertools.chain.from_iterable(set(branch_to_sides.values()) - {sides}))
unique_exit_sides[key] = {(side, ) for side in unique_exits}
new_branch_dict = {}
for old_key, value in branch_dict.items():
new_key = tuple(set(old_key).union(unique_exit_sides[old_key]))
new_branch_dict[new_key] = value
return new_branch_dict
@staticmethod
def create(name, nickname, cell, upgrade_level, paths, value, capacity, upgrade_attrs, properties):
split_city_capacity = {}
for branch_paths_str, branch_capacity in capacity.items():
branch_path_dict = City._calc_paths(cell, json.loads(branch_paths_str))
branch_path_list = []
for entrance, exits in branch_path_dict.items():
if not exits:
branch_paths = [(entrance, )]
else:
branch_paths = [(entrance, exit) for exit in exits]
branch_path_list.extend(tuple(branch_paths))
split_city_capacity[tuple(branch_path_list)] = branch_capacity
split_city_capacity = SplitCity._branches_with_unique_exits(split_city_capacity)
return SplitCity(name, nickname, cell, upgrade_level, paths, value, split_city_capacity, upgrade_attrs, properties)
def __init__(self, name, nickname, cell, upgrade_level, paths, value, capacity, upgrade_attrs, properties):
super().__init__(name, nickname, cell, upgrade_level, paths, value, capacity, upgrade_attrs, properties)
self.branch_to_station = {key: [] for key in self.capacity.keys()}
def add_station(self, game, railroad, branch):
if self.has_station(railroad.name):
raise ValueError(f"{railroad.name} already has a station in {self.name} ({self.cell}).")
split_branch = tuple()
for branch_key, value in self.capacity.items():
if branch in branch_key:
split_branch = branch_key
break
else:
raise ValueError(f"Attempted to add a station to a non-existant branch of a split city: {branch}")
if self.capacity[split_branch] <= len(self.branch_to_station[split_branch]):
raise ValueError(f"The {branch} branch of {self.name} ({self.cell}) cannot hold any more stations.")
station = Station(self.cell, railroad)
self._stations.append(station)
self.branch_to_station[split_branch].append(station)
return station
def passable(self, enter_cell, railroad):
for branch, stations in self.branch_to_station.items():
for path in branch:
if enter_cell in path:
if len(stations) < self.capacity[branch]:
return True
for station in stations:
if station.railroad == railroad:
return True
return False
def get_station_branch(self, user_station):
for branch, stations in self.branch_to_station.items():
if user_station in stations:
return branch
raise ValueError(f"The requested station was not found: {user_station}")
class Terminus(BoardSpace):
@staticmethod
def create(cell, name, edges, values, nickname=None, is_east=False, is_west=False, properties={}):
paths = {cell.neighbors[side]: [] for side in edges}
if is_east:
return EasternTerminus(name, nickname, cell, paths, values, properties)
elif is_west:
return WesternTerminus(name, nickname, cell, paths, values, properties)
else:
return Terminus(name, nickname, cell, paths, values, properties)
def __init__(self, name, nickname, cell, paths, value_dict, properties):
super().__init__(name, nickname, cell, None, paths, properties=properties)
self.phase_value = {phase: val for phase, val in value_dict.get("phase", {}).items()}
self.train_value = {train: val for train, val in value_dict.get("train", {}).items()}
def value(self, game, railroad, train):
if train.name in self.train_value:
base_value = self.train_value[train.name]
else:
for phase, value in sorted(self.phase_value.items(), reverse=True):
if game.compare_phases(phase) >= 0:
base_value = value
break
else:
raise ValueError(f"No value could be found for the provided phase: {game.current_phase}")
return base_value + sum(token.value(game, railroad) for token in self.tokens)
def passable(self, enter_cell, railroad):
return False
class EasternTerminus(Terminus):
def __init__(self, name, nickname, cell, paths, value_dict, properties):
super().__init__(name, nickname, cell, paths, value_dict, properties)
self.e2w_bonus = value_dict["e2w-bonus"]
def value(self, game, railroad, train, east_to_west=False):
return super().value(game, railroad, train) + (self.e2w_bonus if east_to_west else 0)
class WesternTerminus(Terminus):
def __init__(self, name, nickname, cell, paths, value_dict, properties):
super().__init__(name, nickname, cell, paths, value_dict, properties)
self.e2w_bonus = value_dict["e2w-bonus"]
def value(self, game, railroad, train, east_to_west=False):
return super().value(game, railroad, train) + (self.e2w_bonus if east_to_west else 0)
def load(game, board):
board_tiles = []
with open(game.get_data_file(BASE_BOARD_FILENAME)) as board_file:
board_json = json.load(board_file)
board_tiles.extend([Track.create(board.cell(coord), **track_args) for coord, track_args in board_json.get("tracks", {}).items()])
board_tiles.extend([Town.create(board.cell(coord), **town_args) for coord, town_args in board_json.get("towns", {}).items()])
board_tiles.extend([City.create(board.cell(coord), **city_args) for coord, city_args in board_json.get("cities", {}).items()])
board_tiles.extend([Terminus.create(board.cell(coord), **board_edge_args) for coord, board_edge_args in board_json.get("termini", {}).items()])
return board_tiles | /routes-18xx-0.9.1.tar.gz/routes-18xx-0.9.1/routes18xx/boardtile.py | 0.612657 | 0.188305 | boardtile.py | pypi |
import collections
import itertools
from routes18xx import boardtile, cell, games
from routes18xx.placedtile import PlacedTile, SplitCity
from routes18xx.tokens import Station
class Board(object):
@staticmethod
def load(game):
cells = cell.load(game)
board = Board(game, cells)
board._board_tiles = {board_tile.cell: board_tile for board_tile in boardtile.load(game, board)}
return board
def __init__(self, game, cells):
self.game = game
self._cells = cells
self._board_tiles = {}
self._placed_tiles = {}
def cell(self, coord):
if len(coord) < 2 or len(coord) > 3:
raise ValueError(f"Provided invalid coord: {coord}")
row, col = coord[0], int(coord[1:])
if row not in self._cells or col not in self._cells[row]:
raise ValueError(f"The coordinate provided is not legal: {coord}")
return self._cells[row][col]
@property
def cells(self):
for row, columns in self._cells.items():
for column, cell in columns.items():
yield cell
def place_tile(self, coord, tile, orientation):
cell = self.cell(coord)
if int(orientation) not in range(0, 6):
raise ValueError(f"Orientation out of range. Expected between 0 and 5, inclusive. Got {orientation}.")
old_tile = self.get_space(cell)
self._validate_place_tile_space_type(tile, old_tile)
self._validate_place_tile_neighbors(cell, tile, orientation)
if old_tile:
self._validate_place_tile_upgrade(old_tile, cell, tile, orientation)
self._placed_tiles[cell] = PlacedTile.place(cell, tile, orientation, old_tile)
def place_station(self, game, coord, railroad):
cell = self.cell(coord)
tile = self.get_space(cell)
if not tile.is_city:
raise ValueError(f"{cell} is not a city, so it cannot have a station.")
if isinstance(tile, (boardtile.SplitCity, SplitCity)):
raise ValueError(f"Since {coord} is a split city tile, please use Board.place_split_station().")
tile.add_station(game, railroad)
def place_split_station(self, game, coord, railroad, branch):
cell = self.cell(coord)
space = self.get_space(cell)
if not space.is_city:
raise ValueError(f"{cell} is not a city, so it cannot have a station.")
branch_cells = tuple([self.cell(coord) for coord in branch])
space.add_station(game, railroad, branch_cells)
def place_token(self, coord, railroad, TokenType):
if railroad.is_removed:
raise ValueError(f"A removed railroad cannot place a token: {railroad.name}")
current_cell = self.cell(coord)
self.get_space(current_cell).place_token(railroad, TokenType)
def stations(self, railroad_name=None):
all_tiles = list(self._placed_tiles.values()) + list(self._board_tiles.values())
all_stations = itertools.chain.from_iterable([tile.stations for tile in all_tiles if isinstance(tile, (boardtile.City, PlacedTile))])
if railroad_name:
return tuple([station for station in all_stations if station.railroad.name == railroad_name])
else:
return tuple(all_stations)
def get_space(self, cell):
return self._placed_tiles.get(cell) or self._board_tiles.get(cell)
def validate(self):
self._validate_tiles_connected()
self._validate_tiles_upgrade_level()
self._validate_tiles_quantity()
def _validate_tiles_connected(self):
invalid = []
for cell, placed_tile in sorted(self._placed_tiles.items()):
if not placed_tile.stations:
for neighbor_cell in placed_tile.paths():
neighbor = self.get_space(neighbor_cell)
if neighbor and cell in neighbor.paths():
break
else:
invalid.append(str(cell))
if invalid:
raise ValueError(f"Tiles at the following spots have no neighbors and no stations: {', '.join(invalid)}")
def _validate_tiles_upgrade_level(self):
invalid = []
for cell, placed_tile in sorted(self._placed_tiles.items()):
if self.game.compare_phases(self.game.upgrade_phases[placed_tile.upgrade_level]) < 0:
invalid.append(str(cell))
if invalid:
raise ValueError(f"Tiles at the following spots cannot be placed until a later phase: {', '.join(invalid)}")
def _validate_tiles_quantity(self):
tile_count = collections.Counter([placed.tile for placed in self._placed_tiles.values()])
invalid = []
for tile, count in tile_count.items():
if tile.quantity and tile.quantity < count:
invalid = [tile.id]
if invalid:
raise ValueError(f"Found too many of the following tiles on the board: {', '.join(invalid)}")
def _validate_place_tile_space_type(self, tile, old_tile):
if old_tile:
if old_tile.is_terminus:
raise ValueError("Cannot upgrade the terminus.")
elif old_tile.is_city != tile.is_city:
raise ValueError("A city tile must be placed on a city board space.")
elif old_tile.is_town != tile.is_town:
raise ValueError("A town tile must be placed on a town board space.")
elif tile.upgrade_attrs not in old_tile.upgrade_attrs:
old_tile_type = " OR ".join(str(upgrade_attr) for upgrade_attr in old_tile.upgrade_attrs)
raise ValueError(f"Tried to upgrade to a mismatched type. Expected: {old_tile_type}. Got: {tile.upgrade_attrs}.")
else:
if tile.is_stop:
raise ValueError("Tried to place a non-track tile on a track space.")
def _validate_place_tile_neighbors(self, cell, tile, orientation):
for neighbor in PlacedTile.get_paths(cell, tile, orientation):
neighbor_space = self.get_space(neighbor)
if neighbor_space and neighbor_space.upgrade_level is None and cell not in neighbor_space.paths():
tile_type = "terminus" if neighbor_space.is_terminus else "pre-printed gray tile"
raise ValueError(
f"Placing tile {tile.id} on {cell} in orientation {orientation} runs into the side of the {tile_type} at {neighbor_space.cell}.")
def _validate_place_tile_upgrade(self, old_tile, cell, new_tile, orientation):
if old_tile:
if old_tile.upgrade_level is None:
raise ValueError(f"{cell} cannot be upgraded.")
elif old_tile.upgrade_level >= new_tile.upgrade_level:
raise ValueError(f"{cell}: Going from upgrade level {old_tile.upgrade_level} to {new_tile.upgrade_level} is not an upgrade.")
for old_start, old_ends in old_tile._paths.items():
old_paths = tuple([(old_start, end) for end in old_ends])
new_paths = tuple([(start, end) for start, ends in PlacedTile.get_paths(cell, new_tile, orientation).items() for end in ends])
if not all(old_path in new_paths for old_path in old_paths):
raise ValueError(f"The new tile placed on {cell} does not preserve all the old paths.") | /routes-18xx-0.9.1.tar.gz/routes-18xx-0.9.1/routes18xx/board.py | 0.713831 | 0.328556 | board.py | pypi |
import collections
import itertools
from routes18xx import boardtile
from routes18xx.cell import Cell
from routes18xx.tokens import Station
class PlacedTile(object):
@staticmethod
def _rotate(side, orientation):
# ((side num) + (number of times rotated)) mod (number of sides)
return (side + int(orientation)) % 6
@staticmethod
def get_paths(cell, tile, orientation):
paths = {}
for start, ends in tile.paths.items():
start_cell = cell.neighbors[PlacedTile._rotate(start, orientation)]
paths[start_cell] = tuple([cell.neighbors[PlacedTile._rotate(end, orientation)] for end in ends])
if None in paths:
raise ValueError(f"Placing tile {tile.id} in orientation {orientation} at {cell} goes off-map.")
return paths
@staticmethod
def place(cell, tile, orientation, old_space=None):
if isinstance(old_space, (boardtile.SplitCity, SplitCity)):
return SplitCity.place(cell, tile, orientation, old_space)
name = old_space.name if old_space else None
nickname = old_space.nickname if old_space else None
properties = old_space.properties if old_space else {}
paths = PlacedTile.get_paths(cell, tile, orientation)
return PlacedTile(name, nickname, cell, tile, paths, properties)
def __init__(self, name, nickname, cell, tile, paths={}, properties={}):
self.name = name or str(cell)
self.nickname = nickname or self.name
self.cell = cell
self.tile = tile
self.capacity = tile.capacity
self._paths = paths
self.properties = properties
self._stations = []
self.tokens = []
self.upgrade_level = self.tile.upgrade_level
self.is_city = self.tile.is_city
self.is_town = self.tile.is_town
self.is_terminus = self.tile.is_terminus
self.is_stop = self.tile.is_stop
self.upgrade_attrs = self.tile.upgrade_attrs
self.home = None
self.reserved = None
def value(self, game, railroad, train):
return self.tile.value + sum(token.value(game, railroad) for token in self.tokens)
def passable(self, enter_cell, railroad):
if not self.is_stop or self.is_town:
return True
if self.is_terminus:
return False
return self.capacity - len(self.stations) > 0 or self.has_station(railroad.name)
@property
def stations(self):
return tuple(self._stations)
def add_station(self, game, railroad):
if self.has_station(railroad.name):
raise ValueError(f"{railroad.name} already has a station in {self.name} ({self.cell}).")
if len(self.stations) >= self.capacity:
raise ValueError(f"{self.name} ({self.cell}) cannot hold any more stations.")
if self.home and self.home != railroad.name and not self.has_station(self.home) and len(self.stations) + 1 >= self.capacity:
raise ValueError(f"{self.name} ({self.cell}) must leave a slot for {self.home}, its home railroad.")
if game.rules.stations_reserved_until and game.compare_phases(game.rules.stations_reserved_until) < 0:
if self.reserved and self.reserved != railroad.name and not self.has_station(self.reserved) and len(self.stations) + 1 >= self.capacity:
raise ValueError(f"{self.name} ({self.cell}) has no open slot for its {self.reserved} reservation.")
station = Station(self.cell, railroad)
self._stations.append(station)
return station
def get_station(self, railroad_name):
for station in self._stations:
if station.railroad.name == railroad_name:
return station
return None
def has_station(self, railroad_name):
return bool(self.get_station(railroad_name))
def place_token(self, railroad, TokenType):
self.tokens.append(TokenType.place(self.cell, railroad, self.properties))
def paths(self, enter_from=None, railroad=None):
if railroad and railroad.is_removed:
raise ValueError(f"A removed railroad cannot run routes: {railroad.name}")
if enter_from:
return self._paths[enter_from]
else:
return tuple(self._paths.keys())
class SplitCity(PlacedTile):
@staticmethod
def _branches_with_unique_exits(branch_dict):
# Indicating a branch on a split city can be done by a single unqiue
# neighbor, if such a neighbor exists. This determines what they are,
# then add them to the branch keys.
branch_to_cells = {branch_key: tuple(set(itertools.chain.from_iterable(branch_key))) for branch_key in branch_dict}
unique_exit_cells = {}
for key, cells in branch_to_cells.items():
# Get all the neighbors that appear in branches other than the
# current one, and remove them from the current branch. If any
# remain, they must be unique.
# unique_exit_cells[key] = set(cells) - set(itertools.chain.from_iterable(set(branch_to_cells.values()) - {cells}))
unique_exits = set(cells) - set(itertools.chain.from_iterable(set(branch_to_cells.values()) - {cells}))
unique_exit_cells[key] = {(cell, ) for cell in unique_exits}
new_branch_dict = {}
for old_key, value in branch_dict.items():
new_key = tuple(set(old_key).union(unique_exit_cells[old_key]))
new_branch_dict[new_key] = value
return new_branch_dict
@staticmethod
def _map_branches_to_cells(cell, orientation, raw_branch_dict):
branch_dict = {}
# Tiles indicate their neighbors by side number relative to upright.
# Once placed, given the placement orientation, we need to know their
# neighboring coordinates.
for raw_branch, value in raw_branch_dict.items():
branch_paths = []
for path in raw_branch:
path_cells = []
for side in path:
rotated_side = int(orientation) if isinstance(side, Cell) else PlacedTile._rotate(side, orientation)
path_cells.append(cell.neighbors[rotated_side])
branch_paths.append(tuple(path_cells))
branch_dict[tuple(branch_paths)] = value
return SplitCity._branches_with_unique_exits(branch_dict)
@staticmethod
def place(cell, tile, orientation, old_space=None):
name = old_space.name if old_space else None
nickname = old_space.nickname if old_space else None
properties = old_space.properties if old_space else {}
paths = PlacedTile.get_paths(cell, tile, orientation)
return SplitCity(name, nickname, cell, tile, orientation, paths, properties)
def __init__(self, name, nickname, cell, tile, orientation, paths={}, properties={}):
super().__init__(name, nickname, cell, tile, paths, properties)
self.capacity = SplitCity._map_branches_to_cells(cell, orientation, self.capacity)
self.branch_to_station = {key: [] for key in self.capacity.keys()}
def add_station(self, game, railroad, branch):
if self.has_station(railroad.name):
raise ValueError(f"{railroad.name} already has a station in {self.name} ({self.cell}).")
split_branch = tuple()
for branch_key, value in self.capacity.items():
if branch in branch_key:
split_branch = branch_key
break
else:
raise ValueError(f"Attempted to add a station to a non-existant branch of a split city: {branch}")
if self.capacity[split_branch] <= len(self.branch_to_station[split_branch]):
raise ValueError(f"The {branch} branch of {self.name} ({self.cell}) cannot hold any more stations.")
station = Station(self.cell, railroad)
self._stations.append(station)
self.branch_to_station[split_branch].append(station)
return station
def passable(self, enter_cell, railroad):
for branch, stations in self.branch_to_station.items():
for path in branch:
if enter_cell in path:
if len(stations) < self.capacity[branch]:
return True
for station in stations:
if station.railroad == railroad:
return True
return False
def get_station_branch(self, user_station):
for branch, stations in self.branch_to_station.items():
if user_station in stations:
return branch
raise ValueError(f"The requested station was not found: {user_station}") | /routes-18xx-0.9.1.tar.gz/routes-18xx-0.9.1/routes18xx/placedtile.py | 0.724383 | 0.276108 | placedtile.py | pypi |
import csv
from routes18xx.railroads import Railroad
from routes18xx.games.routes1846.tokens import MeatPackingToken, SteamboatToken
FIELDNAMES = ("name", "owner", "coord")
COMPANIES = {
"Steamboat Company": lambda game, board, railroads, kwargs: _handle_steamboat_company(game, board, railroads, kwargs),
"Meat Packing Company": lambda game, board, railroads, kwargs: _handle_meat_packing_company(game, board, railroads, kwargs),
"Mail Contract": lambda game, board, railroads, kwargs: _handle_mail_contract(game, board, railroads, kwargs),
"Big 4": lambda game, board, railroads, kwargs: _handle_independent_railroad(game, board, railroads, "Big 4", kwargs),
"Michigan Southern": lambda game, board, railroads, kwargs: _handle_independent_railroad(game, board, railroads, "Michigan Southern", kwargs)
}
HOME_CITIES = {
"Big 4": "G9",
"Michigan Southern": "C15"
}
PRIVATE_COMPANY_COORDS = {
"Steamboat Company": SteamboatToken.COORDS,
"Meat Packing Company": MeatPackingToken.COORDS,
"Big 4": [HOME_CITIES["Big 4"]],
"Michigan Southern": [HOME_CITIES["Michigan Southern"]]
}
PRIVATE_COMPANY_DEFAULT_COORDS = {
"Big 4": HOME_CITIES["Big 4"],
"Michigan Southern": HOME_CITIES["Michigan Southern"]
}
def _handle_steamboat_company(game, board, railroads, kwargs):
owner = kwargs.get("owner")
coord = kwargs["coord"]
if not owner or not coord:
return
if owner not in railroads:
raise ValueError(f"Assigned the Steamboat Company to an unrecognized or unfounded railroad: {owner}")
board.place_token(coord, railroads[owner], SteamboatToken)
railroads[owner].add_private_company("Steamboat Company")
def _handle_meat_packing_company(game, board, railroads, kwargs):
owner = kwargs.get("owner")
coord = kwargs["coord"]
if not owner or not coord:
return
if owner not in railroads:
raise ValueError(f"Assigned the Meat Packing Company to an unrecognized or unfounded railroad: {owner}")
board.place_token(coord, railroads[owner], MeatPackingToken)
railroads[owner].add_private_company("Meat Packing Company")
def _handle_mail_contract(game, board, railroads, kwargs):
owner = kwargs.get("owner")
if not owner:
return
if owner not in railroads:
raise ValueError(f"Assigned the Mail Contract to an unrecognized or unfounded railroad: {owner}")
railroads[owner].add_private_company("Mail Contract")
def _handle_independent_railroad(game, board, railroads, name, kwargs):
home_city = HOME_CITIES[name]
owner = kwargs.get("owner")
if owner:
if owner not in railroads:
raise ValueError(f"Assigned {name} to an unrecognized or unfounded railroad: {owner}")
owner_railroad = railroads[owner]
if owner_railroad.is_removed:
raise ValueError(f"Cannot assign {name} to a removed railroad: {owner_railroad.name}")
railroad_station_coords = [str(station.cell) for station in board.stations(owner)]
if home_city in railroad_station_coords:
return
board.place_station(game, home_city, owner_railroad)
railroads[owner].add_private_company(name)
else:
if game.compare_phases("3") < 0:
board.place_station(game, home_city, Railroad.create(name, "2"))
def load_from_csv(game, board, railroads, companies_filepath):
if companies_filepath:
with open(companies_filepath, newline='') as companies_file:
return load(game, board, railroads, tuple(csv.DictReader(companies_file, fieldnames=FIELDNAMES, delimiter=';', skipinitialspace=True)))
def load(game, board, railroads, companies_rows):
if not companies_rows:
return
private_company_names = [company["name"] for company in companies_rows]
if len(private_company_names) != len(set(private_company_names)):
raise ValueError("Each private company should only have a single entry.")
for company_kwargs in companies_rows:
name = company_kwargs.get("name")
if name not in COMPANIES:
raise ValueError(f"An unrecognized private company was provided: {name}")
COMPANIES[name](game, board, railroads, company_kwargs) | /routes-18xx-0.9.1.tar.gz/routes-18xx-0.9.1/routes18xx/games/routes1846/private_companies.py | 0.417034 | 0.239327 | private_companies.py | pypi |
from routes18xx.cell import Cell
from routes18xx.boardtile import EasternTerminus
from routes18xx.route import Route
CHICAGO_COORD = "D6"
CHICAGO_CONNECTIONS_COORD = "C5"
def filter_invalid_routes(routes, board, railroad):
"""
Given a collection of routes, returns a new set containing only valid
routes, considering features specific to 1846. Invalid routes removed:
- east to east
- go through Chicago using an impassable exit
- only contain Chicago as a station, but don't use the correct exit path
"""
chicago_cell = board.cell(CHICAGO_COORD)
chicago_connections_cell = board.cell(CHICAGO_CONNECTIONS_COORD)
chicago_space = board.get_space(chicago_cell)
chicago_neighbor_cells = [cell for cell in chicago_cell.neighbors.values() if cell != chicago_connections_cell]
stations = board.stations(railroad.name)
# A sieve style filter. If a condition isn't met, iteration continues to the next item. Items meeting all conditions
# are added to valid_routes at the end of the loop iteration.
valid_routes = set()
for route in routes:
# A route cannot run from east to east
if isinstance(route.stops[0], EasternTerminus) and isinstance(route.stops[-1], EasternTerminus):
continue
# If the route goes through Chicago and isn't [C5, D6], ensure the path it took either contains its station or is unblocked
if route.contains_cell(chicago_connections_cell) and len(route.stops) != 2:
# Finds the subroute which starts at Chicago and is 3 tiles long. That is, it will go [C5, D6, chicago exit]
all_chicago_subroutes = [subroute for subroute in route.subroutes(chicago_connections_cell) if len(subroute) == 3]
chicago_subroute = all_chicago_subroutes[0] if all_chicago_subroutes else None
for cell in chicago_neighbor_cells:
chicago_exit = chicago_subroute and chicago_subroute.contains_cell(cell)
if chicago_exit and chicago_space.passable(cell, railroad):
break
else:
continue
stations_on_route = [station for station in stations if route.contains_cell(station.cell)]
# If the only station is Chicago, the path must be [D6, C5], or exit through the appropriate side.
if [chicago_cell] == [station.cell for station in stations_on_route]:
station_branch = board.get_space(chicago_cell).get_station_branch(stations_on_route[0])
chicago_exit_routes = []
for paths in station_branch:
exit_cell = paths[0] if paths[0] != chicago_connections_cell else paths[1]
chicago_exit_routes.append(Route.create([chicago_space, board.get_space(exit_cell)]))
if not (len(route) == 2 and route.contains_cell(chicago_connections_cell)) \
and not any(route.overlap(chicago_exit_route) for chicago_exit_route in chicago_exit_routes):
continue
valid_routes.add(route)
return valid_routes
def hook_route_set_values(route_set, railroad):
raw_values = {route: route.value for route in route_set}
if railroad.has_private_company("Mail Contract") and route_set:
longest_route = max(route_set, key=lambda run_route: len(run_route.stops))
raw_values[longest_route] = hook_route_max_value(longest_route, railroad)
return raw_values
def hook_route_max_value(route, railroad):
raw_value = route.value
if railroad.has_private_company("Mail Contract"):
raw_value += len(route.stops) * 10
return raw_value | /routes-18xx-0.9.1.tar.gz/routes-18xx-0.9.1/routes18xx/games/routes1846/__init__.py | 0.551091 | 0.459622 | __init__.py | pypi |
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import rv_pb2 as rv__pb2
class RVStub(object):
"""RV Service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.FileUpload = channel.unary_unary(
'/rv.proto.RV/FileUpload',
request_serializer=rv__pb2.FileRequest.SerializeToString,
response_deserializer=rv__pb2.FileResponse.FromString,
)
class RVServicer(object):
"""RV Service definition.
"""
def FileUpload(self, request, context):
"""FileUpload accepts a single file upload request and
returns a status message to the caller.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_RVServicer_to_server(servicer, server):
rpc_method_handlers = {
'FileUpload': grpc.unary_unary_rpc_method_handler(
servicer.FileUpload,
request_deserializer=rv__pb2.FileRequest.FromString,
response_serializer=rv__pb2.FileResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'rv.proto.RV', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class RV(object):
"""RV Service definition.
"""
@staticmethod
def FileUpload(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/rv.proto.RV/FileUpload',
rv__pb2.FileRequest.SerializeToString,
rv__pb2.FileResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | /routeviews-google-upload-0.2.0.tar.gz/routeviews-google-upload-0.2.0/src/routeviews_google_upload/rv_pb2_grpc.py | 0.72526 | 0.19112 | rv_pb2_grpc.py | pypi |
This solution could be no more than a 'collection of scripts.'
However, we prefer to try and maintain some design throughout the code base.
> Generally, in this document, design refers to 'software organization conventions'.
Specifically, we will discusses each of the following in some depth:
* [Design Objectives](#design-objectives)
* [General Conventions](#general-conventions)
* [CLI Tool Naming Convention](#cli-tool-naming-convention)
* [Module & Package Conventions](#module--package-conventions)
* [API Integration Conventions](#api-integrationwrapper-conventions)
* [TextFSM Conventions](#textfsm-conventions)
* [Argument Parsing](#argument-parsing)
# CLI Tool Naming Convention
CLI Tools use the following naming convention:
* `routeviews-` prefix
* Automation tools that help with some automated workflow.
* `rvm-` prefix
* Monitoring tools that give some info about our infrastructure.
* **⚠ Important:** These tools MUST **not** make changes.
* Ideally, all these tools support "--influxdb" option to produce InfluxDB Line Protocol.
> InfluxDB Line Protocol enables easy integration with our Telegraf/InfluxDB/Grafana monitoring solution!
# Argument Parsing
We use the [ConfigArgParse package](https://pypi.org/project/ConfigArgParse/) for processing command line arguments.
> **ℹ Tip:** 'Pipeline' solutions are great at handling Environment Variables.
> ConfigArgParse introduces the `env_var` option for any argument to auto (e.g., Jenkins, GitHub Actions).
# TextFSM Conventions
We use [TextFSM](https://github.com/google/textfsm#textfsm) templates when we need to collect data from some CLI command's output.
> **ℹ Tip:** If there is any method for extracting structured data (ex. JSON, NetCONF, XML), **prefer structured data over using TextFSM**.
> ℹ What is TextFSM?
> Best to provide the definition directly from [project page](https://github.com/google/textfsm):
>
> > Python module which implements a template based state machine for parsing semi-formatted text.
> > Originally developed to allow programmatic access to information returned from the command line interface (CLI) of networking devices.
## Usage
We have wired up the `routeviews.parse.template_parse` function to use the "src/routeviews/templates/" folder.
Any template added to the "src/routeviews/templates/" folder can subsequently be invoked using `template_parse()`.
As an example, assume we've just added the template, "src/routeviews/templates/**bgp_neighbors**.tmpl".
This template can be run by calling `template_parse(..., 'bgp_neighbors')`
A full example (leveraging `routeviews.exec`) is provided below:
import routeviews.exec
import routeviews.parse
console_output = routeviews.exec.run('sudo', 'vtysh', '-c', 'show bgp neighbors')
parsed_output = routeviews.parse.template_parse(console_output, template='bgp_neighbors')
# General Conventions
Many conventions are generic, and can apply to *all* Python code.
## Modules as Singletons
The common 'singleton pattern' is trivial to implement in Python.
All we do is consider the module itself to be the 'singleton object.'
*Convention*:
* use a single module (e.g. "my_singleton.py").
* define all 'methods' directly as module functions.
* define all 'properties' directly as module variables.
* retrieve the singleton object via "import my_singleton"
You may be asking, "is this really a proper Singleton?"
There are two important characteristics we care about Singleton objects, which this scheme fulfills:
* Only ONE of the Singleton object will ever exist.
* ✅ Yes: thanks to Python's import logic, any subsequent imports of the 'my_singleton' module will ALWAYS return the same 'my_singleton object.'
* Can we retrieve the same Singleton object from anywhere?
* ✅ Yes: *everything* is an object in Python.*
> *: Even an imported module is an object!
> So, "import my_singleton" results in importing the one-and-only 'my_singleton object.'
## Provide Type Hints
Type hints make any object-oriented package or module much more 'discoverable' by its users.
So, try to provide type hints wherever possible!
### Reference a Type before it is defined
If some type hint is impossible due to circular dependencies, [that definition may be expressed as a string literal, to be resolved later ("forward references")](https://peps.python.org/pep-0484/#forward-references).
Without forward references, the following code does not work.
class Tree:
def __init__(self, left: Tree, right: Tree):
self.left = left
self.right = right
Using the string literal `'Tree'` (instead of `Tree`) results in a forward reference which will be resolved later.
class Tree:
def __init__(self, left: 'Tree', right: 'Tree'):
self.left = left
self.right = right
## Use Data Classes
> Data Classes Require Python >= 3.7
We love [Python Data Classes (PEP 557)](https://peps.python.org/pep-0557/).
What are Data Classes?
Conceptually, any class that will store data is a Data Class.
In Python, this is manifest as the `dataclasses` package.
This package provides the simple `dataclass` decorator/function, which will reduce a lot of 'boilerplate code' from your Data Class.
**Key Point**: The `dataclasses` module reduces a lot of 'boilerplate code.'
As an example, I consider the `__init__()` method to be boilerplate in general.
Using `dataclass` decorator, that method is auto-defined!
from dataclasses import dataclass
@dataclass
class Foo:
foo: str
With this super-readable 'Data Class' definition for `Foo`, we can instantiate Foo using key-value arguments.
# We didn't have to write __init__(foo)..., but it works!
Foo(foo='hello')
### Prefer Immutable Objects (`frozen=True`)
We prefer immutable dataclasses!
> ℹ Why prefer `frozen=True`?
>
> 'Frozen objects' can be used in Python Sets and as Dictionary Keys!
> In addition, 'preferring immutable objects' often results in code that is *much* **easier to test!**
To make a dataclass immutable, use the dataclass decorator with the keyword argument: "`frozen=True`"
An example of an immutable Point data class.
@dataclass(frozen=True)
class Point:
x: int
y: int
point = Point(1,2)
point.x = 3 # Will raise an exception!
### *Warning: mutable default values*
When setting default values, mind that default values are not mutable!
Specifically, this can result in multiple class instances sharing the same copy of that default value!
Let's consider a specific example: if two instances of class `D` do not specify a value for `x` when creating a class instance will share the same copy of `x`.
from dataclasses import dataclass
@dataclass
class D:
x: list = [] # ❌ BAD, never do this!
To fix this, we can simply use the provided `field` function from the dataclasses module.
from dataclasses import dataclass, field
@dataclass
class D:
x: list = field(default_factory=list)
> See more in ["Mutable default values" in Python docs](https://docs.python.org/3/library/dataclasses.html#mutable-default-values)
# Module & Package Conventions
Overall, we use package and module names that follow PEP 8 and 'make sense.'
Given that, these are some of the patterns that have emerged in this code base.
* `routeviews.scripts` package holds CLI tools.
* `routeviews.templates` package holds template resources.
## Defining `__all__`
We maintain `__all__` in our packages' `__init__.py` to help simplify the 'public API' of the package.
This concept is well discussed in the following quote, from [a stack overflow answer](https://stackoverflow.com/a/35710527):
> *What does `__all__` do?*
>
> It declares the semantically "public" names from a module.
> If there is a name in __all__, users are expected to use it, and they can have the expectation that it will not change.
# API Integration/Wrapper Conventions
We integrate with many external platforms and services via API.
Keeping these integrations organized is useful.
We use the following conventions to manage our API integrations, each discussed further below:
* One package per API integration
* Manage `__all__`
* `dataclasses` sub-package
## One Package per Integration
Recall one of our high level design goals is 'package cohesion.'
For API Integrations, we consider primary dimension of cohesion for these to be, "what product's API am I integrating?"
*Convention*:
* Develop ONE single package for each external application that we integrate with.
> This convention is intuitive and aligns with [many platform/services](https://github.com/realpython/list-of-python-api-wrappers).
## Repository, Client, and Unit of Work
TODO
| Layer | Abstractness | Title | Description |
|-------|---------------|---------------|-------------|
| 3 | More | Unit of Work | Plan some work to be done. |
| 2 | Some | Repository | Work with well-typed 'Domain Data Classes'. |
| 1 | None | Client | Work directly with bytes and return primitive dictionaries. |
These are software layers, which implies dependencies between layers.
Specifically:
* Repository: depends on Client.
* Unit of Work: depends on Repository (and transitively depends on the Client as well).
## Domain Data Classes
In general, every different platform that we integrate will have its own *types* of data.
To make working with these different data joyful, we define a *domain Data Class* per data type.
> Relevant Design goal: "* Type hints for everything."
### `dataclasses` package
We use the name "dataclasses" for the package that contains [Python dataclasses](https://docs.python.org/3/library/dataclasses.html).
Further, all dataclasses are then exposed in the API package.
*Convention*:
* "dataclasses" package contains all dataclasses.
* One dataclass per module/file.
## Maintain `__all__`
We have already discussed the convention that [we 'Define `__all__`' for packages](#defining-all).
*Convention*:
* Expose all dataclasses
Specifically, for API wrappers, we want to ensure the following:
- [ ] All dataclasses are available via `routeviews.<API>.<dataclass_name>`
- [ ] Repository & Client classes are available via `routeviews.<API>.<repository_classname>`
# Design Objectives
We have the following objectives in our design.
> These are roughly order-ranked: the top of the list is highest priority.
* Type hints for everything.
* Packages, Modules, and Classes are cohesive*.
* Packages, Modules, and Classes are loosely coupled**.
*: TODO Discuss 'cohesion.'
**: TODO Discuss 'loosely coupled.'
| /routeviews-0.3.6.tar.gz/routeviews-0.3.6/docs/design.md | 0.937053 | 0.756897 | design.md | pypi |
This project follows [Semantic Versioning](https://semver.org/).
> Notice: Major version "Zero" (0.y.z) is for initial development. Today, anything MAY change with each minor release.
## 0.3.6
- Refactor `rvm-bgp-status` to make InfluxDB Tags more useful.
- `state` is now a Tag rather than a Field.
- Why? We would like to "GROUP BY" state in our InfluxDB queries!
## 0.3.5
- Upgrade `rvm-bgp-status` to add "VTY Latency" to the `bgp_status` InfluxDB measurement.
- `vty_latency_sec` field has been added when running `rvm-bgp-status --influxdb`
- Nice to get an idea of FRRouting's performance over time!
- Refactor `rvm-bgp-status` to make InfluxDB Tags more useful.
- `state` is now a Tag rather than a Field.
- Why? We would like to "GROUP BY" state in our InfluxDB queries!
- Remove redundant "collector" tag from InfluxDB measurements.
- InfluxDB automatically tags data with the "host" tag. So, the "collector" tag was redundant.
## 0.3.4
- Fix `rvm-latest-mrt` to always get the *LATEST* MRT files.
- For some reason, sorting MRT archives by 'latest change timestamp (ctime)' seems to be non-deterministic! As a result, this tool was prioritizing MRT files from as old as 2019 for some collectors!
- Solution: Sort alphabetically instead of using ctime.
Route Views' MRT Archives use a consistent "YYYY-MM-DD" naming scheme which works perfectly when sorted alphabetically!
## 0.3.3
- Fix `routeviews-peer-requests` to use consistent vertical whitespace.
## 0.3.2
* Upgrade `routeviews-peer-requests` to print the, "effected Collector's(es) IP Addresses" after updating the Ansible inventory.
* For Maintainers to copy or reference when completing peer requests.
## 0.3.1
* Fix `routeviews-peer-requests` to ignore 'non-operational' Routers/Collectors.
* Some Route Views collectors are non operational today.
## 0.3.0
> **⚠ NOTE:** Renamed `routeviews-build-peer` CLI Tool to `routeviews-peer-request`.
> (Updated throughout this project's documentation)
* Upgrade `routeviews-peer-requests` with full feature set! 🎉
- Add `--show-options` flag that can be used by ANYONE to check their potential peerings (at Internet Exchanges) with Route Views.
- Add `--multihop-index` argument, to create BGP multihop peering config on Route Views' Multihop Collectors.
* `rvm-haproxy-stats` will fallback to `nc` if `socat` unavailable.
## 0.2.6
* Fix `rvm-haproxy-stats` CLI tool.
* InfluxDB line protocol was broken.
* Fixed a typo in the code that printed the InfluxDB line protocol.
## 0.2.5
* Add `rvm-haproxy-stats` CLI tool.
* Get stats from HAProxy Stick Tables on a Route Views collector.
## 0.2.4
* Add `--zipped` flag to `rvm-latest-mrt`.
* Only report files that have the ".bz2" file extension.
* *Why?* Ubuntu seems to continually update the MRT update file.
This had made the 'age_sec' metric in InfluxDB pretty much useless.
## 0.2.3
* Update `rvm-latest-mrt` InfluxDB line protocol to be simpler.
* Updates and RIBs are separate concerns, so send up separate measurements instead of combining them into one line.
## 0.2.2
* Create a 'GitHub Release' after delivering package to PyPI.org
## 0.2.1
* Add many InfluxDB tags to `rvm-latest-mrt`, and remove 2 fields (that were turned to tags).
* Using tags enables more useful and efficient querying in Grafana!
## 0.2.0
* Add a set of `rvm` (Route Views Monitor) CLI tools.
> **ℹ Tip:** The `rvm` tools listed below can run on any FRR-based Route Views collector.
* `rvm-latest-mrt`: Get metrics about the latest MRT Dump files on a Route Views collector.
* `rvm-bgp-status`: Get info about BGP Peerings on a Route Views collector.
* `rvm-bmp-status`: Get info about BMP sessions on a Route Views collector.
* Add `--sudo` flag to CLI tools where appropriate.
* CLI tools that depend on `vtysh` will only use raise privileges when running `vtysh`.
* Extract 'InfluxDB Line Protocol' logic into `routeviews.influx` module.
* Generate InfluxDB Line Protocol -- useful when using CLI tools as [Telegraf Exec Input Plugins](https://github.com/influxdata/telegraf/tree/master/plugins/inputs/exec).
* Extract 'TextFSM Template Parsing' logic into the `routeviews.parse.template_parse` function.
* See additional [discussion of TextFSM in our Design Docs](./design.md#textfsm-conventions)
## 0.1.3
* Fix Bug: `routeviews-peer-request` CLI tool rearranges the 'Route Views Peer Config' in the Ansible Inventory.
* Now we track the 'order' of attributes whenever loading any `routeviews.ansible.NeighborConfig` class from a YAML file.
That 'order' is then used when subsequently dumping the data, thus ensuring that nothing is rearranged unnecessarily!
## 0.1.2
* Bug: `routeviews-peer-request` CLI tool rearranges the 'Route Views Peer Config' in the Ansible Inventory.
* Fix PeeringDB Authentication!
* See the [relevant GitHub Issue](https://github.com/peeringdb/peeringdb/issues/1206#issuecomment-1202550667) where we discovered the following details about PeeringDB API Basic Authentication:
> 1. Do NOT base64 encode
> 2. Username/Password Must be space-separated (e.g., must not be colon ":" separated)
> 3. Username when using API tokens is "Api-Key"
> 4. Ensure "www" is in all API requests!
* Enable using PeeringDB API Key instead of username/password.
* Exposed via `--peeringdb-key` argument in `routeviews-peer-request` CLI tool (or as env var: `PEERINGDB_KEY`).
* Add the filepath to the exception message when `routeviews.yaml` encounters a `ParseError`.
* This enables fixing syntax issues very quickly.
* "Unable to parse `<filepath>`" is the added message, seen below:
```
... omitted traceback for brevity...
routeviews.yaml.ParseError: while parsing a block mapping
in "<unicode string>", line 1, column 1:
short_name: decix
^ (line: 1)
expected <block end>, but found '-'
in "<unicode string>", line 109, column 1:
- peer_as: 8888
^ (line: 109)
Unable to parse <working-tree>/ansible/inventory/host_vars/route-views.decix.routeviews.org
```
* Ensure that PyVCR cassettes do not contain HTTP Basic Authentication secrets.
* Rotated the (randomly generated) Base64 encoded password that was previously exposed via HTTP Basic Authentication Headers.
## 0.1.1
* Fix Bug: Package failed to declare some critical dependencies.
## 0.1.0
> Bug: Package failed to declare some critical dependencies.
> Was missing `uologging` and `raumel.yaml` dependencies deceleration in "setup.py".
The first release of the routeviews package contains some core CLI tools, as well as some functions/classes that might be useful to routeviews maintainers.
### CLI Tools
Provide new CLI tools! 🎉
* [`routeviews-peer-request` CLI tool](./user-guide.md#routeviews-peer-request-cli-tool): automation of updating ["Route Views Ansible inventory"](https://github.com/routeviews/infra), toward 'adding BGP peers to XYZ collectors'.
* [`routeviews-email-peers` CLI tool](./user-guide.md#routeviews-email-peers-cli-tool): get list of email addresses actively peered with a Route Views Collector.
### Libraries
* There is the `routeviews.peeringdb` package that has some great methods for interfacing with the PeeringDB API.
* There is the `routeviews.yaml` module that can load and save YAML config files (without rearranging them).
* Depends on the [`ruamel.yaml` package](https://pypi.org/project/ruamel.yaml/)
* There is the `routeviews.ansible` package, that can load, modify, and save the Route Views Ansible Inventory.
* There is the `routeviews.bgpsummery` module, that defines a `BGPSummary` class as well as functions for retrieving a `BGPSummary` from any collector.
* There is the (start of a) `routeviews.api` module/package, for interfacing with the Route Views API/DB (undocumented).
| /routeviews-0.3.6.tar.gz/routeviews-0.3.6/docs/release-notes.md | 0.921895 | 0.78083 | release-notes.md | pypi |
# Routinemaker
Routinemaker is a Python CLI that generates strength, cardio, and HIIT exercise routines using parametric curves. Routines are saved as Excel spreadsheets. See [routinemaker.me](https://routinemaker.me) for documentation.
## Installation
Routinemaker is a Python 3 package and can be installed through `pip`.
`$ pip install routinemaker`
## Usage
Run Routinemaker from the command line. The command does not take any options. You will be prompted as the generator walks through all the options.
`$ routinemaker`
## Outputs
Routinemaker generates formatted Excel spreadsheets. You can either print out the spreadsheets or upload them to the cloud (ie: Google Drive) to make them easier to use during workouts. Below are screenshots, downloadable files, and a brief usage guide for sample routines.
### Strength ([download](https://raw.githubusercontent.com/kathyqian/routinemaker/master/docs/xlsx/strength.xlsx))
_Usage:_ On Day 1, do 2 sets of 5 reps of dumbbell goblet squats with 50 pounds of weight, then do 2 sets of 3 reps of barbell bench presses with 20 pounds of weight. Mark an "X" in the corresponding white box as you finish each set.

### Cardio ([download](https://raw.githubusercontent.com/kathyqian/routinemaker/master/docs/xlsx/cardio.xlsx))
_Usage:_ On Day 1, run for 2.5 minutes, then rest by walking. Next, run for 1.5 minutes, then rest by walking. Finish all 6 intervals and mark an "X" in the corresponding white box as you progress.

### HIIT ([download](https://raw.githubusercontent.com/kathyqian/routinemaker/master/docs/xlsx/HIIT.xlsx))
_Usage:_ On Day 1, do 2 sets of 9 reps of bodyweight side lunges for each side, then do 2 sets of 5 reps of bodyweight jump squats, then do 2 sets of 10-second bodyweight forearm planks. Mark an "X" in the corresponding white box as you finish each set.

## Customizations
Routinemake is meant to be customized to your preferences. In particular, you may want to modify the list of exercises or tweak the equations for the curves used to generate the workouts.
### Exercises
You can customize the list of exercises that Routinemaker uses to generate workouts. The exercises are stored in `data/exercises.json`. Each exercise in the json file includes settings for _type_ (ie: strength), muscle _group_ (ie: chest), _variations_ (ie: by equipment), _unit_ of measurement (ie: reps or seconds), _minimum_ units, _maximum_ units, and the _step_ for incrementing the units.
### Curves
You can also customize the curves that Routinemaker uses to generate workouts. The calculation logic is stored in `routine.py`. Routinemaker comes with 3 default curves: _linear_ (constant increase in difficulty), _exponential_ (starts off easy and escalates in difficulty towards the end), and _logarithmic_ (ramps up quickly).
## Workflow
Routinemaker works by prompting for a variety of settings and then feeding the parameters into curves in order to generate workouts for each day. Below are a few sample screenshots of the workflow and a summary of all the prompts and options.
### Splash Screen

### Filtering Equipment for Strength Exercises

### Manually Selecting Strength Exercises

### Choosing Random List of HIIT Exercises

### Editing Cart of Exercises

### Choosing Goals for Strength Exercises

### Configuring Intervals for Cardio Exercises

### Summary of Prompts and Options
```
├── What type of routine do you want to create?
│ ├── STRENGTH
│ │ ├── What type of equipment do you have access to?
│ │ └── How would you like to choose the exercises for your routine?
│ │ ├── START WITH A RANDOM LIST OF EXERCISES
│ │ │ ├── Which muscle groups do you want to train?
│ │ │ └── How many exercises do you want in your routine?
│ │ └── MANUALLY ADD EXERCISES [∞]
│ │ ├── Pick a muscle group to browse exercises to add
│ │ └── Which exercise would you like to add?
│ ├── CARDIO
│ │ └── Which cardio activity would you like to do?
│ │ └── Which specific exercise would you like to work on?
│ └── HIIT
│ └── How would you like to choose the exercises for your routine?
│ ├── START WITH A RANDOM LIST OF EXERCISES
│ │ ├── Which muscle groups do you want to train?
│ │ └── How many exercises do you want in your routine?
│ └── MANUALLY ADD EXERCISES [∞]
│ ├── Pick a muscle group to browse exercises to add
│ └── Which exercise would you like to add?
├── Do you need to edit or reorder the exercises in your routine? [∞]
│ ├── YES
│ │ └── What would you like to do?
│ │ ├── ADD EXERCISE
│ │ │ ├── Pick a muscle group to browse exercises to add
│ │ │ └── Which exercise would you like to add?
│ │ ├── REMOVE EXERCISE
│ │ │ └── Which exercise would you like to remove?
│ │ ├── SWAP EXERCISE
│ │ │ ├── Which exercise would you like to swap?
│ │ │ ├── [STRENGTH/HIIT]
│ │ │ │ ├── Pick a muscle group to browse exercises to add
│ │ │ │ └── Which exercise would you like to add?
│ │ │ └── [CARDIO]
│ │ │ └── Which exercise would you like to add?
│ │ └── REORDER EXERCISE
│ │ ├── Which exercise would you like to move?
│ │ └── Which exercise would you like to move it before?
│ └── NO
├── How many weeks would you like your routine to last?
├── How many days per week are you planning on exercising?
├── [FOR EACH EXERCISE]
│ ├── Which variation of EXERCISE do you plan on doing?
│ ├── How many continuous UNITS of ACTIVITY are you currently comfortable with?
│ ├── How many continuous UNITS of ACTIVITY is your goal?
│ └── [STRENGTH]
│ ├── What weight are you currently using for ACTIVITY?
│ └── What's your goal weight for ACTIVITY?
├── [STRENGTH/HIIT]
│ ├── What's the mininum number of sets you'd like to do for each exercise?
│ └── What's the maximum number of sets you'd like to do for each exercise?
├── [CARDIO]
│ ├── What's the maximum number of intervals you want in your routine?
│ └── Please choose a random number to seed the routine
├── What type of curve do you want to use to create your routine?
│ ├── LINEAR
│ ├── EXPONENTIAL
│ └── LOGARITHMIC
└── What do you want to name the output file?
```
## Disclaimer
Routinemaker is an automated engine that generates workout routines independent of your physical ability. Generated workouts may not be suitable for your level of fitness. Please use common sense. You should see your healthcare provider before starting a new exercise program.
## License
Routinemaker was created and [open-sourced](https://github.com/kathyqian/routinemaker) under the [MIT License](https://github.com/kathyqian/routinemaker/blob/master/LICENSE) by [Kathy Qian](https://kathyqian.com/).
| /routinemaker-0.5.3.tar.gz/routinemaker-0.5.3/README.md | 0.496338 | 0.891434 | README.md | pypi |
from ortools.constraint_solver import pywrapcp
from ortools.constraint_solver.routing_enums_pb2 import (
FirstSolutionStrategy,
LocalSearchMetaheuristic,
)
class VrpSolver:
"""VrpSolver https://developers.google.com/optimization/routing/vrp"""
def __init__(self, num_vehicles):
self.num_vehicles = num_vehicles
def create_data_model(self, distance_matrix):
"""Stores the data for the problem."""
data = {}
data["distance_matrix"] = distance_matrix
data["num_vehicles"] = self.num_vehicles
data["depot"] = 0
return data
def compute_total_distance(
self, route, distance_matrix, unite_mesure="m", show_log=False
):
"""With a distance matrix compute the distance between nodes for routes
"""
distance = distance_matrix[0][route[1]]
for node in range(len(route) - 1):
distance += distance_matrix[route[node]][route[node + 1]]
if show_log:
print(
"\nFor route {} -- > distance {:.1f} {}".format(
route, distance, unite_mesure
)
)
return distance
def print_solution(self, manager, routing, solution):
"""Prints solution on console."""
max_route_distance = 0
for vehicle_id in range(self.num_vehicles):
index = routing.Start(vehicle_id)
plan_output = "Route for vehicle {}:\n".format(vehicle_id)
route_distance = 0
while not routing.IsEnd(index):
plan_output += " {} -> ".format(manager.IndexToNode(index))
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(
previous_index, index, vehicle_id
)
plan_output += "{}\n".format(manager.IndexToNode(index))
plan_output += "Distance of the route: {} m\n".format(route_distance)
print(plan_output)
max_route_distance = max(route_distance, max_route_distance)
print("Maximum of the route distances: {} m".format(max_route_distance))
def get_routes(self, manager, routing, solution):
"""Get vehicle routes from a solution and store them in an array."""
# Get vehicle routes and store them in a two dimensional array whose
# i,j entry is the jth location visited by vehicle i along its route.
routes = []
for route_nbr in range(self.num_vehicles):
index = routing.Start(route_nbr)
route = [manager.IndexToNode(index)]
while not routing.IsEnd(index):
index = solution.Value(routing.NextVar(index))
route.append(manager.IndexToNode(index))
routes.append(route)
return routes
def solver_guided_local_search(
self,
distance_matrix,
time_max,
heuristic_type="FirstSolutionStrategy",
heuristic="PATH_CHEAPEST_ARC",
max_travel_distance=False,
show_log=False,
):
"""Solver fo for local heuristic search
"""
# Instantiate the data problem.
data = self.create_data_model(distance_matrix)
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(
len(data["distance_matrix"]), data["num_vehicles"], data["depot"]
)
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
# Convert from routing variable Index to distance matrix NodeIndex.
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return data["distance_matrix"][from_node][to_node]
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
if not max_travel_distance:
max_travel_distance = max(
[int(sum(line)) * 2 for line in data["distance_matrix"]]
)
# Add Distance constraint.
dimension_name = "Distance"
routing.AddDimension(
transit_callback_index,
0, # no slack
max_travel_distance, # vehicle maximum travel distance
True, # start cumul to zero
dimension_name,
)
distance_dimension = routing.GetDimensionOrDie(dimension_name)
distance_dimension.SetGlobalSpanCostCoefficient(100)
# Setting guide local search solution heuristic.
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
try:
if heuristic_type == "FirstSolutionStrategy":
search_parameters.local_search_metaheuristic = getattr(
FirstSolutionStrategy, heuristic
)
elif heuristic_type == "LocalSearchMetaheuristic":
search_parameters.local_search_metaheuristic = getattr(
LocalSearchMetaheuristic, heuristic
)
except AttributeError as e:
print("Heuristic is not in the available heuristic list\n")
print(e)
raise
search_parameters.time_limit.seconds = time_max
search_parameters.log_search = False
# Solve the problem.
assignment = routing.SolveWithParameters(search_parameters)
route = False
if assignment:
route = self.get_routes(manager, routing, assignment)
if show_log:
# Print solution on console.
self.print_solution(manager, routing, assignment)
return route | /routing-ortools-osrm-1.0.1.tar.gz/routing-ortools-osrm-1.0.1/routingoo/vrp_solver.py | 0.897133 | 0.586641 | vrp_solver.py | pypi |
from osrm import Point, RequestConfig, simple_route
from pynominatim import Nominatim
import time
class RoutingDataMatrix(object):
"""From classical address create data matrix"""
def __init__(self, host="localhost:5000"):
self.host = host
RequestConfig.host = self.host
def coordinate_infos(self, locations):
"""With nominatim convert classic adress to geo_coordinates.
"""
nominatim = Nominatim()
geo_coordinates = []
geo_points = []
for loc in locations:
address = nominatim.query(loc)
if len(address) == 0:
raise ValueError("Address dont match in nominatim", 0, loc)
elif len(address) == 1:
lat = float(address[0].get("lat"))
lon = float(address[0].get("lon"))
else:
list_address = []
for addres in address:
dict_address = {
"display_name": addres.get("display_name"),
"type": addres.get("type"),
"lat": addres.get("lat"),
"lon": addres.get("lon"),
}
list_address.append(dict_address)
raise ValueError("To many address", len(address), list_address)
geo_coordinates.append([lat, lon])
point = Point(latitude=lat, longitude=lon)
geo_points.append(point)
return geo_coordinates, geo_points
def distance_duration_matrix_simple_route(self, points, test_mode=False):
"""Create distance matrix with OSRM point
"""
distance_matrix = []
duration_matrix = []
for i in range(len(points)):
dist_line_list = []
dur_line_list = []
for j in range(len(points)):
if test_mode:
# to avoid unavaiblility of the service
time.sleep(5)
result = simple_route(points[i], points[j])
distance = result["routes"][0].get("distance")
duration = result["routes"][0].get("duration")
dist_line_list.append(distance)
dur_line_list.append(duration)
distance_matrix.append(dist_line_list)
duration_matrix.append(dur_line_list)
return distance_matrix, duration_matrix | /routing-ortools-osrm-1.0.1.tar.gz/routing-ortools-osrm-1.0.1/routingoo/routing_data_matrix.py | 0.536313 | 0.372762 | routing_data_matrix.py | pypi |
from functools import partial
import torch
import random
from torch import nn
import torch.nn.functional as F
from torch.nn.utils.rnn import pad_sequence
from routing_transformer.routing_transformer import RoutingTransformerLM
from routing_transformer.autopadder import Autopadder
def default(value, default):
return value if value is not None else default
def top_p(logits, thres = 0.9):
sorted_logits, sorted_indices = torch.sort(logits, descending=True)
cum_probs = torch.cumsum(F.softmax(sorted_logits, dim=-1), dim=-1)
sorted_indices_to_remove = cum_probs > 1.0 - thres
sorted_indices_to_remove[:, 1:] = sorted_indices_to_remove[:, :-1].clone()
sorted_indices_to_remove[:, 0] = 0
sorted_logits[sorted_indices_to_remove] = float('-inf')
return sorted_logits.scatter(1, sorted_indices, sorted_logits)
def top_k(logits, thres = 0.9):
k = int((1 - thres) * logits.shape[-1])
val, ind = torch.topk(logits, k)
probs = torch.full_like(logits, float('-inf'))
probs.scatter_(1, ind, val)
return probs
def pad_sequence_right(seqs, value):
m = max([len(s) for s in seqs])
return torch.stack([F.pad(s, (0, m - len(s))) for s in seqs])
def truncate_sequence(inputs, mask = None, pad_value=0):
b, t, device, dtype = *inputs.shape, inputs.device, inputs.dtype
mask = default(mask, torch.ones_like(inputs).bool())
rand_length = random.randint(2, t)
return inputs[:, :rand_length], mask[:, :rand_length]
class AutoregressiveWrapper(nn.Module):
def __init__(self, net, ignore_index = None, pad_value = 0):
super().__init__()
assert isinstance(net, RoutingTransformerLM), 'generative trainer wrapper can only accept RoutingTransformerLM class'
self.pad_value = pad_value
self.ignore_index = default(ignore_index, pad_value)
self.net = Autopadder(net)
self.max_seq_len = net.max_seq_len
self.base_net = net
def update_kmeans(self):
self.base_net.update_kmeans()
@torch.no_grad()
def generate(self, start_tokens, seq_len, eos_token = None, temperature = 1., filter_logits_fn = top_k, filter_thres = 0.9, **kwargs):
was_training = self.net.training
num_dims = len(start_tokens.shape)
if num_dims == 1:
start_tokens = start_tokens[None, :]
b, t = start_tokens.shape
self.net.eval()
out = start_tokens
input_mask = kwargs.pop('input_mask', None)
if input_mask is None:
input_mask = torch.full_like(out, True, dtype=torch.bool, device=out.device)
for _ in range(seq_len):
x = out[:, -self.max_seq_len:]
input_mask = input_mask[:, -self.max_seq_len:]
logits, _ = self.net(x, input_mask=input_mask, **kwargs)
logits = logits[:, -1, :]
filtered_logits = filter_logits_fn(logits, thres = filter_thres)
probs = F.softmax(filtered_logits / temperature, dim=-1)
sample = torch.multinomial(probs, 1)
out = torch.cat((out, sample), dim=-1)
input_mask = F.pad(input_mask, (1, 0), value=True)
if eos_token is not None and (sample == eos_token).all():
break
out = out[:, t:]
if num_dims == 1:
out = out.squeeze(0)
self.net.train(was_training)
return out
def forward(self, x, return_loss = False, randomly_truncate_sequence = False, **kwargs):
pad = partial(pad_sequence, batch_first = True, padding_value = self.pad_value)
if not return_loss:
if not isinstance(x, torch.Tensor):
x = pad(x)
return self.net(x, **kwargs)
m = kwargs.get('input_mask', None)
if randomly_truncate_sequence:
x, m = truncate_sequence(x, m, pad_value = self.pad_value)
if isinstance(x, torch.Tensor):
xi, xo = x[:, :-1], x[:, 1:]
else:
xi = pad(list(map(lambda t: t[:-1], x)))
xo = pad(list(map(lambda t: t[1:], x)))
if m is not None:
assert m.shape == x.shape[0:2], 'input mask must be the same shape as the input of the auto-regressive wrapper to automatically handle'
kwargs['input_mask'] = m[:, :-1]
out, aux_loss = self.net(xi, **kwargs)
loss = F.cross_entropy(out.transpose(1, 2), xo, ignore_index = self.ignore_index)
loss = loss + aux_loss
return loss | /routing_transformer-1.6.1-py3-none-any.whl/routing_transformer/autoregressive_wrapper.py | 0.903858 | 0.386619 | autoregressive_wrapper.py | pypi |
from typing import List, Iterable, Callable, TypeVar
import routingblocks
from .move_selectors import MoveSelector
class BestInsertionOperator(routingblocks.RepairOperator):
"""
Iteratively (one at a time) inserts vertices according to the cost incurred from inserting them.
The operator uses a :ref:`routingblocks.operators.MoveSelector[routingblocks.InsertionMove]` to choose the
next vertex to insert. This selector receives as argument a list of :ref:`routingblocks.InsertionMove` objects,
each one representing a possible location at which the next vertex can be inserted, ordered by cost in descending order.
This allows to customize the operator to different insertion strategies, such as inserting the vertex at the location
with the lowest cost, inserting the vertex at the location with the highest cost, or introducing randomness in the
selection process.
"""
def __init__(self, instance: routingblocks.Instance, move_selector: MoveSelector[routingblocks.InsertionMove]):
"""
:param instance: The problem instance
:param move_selector: The move selector used to choose the next insertion position
"""
routingblocks.RepairOperator.__init__(self)
self._instance = instance
self._move_cache = routingblocks.InsertionCache(self._instance)
# Exposed
self.move_selector = move_selector
def apply(self, evaluation: routingblocks.Evaluation, solution: routingblocks.Solution,
vertex_ids: Iterable[int]) -> None:
vertex_ids = [x for x in vertex_ids if not self._instance.get_vertex(x).is_station]
self._move_cache.rebuild(evaluation, solution, vertex_ids)
for vertex_id in vertex_ids:
best_insertion = self.move_selector(self._move_cache.get_best_insertions_for_vertex(vertex_id))
best_route = best_insertion.after_node.route
# Stop tracking
self._move_cache.stop_tracking(vertex_id)
# Insert the vertex
solution.insert_vertex_after(best_insertion.after_node, vertex_id)
# Update the cache
self._move_cache.invalidate_route(solution[best_route],
best_route)
def name(self) -> str:
return "BestInsertionOperator"
def can_apply_to(self, solution: routingblocks.Solution) -> bool:
return True | /operators/best_insert.py | 0.949786 | 0.710378 | best_insert.py | pypi |
from __future__ import annotations
from bisect import bisect_left
from typing import List, Callable, Iterable, Optional, Tuple, Union, Protocol
import routingblocks
class SeedSelector(Protocol):
"""
Selects a seed vertex from a solution.
"""
def __call__(self, evaluation: routingblocks.Evaluation, solution: routingblocks.Solution,
already_selected_vertices: List[routingblocks.NodeLocation]) -> routingblocks.NodeLocation:
"""
:param evaluation: The evaluation to use
:param solution: The solution to select from
:param already_selected_vertices: A list of vertices that have already been selected, i.e., should not be included in the selection
:return:
"""
...
class ClusterMemberSelector(Protocol):
"""
Selects a cluster of vertices based on a seed vertex.
"""
def __call__(self, evaluation: routingblocks.Evaluation, solution: routingblocks.Solution,
seed: routingblocks.NodeLocation) -> List[routingblocks.NodeLocation]:
"""
:param evaluation: The evaluation to use
:param solution: The solution to select from
:param seed: The seed vertex
:return:
"""
...
class ClusterRemovalOperator(routingblocks.DestroyOperator):
"""
The ClusterRemovalOperator is a generic destroy operator that removes clusters of vertices from a solution.
The operator first selects a seed vertex, and then selects a cluster of vertices around that seed.
These two steps are repeated until the desired number of vertices has been selected or no seed vertices can be
identified. The selected vertices are then removed from the solution. Note that seed vertices are not removed unless
they are also selected by the cluster member selector.
The seed selection and cluster member selection are delegated to the :py:class:`routingblocks.operators.SeedSelector`
and :py:class:`routingblocks.operators.ClusterMemberSelector` parameters, respectively.
This allows to customize the operator for different use cases.
"""
def __init__(self, seed_selector: SeedSelector, cluster_member_selector: ClusterMemberSelector):
"""
:param seed_selector: The seed selector
:param cluster_member_selector: The cluster member selector
"""
# Important: Do not use super()!
routingblocks.DestroyOperator.__init__(self)
self._seed_selector = seed_selector
self._cluster_member_selector = cluster_member_selector
def can_apply_to(self, _solution: routingblocks.Solution) -> bool:
return len(_solution) > 0
def apply(self, evaluation: routingblocks.Evaluation, solution: routingblocks.Solution,
number_of_removed_vertices: int) -> List[
int]:
removed_vertices: List[routingblocks.NodeLocation] = []
while len(solution) > 0 and len(removed_vertices) < number_of_removed_vertices:
# Get next seed
try:
next_seed_vertex = self._seed_selector(evaluation, solution, removed_vertices)
except StopIteration:
# Abort when the cluster member selector throws, i.e., the solution has no further eligible clusters
break
# Get members of that seed
for x in self._cluster_member_selector(evaluation, solution, next_seed_vertex):
removed_vertices.append(x)
if len(removed_vertices) == number_of_removed_vertices:
break
removed_vertex_ids = [solution.lookup(x).vertex_id for x in removed_vertices]
solution.remove_vertices(removed_vertices)
return removed_vertex_ids
def name(self) -> str:
return "ClusterRemovalOperator"
class DistanceBasedClusterMemberSelector:
"""
Clusters vertices according to their distance to the seed vertex.
"""
class DistanceListItem:
def __init__(self, vertex: routingblocks.Vertex, distance: float):
self.vertex = vertex
self.distance = distance
def __lt__(self, other: Union[DistanceBasedClusterMemberSelector.DistanceListItem, float]):
if isinstance(other, DistanceBasedClusterMemberSelector.DistanceListItem):
return self.distance < other.distance
else:
return self.distance < other
def __init__(self, vertices: List[routingblocks.Vertex],
get_distance: Callable[[routingblocks.Vertex, routingblocks.Vertex], float],
min_radius_factor: float = 1.0, max_radius_factor: float = 1.0,
randgen: Optional[routingblocks.Randgen] = None):
"""
:param vertices: The vertices in the instance
:param get_distance: A distance function that takes two vertices and returns their distance to each other
:param min_radius_factor: The minimum radius of the cluster as a factor of the maximum distance between any two vertices
:param max_radius_factor: The maximum radius of the cluster as a factor of the maximum distance between any two vertices
:param randgen: A random number generator
"""
self._min_radius_factor = min_radius_factor
self._max_radius_factor = max_radius_factor
if (self._min_radius_factor is not None) + (self._max_radius_factor is not None) == 1:
raise ValueError("Either both or none of min_radius_factor and max_radius_factor must be set.")
self._randgen = randgen
# Cache max distance and radii
self._max_distance = 0.
self._distance_list: List[List[DistanceBasedClusterMemberSelector.DistanceListItem]] = [
[] for _ in range(0, max(x.vertex_id for x in vertices) + 1)
]
for i in vertices:
for j in vertices:
distance = get_distance(i, j)
self._distance_list[i.vertex_id].append(
DistanceBasedClusterMemberSelector.DistanceListItem(j, distance))
self._max_distance = max(distance, self._max_distance)
self._distance_list[i.vertex_id].sort()
def _pick_distance(self):
if self._min_radius_factor == self._max_radius_factor:
return self._min_radius_factor * self._max_distance
if self._randgen is None:
return self._max_distance
return self._randgen.uniform(self._min_radius_factor, self._max_radius_factor) * self._max_distance
def _select_vertices(self, seed_vertex: routingblocks.Vertex):
closest_vertices = self._distance_list[seed_vertex.vertex_id]
distance_cutoff = self._pick_distance()
cutoff_idx = bisect_left(closest_vertices, distance_cutoff)
return [closest_vertices[idx].vertex for idx in range(cutoff_idx)]
def __call__(self, evaluation: routingblocks.Evaluation, solution: routingblocks.Solution,
seed_vertex: routingblocks.NodeLocation) -> List[routingblocks.NodeLocation]:
selected_vertices = self._select_vertices(solution.lookup(seed_vertex).vertex)
return [x for vertex in selected_vertices for x in solution.find(vertex.vertex_id)] | /operators/cluster_removal.py | 0.949353 | 0.73614 | cluster_removal.py | pypi |
from typing import Iterable, TypeVar, Protocol, Generic
from collections.abc import Sequence
import routingblocks
T = TypeVar('T')
class MoveSelector(Protocol[T]):
"""
A move selector selects a move from a sequence of moves.
"""
def __call__(self, moves: Iterable[T]) -> T:
"""
Selects a move from the sequence of moves.
:param moves: The sequence of moves.
:return: The selected move.
"""
...
def first_move_selector(moves: Iterable[T]) -> T:
"""
Selects the first move in the sequence.
:param moves: The sequence of moves.
:return: The first move in the sequence.
"""
move = next(iter(moves), None)
assert move is not None, "Unable to select a move from an empty sequence"
return move
def last_move_selector(moves: Iterable[T]) -> T:
"""
Selects the last move in the sequence.
:param moves: The sequence of moves.
:return: The last move in the sequence.
"""
if isinstance(moves, Sequence):
return moves[len(moves) - 1]
move = None
# Exhaust the iterator
for move in moves:
pass
assert move is not None, "Unable to select a move from an empty sequence"
return move
def nth_move_selector_factory(n: int) -> MoveSelector[T]:
"""
Creates a move selector which selects the nth move in the sequence.
:param n: The index of the move to select.
:return: A move selector which selects the nth move in the sequence.
"""
assert n > 0
def select(moves: Iterable[T]) -> T:
if isinstance(moves, Sequence):
return moves[n - 1]
remaining = n
move = None
# Returns even if there are fewer than n moves
for move in moves:
remaining -= 1
if remaining == 0:
break
assert move is not None, "Unable to select a move from an empty sequence"
return move
return select
def blink_selector_factory(blink_probability: float, randgen: routingblocks.Random) -> MoveSelector[T]:
"""
Creates a move selector which selects the first move of the sequence with probability :math:`(1-p)`, the second move with
probability :math:`(1-p)^2`, where :math:`p` is the blink probability, and so on.
:param blink_probability: The probability of blinking.
:param randgen: The random number generator.
:return: The configured move selector.
"""
assert 0 <= blink_probability <= 1
def select(moves: Iterable[T]) -> T:
move = None
for move in moves:
if randgen.uniform(0, 1) <= blink_probability:
continue
break
assert move is not None, "Unable to select a move from an empty sequence"
return move
return select
def random_selector_factory(rangen: routingblocks.Random):
"""
Creates a move selector which selects a random move from the sequence.
:param rangen: The random number generator.
:return: The configured move selector.
"""
def select(moves: Iterable[T]) -> T:
# TODO Improve
if not isinstance(moves, Sequence):
moves = [*moves]
pos = rangen.randint(0, len(moves) - 1)
return moves[pos]
return select | /operators/move_selectors.py | 0.94014 | 0.727975 | move_selectors.py | pypi |
import routingblocks
from typing import Iterable, List
from .move_selectors import MoveSelector
class WorstRemovalOperator(routingblocks.DestroyOperator):
"""
Iteratively (one at a time) removes vertices according to the benefit yielded by removing them, i.e., the change in
the solution's cost with and without the vertex.
The operator uses a :ref:`routingblocks.operators.MoveSelector[routingblocks.RemovalMove]` to choose the
next vertex to remove. This selector receives as argument a list of :ref:`routingblocks.RemovalMove` objects,
each one representing a possible removal of a vertex from the solution, ordered by cost improvement.
This allows to customize the operator to different removal strategies, such as removing the vertex with the
highest cost improvement, removing the vertex with the lowest cost improvement, or introducing randomness in the
selection process.
"""
def __init__(self, instance: routingblocks.Instance, move_selector: MoveSelector[routingblocks.RemovalMove]):
"""
:param instance: The problem instance
:param routingblocks.operators.MoveSelector[routingblocks.RemovalMove] move_selector: The move selector used to choose the next vertex to remove
"""
routingblocks.DestroyOperator.__init__(self)
self._instance = instance
self._move_cache = routingblocks.RemovalCache(self._instance)
# Exposed
self.move_selector = move_selector
def name(self) -> str:
return "WorstRemovalOperator"
def can_apply_to(self, _solution: routingblocks.Solution) -> bool:
return len(_solution) > 0
def apply(self, evaluation: routingblocks.Evaluation, _solution: routingblocks.Solution,
number_of_removed_vertices: int) -> List[
int]:
self._move_cache.rebuild(evaluation, _solution)
removed_vertices = []
while len(removed_vertices) < number_of_removed_vertices:
# Choose the next removed vertex
selected_move = self.move_selector(self._move_cache.moves_in_order)
# Remove the vertex
_solution.remove_vertex(selected_move.node_location)
# Update the cache
self._move_cache.invalidate_route(_solution[selected_move.node_location.route],
selected_move.node_location.route)
removed_vertices.append(selected_move.vertex_id)
return removed_vertices | /operators/worst_removal.py | 0.921032 | 0.59561 | worst_removal.py | pypi |
from __future__ import annotations
from typing import List, Callable, Set, Tuple
from .move_selectors import MoveSelector
from dataclasses import dataclass
import routingblocks
@dataclass(frozen=True)
class RelatedVertexRemovalMove:
"""
:ivar vertex_id: The id of the corresponding vertex.
:ivar relatedness: The relatedness of the vertex to the seed vertex.
:ivar location: The location of the vertex in the solution.
"""
vertex_id: int
relatedness: float
location: routingblocks.NodeLocation
def __hash__(self):
return hash((self.vertex_id, self.location.route, self.location.position))
def __eq__(self, other):
return self.vertex_id == other.vertex_id and \
self.location.route == other.location.route and \
self.location.position == other.location.position
def build_relatedness_matrix(instance: routingblocks.Instance, relatedness_computer: Callable[[int, int], float]) -> \
List[
List[float]]:
"""
Builds a relatedness matrix for the given instance and relatedness function.
:param instance: The instance.
:param relatedness_computer: A function that computes the relatedness between two vertices. Takes as input the ids of the two vertices and returns a number that measures the degree of relatedness.
:return: A matrix of relatedness values.
"""
matrix: List[List[float]] = []
n = instance.number_of_vertices
for i in range(n):
matrix.append([0] * n)
for j in range(n):
if i != j:
matrix[i][j] = relatedness_computer(i, j)
return matrix
class RelatedRemovalOperator(routingblocks.DestroyOperator):
"""
Removes related vertices from the solution. The operator first selects an initial seed vertex.
Then, it selects the n most related vertices to the current seed vertex and adds them to the list of removed vertices.
It then selects the next seed vertex from the list of removed vertices and repeats the process until the desired number of vertices has been selected.
Finally, the operator removes the selected vertices from the solution.
The operator determines related vertices by using a relatedness matrix passed to the constructor.
This matrix contains a number that measures the degree of the relatedness between each pair of vertices.
The higher the number, the more related the vertices are.
(Initial) seed and related vertex selection is done using move selectors.
"""
def __init__(self, relatedness_matrix: List[List[float]],
move_selector: MoveSelector[RelatedVertexRemovalMove],
seed_selector: MoveSelector[RelatedVertexRemovalMove],
initial_seed_selector: MoveSelector[routingblocks.Node],
cluster_size: int = 1):
"""
:param relatedness_matrix: The relatedness matrix. See :py:func:`build_relatedness_matrix` for a way to build such a matrix.
:param move_selector: The move selector to use for selecting the vertex to remove. Receives a list of related vertices, ordered by the degree of relatedness in descending order.
:param seed_selector: The move selector to use for selecting the seed vertex.
:param initial_seed_selector: The move selector to use for selecting the initial seed vertex.
:param cluster_size: The number of related vertices to remove for each seed.
"""
# Important: Do not use super()!
routingblocks.DestroyOperator.__init__(self)
self._relatedness_matrix = relatedness_matrix
self._move_selector = move_selector
self._seed_selector = seed_selector
self._initial_seed_selector = initial_seed_selector
self._cluster_size = cluster_size
self._nodes_in_solution: List[Tuple[routingblocks.NodeLocation, routingblocks.Node]] = []
def can_apply_to(self, _solution: routingblocks.Solution) -> bool:
return len(_solution) > 0
def _get_sorted_related_vertices(self, related_vertices: List[float],
removed_vertices: Set[RelatedVertexRemovalMove]):
related_vertices_in_solution = []
# Iterate over solution and add entry for each node
for node_location, node in self._nodes_in_solution:
vertex_id = node.vertex_id
candidate = RelatedVertexRemovalMove(vertex_id, related_vertices[vertex_id], node_location)
if candidate not in removed_vertices:
related_vertices_in_solution.append(candidate)
# Sort by relatedness
related_vertices_in_solution.sort(key=lambda x: x.relatedness, reverse=True)
return related_vertices_in_solution
def _remove_seed_and_related(self, solution: routingblocks.Solution,
removed_vertices: List[RelatedVertexRemovalMove], num_vertices_to_remove: int):
# Pick random node from already removed vertices
seed_move = self._seed_selector(removed_vertices)
seed_move_id = seed_move.vertex_id
# seed_node_id = next(itertools.islice(removed_vertices, self._randgen.randint(0, len(removed_vertices) - 1), None)).vertex_id
# Get related vertices
related_vertices = self._get_sorted_related_vertices(self._relatedness_matrix[seed_move_id], removed_vertices)
# Select related vertices to remove
for _ in range(num_vertices_to_remove):
next_vertex = self._move_selector(related_vertices)
removed_vertices.append(next_vertex)
related_vertices.remove(next_vertex)
# Convenience return, actually modifies in-place
return removed_vertices
def _select_initial_seed(self, _solution: routingblocks.Solution) -> routingblocks.NodeLocation:
return self._initial_seed_selector(x[0] for x in self._nodes_in_solution)
def _cache_nodes_in_solution(self, solution: routingblocks.Solution):
for node_location in solution.non_depot_nodes:
self._nodes_in_solution.append((node_location, solution[node_location.route][node_location.position]))
def apply(self, evaluation: routingblocks.Evaluation, _solution: routingblocks.Solution,
number_of_removed_vertices: int) -> List[
int]:
# Cache nodes in the solution with their locations
self._cache_nodes_in_solution(_solution)
# Select seed node
initial_seed_location = self._select_initial_seed(_solution)
# seed_node_location = routingblocks.sample_locations(_solution, self._randgen, 1, False)[0]
seed_node = _solution[initial_seed_location.route][initial_seed_location.position]
# Initialize removed vertices
removed_vertices = [RelatedVertexRemovalMove(seed_node.vertex_id, 1.0, initial_seed_location)]
# Remove related
while len(removed_vertices) < number_of_removed_vertices:
num_vertices_to_remove = min(number_of_removed_vertices - len(removed_vertices), self._cluster_size)
removed_vertices = self._remove_seed_and_related(_solution, removed_vertices,
num_vertices_to_remove)
# Remove vertices
_solution.remove_vertices([move.location for move in removed_vertices])
# Clear the cache
self._nodes_in_solution.clear()
return [move.vertex_id for move in removed_vertices]
def name(self) -> str:
return self.__class__.__name__ | /operators/related_removal.py | 0.930899 | 0.768299 | related_removal.py | pypi |
from __future__ import annotations
from typing import List, Callable, Iterable, Optional, Tuple, Union
import routingblocks
from .cluster_removal import ClusterRemovalOperator, DistanceBasedClusterMemberSelector
class StationSeedSelector:
def __init__(self, stations: List[routingblocks.Vertex], randgen: Optional[routingblocks.Randgen] = None):
self._stations = stations
self._randgen = randgen
def _get_station_locations(self, solution: routingblocks.Solution):
return [
y for x in self._stations for y in solution.find(x.vertex_id)
]
def __call__(self, evaluation: routingblocks.Evaluation, solution: routingblocks.Solution,
removed_vertices: List[routingblocks.NodeLocation]) -> routingblocks.NodeLocation:
# Get all stations in the solution not removed yet
stations = [x for x in self._get_station_locations(solution) if
x not in removed_vertices]
if len(stations) == 0:
raise StopIteration
# Return a random one
picked_station_location = stations[self._randgen.randint(0, len(stations) - 1)]
return picked_station_location
class StationVicinityRemovalOperator(routingblocks.DestroyOperator):
"""
Station vicinity removal is a specialized cluster removal operator designed to reorder customer visits in the
vicinity of recharging stations. The operator defines the vicinity of a station by selecting a random radius
based on a percentage of the maximum distance between vertices. It then randomly chooses a recharging station
and removes the station along with vertices within the selected radius, repeating this process until the
desired number of vertices are removed.
"""
def __init__(self, instance: routingblocks.Instance,
get_distance: Callable[[routingblocks.Vertex, routingblocks.Vertex], float],
min_radius_factor: float, max_radius_factor: float,
randgen: Optional[routingblocks.Randgen]):
"""
:param instance: Instance the operator will be applied to
:param get_distance: A function taking two vertices and returning the distance between them. The distance can be arbitrary, i.e., does not have to correspond to the evaluation function.
:param min_radius_factor: Minimum of the interval the radius is picked from
:param max_radius_factor: Maximum of the interval the radius is picked from
:param randgen: Random number generator
"""
routingblocks.DestroyOperator.__init__(self)
self._cluster_removal_operator = ClusterRemovalOperator(
seed_selector=StationSeedSelector(list(instance.stations), randgen),
cluster_member_selector=DistanceBasedClusterMemberSelector(
vertices=[*instance.stations, *instance.customers],
get_distance=get_distance,
min_radius_factor=min_radius_factor,
max_radius_factor=max_radius_factor,
randgen=randgen)
)
def can_apply_to(self, solution: routingblocks.Solution) -> bool:
"""
Station vicinity removal can be applied to any solution that contains at least one station.
"""
return any(node.vertex.is_station
for route in solution.routes for node in route)
def name(self) -> str:
return "StationVicinityRemovalOperator"
def apply(self, evaluation: routingblocks.Evaluation, solution: routingblocks.Solution,
number_of_removed_vertices: int) -> List[int]:
return self._cluster_removal_operator.apply(evaluation, solution, number_of_removed_vertices) | /operators/station_vicinity_removal.py | 0.965136 | 0.394114 | station_vicinity_removal.py | pypi |
# routor





Simple routing engine for OpenStreetMaps with easy to customize profiles/weight-functions.
## Requirements
* Python 3.6.1 or newer
## Installation
```sh
pip install routor
```
## Usage
### CLI
The CLI offers multiple commands, use `routor --help` to find out more.
#### Download map
Downloads a compatible map from OSM, eg.
```sh
routor download "Bristol, England" ./bristol.graphml
```
By default it only adds a handful of tags ([nodes](https://github.com/gboeing/osmnx/blob/77b2535776b4397ae0deda402398609b3a4694a6/osmnx/settings.py#L5), [edge](https://github.com/gboeing/osmnx/blob/77b2535776b4397ae0deda402398609b3a4694a6/osmnx/settings.py#L49)) to the graph.
Use `-n` or `-e` to add other available tags ([edge](https://github.com/gboeing/osmnx/blob/77b2535776b4397ae0deda402398609b3a4694a6/osmnx/settings.py#L29), [node](https://github.com/gboeing/osmnx/blob/77b2535776b4397ae0deda402398609b3a4694a6/osmnx/settings.py#L28)) as well.
Additionally, you can download multiple regions at once:
```sh
routor download -n junction -n traffic_signals -e surface -e lanes "Bristol, England" "Somerset, England" ./bristol_somerset.graphml
```
By default, each downloaded map is enhanced with
* `street_count` - how many physical segments are connected to a node
* `bearing` - angle of each edge
* `speed_kph` - free-flow travel speed based on `maxspeed`, fallback is set to `30` kph (see [osmnx](https://osmnx.readthedocs.io/en/stable/osmnx.html#osmnx.speed.add_edge_speeds) for more information)
* `travel_time` - Travel time based on `speed_kph` and `length`
If you provide a [Google API](https://developers.google.com/maps/documentation/javascript/get-api-key) (using --api-key), the following additional attributes are available:
* `elevation` - elevation above sea level
* `grade`/`grade_abs` - grade of an endge
#### Calculate route
Determine the optimal route between two points using the given weight function and print the route as `JSON` to `stdout`.
```sh
routor route -- ./bristol.graphml "51.47967237816338,-2.6174926757812496" "51.45422084861252,-2.564105987548828" "routor.weights.length"
```
### Web API
#### Configuration
The configuration is either read from a `.env` file or the environment.
Before you are able to run the server, you have to set the variables mentioned in [routor/api/config.py](routor/api/config.py).
#### Run the API
The api is served using [uvicorn](https://www.uvicorn.org/).
To start the server run
```sh
uvicorn routor.api.main:app
```
The API will be available at http://127.0.0.1:8000 and the docs at http://127.0.0.1:8000/docs.
### As library
You can also use the engine as a library.
To calculate a route from A to B you can do
```python
from pathlib import Path
from routor.engine import Engine
from routor import models, weights
...
map_path = Path(...)
engine = Engine(map_path)
origin = models.Location(latitude=51.47967237816338, longitude=-2.6174926757812496)
destination = models.Location(latitude=51.45422084861252, longitude=-2.564105987548828)
route = engine.route(origin, destination, weight_func=weights.length, travel_time_func=weights.travel_time) # shortest distance
```
## Available weight-functions
### `"length"` / `routor.weights.length`
Calculates the shortest path from A to B, only the length of an edge is taken into account.
### `"travel_time"` / `routor.weight.travel_time`
Calculates the fastest route based on [travel time](https://osmnx.readthedocs.io/en/stable/osmnx.html#osmnx.speed.add_edge_travel_times).
## Plugins
`routor` implements a simple plugin mechanism.
Simply create a new module with the prefix `routor_`, make it available (install it, `sys.path` hack or similar) and it will be automatically discovered and loaded.
Depending on how you structure your module/plugin, you have to do the registration of the additional functionality in either `routor_YOUR_MODULE/__init__.py` or `routor_YOUR_MODULE.py`.
### Register a new weight function
Existing weight functions are defined in [routor/weights.py](routor/weights.py) and can be taken as reference.
To register a new function in your plugin, you have to implement something similar to
```python
# __init__.py
from typing import Optional
from routor.weights import register
from routor import models
def my_weight_func(prev_edge: Optional[models.Edge], edge: models.Edge) -> float:
...
return ...
register(my_weight_func, "weight_func")
```
## Development
This project uses [poetry](https://poetry.eustace.io/) for packaging and
managing all dependencies and [pre-commit](https://pre-commit.com/) to run
[flake8](http://flake8.pycqa.org/), [isort](https://pycqa.github.io/isort/),
[mypy](http://mypy-lang.org/) and [black](https://github.com/python/black).
Additionally, [pdbpp](https://github.com/pdbpp/pdbpp) and [better-exceptions](https://github.com/qix-/better-exceptions) are installed to provide a better debugging experience.
To enable `better-exceptions` you have to run `export BETTER_EXCEPTIONS=1` in your current session/terminal.
Clone this repository and run
```bash
poetry install
poetry run pre-commit install
```
to create a virtual enviroment containing all dependencies.
Afterwards, You can run the test suite using
```bash
poetry run pytest
```
This repository follows the [Conventional Commits](https://www.conventionalcommits.org/)
style.
### Cookiecutter template
This project was created using [cruft](https://github.com/cruft/cruft) and the
[cookiecutter-pyproject](https://github.com/escaped/cookiecutter-pypackage) template.
In order to update this repository to the latest template version run
```sh
cruft update
```
in the root of this repository.
| /routor-0.7.1.tar.gz/routor-0.7.1/README.md | 0.831177 | 0.888662 | README.md | pypi |
import logging
from collections import defaultdict
class UnionFind:
"""
O(1) find: Find if two nodes belong to the same set
O(1) union: Combine two sets into 1 set
"""
def __init__(self, nodes):
"""
:param num_nodes:
"""
# parent[i] means the father of the ith node.
# initially every node's father is itself;
# self.parent: List[int] = list(range(num_nodes))
self.parent = {node: node for node in nodes}
# rank[i] means how many nodes are there in the set where the i-th node is
# located. When merging two nodes, we need to merge a single node to a majority
# of nodes;
# self.rank: List[int] = [0] * num_nodes
self.rank = {node: 0 for node in self.parent}
def find(self, node: int) -> int:
"""
:param node:
:return:
"""
if self.parent[node] == node:
return node
self.parent[node] = self.find(self.parent[node])
return self.parent[node]
def union(self, node_1: int, node_2: int):
"""
:param node_1:
:param node_2:
:return:
"""
node_1, node_2 = self.find(node_1), self.find(node_2)
if node_1 != node_2:
if self.rank[node_1] < self.rank[node_2]:
node_1, node_2 = node_2, node_1
self.parent[node_2] = node_1
if self.rank[node_1] == self.rank[node_2]:
self.rank[node_1] += 1
return node_2
else:
logging.warning("Something went wrong in union find, attempting union of "
"nodes _already_ in the same set.")
def disjoint_sets(self):
result = defaultdict(list)
for key in self.parent:
result[self.find(key)].append(key)
return result
def non_roots(self):
return {
k: v for k, v in self.parent.items() if k != v
}
def roots(self):
return {
k: v for k, v in self.parent.items() if k == v
}
def supernode_size(self):
temp = {k: [] for k in self.non_roots().values()}
for k, v in self.parent.items():
temp[v].append(k)
return (len(v) for v in temp.values()) | /roux_algo_geneweaver_kargers-0.0.3-py3-none-any.whl/roux/algo/geneweaver/kargers/union_find.py | 0.66072 | 0.648212 | union_find.py | pypi |
import time
from contextlib import contextmanager
from collections import defaultdict
from sqlalchemy.orm import Session
from roux.algo.geneweaver.kargers.types import AdjGraph, AdjCallable, GsidSet, EdgeList
from roux.algo.geneweaver.kargers.utils.transform import query_result_to_set
TRACK_TIME = True
def get_all_genesets_by_tier(db: Session, tier: int) -> GsidSet:
query = db.execute("""SELECT gs_id
FROM production.geneset
WHERE gs_status NOT LIKE 'de%' AND cur_id = :tier;
""", {'tier': tier})
result = query.fetchall()
return query_result_to_set(result)
@contextmanager
def interested_genesets(db: Session, tier: int, limit=10000):
temp_table_name = 'temp_interested_genesets'
db.execute("""
CREATE TEMP TABLE temp_interested_genesets AS
SELECT gs_id FROM production.geneset
WHERE gs_status NOT LIKE 'de%' AND cur_id = :tier LIMIT :limit;
""", {'tier': tier, 'limit': limit})
db.execute("""
CREATE INDEX ON temp_interested_genesets(gs_id);
""")
yield temp_table_name
db.execute("""
DROP TABLE temp_interested_genesets;
""")
def get_adjacency_simple(db: Session, gs_id: int) -> GsidSet:
query = db.execute("""SELECT DISTINCT gvv.gs_id
FROM production.geneset g
INNER JOIN extsrc.geneset_value gv ON g.gs_id = gv.gs_id
INNER JOIN extsrc.geneset_value gvv on gv.ode_gene_id = gvv.ode_gene_id
WHERE g.gs_id = :gs_id;
""", {'gs_id': gs_id})
result = query.fetchall()
return query_result_to_set(result)
def get_adjacency_full(db: Session, gs_id: int, genesets: GsidSet) -> GsidSet:
query = db.execute("""SELECT DISTINCT gvv.gs_id
FROM production.geneset g
INNER JOIN extsrc.geneset_value gv ON g.gs_id = gv.gs_id
INNER JOIN extsrc.geneset_value gvv on gv.ode_gene_id = gvv.ode_gene_id
WHERE g.gs_id = :gs_id AND gvv.gs_id != g.gs_id AND gvv.gs_id IN :genesets;
""", {'gs_id': gs_id, 'genesets': tuple(genesets)})
result = query.fetchall()
return query_result_to_set(result)
def get_adjacency_exclusive(db: Session, gs_id: int, genesets: GsidSet) -> GsidSet:
start = time.time() if TRACK_TIME else 0
query = db.execute("""SELECT DISTINCT gvv.gs_id
FROM production.geneset g
INNER JOIN extsrc.geneset_value gv ON g.gs_id = gv.gs_id
INNER JOIN extsrc.geneset_value gvv on gv.ode_gene_id = gvv.ode_gene_id
WHERE g.gs_id = :gs_id AND gvv.gs_id != g.gs_id AND gvv.gs_id IN :genesets;
""", {'gs_id': gs_id, 'genesets': tuple(genesets)})
if TRACK_TIME:
print(f"{gs_id} executed in {time.time() - start} s")
result = query.fetchall()
if TRACK_TIME:
print(f"{gs_id} retrieved in {time.time() - start} s")
result = query_result_to_set(result)
if TRACK_TIME:
print(f"{gs_id} formatted in {time.time() - start} s")
return result
def get_adjacency_exclusive_new(db: Session, tier: int = 2, limit: int = 100) -> AdjGraph:
""""""
start = time.time() if TRACK_TIME else 0
with interested_genesets(db, tier, limit):
if TRACK_TIME:
print(f"Set up in {time.time() - start} s")
query = db.execute("""CREATE TEMP TABLE gs_graph AS
SELECT DISTINCT igs.gs_id gs_id1, gvv.gs_id gs_id2
FROM temp_interested_genesets AS igs
JOIN extsrc.geneset_value gv ON igs.gs_id = gv.gs_id
JOIN
(SELECT igs.gs_id, ode_gene_id
FROM temp_interested_genesets AS igs
JOIN extsrc.geneset_value gsv
ON igs.gs_id = gsv.gs_id) gvv
ON gv.ode_gene_id = gvv.ode_gene_id
WHERE igs.gs_id != gvv.gs_id;
""")
print(f"Query table set up in {time.time() - start} s")
remaining = True
q_limit = 500000
# Stupid index is off by 1
q_offset = -1
graph: AdjGraph = defaultdict(set)
edge_list: EdgeList = []
print("Starting fetch results calls")
while remaining:
if q_offset == -1:
query = db.execute("""SELECT gs_id1, gs_id2
FROM gs_graph
LIMIT :q_limit;
""", {'q_limit': q_limit})
else:
query = db.execute("""SELECT gs_id1, gs_id2
FROM gs_graph
LIMIT :q_limit OFFSET :q_offset;
""", {'q_limit': q_limit, 'q_offset': q_offset})
if TRACK_TIME:
print(f"executed in {time.time() - start} s")
new_rows = query.fetchall()
print("fetched rows")
if len(new_rows) == 0:
remaining = False
for r in new_rows:
graph[int(r[0])].add(r[1])
edge_list.extend([(int(r[0]), int(r[1])) for r in new_rows])
print("formatted rows")
q_offset += q_limit
print(f"processed {q_offset + 1} rows so far ...")
del query
db.execute("DROP TABLE gs_graph;")
if TRACK_TIME:
print(f"formatted in {time.time() - start} s")
return graph
def build_graph(db: Session,
genesets: GsidSet,
select_func: AdjCallable = get_adjacency_full) -> AdjGraph:
"""
:param db:
:param genesets:
:param select_func:
:return:
"""
return {
gs_id: select_func(db, gs_id, genesets)
for gs_id in genesets
}
def build_graph_timed(db: Session,
genesets: GsidSet,
select_func: AdjCallable = get_adjacency_full) -> AdjGraph:
"""
:param db:
:param genesets:
:param select_func:
:return:
"""
start_outer = time.time()
print(f"Building graph with {len(genesets)} nodes.")
graph: AdjGraph = {}
for gs_id in genesets:
start_inner = time.time()
result = select_func(db, gs_id, genesets)
graph[gs_id] = result
now = time.time()
print(f"{gs_id} retrieved in {now - start_outer}s, {now - start_inner}s")
print(f"Total time: {start_outer - time.time()}")
return graph
def graph_has_edge(to_node: int, graph: AdjGraph):
found = False
while not found:
for source, edges in graph.items():
for edge in edges:
if edge == to_node:
print(source, edge, to_node)
found = True
break
return found
def missing_nodes(graph: AdjGraph):
keys = graph.keys()
missing_nodes = []
for source, edges in graph.items():
for edge in edges:
if edge not in keys and edge not in missing_nodes:
missing_nodes.append(edge)
print(f"Source {source} has edge to missing node {edge}")
return missing_nodes | /roux_algo_geneweaver_kargers-0.0.3-py3-none-any.whl/roux/algo/geneweaver/kargers/utils/build_graph.py | 0.442396 | 0.160924 | build_graph.py | pypi |
from typing import Tuple, List, Dict
from roux.algo.geneweaver.kargers.types import GsidSet, AdjGraph, AdjGraphList, \
EdgeList, \
NodeList
def query_result_to_set(result) -> GsidSet:
"""
:param result:
:return:
"""
return {r[0] for r in result}
def adj_graph_set_to_list(graph: AdjGraph) -> AdjGraphList:
"""
:param graph:
:return:
"""
return {k: list(v) for k, v in graph.items()}
def adj_graph_list_to_set(graph: AdjGraphList) -> AdjGraph:
"""
:param graph:
:return:
"""
return {int(k): {int(v) for v in vs} for k, vs in graph.items()}
def adj_graph_to_edge_list(graph: AdjGraph) -> Tuple[NodeList, EdgeList]:
"""
:param graph:
:return:
"""
return ([int(n) for n in graph.keys()],
[(int(source_node), int(dest_node))
for source_node, edges in graph.items()
for dest_node in edges])
def deduplicate_edge_list(edge_list: EdgeList) -> EdgeList:
result = []
seen = set()
for edge in edge_list:
if edge[0] != edge[1]:
if edge not in seen and tuple(reversed(edge)) not in seen:
seen.add(edge)
result.append(edge)
return result
def union_find_to_geneset_list(roots: List[int],
non_roots: Dict[int, int]) -> List[List[int]]:
temp = {root: [root] for root in roots}
for member, root in non_roots.items():
temp[root].append(member)
return list(temp.values())
def split_adj_graph(graph: AdjGraph, node_sets: List[List[int]]) -> List[AdjGraph]:
return [{node: graph[int(node)] for node in node_set} for node_set in node_sets]
def split_adj_graph_and_cut(graph: AdjGraph,
node_sets: List[List[int]],
cuts: List[Tuple[int, int]]) -> List[AdjGraph]:
remove_edges_adj_graph(graph, cuts)
return split_adj_graph(graph, node_sets)
def remove_nodes(edges: EdgeList, node):
res = []
for edge in edges:
if edge[0] != node and edge[1] != node:
res.append(edge)
return res
def remove_node_adj_graph(graph: AdjGraph, node):
for key, value_list in graph.items():
if key == node:
del graph[key]
else:
graph[key] = {v for v in value_list if v != node}
def remove_edge_adj_graph(graph: AdjGraph, edge: Tuple[int, int]):
try:
graph[edge[0]].remove(edge[1])
except KeyError:
pass
try:
graph[edge[1]].remove(edge[0])
except KeyError:
pass
def remove_edges_adj_graph(graph: AdjGraph, edges: List[Tuple[int, int]]):
for edge in edges:
remove_edge_adj_graph(graph, edge)
def remove_nodes_adj_graph(graph: AdjGraph, nodes: List[int]):
for node in nodes:
del graph[node]
for node in graph.keys():
try:
graph[node].difference_update(set(nodes))
except KeyError:
pass | /roux_algo_geneweaver_kargers-0.0.3-py3-none-any.whl/roux/algo/geneweaver/kargers/utils/transform.py | 0.724773 | 0.602822 | transform.py | pypi |
import gym
from gym import error, spaces, utils
from gym.utils import seeding
import os
import pybullet as p
import pybullet_data
import math
import numpy as np
import random
import site
class RoverArmEnvGym(gym.Env):
def __init__(self, render_mode = 'rgb_array', maxSteps=10_000, isDiscrete=False, urdfRoot = pybullet_data.getDataPath(),
width = 480, height = 480, all_views = False):
self.metadata = {'render.modes': ['human' , 'rgb_array']}
self.render_mode = render_mode
self._isDiscrete = isDiscrete
self._timeStep = 1. / 240.
self._urdfRoot = urdfRoot
self._maxSteps = maxSteps
self._width = width
self._height = height
self._all_views = all_views
if self.render_mode == 'human':
cid = p.connect(p.SHARED_MEMORY)
if (cid < 0):
cid = p.connect(p.GUI)
p.resetDebugVisualizerCamera(1.3, 180, -41, [0.52, -0.2, -0.33])
else:
p.connect(p.DIRECT)
self._cam_dist = 3
self._cam_yaw = -0.35
self._cam_pitch = -35
self._cam_target_p = [0.67, -0.35, 0.20]
p.resetDebugVisualizerCamera(cameraDistance= self._cam_dist , cameraYaw= self._cam_yaw, cameraPitch= self._cam_pitch, cameraTargetPosition=self._cam_target_p)
self.action_space = spaces.Box(np.array([-1]*6), np.array([1]*6))
self.boundary = 5
self.max_vel = 5
self.observation_space = spaces.Box(np.array([-self.boundary, -self.boundary, -1, -1, -self.max_vel, -self.max_vel, -self.boundary, -self.boundary, -1, 0, 0 , -self.boundary, -self.boundary, -1]), np.array([self.boundary, self.boundary, 1, 1, self.max_vel, self.max_vel, self.boundary, self.boundary, 1, 0.07, 0.07, self.boundary, self.boundary, 1]))
# Joint indices as found by p.getJointInfo()
self.steering_joints = [0, 2]
self.drive_joints = [1, 3, 4, 5]
# Joint speed
self.joint_speed = 0
# Drag constants
self.c_rolling = 0.3
self.c_drag = 0.01
# Throttle constant increases "speed" of the car
self.c_throttle = 200
self.MAX_SPEED = 20
def reset(self, seed = None):
random.seed(seed)
np.random.seed(seed)
self.step_counter = 0
p.resetSimulation()
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING,0) # we will enable rendering after we loaded everything
p.setGravity(0,0,-10)
planeUid = p.loadURDF(os.path.join(self._urdfRoot,"plane.urdf"), basePosition=[0,0,-0.65])
rest_poses = [0,-0.215,0,-2.57,0,2.356,2.356,0.08,0.08]
x_pos = np.random.choice([random.uniform(-1, -0.3), random.uniform(1.25,2)])
y_pos = random.uniform(-1, 2.5)
BASE_DIR = site.getsitepackages()[0] + "/rover_arm/data/"
# BASE_DIR = "./rover_arm/data/"
self.roverarmUid = p.loadURDF(BASE_DIR + "rover_arm.xml", basePosition=[ x_pos, y_pos ,-0.5])
for i in range(7,14):
p.resetJointState(self.roverarmUid,i, rest_poses[i - 7])
p.resetJointState(self.roverarmUid, 16, 0.07)
p.resetJointState(self.roverarmUid, 17, 0.07)
tableUid = p.loadURDF(os.path.join(self._urdfRoot, "table/table.urdf"),basePosition=[0.5,0,-0.65], globalScaling = 0.5)
trayUid = p.loadURDF(os.path.join(self._urdfRoot, "tray/traybox.urdf"),basePosition=[0.45,0,-0.335], globalScaling = 0.5)
state_object= [random.uniform(0.4, 0.5), random.uniform(-0.05, 0.05), -0.2]
self.objectUid = p.loadURDF(os.path.join(self._urdfRoot, "random_urdfs/000/000.urdf"), basePosition=state_object, globalScaling = 0.8)
pos, ang = p.getBasePositionAndOrientation(self.roverarmUid, 0)
ang = p.getEulerFromQuaternion(ang)
ori = (math.cos(ang[2]), math.sin(ang[2]))
pos = pos[:2]
vel = p.getBaseVelocity(self.roverarmUid, 0)[0][0:2]
state_rover = pos + ori + vel
state_arm = p.getLinkState(self.roverarmUid, 18)[0]
state_fingers = (p.getJointState(self.roverarmUid,16)[0], p.getJointState(self.roverarmUid, 17)[0])
state_object, _ = p.getBasePositionAndOrientation(self.objectUid)
self.observation = state_rover + state_arm + state_fingers + tuple(state_object)
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING,1)
info = {'state_rover_pos': pos, 'state_rover_ang' : ang, 'state_rover_vel' : vel ,'state_arm' : state_arm, 'state_fingers': state_fingers, 'state_object': state_object}
return (np.array(self.observation).astype(np.float32), info)
def step(self, action):
# p.configureDebugVisualizer(p.COV_ENABLE_SINGLE_STEP_RENDERING)
orientation = p.getQuaternionFromEuler([0.,-math.pi,math.pi/2.])
dv = 0.05
dx_a, dy_a, dz_a = [x * dv for x in action[2:5] ]
fingers = action[5]
fingers = np.interp(fingers, [-1,1], [0, 0.07])
currentPose = p.getLinkState(self.roverarmUid, 18)
currentPosition = currentPose[0]
newPosition = [currentPosition[0] + dx_a,
currentPosition[1] + dy_a,
currentPosition[2] + dz_a]
jointPoses = p.calculateInverseKinematics(self.roverarmUid,18,newPosition, orientation)
jointPoses_rover, jointPoses_arm = jointPoses[:6], jointPoses[6:13]
p.setJointMotorControlArray(self.roverarmUid, list(range(7,14))+[16,17], p.POSITION_CONTROL, list(jointPoses_arm)+2*[fingers])
throttle, steering_angle = action[:2]
# Clip throttle and steering angle to reasonable values
throttle = min(max(throttle, -1), 1)
steering_angle = np.interp(steering_angle, [-1,1], [-0.6, 0.6])
# Set the steering joint positions
p.setJointMotorControlArray(self.roverarmUid, self.steering_joints,
controlMode=p.POSITION_CONTROL,
targetPositions=[steering_angle] * 2)
# Calculate drag / mechanical resistance ourselves
# Using velocity control, as torque control requires precise models
friction = -self.joint_speed * (self.joint_speed * self.c_drag +
self.c_rolling)
acceleration = self.c_throttle * throttle + friction
# Each time step is 1/240 of a second
self.joint_speed = self.joint_speed + 1 / 30 * acceleration
self.joint_speed = min(max(self.joint_speed, -self.MAX_SPEED), self.MAX_SPEED)
# Set the velocity of the wheel joints directly
p.setJointMotorControlArray(
bodyUniqueId=self.roverarmUid,
jointIndices=self.drive_joints,
controlMode=p.VELOCITY_CONTROL,
targetVelocities=[self.joint_speed] * 4,
forces=[10] * 4)
state_object_prev, _ = p.getBasePositionAndOrientation(self.objectUid)
p.stepSimulation()
state_object, _ = p.getBasePositionAndOrientation(self.objectUid)
pos, ang = p.getBasePositionAndOrientation(self.roverarmUid, 0)
ang = p.getEulerFromQuaternion(ang)
ori = (math.cos(ang[2]), math.sin(ang[2]))
pos = pos[:2]
vel = p.getBaseVelocity(self.roverarmUid, 0)[0][0:2]
state_rover = pos + ori + vel
state_arm = p.getLinkState(self.roverarmUid, 18)[0]
state_fingers = (p.getJointState(self.roverarmUid,16)[0], p.getJointState(self.roverarmUid, 17)[0])
terminated , truncated = False, False
if state_object[2] > 0:
reward = 1
terminated = True
else:
x0, y0, z0 = np.abs(np.array(currentPosition) - np.array(state_object_prev))
x1, y1, z1 = np.abs(np.array(state_arm) - np.array(state_object))
reward = x0 - x1 + y0 - y1 + z0 - z1
if abs(reward) > 1e-3:
reward = reward/ 10
else:
reward = 0
self.step_counter += 1
def inGame(state_rover):
rx, ry = state_rover[:2]
inBound = rx > -self.boundary and rx < self.boundary
inBound = inBound and ry > -self.boundary and ry < self.boundary
return inBound
if not inGame(state_rover):
reward = -1
terminated = True
if self.step_counter > self._maxSteps:
reward = 0
truncated = True
self.observation = state_rover + state_arm + state_fingers + tuple(state_object)
info = {'state_rover_pos': pos, 'state_rover_ang' : ang, 'state_rover_vel' : vel ,'state_arm' : state_arm, 'state_fingers': state_fingers, 'state_object': state_object}
done = terminated or truncated
return np.array(self.observation).astype(np.float32), reward, done, info
def render(self, mode = None, width = None, height = None):
# cam = p.getDebugVisualizerCamera()
# xyz = cam[11]
# x= float(xyz[0]) + 0.125
# y = xyz[1]
# z = xyz[2]
# p.resetDebugVisualizerCamera(cameraYaw = cam[8], cameraPitch= cam[9],cameraDistance = cam[10],cameraTargetPosition=[x,y,z])
if width == None or height == None:
width = self._width
height = self._height
if mode != None:
self.render_mode = mode
if self.render_mode != 'rgb_array':
return None
view_matrix1 = p.computeViewMatrixFromYawPitchRoll(cameraTargetPosition=self._cam_target_p,
distance=self._cam_dist,
yaw=self._cam_yaw,
pitch=self._cam_pitch,
roll=0,
upAxisIndex=2)
view_matrix2 = p.computeViewMatrixFromYawPitchRoll(cameraTargetPosition=[0.7,0,0.05],
distance=.7,
yaw=90,
pitch=-70,
roll=0,
upAxisIndex=2)
proj_matrix = p.computeProjectionMatrixFOV(fov=60,
aspect=float(width) /height,
nearVal=0.1,
farVal=100.0)
(_, _, px1, _, _) = p.getCameraImage(width=width,
height=height,
viewMatrix=view_matrix1,
projectionMatrix=proj_matrix,
renderer=p.ER_BULLET_HARDWARE_OPENGL)
rgb_array1 = np.array(px1, dtype=np.uint8)
rgb_array1 = np.reshape(rgb_array1, (height,width, 4))[:, :, :3]
if not self._all_views:
return rgb_array1
(_, _, px2, _, _) = p.getCameraImage(width=width,
height=height,
viewMatrix=view_matrix2,
projectionMatrix=proj_matrix,
renderer=p.ER_BULLET_HARDWARE_OPENGL)
rgb_array2 = np.array(px2, dtype=np.uint8)
rgb_array2 = np.reshape(rgb_array2, (height,width, 4))[:, :, :3]
rgb_array = np.concatenate((rgb_array1 , rgb_array2), axis = 2)
return rgb_array
def _get_state(self):
return self.observation
def close(self):
p.disconnect() | /rover_arm-1.1.11.tar.gz/rover_arm-1.1.11/rover_arm/envs/roverarm_env_gym.py | 0.428712 | 0.225353 | roverarm_env_gym.py | pypi |
import gymnasium as gym
from gymnasium import error, spaces, utils
from gymnasium.utils import seeding
import os
import pybullet as p
import pybullet_data
import math
import numpy as np
import random
import site
class RoverArmEnv(gym.Env):
def __init__(self, render_mode = 'rgb_array', maxSteps=10_000, isDiscrete=False, urdfRoot = pybullet_data.getDataPath(),
width = 480, height = 480, all_views = False):
self.metadata = {'render.modes': ['human' , 'rgb_array']}
self.render_mode = render_mode
self._isDiscrete = isDiscrete
self._timeStep = 1. / 240.
self._urdfRoot = urdfRoot
self._maxSteps = maxSteps
self._width = width
self._height = height
self._all_views = all_views
if self.render_mode == 'human':
cid = p.connect(p.SHARED_MEMORY)
if (cid < 0):
cid = p.connect(p.GUI)
p.resetDebugVisualizerCamera(1.3, 180, -41, [0.52, -0.2, -0.33])
else:
p.connect(p.DIRECT)
self._cam_dist = 3
self._cam_yaw = -0.35
self._cam_pitch = -35
self._cam_target_p = [0.67, -0.35, 0.20]
p.resetDebugVisualizerCamera(cameraDistance= self._cam_dist , cameraYaw= self._cam_yaw, cameraPitch= self._cam_pitch, cameraTargetPosition=self._cam_target_p)
self.action_space = spaces.Box(np.array([-1]*6), np.array([1]*6))
self.boundary = 5
self.max_vel = 5
self.observation_space = spaces.Box(np.array([-self.boundary, -self.boundary, -1, -1, -self.max_vel, -self.max_vel, -self.boundary, -self.boundary, -1, 0, 0 , -self.boundary, -self.boundary, -1]), np.array([self.boundary, self.boundary, 1, 1, self.max_vel, self.max_vel, self.boundary, self.boundary, 1, 0.07, 0.07, self.boundary, self.boundary, 1]))
# Joint indices as found by p.getJointInfo()
self.steering_joints = [0, 2]
self.drive_joints = [1, 3, 4, 5]
# Joint speed
self.joint_speed = 0
# Drag constants
self.c_rolling = 0.3
self.c_drag = 0.01
# Throttle constant increases "speed" of the car
self.c_throttle = 200
self.MAX_SPEED = 20
def reset(self, seed = None):
random.seed(seed)
np.random.seed(seed)
self.step_counter = 0
p.resetSimulation()
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING,0) # we will enable rendering after we loaded everything
p.setGravity(0,0,-10)
planeUid = p.loadURDF(os.path.join(self._urdfRoot,"plane.urdf"), basePosition=[0,0,-0.65])
rest_poses = [0,-0.215,0,-2.57,0,2.356,2.356,0.08,0.08]
x_pos = np.random.choice([random.uniform(-1, -0.3), random.uniform(1.25,2)])
y_pos = random.uniform(-1, 2.5)
BASE_DIR = site.getsitepackages()[0] + "/rover_arm/data/"
# BASE_DIR = "./rover_arm/data/"
self.roverarmUid = p.loadURDF(BASE_DIR + "rover_arm.xml", basePosition=[ x_pos, y_pos ,-0.5])
for i in range(7,14):
p.resetJointState(self.roverarmUid,i, rest_poses[i - 7])
p.resetJointState(self.roverarmUid, 16, 0.07)
p.resetJointState(self.roverarmUid, 17, 0.07)
tableUid = p.loadURDF(os.path.join(self._urdfRoot, "table/table.urdf"),basePosition=[0.5,0,-0.65], globalScaling = 0.5)
trayUid = p.loadURDF(os.path.join(self._urdfRoot, "tray/traybox.urdf"),basePosition=[0.45,0,-0.335], globalScaling = 0.5)
state_object= [random.uniform(0.4, 0.5), random.uniform(-0.05, 0.05), -0.2]
self.objectUid = p.loadURDF(os.path.join(self._urdfRoot, "random_urdfs/000/000.urdf"), basePosition=state_object, globalScaling = 0.8)
pos, ang = p.getBasePositionAndOrientation(self.roverarmUid, 0)
ang = p.getEulerFromQuaternion(ang)
ori = (math.cos(ang[2]), math.sin(ang[2]))
pos = pos[:2]
vel = p.getBaseVelocity(self.roverarmUid, 0)[0][0:2]
state_rover = pos + ori + vel
state_arm = p.getLinkState(self.roverarmUid, 18)[0]
state_fingers = (p.getJointState(self.roverarmUid,16)[0], p.getJointState(self.roverarmUid, 17)[0])
state_object, _ = p.getBasePositionAndOrientation(self.objectUid)
self.observation = state_rover + state_arm + state_fingers + tuple(state_object)
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING,1)
info = {'state_rover_pos': pos, 'state_rover_ang' : ang, 'state_rover_vel' : vel ,'state_arm' : state_arm, 'state_fingers': state_fingers, 'state_object': state_object}
return (np.array(self.observation).astype(np.float32), info)
def step(self, action):
# p.configureDebugVisualizer(p.COV_ENABLE_SINGLE_STEP_RENDERING)
orientation = p.getQuaternionFromEuler([0.,-math.pi,math.pi/2.])
dv = 0.05
dx_a, dy_a, dz_a = [x * dv for x in action[2:5] ]
fingers = action[5]
fingers = np.interp(fingers, [-1,1], [0, 0.07])
currentPose = p.getLinkState(self.roverarmUid, 18)
currentPosition = currentPose[0]
newPosition = [currentPosition[0] + dx_a,
currentPosition[1] + dy_a,
currentPosition[2] + dz_a]
jointPoses = p.calculateInverseKinematics(self.roverarmUid,18,newPosition, orientation)
jointPoses_rover, jointPoses_arm = jointPoses[:6], jointPoses[6:13]
p.setJointMotorControlArray(self.roverarmUid, list(range(7,14))+[16,17], p.POSITION_CONTROL, list(jointPoses_arm)+2*[fingers])
throttle, steering_angle = action[:2]
# Clip throttle and steering angle to reasonable values
throttle = min(max(throttle, -1), 1)
steering_angle = np.interp(steering_angle, [-1,1], [-0.6, 0.6])
# Set the steering joint positions
p.setJointMotorControlArray(self.roverarmUid, self.steering_joints,
controlMode=p.POSITION_CONTROL,
targetPositions=[steering_angle] * 2)
# Calculate drag / mechanical resistance ourselves
# Using velocity control, as torque control requires precise models
friction = -self.joint_speed * (self.joint_speed * self.c_drag +
self.c_rolling)
acceleration = self.c_throttle * throttle + friction
# Each time step is 1/240 of a second
self.joint_speed = self.joint_speed + 1 / 30 * acceleration
self.joint_speed = min(max(self.joint_speed, -self.MAX_SPEED), self.MAX_SPEED)
# Set the velocity of the wheel joints directly
p.setJointMotorControlArray(
bodyUniqueId=self.roverarmUid,
jointIndices=self.drive_joints,
controlMode=p.VELOCITY_CONTROL,
targetVelocities=[self.joint_speed] * 4,
forces=[10] * 4)
state_object_prev, _ = p.getBasePositionAndOrientation(self.objectUid)
p.stepSimulation()
state_object, _ = p.getBasePositionAndOrientation(self.objectUid)
pos, ang = p.getBasePositionAndOrientation(self.roverarmUid, 0)
ang = p.getEulerFromQuaternion(ang)
ori = (math.cos(ang[2]), math.sin(ang[2]))
pos = pos[:2]
vel = p.getBaseVelocity(self.roverarmUid, 0)[0][0:2]
state_rover = pos + ori + vel
state_arm = p.getLinkState(self.roverarmUid, 18)[0]
state_fingers = (p.getJointState(self.roverarmUid,16)[0], p.getJointState(self.roverarmUid, 17)[0])
terminated , truncated = False, False
if state_object[2] > 0:
reward = 1
terminated = True
else:
x0, y0, z0 = np.abs(np.array(currentPosition) - np.array(state_object_prev))
x1, y1, z1 = np.abs(np.array(state_arm) - np.array(state_object))
reward = x0 - x1 + y0 - y1 + z0 - z1
if abs(reward) > 1e-3:
reward = reward/ 10
else:
reward = 0
self.step_counter += 1
def inGame(state_rover):
rx, ry = state_rover[:2]
inBound = rx > -self.boundary and rx < self.boundary
inBound = inBound and ry > -self.boundary and ry < self.boundary
return inBound
if not inGame(state_rover):
reward = -1
terminated = True
if self.step_counter > self._maxSteps:
reward = 0
truncated = True
self.observation = state_rover + state_arm + state_fingers + tuple(state_object)
info = {'state_rover_pos': pos, 'state_rover_ang' : ang, 'state_rover_vel' : vel ,'state_arm' : state_arm, 'state_fingers': state_fingers, 'state_object': state_object}
return np.array(self.observation).astype(np.float32), reward, terminated, truncated, info
def render(self, width = None, height = None):
# cam = p.getDebugVisualizerCamera()
# xyz = cam[11]
# x= float(xyz[0]) + 0.125
# y = xyz[1]
# z = xyz[2]
# p.resetDebugVisualizerCamera(cameraYaw = cam[8], cameraPitch= cam[9],cameraDistance = cam[10],cameraTargetPosition=[x,y,z])
if width == None or height == None:
width = self._width
height = self._height
if self.render_mode != 'rgb_array':
return None
view_matrix1 = p.computeViewMatrixFromYawPitchRoll(cameraTargetPosition=self._cam_target_p,
distance=self._cam_dist,
yaw=self._cam_yaw,
pitch=self._cam_pitch,
roll=0,
upAxisIndex=2)
view_matrix2 = p.computeViewMatrixFromYawPitchRoll(cameraTargetPosition=[0.7,0,0.05],
distance=.7,
yaw=90,
pitch=-70,
roll=0,
upAxisIndex=2)
proj_matrix = p.computeProjectionMatrixFOV(fov=60,
aspect=float(width) /height,
nearVal=0.1,
farVal=100.0)
(_, _, px1, _, _) = p.getCameraImage(width=width,
height=height,
viewMatrix=view_matrix1,
projectionMatrix=proj_matrix,
renderer=p.ER_BULLET_HARDWARE_OPENGL)
rgb_array1 = np.array(px1, dtype=np.uint8)
rgb_array1 = np.reshape(rgb_array1, (height,width, 4))[:, :, :3]
if not self._all_views:
return rgb_array1
(_, _, px2, _, _) = p.getCameraImage(width=width,
height=height,
viewMatrix=view_matrix2,
projectionMatrix=proj_matrix,
renderer=p.ER_BULLET_HARDWARE_OPENGL)
rgb_array2 = np.array(px2, dtype=np.uint8)
rgb_array2 = np.reshape(rgb_array2, (height,width, 4))[:, :, :3]
rgb_array = np.concatenate((rgb_array1 , rgb_array2), axis = 2)
return rgb_array
def _get_state(self):
return self.observation
def close(self):
p.disconnect() | /rover_arm-1.1.11.tar.gz/rover_arm-1.1.11/rover_arm/envs/roverarm_env.py | 0.44746 | 0.272828 | roverarm_env.py | pypi |
import gymnasium as gym
from gymnasium import error, spaces, utils
from gymnasium.utils import seeding
import os
import pybullet as p
import pybullet_data
import math
import numpy as np
import random
import site
class RoverArmToPlaceEnv(gym.Env):
metadata = {'render.modes': ['human' , 'rgb_array']}
def __init__(self, render_mode = 'rgb_array', maxSteps=50 * 1000, isDiscrete=False, urdfRoot = pybullet_data.getDataPath(),
width = 48, height = 48):
self.render_mode = render_mode
self._isDiscrete = isDiscrete
self._timeStep = 1. / 240.
self._urdfRoot = urdfRoot
self._maxSteps = maxSteps
self._width = width
self._height = height
if self.render_mode == 'human':
cid = p.connect(p.SHARED_MEMORY)
if (cid < 0):
cid = p.connect(p.GUI)
p.resetDebugVisualizerCamera(1.3, 180, -41, [0.52, -0.2, -0.33])
else:
p.connect(p.DIRECT)
self._cam_dist = 3
self._cam_yaw = -0.35
self._cam_pitch = -35
self._cam_target_p = [0.67, -0.35, 0.20]
p.resetDebugVisualizerCamera(cameraDistance= self._cam_dist , cameraYaw= self._cam_yaw, cameraPitch= self._cam_pitch, cameraTargetPosition=self._cam_target_p)
self.action_space = spaces.Box(np.array([-1,-0.6] + [-1]*4), np.array([1,0.6] + [1]*4))
self.boundary = 7
self.observation_space = spaces.Box(np.array([-self.boundary, -self.boundary] + [-1]*3 + [0,0]), np.array([self.boundary, self.boundary] + [1]*3 + [0.07] * 2))
# Joint indices as found by p.getJointInfo()
self.steering_joints = [0, 2]
self.drive_joints = [1, 3, 4, 5]
# Joint speed
self.joint_speed = 0
# Drag constants
self.c_rolling = 0.3
self.c_drag = 0.01
# Throttle constant increases "speed" of the car
self.c_throttle = 200
self.MAX_SPEED = 20
def reset(self):
self.step_counter = 0
p.resetSimulation()
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING,0) # we will enable rendering after we loaded everything
p.setGravity(0,0,-10)
planeUid = p.loadURDF(os.path.join(self._urdfRoot,"plane.urdf"), basePosition=[0,0,-0.65])
rest_poses = [0,-0.215,0,-2.57,0,2.356,2.356,0.08,0.08]
x_pos = np.random.choice([random.uniform(-1, -0.3), random.uniform(1.25,2)])
y_pos = random.uniform(-1,2)
BASE_DIR = site.getsitepackages()[0] + "/rover_arm/data/"
self.roverarmUid = p.loadURDF(BASE_DIR + "rover_arm.xml", basePosition=[ x_pos, y_pos ,-0.5])
for i in range(7,14):
p.resetJointState(self.roverarmUid,i, rest_poses[i - 7])
p.resetJointState(self.roverarmUid, 16, 0.07)
p.resetJointState(self.roverarmUid, 17, 0.07)
tableUid = p.loadURDF(os.path.join(self._urdfRoot, "table/table.urdf"),basePosition=[0.5,0,-0.65], globalScaling = 0.5)
trayUid = p.loadURDF(os.path.join(self._urdfRoot, "tray/traybox.urdf"),basePosition=[0.45,0,-0.335], globalScaling = 0.5)
targetTableUid = p.loadURDF(os.path.join(self._urdfRoot, "table/table.urdf"),basePosition=[3.5,3.5,-0.65], globalScaling = 0.5)
self.ttc = [3.45, 3.5, -0.335] # target_tray_center
targetTrayUid = p.loadURDF(os.path.join(self._urdfRoot, "tray/traybox.urdf"),basePosition=self.ttc, globalScaling = 0.5)
state_object= [random.uniform(0.4, 0.5), random.uniform(-0.05, 0.05), -0.2]
self.objectUid = p.loadURDF(os.path.join(self._urdfRoot, "random_urdfs/000/000.urdf"), basePosition=state_object, globalScaling = 0.8)
self.tx_min = self.ttc[0] - 0.1
self.tx_max = self.ttc[0] + 0.1
self.ty_min = self.ttc[1] - 0.1
self.ty_max = self.ttc[1] + 0.1
self.tz = self.ttc[2] + 0.05
state_rover = p.getLinkState(self.roverarmUid, 0)[0][:2]
state_arm = p.getLinkState(self.roverarmUid, 18)[0]
state_fingers = (p.getJointState(self.roverarmUid,16)[0], p.getJointState(self.roverarmUid, 17)[0])
self.observation = state_rover + state_arm + state_fingers
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING,1)
return np.array(self.observation).astype(np.float32)
def step(self, action):
# p.configureDebugVisualizer(p.COV_ENABLE_SINGLE_STEP_RENDERING)
orientation = p.getQuaternionFromEuler([0.,-math.pi,math.pi/2.])
dv = 0.05
dx_a, dy_a, dz_a = [x * dv for x in action[2:5] ]
fingers = action[5]
currentPose = p.getLinkState(self.roverarmUid, 18)
currentPosition = currentPose[0]
newPosition = [currentPosition[0] + dx_a,
currentPosition[1] + dy_a,
currentPosition[2] + dz_a]
jointPoses = p.calculateInverseKinematics(self.roverarmUid,18,newPosition, orientation)
jointPoses_rover, jointPoses_arm = jointPoses[:6], jointPoses[6:13]
p.setJointMotorControlArray(self.roverarmUid, list(range(7,14))+[16,17], p.POSITION_CONTROL, list(jointPoses_arm)+2*[fingers])
throttle, steering_angle = action[:2]
# Clip throttle and steering angle to reasonable values
throttle = min(max(throttle, -1), 1)
steering_angle = max(min(steering_angle, 0.6), -0.6)
# Set the steering joint positions
p.setJointMotorControlArray(self.roverarmUid, self.steering_joints,
controlMode=p.POSITION_CONTROL,
targetPositions=[steering_angle] * 2)
# Calculate drag / mechanical resistance ourselves
# Using velocity control, as torque control requires precise models
friction = -self.joint_speed * (self.joint_speed * self.c_drag +
self.c_rolling)
acceleration = self.c_throttle * throttle + friction
# Each time step is 1/240 of a second
self.joint_speed = self.joint_speed + 1 / 30 * acceleration
self.joint_speed = min(max(self.joint_speed, -self.MAX_SPEED), self.MAX_SPEED)
# Set the velocity of the wheel joints directly
p.setJointMotorControlArray(
bodyUniqueId=self.roverarmUid,
jointIndices=self.drive_joints,
controlMode=p.VELOCITY_CONTROL,
targetVelocities=[self.joint_speed] * 4,
forces=[10] * 4)
p.stepSimulation()
state_object, _ = p.getBasePositionAndOrientation(self.objectUid)
state_rover = p.getLinkState(self.roverarmUid, 0)[0][:2]
state_arm = p.getLinkState(self.roverarmUid, 18)[0]
state_fingers = (p.getJointState(self.roverarmUid,16)[0], p.getJointState(self.roverarmUid, 17)[0])
ox, oy, oz = state_object
if oz < self.tz and ox > self.tx_min and ox < self.tx_max and oy > self.ty_min and oy < self.ty_max:
reward = 1
done = True
self.close()
else:
reward = 0
done = False
self.step_counter += 1
def inGame(state_rover):
rx, ry = state_rover
inBound = rx > -self.boundary and rx < self.boundary
inBound = inBound and ry > -self.boundary and ry < self.boundary
return inBound
if not inGame(state_rover):
reward = -1
done = True
self.close()
if self.step_counter > self._maxSteps:
reward = 0
done = True
self.close()
info = {'object_position': state_object}
self.observation = state_rover + state_arm + state_fingers
return np.array(self.observation).astype(np.float32), reward, done, info
def render(self, width = None, height = None):
# cam = p.getDebugVisualizerCamera()
# xyz = cam[11]
# x= float(xyz[0]) + 0.125
# y = xyz[1]
# z = xyz[2]
# p.resetDebugVisualizerCamera(cameraYaw = cam[8], cameraPitch= cam[9],cameraDistance = cam[10],cameraTargetPosition=[x,y,z])
if width == None or height == None:
width = self._width
height = self._height
if self.render_mode != 'rgb_array':
return None
view_matrix1 = p.computeViewMatrixFromYawPitchRoll(cameraTargetPosition=self._cam_target_p,
distance=self._cam_dist,
yaw=self._cam_yaw,
pitch=self._cam_pitch,
roll=0,
upAxisIndex=2)
view_matrix2 = p.computeViewMatrixFromYawPitchRoll(cameraTargetPosition=[0.7,0,0.05],
distance=.7,
yaw=90,
pitch=-70,
roll=0,
upAxisIndex=2)
view_matrix3 = p.computeViewMatrixFromYawPitchRoll(cameraTargetPosition=[3.7, 3.5, 0.05],
distance=.7,
yaw=90,
pitch=-70,
roll=0,
upAxisIndex=2)
proj_matrix = p.computeProjectionMatrixFOV(fov=60,
aspect=float(width) /height,
nearVal=0.1,
farVal=100.0)
(_, _, px1, _, _) = p.getCameraImage(width=width,
height=height,
viewMatrix=view_matrix1,
projectionMatrix=proj_matrix,
renderer=p.ER_BULLET_HARDWARE_OPENGL)
(_, _, px2, _, _) = p.getCameraImage(width=width,
height=height,
viewMatrix=view_matrix2,
projectionMatrix=proj_matrix,
renderer=p.ER_BULLET_HARDWARE_OPENGL)
(_, _, px3, _, _) = p.getCameraImage(width=width,
height=height,
viewMatrix=view_matrix3,
projectionMatrix=proj_matrix,
renderer=p.ER_BULLET_HARDWARE_OPENGL)
rgb_array1 = np.array(px1, dtype=np.uint8)
rgb_array1 = np.reshape(rgb_array1, (height,width, 4))[:, :, :3]
rgb_array2 = np.array(px2, dtype=np.uint8)
rgb_array2 = np.reshape(rgb_array2, (height,width, 4))[:, :, :3]
rgb_array3 = np.array(px3, dtype=np.uint8)
rgb_array3 = np.reshape(rgb_array3, (height,width, 4))[:, :, :3]
rgb_array = np.concatenate((rgb_array1 , rgb_array2, rgb_array3), axis = 0)
return rgb_array
def _get_state(self):
return self.observation
def close(self):
p.disconnect() | /rover_arm-1.1.11.tar.gz/rover_arm-1.1.11/rover_arm/envs/roverarm_env_place.py | 0.487551 | 0.290276 | roverarm_env_place.py | pypi |
import math
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.position.calibration.decawave.anchor_ranges_to_positions import AnchorRangesToPositions
def get_leading_diagonal_coord(layout: AnchorRangesToPositions, dist: float):
diag_1_3 = math.sqrt(layout.position_3().x**2 + layout.position_3().y**2)
ratio = dist/diag_1_3
return [ratio * layout.position_3().x, ratio * layout.position_3().y]
def get_trailing_diagonal_coord(layout: AnchorRangesToPositions, dist: float):
delta = layout.position_2() - layout.position_4()
diag_2_4 = math.sqrt(delta.x**2 + delta.y**2)
ratio = dist/diag_2_4
x = layout.position_4().x + (layout.position_2().x * (1 - ratio))
y = layout.position_4().y * ratio
return [x, y]
def get_distances(layout: AnchorRangesToPositions, position: Vector):
d0 = (layout.position_1() - position).magnitude()
d1 = (layout.position_2() - position).magnitude()
d2 = (layout.position_3() - position).magnitude()
d3 = (layout.position_4() - position).magnitude()
return [d0, d1, d2, d3]
if __name__ == '__main__':
side_0_1 = 3520
side_1_2 = 3460
side_2_3 = 3251
side_3_0 = 3362
diag_0_2 = 4789
diag_1_3 = 4818
height = 420
rover_height = 250
layout = AnchorRangesToPositions(
side_0_1, side_1_2, side_2_3, side_3_0,
diag_0_2, diag_1_3, height
)
print('0 - {}'.format(layout.position_1()))
print('1 - {}'.format(layout.position_2()))
print('2 - {}'.format(layout.position_3()))
print('3 - {}'.format(layout.position_4()))
print('Diag 1/3 error {:.2f}'.format(layout.error_3_4()))
# Leading diagonal co-ordinates
print()
print('Leading Diagonal')
for i in range(1000, int(diag_0_2), 1000):
diag = get_leading_diagonal_coord(layout, i)
distances = get_distances(layout, Vector([diag[0], diag[1], rover_height]))
print('Diag 0->2, {}mm [{:4.0f}, {:4.0f}], Ranges [{:4.0f}, {:4.0f}, {:4.0f}, {:4.0f}]'.format(
i, diag[0], diag[1], distances[0], distances[1], distances[2], distances[3]))
print()
print('Trailing Diagonal')
for i in range(1000, int(diag_1_3), 1000):
diag = get_trailing_diagonal_coord(layout, i)
distances = get_distances(layout, Vector([diag[0], diag[1], rover_height]))
print('Diag 1->3, {}mm [{:4.0f}, {:4.0f}], Ranges [{:4.0f}, {:4.0f}, {:4.0f}, {:4.0f}]'.format(
i, diag[0], diag[1], distances[0], distances[1], distances[2], distances[3])) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/clients/calibration/decawave/layout.py | 0.775732 | 0.551332 | layout.py | pypi |
import re
from typing import List
from rover_position_rjg.data.data import Data
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class Calibrate:
def __init__(self):
self.x_plus_south: NineDoFData = None
self.x_minus_south: NineDoFData = None
self.y_plus_south: NineDoFData = None
self.y_minus_south: NineDoFData = None
self.z_plus_south: NineDoFData = None
self.z_minus_south: NineDoFData = None
self.offsets = NineDoFData.zero()
self.offsets.temperature.value = -440
self.multipliers = NineDoFData.one()
self.multipliers.temperature.value = 0.0625
def set_gyro_offset(self):
total_gyro = Vector.zero()
total_gyro += self.x_plus_south.angular_velocity.value
total_gyro += self.x_minus_south.angular_velocity.value
total_gyro += self.y_plus_south.angular_velocity.value
total_gyro += self.y_plus_south.angular_velocity.value
total_gyro += self.z_minus_south.angular_velocity.value
total_gyro += self.z_plus_south.angular_velocity.value
self.offsets.angular_velocity.value = total_gyro / 6
print("Gyro offset: {}".format(self.offsets.angular_velocity.value))
def set_accelerometer_offset_and_multiplier(self):
g = 1
offsets = [0, 0, 0]
multipliers = [0, 0, 0]
# x
x_max = self.x_plus_south.acceleration.value
x_min = self.x_minus_south.acceleration.value
offsets[0] = self._get_average(x_max.x, x_min.x)
multipliers[0] = self._get_scaling(x_min.x, x_max.x, g)
# y
y_max = self.y_plus_south.acceleration.value
y_min = self.y_minus_south.acceleration.value
offsets[1] = self._get_average(y_max.y, y_min.y)
multipliers[1] = self._get_scaling(y_min.y, y_max.y, g)
# z
z_max = self.z_plus_south.acceleration.value
z_min = self.z_minus_south.acceleration.value
offsets[2] = self._get_average(z_max.z, z_min.z)
multipliers[2] = self._get_scaling(z_min.z, z_max.z, g)
# save results
print("Acc: {}, {}".format(offsets, multipliers))
self.offsets.acceleration.value = Vector(offsets)
self.multipliers.acceleration.value = Vector(multipliers)
def set_magnetometer_offset_and_multiplier(self):
offsets = [0, 0, 0]
multipliers = [0, 0, 0]
# x
x_max = self.x_plus_south.magnetic_field.value
x_min = self.x_minus_south.magnetic_field.value
offsets[0] = self._get_average(x_max.x, x_min.x)
# multipliers[0] = self._get_scaling(x_min.x, x_max.x, g)
# y
y_max = self.y_plus_south.magnetic_field.value
y_min = self.y_minus_south.magnetic_field.value
offsets[1] = self._get_average(y_max.y, y_min.y)
# multipliers[1] = self._get_scaling(y_min.y, y_max.y, g)
# z
z_max = self.z_plus_south.magnetic_field.value
z_min = self.z_minus_south.magnetic_field.value
offsets[2] = self._get_average(z_max.z, z_min.z)
# multipliers[2] = self._get_scaling(z_min.z, z_max.z, g)
# save results
print("Mag: {}, {}".format(offsets, multipliers))
self.offsets.magnetic_field.value = Vector(offsets)
self.multipliers.magnetic_field.value = Vector(multipliers)
def print(self):
data = '{{\n"offset":{},\n"multiplier":{}\n}}'.format(self.offsets.to_json(), self.multipliers.to_json())
print(data)
@staticmethod
def _get_scaling(min: float, max: float, value: float) -> float:
return (value * 2)/(max - min)
@staticmethod
def _get_average(a: float, b: float) -> float:
return (a + b) / 2
def set_x_plus_south(self, value_pair: List[str]):
self.x_plus_south = Calibrate.pair_to_nine_dof(value_pair)
def set_x_minus_south(self, value_pair: List[str]):
self.x_minus_south = Calibrate.pair_to_nine_dof(value_pair)
def set_y_plus_south(self, value_pair: List[str]):
self.y_plus_south = Calibrate.pair_to_nine_dof(value_pair)
def set_y_minus_south(self, value_pair: List[str]):
self.y_minus_south = Calibrate.pair_to_nine_dof(value_pair)
def set_z_plus_south(self, value_pair: List[str]):
self.z_plus_south = Calibrate.pair_to_nine_dof(value_pair)
def set_z_minus_south(self, value_pair: List[str]):
self.z_minus_south = Calibrate.pair_to_nine_dof(value_pair)
@staticmethod
def pair_to_nine_dof(value_pair: List[str]):
a = Calibrate.parse_line(value_pair[0])
b = Calibrate.parse_line(value_pair[1])
return (a + b) / 2
@staticmethod
def parse_line(line: str) -> NineDoFData:
values = re.findall(r"-?[\d]+", line)
acc = Calibrate._to_vector(values[0:3])
gyro = Calibrate._to_vector(values[3:6])
mag = Calibrate._to_vector(values[6:9])
temp = int(values[9])
t = 1
return NineDoFData(Data(acc, t), Data(gyro, t), Data(mag, t), Data(temp, t))
@staticmethod
def _to_vector(values: List[str]) -> Vector:
return Vector([int(values[0]), int(values[1]), int(values[2])])
if __name__ == '__main__':
calibrator = Calibrate()
# Min/Max Z
# calibrator.set_z_plus_south([
# "838 155 16276 | -361 -48 176 | 1344 1008 -2584 | -122",
# "412 -51 16281 | -361 -46 175 | 1330 3594 -2556 | -122"
# ])
# calibrator.set_z_minus_south([
# "410 -160 -16410 | -368 -50 178 | 1273 3599 3125 | -123",
# "-172 154 -16374 | -373 -55 172 | 1538 1079 3244 | -114"
# ])
# # Min/Max X
# calibrator.set_x_plus_south([
# "16764 384 -704 | -370 -52 173 | -1616 3606 561 | -114",
# "16695 665 -1545 | -372 -53 174 | -1483 996 737 | -112"
# ])
# calibrator.set_x_minus_south([
# "-16033 -505 861 | -369 -53 174 | 4388 1042 87 | -112",
# "-16045 -721 331 | -370 -39 174 | 4180 3646 234 | -113"
# ])
# # Min/Max Y
# calibrator.set_y_plus_south([
# "-471 16359 -515 | -379 -62 173 | 2944 -663 363 | -105",
# "-211 16365 345 | -383 -54 171 | 273 -685 467 | -99"
# ])
# calibrator.set_y_minus_south([
# "1974 -16331 -385 | -394 -55 167 | -298 5178 616 | -85",
# "1780 -16314 -1134 | -366 -55 165 | 2164 5383 432 | -91"
# ])
calibrator.set_z_plus_south([
"766 39 16145 | -524 -91 106 | -374 3759 -761 | 101",
"585 179 16152 | -516 -90 110 | -16 1276 -710 | 91"
])
calibrator.set_z_minus_south([
"655 -71 -16531 | -532 -92 124 | 21 1014 5465 | 100",
"543 -111 -16532 | -535 -92 123 | -4 3636 5219 | 101"
])
# Min/Max X
calibrator.set_x_plus_south([
"17004 118 -891 | -635 -395 124 | -2832 4455 3866 | 94",
"16930 251 42 | -516 -110 105 | -2951 2418 3599 | 88"
])
calibrator.set_x_minus_south([
"-15843 -120 -90 | -456 -90 125 | 3122 1782 5373 | 92",
"-15854 -176 -294 | -532 -101 122 | 2741 1910 3182 | 102"
])
# Min/Max Y
calibrator.set_y_plus_south([
"518 16420 -244 | -515 -93 129 | 244 417 2153 | 82",
"325 16407 262 | -522 72 141 | 538 324 4616 | 81"
])
calibrator.set_y_minus_south([
"812 -16340 -69 | -532 -92 125 | 209 5264 3570 | 105",
"672 -16344 -23 | -515 -135 124 | 362 5395 819 | 99"
])
calibrator.set_gyro_offset()
calibrator.set_accelerometer_offset_and_multiplier()
calibrator.set_magnetometer_offset_and_multiplier()
calibrator.print() | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/clients/calibration/imu/calibrate.py | 0.819821 | 0.313761 | calibrate.py | pypi |
import math
from typing import List, Tuple
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class ImuCalibrator:
def __init__(self, offsets: NineDoFData, multipliers: NineDoFData):
self.offsets = offsets
self.multipliers = multipliers
def set_x_y_acceleration_biases(self, measurements: List[Tuple[Vector, Vector]]):
"""
Calculates the accelerometer x and y biases from pairs of measurements
taken 180 degrees apart on a plane surface. The rotation must be around
the z axis It doesn't matter what angle the plane is at relative to
the horizontal. The biases are the averages of the midpoints of each pair of
measurements.
:param measurements: pairs of measurements made 180 degrees apart
"""
midpoints = self._get_average(measurements)
self.offsets.acceleration.value = Vector([midpoints.x, midpoints.y, self.offsets.acceleration.value.z])
def set_acceleration_biases_to_1_g(self, measurement: Vector):
"""Adjusts the bias of the vertical axis (z) to give a magnitude
of exactly 1 g. This avoids a small but annoying net acceleration
after removing g from the overall acceleration. Assumes the scaling
is correct and the biases for x and y have been set."""
offset = self.offsets.acceleration.value
multiplier = self.multipliers.acceleration.value
scaled = (measurement - offset).scale(multiplier)
m = scaled.magnitude()
z = scaled.z
scaled_delta_z = z - math.sqrt(z**2 + 1 - m**2) # Positive root of quadratic equation for scaled_delta_z
new_offset_z = offset.z + scaled_delta_z / multiplier.z
new_offset = Vector([offset.x, offset.y, new_offset_z])
self.offsets.acceleration.value = new_offset
def set_x_y_magnetic_field_biases(self, measurements: List[Tuple[Vector, Vector]]):
"""
Calculates the magnetometer x and y biases from pairs of measurements
taken 180 degrees apart on a plane surface. The rotation must be around
the z axis It doesn't matter what angle the plane is at relative to
the horizontal. The biases are the averages of the midpoints of each pair of
measurements.
:param measurements: pairs of measurements made 180 degrees apart
"""
midpoints = self._get_average(measurements)
self.offsets.magnetic_field.value = Vector([midpoints.x, midpoints.y, self.offsets.magnetic_field.value.z])
@staticmethod
def _get_average(measurements: List[Tuple[Vector, Vector]]):
total = Vector.zero()
for measurement in measurements:
total += (measurement[0] + measurement[1])
return total / (2 * len(measurements)) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/clients/calibration/imu/imu_calibrator.py | 0.932315 | 0.646809 | imu_calibrator.py | pypi |
import curses
from rover_position_rjg.clients.monitor.imu_data_model import ImuDataModel
from rover_position_rjg.data.quaternion import Quaternion
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.position.filters.attitude_filter import AttitudeOutput
from rover_position_rjg.position.position.position import Position
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class View:
def __init__(self, stdscr):
self.main_window = stdscr
self.imu_data_window = curses.newwin(6, 80, 0, 0)
self.heading_window = curses.newwin(3, 80, 6, 0)
self.attitude_window = curses.newwin(4, 80, 9, 0)
self.beacon_window = curses.newwin(4, 80, 13, 0)
self.output_window = curses.newwin(4, 80, 17, 0)
def display_template(self):
self.imu_data_window.addstr(0, 0, '-------------------------------------------------------------------------------')
self.imu_data_window.addstr(1, 0, 'IMU Accelerometer | Gyroscope | Magnetometer | Temp')
self.imu_data_window.addstr(2, 0, ' x y z | x y z | x y z |')
self.imu_data_window.addstr(3, 0, 'Raw | | |')
self.imu_data_window.addstr(4, 0, 'Act | | |')
self.imu_data_window.addstr(5, 0, 'Mag/Err | | | NA')
self.heading_window.addstr(0, 0, '-------------------------------------------------------------------------------')
self.heading_window.addstr(1, 0, 'Attitude Roll Pitch Yaw G-Zero Cal Madg')
self.heading_window.addstr(2, 0, 'Heading')
self.attitude_window.addstr(0, 0, '-------------------------------------------------------------------------------')
self.attitude_window.addstr(1, 0, 'Attitude Output (Madgwick) | Roll Pitch Yaw')
self.attitude_window.addstr(2, 0, 'Acceler. | Heading ')
self.attitude_window.addstr(3, 0, 'Quatern.')
self.beacon_window.addstr(0, 0, '-------------------------------------------------------------------------------')
self.beacon_window.addstr(1, 0, 'Beacon x y z')
self.beacon_window.addstr(2, 0, 'Raw')
self.beacon_window.addstr(3, 0, 'Scaled')
self.output_window.addstr(0, 0, '-------------------------------------------------------------------------------')
self.output_window.addstr(1, 0, 'Position Output (Kalman)')
self.output_window.addstr(2, 0, 'Acceler. | Heading ')
self.output_window.addstr(3, 0, 'Position | Velocity')
r = self.get_row_after(self.output_window)
self.main_window.addstr(r, 0, '-------------------------------------------------------------------------------')
self.main_window.addstr(r+1, 0, 'Monitor: (q)uit')
self.main_window.addstr(r+2, 0, 'Position: (p)ause imu, (t)rack position, (c)alibrate, e(x)it app')
self.main_window.addstr(r+3, 0, 'Publish: (i)mu, (a)ttitude, (b)eacon, p(o)sition, (h)eading)')
self.main_window.addstr(r+4, 0, 'Record: (0)all, (1)imu, (2)attitude out, (3)beacon, (4)position out')
self.main_window.addstr(r+5, 0, '-------------------------------------------------------------------------------')
self.main_window.refresh()
self.imu_data_window.refresh()
self.heading_window.refresh()
self.attitude_window.refresh()
self.beacon_window.refresh()
self.output_window.refresh()
def display_imu_data(self, imu_data: ImuDataModel):
self.print_9dof(self.imu_data_window, 3, imu_data.raw, True)
self.print_9dof(self.imu_data_window, 4, imu_data.actual, False)
actual_error = imu_data.get_actual_error()
self.print_mag_error(8, imu_data.actual.acceleration.value.magnitude(), actual_error.x)
self.print_mag_error(30, imu_data.actual.angular_velocity.value.magnitude(), actual_error.y)
relative_mag_field = imu_data.get_relative_magnetic_field(imu_data.actual.magnetic_field.value.magnitude())
relative_mag_error = imu_data.get_relative_magnetic_field(actual_error.z)
self.print_mag_error(53, relative_mag_field, relative_mag_error)
self.imu_data_window.refresh()
def print_mag_error(self, x: int, magnitude: float, error: float):
self.imu_data_window.addstr(5, x, '{:7.4f} {:7.4f}'.format(magnitude, error))
def print_9dof(self, window: any, y: int, data: NineDoFData, raw: bool):
self.print_vector(window, y, 4, data.acceleration.value, raw)
self.print_vector(window, y, 27, data.angular_velocity.value, raw)
self.print_vector(window, y, 50, data.magnetic_field.value, raw)
window.addstr(y, 73, '{:5.0f}'.format(data.temperature.value))
def display_attitude_data(self, data: AttitudeOutput):
self.print_vector(self.attitude_window, 2, 9, data.acceleration, False)
a = data.attitude
self.print_as_tait_bryan_angles(self.attitude_window, 2, 41, a)
self.attitude_window.addstr(3, 9, '{:9.6f} {:9.6f} {:9.6f} {:9.6f}'.format(a.w, a.i, a.j, a.k))
self.attitude_window.refresh()
def display_raw_beacon_data(self, data: Vector):
self.print_vector(self.beacon_window, 2, 9, data, False)
self.beacon_window.refresh()
def display_scaled_beacon_data(self, data: Vector):
self.print_vector(self.beacon_window, 3, 9, data, False)
self.beacon_window.refresh()
def display_position_data(self, data: Position):
self.print_vector(self.output_window, 2, 9, data.acceleration, False)
a = data.attitude
if a:
self.print_as_tait_bryan_angles(self.output_window, 2, 41, a)
if data.position:
self.print_vector(self.output_window, 3, 9, data.position, False)
if data.velocity:
self.print_vector(self.output_window, 3, 41, data.velocity, False)
self.output_window.refresh()
def display_heading_data(self, data: Vector, gyro_in_zero_limit: bool, calibrating: bool, stationary: bool):
self.heading_window.addstr(2, 9, '{:6.1f} {:6.1f} {:6.1f} {:>5s} {:>5s} {:>5s}'.format(data.x, data.y, data.z, str(gyro_in_zero_limit), str(calibrating), str(stationary)))
self.heading_window.refresh()
@staticmethod
def get_row_after(window: any) -> int:
return window.getbegyx()[0] + window.getmaxyx()[0]
@staticmethod
def print_as_tait_bryan_angles(window: any, y: int, x: int, quaternion: Quaternion):
e = quaternion.to_tait_bryan()
window.addstr(y, x, '{:6.1f} {:6.1f} {:6.1f}'.format(e.x, e.y, e.z))
@staticmethod
def print_vector(window: any, y: int, x: int, vector: Vector, raw: bool):
if raw:
window.addstr(y, x, '{:6.0f} {:6.0f} {:6.0f}'.format(vector.x, vector.y, vector.z))
else:
window.addstr(y, x, '{:6.3f} {:6.3f} {:6.3f}'.format(vector.x, vector.y, vector.z)) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/clients/monitor/view.py | 0.638385 | 0.239227 | view.py | pypi |
from rover_position_rjg.clients.monitor.imu_data_model import ImuDataModel
from rover_position_rjg.clients.monitor.messenger import Messenger
from rover_position_rjg.clients.monitor.presenter import Presenter
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.position.filters.attitude_filter import AttitudeOutput
from rover_position_rjg.position.position.position import Position
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class App:
def __init__(self, presenter: Presenter, messenger: Messenger):
self.imu_data_model = ImuDataModel()
self._presenter = presenter
self._messenger = messenger
self._messenger.start_publishing_data(
self._on_raw_data_received,
self._on_scaled_data_received,
self._on_attitude_data_received,
self._on_raw_beacon_data_received,
self._on_scaled_beacon_data_received,
self._on_position_data_received,
self._on_heading_data_received)
def quit(self):
self._messenger.stop_publishing_data()
self._messenger.check_messages()
self._messenger.disconnect()
def _on_raw_data_received(self, data: NineDoFData):
self.imu_data_model.raw = data
def _on_scaled_data_received(self, data: NineDoFData):
self.imu_data_model.actual = data
self._presenter.present_imu_data(self.imu_data_model)
def _on_attitude_data_received(self, data: AttitudeOutput):
self._presenter.present_attitude_data(data)
def _on_raw_beacon_data_received(self, data: Vector):
self._presenter.present_raw_beacon_data(data)
def _on_scaled_beacon_data_received(self, data: Vector):
self._presenter.present_scaled_beacon_data(data)
def _on_position_data_received(self, data: Position):
self._presenter.present_position_data(data)
def _on_heading_data_received(self, data: Vector, gyro_in_zero_limit: bool, calibrating: bool, stationary: bool):
self._presenter.present_heading_data(data, gyro_in_zero_limit, calibrating, stationary) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/clients/monitor/app.py | 0.733356 | 0.164114 | app.py | pypi |
from rover_position_rjg.data.flags import Flags
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.data.data import Data
from rover_position_rjg.json_aware.json_aware import JsonAware
class NineDoFData(JsonAware['NineDoFData']):
@staticmethod
def zero() -> 'NineDoFData':
# Don't use the same object for acc/gyro and mag as it means editing one vector edits all of them.
return NineDoFData(Data(Vector.zero(), 0), Data(Vector.zero(), 0), Data(Vector.zero(), 0), Data(0, 0))
@staticmethod
def one() -> 'NineDoFData':
# Don't use the same object for acc/gyro and mag as it means editing one vector edits all of them.
return NineDoFData(Data(Vector.one(), 0), Data(Vector.one(), 0), Data(Vector.one(), 0), Data(1, 0))
def __init__(self,
acceleration: Data[Vector],
angular_velocity: Data[Vector],
magnetic_field: Data[Vector],
temperature: Data[float],
status: Flags = None):
if not status:
status = Flags()
self.acceleration = acceleration
self.angular_velocity = angular_velocity
self.magnetic_field = magnetic_field
self.temperature = temperature
self.status = status
def __eq__(self, other):
if isinstance(other, NineDoFData):
return self.acceleration == other.acceleration and \
self.angular_velocity == other.angular_velocity and \
self.magnetic_field == other.magnetic_field and \
self.temperature == other.temperature and \
self.status == other.status
return False
def __add__(self, other):
acc = Data(self.acceleration.value + other.acceleration.value, self.acceleration.timestamp)
av = Data(self.angular_velocity.value + other.angular_velocity.value, self.angular_velocity.timestamp)
mag = Data(self.magnetic_field.value + other.magnetic_field.value, self.magnetic_field.timestamp)
temp = Data(self.temperature.value + other.temperature.value, self.temperature.timestamp)
return NineDoFData(acc, av, mag, temp, self.status)
def __sub__(self, other):
acc = Data(self.acceleration.value - other.acceleration.value, self.acceleration.timestamp)
av = Data(self.angular_velocity.value - other.angular_velocity.value, self.angular_velocity.timestamp)
mag = Data(self.magnetic_field.value - other.magnetic_field.value, self.magnetic_field.timestamp)
temp = Data(self.temperature.value - other.temperature.value, self.temperature.timestamp)
return NineDoFData(acc, av, mag, temp, self.status)
def __mul__(self, other: float):
acc = Data(self.acceleration.value * other, self.acceleration.timestamp)
av = Data(self.angular_velocity.value * other, self.angular_velocity.timestamp)
mag = Data(self.magnetic_field.value * other, self.magnetic_field.timestamp)
temp = Data(self.temperature.value * other, self.temperature.timestamp)
return NineDoFData(acc, av, mag, temp, self.status)
def __truediv__(self, other: float):
acc = Data(self.acceleration.value / other, self.acceleration.timestamp)
av = Data(self.angular_velocity.value / other, self.angular_velocity.timestamp)
mag = Data(self.magnetic_field.value / other, self.magnetic_field.timestamp)
temp = Data(self.temperature.value / other, self.temperature.timestamp)
return NineDoFData(acc, av, mag, temp, self.status)
def scale(self, other):
"""Scales each element of this vector by the corresponding element of other."""
acc = Data(self.acceleration.value.scale(other.acceleration.value), self.acceleration.timestamp)
av = Data(self.angular_velocity.value.scale(other.angular_velocity.value), self.angular_velocity.timestamp)
mag = Data(self.magnetic_field.value.scale(other.magnetic_field.value), self.magnetic_field.timestamp)
temp = Data(self.temperature.value * other.temperature.value, self.temperature.timestamp)
return NineDoFData(acc, av, mag, temp, self.status)
def to_json(self) -> str:
return '{{\n "acceleration":{},\n "angular_velocity":{},\n "magnetic_field":{},\n "temperature":{},\n "status":"{}"\n}}'\
.format(self.acceleration.to_json(), self.angular_velocity.to_json(), self.magnetic_field.to_json(), self.temperature.to_json(), self.status)
@staticmethod
def from_json(obj: dict) -> 'NineDoFData':
acc = Data.from_json(obj['acceleration'], Vector.from_json)
av = Data.from_json(obj['angular_velocity'], Vector.from_json)
mag = Data.from_json(obj['magnetic_field'], Vector.from_json)
temp = Data.from_json(obj['temperature'])
if 'status' in obj:
status = int(obj['status'], 0)
else:
status = 0
return NineDoFData(acc, av, mag, temp, Flags(status)) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/sensors/imu/nine_dof_data.py | 0.851891 | 0.667344 | nine_dof_data.py | pypi |
from typing import Iterable
from rover_position_rjg.csv_helpers.csv_converter import CsvConverter, TCsvItem
from rover_position_rjg.data.data import Data
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class NineDoFDataCsvConverter(CsvConverter[NineDoFData]):
def to_object(self, row: Iterable[TCsvItem]) -> NineDoFData:
values = list(row)
acceleration = self._parse_vector(values[0:4])
angular_velocity = self._parse_vector(values[4:8])
magnetic_field = self._parse_vector(values[8:12])
temperature = self._parse_temp(values[12:14])
return NineDoFData(acceleration, angular_velocity, magnetic_field, temperature)
def to_row(self, value: NineDoFData) -> Iterable:
return self._write_vector(value.acceleration) + \
self._write_vector(value.angular_velocity) + \
self._write_vector(value.magnetic_field) + \
self._write_temp(value.temperature)
@staticmethod
def _parse_vector(values: list) -> Data[Vector]:
x = values[0]
y = values[1]
z = values[2]
time = values[3]
value = None
if x is not None or y is not None or z is not None:
value = Vector([x, y, z])
if value or time:
return Data[Vector](value, time)
@staticmethod
def _write_vector(data: Data[Vector]):
if not data:
return [None, None, None, None]
value = data.value
if not value:
return [None, None, None, data.timestamp]
return [value.x, value.y, value.z, data.timestamp]
@staticmethod
def _parse_temp(values: list) -> Data[float]:
temp = values[0]
time = values[1]
if temp or time:
return Data[float](values[0], values[1])
@staticmethod
def _write_temp(data: Data):
if not data:
return [None, None]
return [data.value, data.timestamp] | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/sensors/imu/nine_dof_data_to_csv.py | 0.836521 | 0.40392 | nine_dof_data_to_csv.py | pypi |
import time
import logging
from rover_position_rjg.data.data_pump.data_provider import DataProvider
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
from rover_position_rjg.data.data import Data
from lsm9ds1_rjg import Driver, I2CTransport, SPITransport
class ImuDataProvider(DataProvider[NineDoFData]):
PIN_INT1_AG = 23
def __init__(self, driver: Driver):
super().__init__()
self.driver = driver
if not self.driver:
self.driver = Driver(
# I2CTransport(1, I2CTransport.I2C_AG_ADDRESS, self.PIN_INT1_AG),
# I2CTransport(1, I2CTransport.I2C_MAG_ADDRESS))
SPITransport(0, False, self.PIN_INT1_AG),
SPITransport(1, True),
high_priority=True)
self.driver.configure()
self.data_ready_timestamp = 0
def get(self) -> Data[NineDoFData]:
ag_timestamp = self.data_ready_timestamp
temp, acc, gyro = self.driver.read_ag_data()
mag_timestamp = self.time()
mag = self.driver.read_magnetometer()
nine_dof = NineDoFData(
Data(Vector(acc), ag_timestamp),
Data(Vector(gyro), ag_timestamp),
Data(Vector(mag), mag_timestamp),
Data(temp, ag_timestamp))
return Data(nine_dof, self.time())
def poll(self, timeout: float) -> bool:
# Wait for acceleration and gyro to be ready.
# Assume magnetometer will be ready at the same time.
# This requires that the mag is configured to be faster
# than the ag ODR
ready = self.driver.ag_data_ready(int(timeout * 1000))
if ready:
# Get the timestamp now to eliminate the time to call get()
self.data_ready_timestamp = self.time()
else:
logging.warning("IMU data not ready after {} seconds".format(timeout))
return ready
def close(self):
self.driver.close()
@staticmethod
def time():
return time.clock_gettime_ns(time.CLOCK_MONOTONIC_RAW) * 1e-9 | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/sensors/imu/imu_data_provider.py | 0.561816 | 0.245441 | imu_data_provider.py | pypi |
import math
from rover_position_rjg.data.quaternion import Quaternion
from rover_position_rjg.position.attitude.attitude_algorithm import AttitudeAlgorithm
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
# noinspection PyPep8Naming,SpellCheckingInspection
class BasicMadgwick(AttitudeAlgorithm):
gyroMeasError = 3.14159265358979 * (5 / 180.0) # gyroscope measurement error in rad/s (default 5 deg/s)
gyroMeasDrift = 3.14159265358979 * (0.2 / 180.0) # gyroscope measurement error in rad/s/s (default 0.2f deg/s/s)
# gyroMeasError = 3.14159265358979 * (2 / 180.0) # gyroscope measurement error in rad/s (default 5 deg/s)
# gyroMeasDrift = 3.14159265358979 * (0.05 / 180.0) # gyroscope measurement error in rad/s/s (default 0.2f deg/s/s)
beta = math.sqrt(3.0 / 4.0) * gyroMeasError # compute beta
zeta = math.sqrt(3.0 / 4.0) * gyroMeasDrift # compute zeta
nwu_to_enu = Quaternion(1/math.sqrt(2), 0, 0, 1/math.sqrt(2))
"""An implementation of Madgwick's algorithm for MARG that's a close
as possible to the version in his paper."""
def __init__(self):
super().__init__()
print('Basic Madgwick beta={:5.3f}, zeta={:5.3f}'.format(BasicMadgwick.beta, BasicMadgwick.zeta))
self.deltat = 0
self.previous_timestamp = 0
# estimated orientation quaternion elements with initial conditions
self.SEq_1, self.SEq_2, self.SEq_3, self.SEq_4 = 1, 0, 0, 0
# reference direction of flux in earth frame
self.b_x, self.b_z = 1, 0
# estimate gyroscope biases error
self.w_bx, self.w_by, self.w_bz = 0, 0, 0
def reset(self):
self.__init__()
def initialise(self, attitude: Quaternion, timestamp: float):
# Rotate from rover_position_rjg.ENU to NWU or we'll start Madgwick out facing the wrong way
q = (-self.nwu_to_enu) @ attitude
self.SEq_1, self.SEq_2, self.SEq_3, self.SEq_4 = q.w, q.i, q.j, q.k
self.previous_timestamp = timestamp
self.initialised = True
def step(self, data: NineDoFData) -> Quaternion:
acc = data.acceleration.value
gyro = data.angular_velocity.value
mag = data.magnetic_field.value
timestamp = data.angular_velocity.timestamp
if self.previous_timestamp > 0:
self.deltat = timestamp - self.previous_timestamp
self.previous_timestamp = timestamp
self._filterUpdate(gyro.x, gyro.y, gyro.z, acc.x, acc.y, acc.z, mag.x, mag.y, mag.z)
# Madgwick uses the axes NWU. We want ENU so rotate the output by 90 degrees
# There may be a more efficient way to do this by messing with the inputs but
# that seems to lead to cross talk between the different axes so I'll play
# it safe and use a Quaternion rotation.
madgwick_output = Quaternion(self.SEq_1, self.SEq_2, self.SEq_3, self.SEq_4)
return self.nwu_to_enu @ madgwick_output
else:
# Initialise quaternion from rover_position_rjg.magnetic field and gravity
from_imu = self.quaternion_from_imu(data)
self.initialise(from_imu, timestamp)
return from_imu
# Note that Madgwick has his real world axes in the order North West Up.
# We want to use East North Up so we need to rotate Madgwick's output by 90 degrees.
def _filterUpdate(self, w_x: float, w_y: float, w_z: float, a_x: float, a_y: float, a_z: float, m_x: float, m_y: float, m_z: float):
# local system variables
norm = 0 # vector norm
SEqDot_omega_1, SEqDot_omega_2, SEqDot_omega_3, SEqDot_omega_4 = 0, 0, 0, 0 # quaternion rate from rover_position_rjg.gyroscopes elements
f_1, f_2, f_3, f_4, f_5, f_6 = 0, 0, 0, 0, 0, 0 # objective function elements
J_11or24, J_12or23, J_13or22, J_14or21, J_32, J_33 = 0, 0, 0, 0, 0, 0 # objective function Jacobian elements
J_41, J_42, J_43, J_44, J_51, J_52, J_53, J_54, J_61, J_62, J_63, J_64 = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 , 0
SEqHatDot_1, SEqHatDot_2, SEqHatDot_3, SEqHatDot_4 = 0, 0, 0, 0 # estimated direction of the gyroscope error
w_err_x, w_err_y, w_err_z = 0, 0, 0 # estimated direction of the gyroscope error (angular)
h_x, h_y, h_z = 0, 0, 0 # computed flux in the earth frame
# axulirary variables to avoid reapeated calcualtions
halfSEq_1 = 0.5 * self.SEq_1
halfSEq_2 = 0.5 * self.SEq_2
halfSEq_3 = 0.5 * self.SEq_3
halfSEq_4 = 0.5 * self.SEq_4
twoSEq_1 = 2.0 * self.SEq_1
twoSEq_2 = 2.0 * self.SEq_2
twoSEq_3 = 2.0 * self.SEq_3
twoSEq_4 = 2.0 * self.SEq_4
twob_x = 2.0 * self.b_x
twob_z = 2.0 * self.b_z
twob_xSEq_1 = 2.0 * self.b_x * self.SEq_1
twob_xSEq_2 = 2.0 * self.b_x * self.SEq_2
twob_xSEq_3 = 2.0 * self.b_x * self.SEq_3
twob_xSEq_4 = 2.0 * self.b_x * self.SEq_4
twob_zSEq_1 = 2.0 * self.b_z * self.SEq_1
twob_zSEq_2 = 2.0 * self.b_z * self.SEq_2
twob_zSEq_3 = 2.0 * self.b_z * self.SEq_3
twob_zSEq_4 = 2.0 * self.b_z * self.SEq_4
SEq_1SEq_2 = 0
SEq_1SEq_3 = self.SEq_1 * self.SEq_3
SEq_1SEq_4 = 0
SEq_2SEq_3 = 0
SEq_2SEq_4 = self.SEq_2 * self.SEq_4
SEq_3SEq_4 = 0
twom_x = 2.0 * m_x
twom_y = 2.0 * m_y
twom_z = 2.0 * m_z
# normalise the accelerometer measurement
norm = math.sqrt(a_x * a_x + a_y * a_y + a_z * a_z)
if norm != 0:
a_x /= norm
a_y /= norm
a_z /= norm
# normalise the magnetometer measurement
norm = math.sqrt(m_x * m_x + m_y * m_y + m_z * m_z)
if norm != 0:
m_x /= norm
m_y /= norm
m_z /= norm
# compute the objective function and Jacobian
f_1 = twoSEq_2 * self.SEq_4 - twoSEq_1 * self.SEq_3 - a_x
f_2 = twoSEq_1 * self.SEq_2 + twoSEq_3 * self.SEq_4 - a_y
f_3 = 1.0 - twoSEq_2 * self.SEq_2 - twoSEq_3 * self.SEq_3 - a_z
f_4 = twob_x * (0.5 - self.SEq_3 * self.SEq_3 - self.SEq_4 * self.SEq_4) + twob_z * (SEq_2SEq_4 - SEq_1SEq_3) - m_x
f_5 = twob_x * (self.SEq_2 * self.SEq_3 - self.SEq_1 * self.SEq_4) + twob_z * (self.SEq_1 * self.SEq_2 + self.SEq_3 * self.SEq_4) - m_y
f_6 = twob_x * (SEq_1SEq_3 + SEq_2SEq_4) + twob_z * (0.5 - self.SEq_2 * self.SEq_2 - self.SEq_3 * self.SEq_3) - m_z
J_11or24 = twoSEq_3 # J_11 negated in matrix multiplication
J_12or23 = 2.0 * self.SEq_4
J_13or22 = twoSEq_1 # J_12 negated in matrix multiplication
J_14or21 = twoSEq_2
J_32 = 2.0 * J_14or21 # negated in matrix multiplication
J_33 = 2.0 * J_11or24 # negated in matrix multiplication
J_41 = twob_zSEq_3 # negated in matrix multiplication
J_42 = twob_zSEq_4
J_43 = 2.0 * twob_xSEq_3 + twob_zSEq_1 # negated in matrix multiplication
J_44 = 2.0 * twob_xSEq_4 - twob_zSEq_2 # negated in matrix multiplication
J_51 = twob_xSEq_4 - twob_zSEq_2 # negated in matrix multiplication
J_52 = twob_xSEq_3 + twob_zSEq_1
J_53 = twob_xSEq_2 + twob_zSEq_4
J_54 = twob_xSEq_1 - twob_zSEq_3 # negated in matrix multiplication
J_61 = twob_xSEq_3
J_62 = twob_xSEq_4 - 2.0 * twob_zSEq_2
J_63 = twob_xSEq_1 - 2.0 * twob_zSEq_3
J_64 = twob_xSEq_2
# compute the gradient (matrix multiplication)
SEqHatDot_1 = J_14or21 * f_2 - J_11or24 * f_1 - J_41 * f_4 - J_51 * f_5 + J_61 * f_6
SEqHatDot_2 = J_12or23 * f_1 + J_13or22 * f_2 - J_32 * f_3 + J_42 * f_4 + J_52 * f_5 + J_62 * f_6
SEqHatDot_3 = J_12or23 * f_2 - J_33 * f_3 - J_13or22 * f_1 - J_43 * f_4 + J_53 * f_5 + J_63 * f_6
SEqHatDot_4 = J_14or21 * f_1 + J_11or24 * f_2 - J_44 * f_4 - J_54 * f_5 + J_64 * f_6
# normalise the gradient to estimate direction of the gyroscope error
norm = math.sqrt(SEqHatDot_1 * SEqHatDot_1 + SEqHatDot_2 * SEqHatDot_2 + SEqHatDot_3 * SEqHatDot_3 + SEqHatDot_4 * SEqHatDot_4)
if norm != 0:
SEqHatDot_1 = SEqHatDot_1 / norm
SEqHatDot_2 = SEqHatDot_2 / norm
SEqHatDot_3 = SEqHatDot_3 / norm
SEqHatDot_4 = SEqHatDot_4 / norm
# compute angular estimated direction of the gyroscope error
w_err_x = twoSEq_1 * SEqHatDot_2 - twoSEq_2 * SEqHatDot_1 - twoSEq_3 * SEqHatDot_4 + twoSEq_4 * SEqHatDot_3
w_err_y = twoSEq_1 * SEqHatDot_3 + twoSEq_2 * SEqHatDot_4 - twoSEq_3 * SEqHatDot_1 - twoSEq_4 * SEqHatDot_2
w_err_z = twoSEq_1 * SEqHatDot_4 - twoSEq_2 * SEqHatDot_3 + twoSEq_3 * SEqHatDot_2 - twoSEq_4 * SEqHatDot_1
# compute and remove the gyroscope baises
self.w_bx += w_err_x * self.deltat * BasicMadgwick.zeta
self.w_by += w_err_y * self.deltat * BasicMadgwick.zeta
self.w_bz += w_err_z * self.deltat * BasicMadgwick.zeta
w_x -= self.w_bx
w_y -= self.w_by
w_z -= self.w_bz
# compute the quaternion rate measured by gyroscopes
SEqDot_omega_1 = -halfSEq_2 * w_x - halfSEq_3 * w_y - halfSEq_4 * w_z
SEqDot_omega_2 = halfSEq_1 * w_x + halfSEq_3 * w_z - halfSEq_4 * w_y
SEqDot_omega_3 = halfSEq_1 * w_y - halfSEq_2 * w_z + halfSEq_4 * w_x
SEqDot_omega_4 = halfSEq_1 * w_z + halfSEq_2 * w_y - halfSEq_3 * w_x
# compute then integrate the estimated quaternion rate
self.SEq_1 += (SEqDot_omega_1 - (BasicMadgwick.beta * SEqHatDot_1)) * self.deltat
self.SEq_2 += (SEqDot_omega_2 - (BasicMadgwick.beta * SEqHatDot_2)) * self.deltat
self.SEq_3 += (SEqDot_omega_3 - (BasicMadgwick.beta * SEqHatDot_3)) * self.deltat
self.SEq_4 += (SEqDot_omega_4 - (BasicMadgwick.beta * SEqHatDot_4)) * self.deltat
# normalise quaternion
norm = math.sqrt(self.SEq_1 * self.SEq_1 + self.SEq_2 * self.SEq_2 + self.SEq_3 * self.SEq_3 + self.SEq_4 * self.SEq_4)
if norm != 0:
self.SEq_1 /= norm
self.SEq_2 /= norm
self.SEq_3 /= norm
self.SEq_4 /= norm
# compute flux in the earth frame
SEq_1SEq_2 = self.SEq_1 * self.SEq_2 # recompute axulirary variables
SEq_1SEq_3 = self.SEq_1 * self.SEq_3
SEq_1SEq_4 = self.SEq_1 * self.SEq_4
SEq_3SEq_4 = self.SEq_3 * self.SEq_4
SEq_2SEq_3 = self.SEq_2 * self.SEq_3
SEq_2SEq_4 = self.SEq_2 * self.SEq_4
h_x = twom_x * (0.5 - self.SEq_3 * self.SEq_3 - self.SEq_4 * self.SEq_4) + twom_y * (SEq_2SEq_3 - SEq_1SEq_4) + twom_z * (SEq_2SEq_4 + SEq_1SEq_3)
h_y = twom_x * (SEq_2SEq_3 + SEq_1SEq_4) + twom_y * (0.5 - self.SEq_2 * self.SEq_2 - self.SEq_4 * self.SEq_4) + twom_z * (SEq_3SEq_4 - SEq_1SEq_2)
h_z = twom_x * (SEq_2SEq_4 - SEq_1SEq_3) + twom_y * (SEq_3SEq_4 + SEq_1SEq_2) + twom_z * (0.5 - self.SEq_2 * self.SEq_2 - self.SEq_3 * self.SEq_3)
# normalise the flux vector to have only components in the x and z
self.b_x = math.sqrt((h_x * h_x) + (h_y * h_y))
self.b_z = h_z | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/attitude/basic_madgwick.py | 0.788827 | 0.390941 | basic_madgwick.py | pypi |
import math
from abc import ABC, abstractmethod
from rover_position_rjg.data.quaternion import Quaternion
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class AttitudeAlgorithm(ABC):
"""A step wise algorithm for determining a device's attitude
from gyroscope, accelerometer and magnetometer readings"""
def __init__(self):
self.initialised = False
@abstractmethod
def step(self, data: NineDoFData) -> Quaternion:
"""
Updates the attitude estimate with the supplied IMU data
:param data: the data
:returns the new attitude
"""
pass
@abstractmethod
def reset(self):
"""Resets the algorithm to its initial state. i.e. it behaves as
if it had just been created."""
pass
@abstractmethod
def initialise(self, attitude: Quaternion, timestamp: float):
"""
Sets the current attitude estimate to the supplied value
:param attitude: the desired attitude estimate
:param timestamp: the time at which this attitude was calculated
"""
pass
@staticmethod
def quaternion_from_imu(data: NineDoFData) -> Quaternion:
"""Creates a quaternion from the magnetic field and acceleration
vectors in the data"""
# Get roll and pitch directly from acceleration
acc = data.acceleration.value
roll = math.atan2(acc.y, acc.z)
pitch = -math.atan2(acc.x, math.sqrt(acc.y**2 + acc.z**2))
# Yaw is much harder!
mag = data.magnetic_field.value
sin_roll = math.sin(roll)
cos_roll = math.cos(roll)
vy = mag.z * sin_roll - mag.y * cos_roll
vz = mag.y * sin_roll + mag.z * cos_roll
vx = mag.x * math.cos(pitch) + vz * math.sin(pitch)
yaw = math.atan2(vx, -vy)
euler_angles = Vector([math.degrees(roll), math.degrees(pitch), math.degrees(yaw)])
# print('Initial Tait Bryan angles: {}'.format(euler_angles))
q = Quaternion.from_tait_bryan(euler_angles)
# print('Initial Quaternion {}'.format(q))
return q | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/attitude/attitude_algorithm.py | 0.834609 | 0.537406 | attitude_algorithm.py | pypi |
import math
from rover_position_rjg.data.quaternion import Quaternion
from rover_position_rjg.position.attitude.attitude_algorithm import AttitudeAlgorithm
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
# noinspection PyPep8Naming,SpellCheckingInspection
class ModifiedMadgwick(AttitudeAlgorithm):
# Below 2 it generates a significant drift up and down in velocity
gyroMeasError = math.radians(2) # gyroscope measurement error in rad/s (default 5 deg/s)
beta = math.sqrt(3.0 / 4.0) * gyroMeasError # compute beta
nwu_to_enu = Quaternion(1/math.sqrt(2), 0, 0, 1/math.sqrt(2))
"""An implementation of Madgwick's algorithm for MARG that's a close
as possible to the version in his paper."""
def __init__(self):
super().__init__()
# print('Modified Madgwick beta={:5.3f}'.format(ModifiedMadgwick.beta))
self.previous_timestamp = 0
# estimated orientation quaternion elements with initial conditions
self.q0, self.q1, self.q2, self.q3 = 1, 0, 0, 0
def reset(self):
self.__init__()
def initialise(self, attitude: Quaternion, timestamp: float):
# Rotate from rover_position_rjg.ENU to NWU or we'll start Madgwick out facing the wrong way
q = (-self.nwu_to_enu) @ attitude
self.q0, self.q1, self.q2, self.q3 = q.w, q.i, q.j, q.k
self.previous_timestamp = timestamp
self.initialised = True
def step(self, data: NineDoFData) -> Quaternion:
acc = data.acceleration.value
gyro = data.angular_velocity.value
mag = data.magnetic_field.value
timestamp = data.angular_velocity.timestamp
if self.initialised:
deltat = timestamp - self.previous_timestamp
self.previous_timestamp = timestamp
self._filterUpdate(deltat, gyro.x, gyro.y, gyro.z, acc.x, acc.y, acc.z, mag.x, mag.y, mag.z)
# Madgwick uses the axes NWU. We want ENU so rotate the output by 90 degrees
# There may be a more efficient way to do this by messing with the inputs but
# that seems to lead to cross talk between the different axes so I'll play
# it safe and use a Quaternion rotation.
madgwick_output = Quaternion(self.q0, self.q1, self.q2, self.q3)
return self.nwu_to_enu @ madgwick_output
else:
# Initialise quaternion from rover_position_rjg.magnetic field and gravity
from_imu = self.quaternion_from_imu(data)
self.initialise(from_imu, timestamp)
return from_imu
# Note that Madgwick has his real world axes in the order North West Up.
# We want to use East North Up so we need to rotate Madgwick's output by 90 degrees.
def _filterUpdate(self, deltat: float, gx: float, gy: float, gz: float, ax: float, ay: float, az: float, mx: float, my: float, mz: float):
norm = 0.0
s0 = s1 = s2 = s3 = 0.0
qDot1 = qDot2 = qDot3 = qDot4 = 0.0
hx = hy = 0.0
_2q0mx = _2q0my = _2q0mz = _2q1mx = 0.0
_2bx = _2bz = _4bx = _4bz = 0.0
_2q0 = _2q1 = _2q2 = _2q3 = _2q0q2 = _2q2q3 = 0.0
q0q0 = q0q1 = q0q2 = q0q3 = 0.0
q1q1 = q1q2 = q1q3 = q2q2 = q2q3 = q3q3 = 0.0
# Rate of change of quaternion from rover_position_rjg.gyroscope
qDot1 = 0.5 * (-self.q1 * gx - self.q2 * gy - self.q3 * gz)
qDot2 = 0.5 * (self.q0 * gx + self.q2 * gz - self.q3 * gy)
qDot3 = 0.5 * (self.q0 * gy - self.q1 * gz + self.q3 * gx)
qDot4 = 0.5 * (self.q0 * gz + self.q1 * gy - self.q2 * gx)
# normalise the accelerometer measurement
norm = math.sqrt(ax * ax + ay * ay + az * az)
if norm != 0:
ax /= norm
ay /= norm
az /= norm
# normalise the magnetometer measurement
norm = math.sqrt(mx * mx + my * my + mz * mz)
if norm != 0:
mx /= norm
my /= norm
mz /= norm
# Auxiliary variables to avoid repeated arithmetic
_2q0mx = 2.0 * self.q0 * mx
_2q0my = 2.0 * self.q0 * my
_2q0mz = 2.0 * self.q0 * mz
_2q1mx = 2.0 * self.q1 * mx
_2q0 = 2.0 * self.q0
_2q1 = 2.0 * self.q1
_2q2 = 2.0 * self.q2
_2q3 = 2.0 * self.q3
_2q0q2 = 2.0 * self.q0 * self.q2
_2q2q3 = 2.0 * self.q2 * self.q3
q0q0 = self.q0 * self.q0
q0q1 = self.q0 * self.q1
q0q2 = self.q0 * self.q2
q0q3 = self.q0 * self.q3
q1q1 = self.q1 * self.q1
q1q2 = self.q1 * self.q2
q1q3 = self.q1 * self.q3
q2q2 = self.q2 * self.q2
q2q3 = self.q2 * self.q3
q3q3 = self.q3 * self.q3
# Reference direction of Earth's magnetic field
hx = mx * q0q0 - _2q0my * self.q3 + _2q0mz * self.q2 + mx * q1q1 + _2q1 * my * self.q2 + _2q1 * mz * self.q3 - mx * q2q2 - mx * q3q3
hy = _2q0mx * self.q3 + my * q0q0 - _2q0mz * self.q1 + _2q1mx * self.q2 - my * q1q1 + my * q2q2 + _2q2 * mz * self.q3 - my * q3q3
_2bx = math.sqrt(hx * hx + hy * hy)
_2bz = -_2q0mx * self.q2 + _2q0my * self.q1 + mz * q0q0 + _2q1mx * self.q3 - mz * q1q1 + _2q2 * my * self.q3 - mz * q2q2 + mz * q3q3
_4bx = 2.0 * _2bx
_4bz = 2.0 * _2bz
_8bx = 2.0 * _4bx
_8bz = 2.0 * _4bz
# Gradient decent algorithm corrective step
s0 = -_2q2 * (2.0 * (q1q3 - q0q2) - ax) + _2q1 * (2.0 * (q0q1 + q2q3) - ay) + -_4bz * self.q2 * (
_4bx * (0.5 - q2q2 - q3q3) + _4bz * (q1q3 - q0q2) - mx) + (-_4bx * self.q3 + _4bz * self.q1) * (
_4bx * (q1q2 - q0q3) + _4bz * (q0q1 + q2q3) - my) + _4bx * self.q2 * (
_4bx * (q0q2 + q1q3) + _4bz * (0.5 - q1q1 - q2q2) - mz)
s1 = _2q3 * (2.0 * (q1q3 - q0q2) - ax) + _2q0 * (2.0 * (q0q1 + q2q3) - ay) + -4.0 * self.q1 * (
2.0 * (0.5 - q1q1 - q2q2) - az) + _4bz * self.q3 * (
_4bx * (0.5 - q2q2 - q3q3) + _4bz * (q1q3 - q0q2) - mx) + (_4bx * self.q2 + _4bz * self.q0) * (
_4bx * (q1q2 - q0q3) + _4bz * (q0q1 + q2q3) - my) + (_4bx * self.q3 - _8bz * self.q1) * (
_4bx * (q0q2 + q1q3) + _4bz * (0.5 - q1q1 - q2q2) - mz)
s2 = -_2q0 * (2.0 * (q1q3 - q0q2) - ax) + _2q3 * (2.0 * (q0q1 + q2q3) - ay) + (-4.0 * self.q2) * (
2.0 * (0.5 - q1q1 - q2q2) - az) + (-_8bx * self.q2 - _4bz * self.q0) * (
_4bx * (0.5 - q2q2 - q3q3) + _4bz * (q1q3 - q0q2) - mx) + (_4bx * self.q1 + _4bz * self.q3) * (
_4bx * (q1q2 - q0q3) + _4bz * (q0q1 + q2q3) - my) + (_4bx * self.q0 - _8bz * self.q2) * (
_4bx * (q0q2 + q1q3) + _4bz * (0.5 - q1q1 - q2q2) - mz)
s3 = _2q1 * (2.0 * (q1q3 - q0q2) - ax) + _2q2 * (2.0 * (q0q1 + q2q3) - ay) + (
-_8bx * self.q3 + _4bz * self.q1) * (_4bx * (0.5 - q2q2 - q3q3) + _4bz * (q1q3 - q0q2) - mx) + (
-_4bx * self.q0 + _4bz * self.q2) * (_4bx * (q1q2 - q0q3) + _4bz * (q0q1 + q2q3) - my) + (
_4bx * self.q1) * (_4bx * (q0q2 + q1q3) + _4bz * (0.5 - q1q1 - q2q2) - mz)
norm = math.sqrt(s0 * s0 + s1 * s1 + s2 * s2 + s3 * s3) # normalise step magnitude
if norm != 0:
s0 /= norm
s1 /= norm
s2 /= norm
s3 /= norm
# Apply feedback step
qDot1 -= ModifiedMadgwick.beta * s0
qDot2 -= ModifiedMadgwick.beta * s1
qDot3 -= ModifiedMadgwick.beta * s2
qDot4 -= ModifiedMadgwick.beta * s3
# Integrate rate of change of quaternion to yield quaternion
self.q0 += qDot1 * deltat
self.q1 += qDot2 * deltat
self.q2 += qDot3 * deltat
self.q3 += qDot4 * deltat
# Normalise quaternion
norm = math.sqrt(self.q0 * self.q0 + self.q1 * self.q1 + self.q2 * self.q2 + self.q3 * self.q3)
if norm != 0:
self.q0 /= norm
self.q1 /= norm
self.q2 /= norm
self.q3 /= norm | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/attitude/modified_madgwick.py | 0.756447 | 0.433322 | modified_madgwick.py | pypi |
import numpy as np
from rover_position_rjg.data.quaternion import Quaternion
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.kalman.kalman_filter import KalmanFilter
from rover_position_rjg.position.position.position import Position
from rover_position_rjg.position.position.position_algorithm import PositionAlgorithm, ABC
from rover_position_rjg.position.position.position_input import PositionInput
class KalmanPositionAlgorithmConfig:
def __init__(self,
expected_frequency: float,
mean_position_error: float,
mean_velocity_error: float,
mean_acceleration_error: float):
self.mean_acceleration_error = mean_acceleration_error
self.mean_velocity_error = mean_velocity_error
self.mean_position_error = mean_position_error
self.expected_frequency = expected_frequency
# noinspection PyPep8Naming
class KalmanPositionAlgorithm(PositionAlgorithm, ABC):
"""Uses a Kalman filter to determine the rover's position
and velocity."""
kalman = ... # type: KalmanFilter
def __init__(self,
expected_frequency: float,
mean_position_error: float,
mean_velocity_error: float,
mean_acceleration_error: float):
"""
Constructor. The mean errors help the filter to provide an accurate position
as soon as it starts. They are replaced by measured values once the filter
has been running for a while.
:param expected_frequency: Expected frequency of samples. Used when updating
the Kalman filter for the first time
:param mean_position_error: RMS error of position measurement in metres
:param mean_velocity_error: RMS error of the velocity measurement in m/s
:param mean_acceleration_error: RMS error of the acceleration measurement in m/s/s
"""
super().__init__()
# x0: Initial state
x0 = self.build_state(Vector.zero(), Vector.zero(), Vector.zero())
# p0: Initial state covariance. Starts with initial measurement variances
p0 = self.init_p0(mean_position_error, mean_velocity_error, mean_acceleration_error)
# A: Transition matrix that calculates new state from rover_position_rjg.old state using equations of motion
A = self.init_A(expected_frequency)
# B: Control matrix. Set to None as we're not providing control inputs
B = None
# H: Observation matrix: contains ones for each measurement in the step
H = self.init_H()
# R: Observation co-variance. Reports cross talk between variables
# Happens to be identical to p0 in the 3D case
R = self.init_p0(mean_position_error, mean_velocity_error, mean_acceleration_error)
# Q: Transition covariance matrix. Adds cross talk between inputs.
Q = self.init_Q()
self.kalman = KalmanFilter(x0, p0, A, B, H, R, Q)
self.previous_timestamp = 0
self.position_initialised = False
self.velocity_initialised = False
self.acceleration_initialised = False
def step(self, data: PositionInput) -> Position:
# Assume all inputs have the same timestamp
timestamp = 0 # This will be overwritten as PositionInput must have at least one datum
position = velocity = acceleration = Vector.zero()
has_position = 0
if data.position:
has_position = 1
timestamp = data.position.timestamp
position = data.position.value
self._initialise_position(position)
has_velocity = 0
if data.velocity:
has_velocity = 1
timestamp = data.velocity.timestamp
velocity = data.velocity.value
self._initialise_velocity(velocity)
has_acceleration = 0
attitude = None
if data.attitude:
has_acceleration = 1
timestamp = data.attitude.timestamp
acceleration = data.attitude.value.acceleration
attitude = data.attitude.value.attitude
self.update_observation_matrix(has_position, has_velocity, has_acceleration)
self.update_transition_matrix(timestamp)
z = self.build_state(position, velocity, acceleration)
new_position = self.kalman.step(z)
return self.to_position(new_position, attitude)
def _initialise_position(self, position: Vector):
"""Sets the position in the Kalman filter directly to
a new value. This is used to initialise the position
without having to wait for the filter to converge."""
if not self.position_initialised:
self.kalman.x[0] = position.x
self.kalman.x[3] = position.y
self.kalman.x[6] = position.z
self.position_initialised = True
def _initialise_velocity(self, velocity: Vector):
"""Sets the velocity in the Kalman filter directly to
a new value. This is used to initialise the velocity
without having to wait for the filter to converge."""
if not self.velocity_initialised:
self.kalman.x[1] = velocity.x
self.kalman.x[4] = velocity.y
self.kalman.x[7] = velocity.z
self.velocity_initialised = True
def update_transition_matrix(self, timestamp):
if self.previous_timestamp:
# Calculate the time interval since the last update
dt = timestamp - self.previous_timestamp
at = 0.5*(dt**2)
a = self.kalman.A
a[0, 1] = a[1, 2] = a[3, 4] = a[4, 5] = a[6, 7] = a[7, 8] = dt
a[0, 2] = a[3, 5] = a[6, 8] = at
# else: This is the first iteration. Use the expected frequency
# which is already encoded in the matrix.
self.previous_timestamp = timestamp
def update_observation_matrix(self, has_position: int, has_velocity: int, has_acceleration: int):
"""Changes the transition matrix to 'switch on' whichever measurements
were actually provided by the PositionInput"""
h = self.kalman.H
h[0, 0] = h[3, 3] = h[6, 6] = has_position
h[1, 1] = h[4, 4] = h[7, 7] = has_velocity
h[2, 2] = h[5, 5] = h[8, 8] = has_acceleration
@staticmethod
def to_position(new_position: np.ndarray, attitude: Quaternion):
acceleration = Vector([new_position.item(2), new_position.item(5), new_position.item(8)])
velocity = Vector([new_position.item(1), new_position.item(4), new_position.item(7)])
position = Vector([new_position.item(0), new_position.item(3), new_position.item(6)])
return Position(attitude, acceleration, velocity, position)
@staticmethod
def build_state(position: Vector, velocity: Vector, acceleration: Vector):
return np.array([
[position.x],
[velocity.x],
[acceleration.x],
[position.y],
[velocity.y],
[acceleration.y],
[position.z],
[velocity.z],
[acceleration.z]
], dtype=float)
@staticmethod
def init_p0(mean_position_error: float, mean_velocity_error: float, mean_acceleration_error: float):
"""
Generates the initial state covariance matrix. The initial values will be
replaced by measured values as the filter runs but the filter will stabilise
faster if these are estimated correctly.
:param mean_position_error: RMS error of position measurement in metres
:param mean_velocity_error: RMS error of the velocity measurement in m/s
:param mean_acceleration_error: RMS error of the acceleration measurement in m/s/s
:return:
"""
d = mean_position_error**2
v = mean_velocity_error**2
a = mean_acceleration_error**2
return np.array([
[d, 0, 0, 0, 0, 0, 0, 0, 0],
[0, v, 0, 0, 0, 0, 0, 0, 0],
[0, 0, a, 0, 0, 0, 0, 0, 0],
[0, 0, 0, d, 0, 0, 0, 0, 0],
[0, 0, 0, 0, v, 0, 0, 0, 0],
[0, 0, 0, 0, 0, a, 0, 0, 0],
[0, 0, 0, 0, 0, 0, d, 0, 0],
[0, 0, 0, 0, 0, 0, 0, v, 0],
[0, 0, 0, 0, 0, 0, 0, 0, a],
], dtype=float)
@staticmethod
def init_A(expected_frequency: float):
# dt and at will be overridden by _update_transition_matrix
# when we know the real interval between samples
dt = 1/expected_frequency
at = 0.5*(dt**2)
return np.array([
[1, dt, at, 0, 0, 0, 0, 0, 0],
[0, 1, dt, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, dt, at, 0, 0, 0],
[0, 0, 0, 0, 1, dt, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, dt, at],
[0, 0, 0, 0, 0, 0, 0, 1, dt],
[0, 0, 0, 0, 0, 0, 0, 0, 1],
], dtype=float)
@staticmethod
def init_H():
return np.identity(9)
@staticmethod
def init_Q():
return np.array([
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1],
], dtype=float) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/position/kalman_position_algorithm.py | 0.934924 | 0.587115 | kalman_position_algorithm.py | pypi |
import math
from rover_position_rjg.data.vector import Vector
class AnchorRangesToPositions:
"""Finds the absolute positions of all 4 anchor tags in their
own frame of reference. Note the Decawave tags use a right hand
rule when auto-calibrating. This class does the same so that
calibrations from this class are directly comparable to the
Decawave calibration."""
def __init__(self,
distance_1_2: float,
distance_2_3: float,
distance_3_4: float,
distance_4_1: float,
distance_1_3: float,
distance_2_4: float,
height: float,
):
"""
Creates a new instance from the 6 distances.
:param distance_1_2: Distance from the initiator anchor to the next anchor going
anticlockwise. This sets the direction of the x axis.
:param distance_2_3: Distance from 2nd anchor to 3rd anchor going anticlockwise
:param distance_3_4: Distance from 3rd anchor to 4th anchor going anticlockwise
:param distance_4_1: Distance from 4th anchor to initiator anchor going anticlockwise
:param distance_1_3: Diagonal from the initiator to anchor 3
:param distance_2_4: Diagonal from the 2nd anchor to the 4th
:param height: height of the anchors above floor level (they must all be the same)
"""
self._distance_1_2 = distance_1_2
self._distance_2_3 = distance_2_3
self._distance_3_4 = distance_3_4
self._distance_4_1 = distance_4_1
self._distance_1_3 = distance_1_3
self._distance_2_4 = distance_2_4
self._height = height
def position_1(self) -> Vector:
"""The co-ordinates of anchor 1 (the initiator)"""
return Vector([0.0, 0.0, self._height])
def position_2(self) -> Vector:
"""The co-ordinates of anchor 2. They y co-ordinate is always 0."""
return Vector([self._distance_1_2, 0.0, self._height])
def position_3(self) -> Vector:
"""The co-ordinates of anchor 3"""
angle_at_1 = self._calculate_angle(self._distance_1_2, self._distance_1_3, self._distance_2_3)
x = math.cos(angle_at_1) * self._distance_1_3
y = math.sin(angle_at_1) * self._distance_1_3
return Vector([x, y, self._height])
def position_4(self) -> Vector:
"""The co-ordinates of anchor 4"""
angle_at_1 = self._calculate_angle(self._distance_1_2, self._distance_4_1, self._distance_2_4)
x = math.cos(angle_at_1) * self._distance_4_1
y = math.sin(angle_at_1) * self._distance_4_1
return Vector([x, y, self._height])
@staticmethod
def _calculate_angle(a: float, b: float, c: float) -> float:
"""
Calculates an angle in a triangle given the lengths of all three sides
:param a: side a, adjacent to the angle to be returned
:param b: side b, also adjacent to the angle to be returned
:param c: side c, opposite the angle to be returned
:returns the angle in radians
"""
# Law of cosines
cos_theta = ((a**2 + b**2) - c**2) / (2 * a * b)
return math.acos(cos_theta)
def error_3_4(self) -> float:
"""Returns the difference between the measured and calculated lengths
for side 3_4 which wasn't used in any calculations"""
vector_3_4 = self.position_4() - self.position_3()
error = vector_3_4.magnitude() - self._distance_3_4
return error | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/calibration/decawave/anchor_ranges_to_positions.py | 0.955506 | 0.811452 | anchor_ranges_to_positions.py | pypi |
from typing import Tuple
from rover_position_rjg.data.data import Data
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.position.calibration.simple_scaler import SimpleScaler
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class ImuScaler(SimpleScaler[NineDoFData]):
sensitivity_to_minus_x = Vector([ 0, -0.0135, -0.0155])
sensitivity_to_minus_y = Vector([-0.0035, 0, -0.0094])
sensitivity_to_minus_z = Vector([ 0.082, 0.028, 0])
sensitivity_to_plus_x = Vector([ 0, 0.0210, 0.0155])
sensitivity_to_plus_y = Vector([ 0.030, 0, 0.020])
sensitivity_to_plus_z = Vector([ 0.026, 0.0021, 0])
scale_minus_x = 1/16000
scale_plus_x = 1/16626
scale_minus_y = 1/16423
scale_plus_y = 1/16196
scale_minus_z = 1/16280
scale_plus_z = 1/16425
i_x = 0
i_y = 1
i_z = 2
i_minus = 0
i_plus = 1
"""Provides signal conditioning as well as scaling for an IMU"""
def __init__(self,
offset: NineDoFData = NineDoFData.zero(),
multiplier: NineDoFData = NineDoFData.one(),
gyro_zero_band_height: Vector = Vector([6.5, 8, 5.5]),
gyro_zero_band_num_samples: int = 46,
cross_axis_sensitivities: (Tuple[Vector, Vector, Vector], Tuple[Vector, Vector, Vector])
= ((sensitivity_to_minus_x, sensitivity_to_minus_y, sensitivity_to_minus_z),
(sensitivity_to_plus_x, sensitivity_to_plus_y, sensitivity_to_plus_z))
):
super().__init__(offset, multiplier)
self.gyro_zero_band_num_samples = gyro_zero_band_num_samples
self.gyro_zero_band_height = gyro_zero_band_height
self.gyro_zero_upper_limit: Vector = None
self.gyro_zero_lower_limit: Vector = None
self._set_gyro_zero_limits()
self.cross_axis_sensitivities = cross_axis_sensitivities
def load(self, filename: str) -> None:
super().load(filename)
self._set_gyro_zero_limits()
def scale(self, data: Data[NineDoFData]) -> Data[NineDoFData]:
acc = data.value.acceleration.value
gyro = data.value.angular_velocity.value
mag = data.value.magnetic_field.value
new_gyro = self._scale_gyro(gyro)
new_acc = self._scale_accelerations(acc)
new_mag = self._scale_mag(mag)
return self._build_data(data, new_acc, new_gyro, new_mag)
def _scale_mag(self, raw: Vector) -> Vector:
return (raw - self.offset.magnetic_field.value).scale(self.multiplier.magnetic_field.value)
def _scale_gyro(self, raw: Vector) -> Vector:
self._adjust_gyro_offset(raw)
return (raw - self.offset.angular_velocity.value).scale(self.multiplier.angular_velocity.value)
def _scale_accelerations(self, raw: Vector) -> Vector:
by_dx = by_dy = by_dz = Vector.zero()
scale_x = scale_y = scale_z = 0
raw_x = raw.x
if raw_x < 0:
by_dx = self.cross_axis_sensitivities[self.i_minus][self.i_x] * raw_x
scale_x = self.scale_minus_x
if raw_x > 0:
by_dx = self.cross_axis_sensitivities[self.i_plus][self.i_x] * raw_x
scale_x = self.scale_plus_x
raw_y = raw.y
if raw_y < 0:
by_dy = self.cross_axis_sensitivities[self.i_minus][self.i_y] * raw_y
scale_y = self.scale_minus_y
if raw_y > 0:
by_dy = self.cross_axis_sensitivities[self.i_plus][self.i_y] * raw_y
scale_y = self.scale_plus_y
raw_z = raw.z
if raw_z < 0:
by_dz = self.cross_axis_sensitivities[self.i_minus][self.i_z] * raw_z
scale_z = self.scale_minus_z
if raw_z > 0:
by_dz = self.cross_axis_sensitivities[self.i_plus][self.i_z] * raw_z
scale_z = self.scale_plus_z
cross_axis_offsets = by_dx + by_dy + by_dz
x = (raw_x - cross_axis_offsets.x) * scale_x
y = (raw_y - cross_axis_offsets.y) * scale_y
z = (raw_z - cross_axis_offsets.z) * scale_z
return Vector([x, y, z])
def _adjust_gyro_offset(self, angular_velocity: Vector):
if self.gyro_zero_upper_limit > angular_velocity and angular_velocity > self.gyro_zero_lower_limit:
gyro_average = self.offset.angular_velocity.value
gyro_average = ((gyro_average * (self.gyro_zero_band_num_samples - 1)) + angular_velocity) / self.gyro_zero_band_num_samples
self.offset.angular_velocity.value = gyro_average
self._set_gyro_zero_limits()
# print('Set gyro bias to {}'.format(self.offset.angular_velocity.value))
def _set_gyro_zero_limits(self):
gyro_offset = self.offset.angular_velocity.value
self.gyro_zero_upper_limit = gyro_offset + self.gyro_zero_band_height
self.gyro_zero_lower_limit = gyro_offset - self.gyro_zero_band_height
@staticmethod
def _build_data(data: Data[NineDoFData], acceleration: Vector, angular_velocity: Vector, magnetic_field: Vector) -> Data[NineDoFData]:
acc_data = Data(acceleration, data.value.acceleration.timestamp)
gyro_data = Data(angular_velocity, data.value.angular_velocity.timestamp)
mag_data = Data(magnetic_field, data.value.magnetic_field.timestamp)
nine_dof = NineDoFData(acc_data, gyro_data, mag_data, data.value.temperature)
return Data(nine_dof, data.timestamp) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/calibration/imu/double_sided_imu_scaler.py | 0.863665 | 0.400691 | double_sided_imu_scaler.py | pypi |
import math
from typing import Tuple
import numpy as np
from rover_position_rjg.data.data import Data
from rover_position_rjg.data.flags import Flags
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.position.calibration.simple_scaler import SimpleScaler
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class ImuScaler(SimpleScaler[NineDoFData]):
INSIDE_ZERO_LIMITS_STATUS_BIT = 0
CALIBRATING_STATUS_BIT = 2
G = 1
sensitivity_to_x = Vector([ 0, 0.012, 0.000])
sensitivity_to_y = Vector([-0.008, 0, -0.015])
sensitivity_to_z = Vector([-0.01, 0.008, 0])
"""Provides signal conditioning as well as scaling for an IMU"""
def __init__(self,
offset: NineDoFData = NineDoFData.zero(),
multiplier: NineDoFData = NineDoFData.one(),
gyro_zero_band_height: Vector = Vector([9.5, 7.0, 5.0]),
gyro_zero_band_num_samples: int = 46,
cross_axis_sensitivities: Tuple[Vector, Vector, Vector] = (sensitivity_to_x, sensitivity_to_y, sensitivity_to_z)
):
super().__init__(offset, multiplier)
self.gyro_zero_band_num_samples = gyro_zero_band_num_samples
self.gyro_zero_band_height = gyro_zero_band_height
self.gyro_zero_upper_limit: Vector = None
self.gyro_zero_lower_limit: Vector = None
self._set_gyro_zero_limits()
self.cross_axis_sensitivities = cross_axis_sensitivities
self._calibration_gyro_samples = []
self._calibration_acc_samples = []
self.calibrating = False
def load(self, filename: str) -> None:
super().load(filename)
self._set_gyro_zero_limits()
def calibrate(self) -> None:
self._calibration_gyro_samples = []
self._calibration_acc_samples = []
self.calibrating = True
def _add_calibration_sample(self, gyro: Vector, acc: Vector) -> None:
self._calibration_gyro_samples.append(gyro.values)
self._calibration_acc_samples.append(acc.values)
if len(self._calibration_gyro_samples) >= self.gyro_zero_band_num_samples:
self._calibrate_gyro_offset()
self._calibration_gyro_samples.clear()
self._calibrate_acceleration()
self._calibration_acc_samples.clear()
self.calibrating = False
def _calibrate_gyro_offset(self):
np_array = np.array(self._calibration_gyro_samples)
self.gyro_zero_band_height = Vector(list(np.std(np_array, axis=0)))
self.offset.angular_velocity.value = Vector(list(np.average(np_array, axis=0)))
self._set_gyro_zero_limits()
def _calibrate_acceleration(self):
"""Modifies the acceleration z-offset to give an acceleration magnitude of 1g
Valid if the rover is within +- 5 degrees of horizontal."""
np_array = np.array(self._calibration_acc_samples)
raw_samples = Vector(list(np.average(np_array, axis=0)))
scaled_samples = (raw_samples - self.offset.acceleration.value).scale(self.multiplier.acceleration.value)
target_scaled_z = math.sqrt(math.fabs(self.G - scaled_samples.x**2 - scaled_samples.y**2))
z_offset = raw_samples.z - (target_scaled_z / self.multiplier.acceleration.value.z)
old = self.offset.acceleration.value
self.offset.acceleration.value = Vector([old.x, old.y, z_offset])
def scale(self, data: Data[NineDoFData]) -> Data[NineDoFData]:
acc = data.value.acceleration.value
gyro = data.value.angular_velocity.value
mag = data.value.magnetic_field.value
cross_axis_offset = self._get_cross_axis_offset(acc)
if self.calibrating:
self._add_calibration_sample(gyro, acc)
else:
in_zero_limits = self._adjust_gyro_offset(gyro, data.timestamp)
data.value.status[self.INSIDE_ZERO_LIMITS_STATUS_BIT] = in_zero_limits
data.value.status[self.CALIBRATING_STATUS_BIT] = self.calibrating
new_acc = Vector([acc.x - cross_axis_offset.x, acc.y - cross_axis_offset.y, acc.z - cross_axis_offset.z])
new_data = self._build_data(data, new_acc, gyro, mag, data.value.status)
return super().scale(new_data)
def _get_cross_axis_offset(self, acceleration: Vector) -> Vector:
by_dx = self.cross_axis_sensitivities[0] * acceleration.x
by_dy = self.cross_axis_sensitivities[1] * acceleration.y
by_dz = self.cross_axis_sensitivities[2] * acceleration.z
return by_dx + by_dy + by_dz
def _adjust_gyro_offset(self, angular_velocity: Vector, timestamp: float) -> bool:
if self.gyro_zero_upper_limit > angular_velocity and angular_velocity > self.gyro_zero_lower_limit:
gyro_average = self.offset.angular_velocity.value
gyro_average = ((gyro_average * (self.gyro_zero_band_num_samples - 1)) + angular_velocity) / self.gyro_zero_band_num_samples
self.offset.angular_velocity.value = gyro_average
self._set_gyro_zero_limits()
# print('Set gyro bias to {} at {}'.format(self.offset.angular_velocity.value, timestamp))
return True
return False
def _set_gyro_zero_limits(self):
gyro_offset = self.offset.angular_velocity.value
self.gyro_zero_upper_limit = gyro_offset + self.gyro_zero_band_height
self.gyro_zero_lower_limit = gyro_offset - self.gyro_zero_band_height
@staticmethod
def _build_data(data: Data[NineDoFData], acceleration: Vector, angular_velocity: Vector, magnetic_field: Vector, status: Flags) -> Data[NineDoFData]:
acc_data = Data(acceleration, data.value.acceleration.timestamp)
gyro_data = Data(angular_velocity, data.value.angular_velocity.timestamp)
mag_data = Data(magnetic_field, data.value.magnetic_field.timestamp)
nine_dof = NineDoFData(acc_data, gyro_data, mag_data, data.value.temperature, status)
return Data(nine_dof, data.timestamp) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/calibration/imu/imu_scaler.py | 0.869963 | 0.407982 | imu_scaler.py | pypi |
import numpy as np
from numpy.core.multiarray import ndarray
from rover_position_rjg.data.data import Data
from rover_position_rjg.data.data_filter import DataFilter
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class FrequencyShiftFilter(DataFilter[NineDoFData, NineDoFData]):
"""Reduces the frequency of NineDoFData. i.e. emits data
less often than it receives it. Each emitted data contains
the average value of the data that was received since the
last emission. Handy for logging etc."""
inputs = ... # type: ndarray
def __init__(self, frequency: float):
super().__init__()
self.period = 1 / frequency
self.start: float = None
self.latest = self.start
self.clear_inputs()
def receive(self, data: Data[NineDoFData]) -> None:
if not self.start:
self.start = data.timestamp
due = self.start + self.period
if data.timestamp > due:
if data.timestamp > due + self.period:
# We've been paused for a while. Re-sync start with timestamps
self.start = data.timestamp
else:
self.start = due
average = self.get_average()
self.send(average)
self.clear_inputs()
acc = data.value.acceleration.value
gyro = data.value.angular_velocity.value
mag = data.value.magnetic_field.value
temp = data.value.temperature.value
self.inputs = np.append(self.inputs, [[acc.x, acc.y, acc.z, gyro.x, gyro.y, gyro.z, mag.x, mag.y, mag.z, temp]], axis=0)
self.latest = data.timestamp
def get_average(self) -> Data[NineDoFData]:
average = np.average(self.inputs, axis=0)
acc = Data(Vector([average[0], average[1], average[2]]), self.latest)
gyro = Data(Vector([average[3], average[4], average[5]]), self.latest)
mag = Data(Vector([average[6], average[7], average[8]]), self.latest)
temp = Data(average[9], self.latest)
nine_dof = NineDoFData(acc, gyro, mag, temp)
return Data(nine_dof, self.latest)
def clear_inputs(self):
self.inputs = np.empty([0, 10])
def close(self) -> None:
self.clear_inputs() | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/filters/frequency_shift_filter.py | 0.733261 | 0.619183 | frequency_shift_filter.py | pypi |
from typing import List
import numpy as np
from decawave_1001_rjg import DwmLocationResponse, DwmDistanceAndPosition, DwmPosition
from scipy import optimize
from rover_position_rjg.data.data import Data
from rover_position_rjg.data.data_filter import DataFilter
from rover_position_rjg.position.calibration.decawave.decawave_range_scaler import DecawaveRangeScaler
class DecawaveTrilaterationFilter(DataFilter[DwmLocationResponse, DwmLocationResponse]):
def __init__(self, range_scaler: DecawaveRangeScaler, name: str = ''):
super().__init__(name)
self.range_scaler = range_scaler
self.tag_temperature = DecawaveRangeScaler.reference_temp
self.anchor_temperatures = DecawaveRangeScaler.reference_temp
def set_temperatures(self, tag_temperature: float, anchor_temperatures: float):
self.tag_temperature = tag_temperature
self.anchor_temperatures = anchor_temperatures
def receive(self, data: Data[DwmLocationResponse]) -> None:
message = data.value
# Scale the measured distances using existing calibration
new_anchors: List[DwmDistanceAndPosition] = []
for dp in message.get_anchor_distances_and_positions():
scaled = self.range_scaler.scale(dp.distance(), self.tag_temperature, self.anchor_temperatures, dp.address())
new_dp = DwmDistanceAndPosition.from_properties(dp.address(), scaled, dp.quality_factor(), dp.position())
new_anchors.append(new_dp)
# Find the best fit solution for the tag position
if len(new_anchors) < 3:
new_tag_coordinate = message.get_tag_position().position()
else:
new_tag_coordinate = self.trilaterate(message.get_tag_position().position(), new_anchors)
# Build the result and send it out
new_tag_position = DwmPosition.from_properties(new_tag_coordinate, message.get_tag_position().quality_factor())
response = DwmLocationResponse.from_properties(new_tag_position , new_anchors)
self.send(Data(response, data.timestamp))
@staticmethod
def trilaterate(initial_guess: List[int], distances_and_positions: List[DwmDistanceAndPosition]) -> List[int]:
anchors = []
measurements = []
for dp in distances_and_positions:
anchor_position = dp.position().position()
anchors.append(anchor_position)
measurements.append(dp.distance())
co_ords = optimize.least_squares(DecawaveTrilaterationFilter._least_squares_error, initial_guess, method='lm', xtol=1e-8, args=(anchors, measurements))
return [int(round(co_ords.x[0])), int(round(co_ords.x[1])), int(round(co_ords.x[2]))]
@staticmethod
def _least_squares_error(guess, anchors: List[List[int]], measurements: List[float]) -> List[float]:
x, y, z = guess
results = []
for i in range(0, len(anchors)):
results.append(DecawaveTrilaterationFilter._measurement_error(x, y, z, anchors[i], measurements[i]))
return results
@staticmethod
def _measurement_error(x: float, y: float, z: float, position: np.array, measurement: float):
return ((x - position[0]) ** 2) + ((y - position[1]) ** 2) + ((z - position[2]) ** 2)*5 - (measurement ** 2) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/filters/decawave_trilateration_filter.py | 0.889643 | 0.399138 | decawave_trilateration_filter.py | pypi |
from rover_position_rjg.data.data_filter import *
from rover_position_rjg.data.quaternion import Quaternion
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.position.attitude.attitude_algorithm import AttitudeAlgorithm
from rover_position_rjg.position.filters.attitude_filter import AttitudeOutput
from rover_position_rjg.sensors.imu.nine_dof_data import NineDoFData
class SwitchingAttitudeFilterConfig:
def __init__(self, acceleration_sensitivity: float = 0.005, cool_down: float = 0.2):
self.acceleration_sensitivity = acceleration_sensitivity
self.cool_down = cool_down
class SwitchingAttitudeFilter(DataFilter[NineDoFData, AttitudeOutput]):
"""Like AttitudeFilter except that it switches between 2 different
attitude algorithms. The 'static' algorithm is assumed to be
sensitive to acceleration and changing magnetic fields. e.g Madgwick
The dynamic algorithm relies almost entirely on gyroscope data
for the heading."""
STATIONARY_STATUS_BIT = 1
def __init__(self,
static_algorithm: AttitudeAlgorithm,
dynamic_algorithm: AttitudeAlgorithm,
acceleration_sensitivity: float = 0.005,
cool_down: float = 0.2,
name: str = ''):
"""
:param static_algorithm: an algorithm that's more accurate when stationary
:param dynamic_algorithm: an algorithm that's more accurate when moving
:param acceleration_sensitivity: acceleration in g that triggers the switch
from rover_position_rjg.the static to the dynamic algorithm.
:param cool_down: time to wait in seconds before switching to the static
algorithm once acceleration has ended
:param name: the filter name
"""
super().__init__(name)
self.g = Vector([0, 0, 1])
self.static_algorithm = static_algorithm
self.dynamic_algorithm = dynamic_algorithm
self.current_algorithm = self.static_algorithm
self.acceleration_sensitivity = acceleration_sensitivity
self.cool_down = cool_down
self.previous_attitude = Quaternion.identity()
self.previous_timestamp = 0
self.previous_in_tolerance = False
self.switch_to_static_time = 0
self.start_timestamp = 0
def reset(self):
"""Switches back to the static algorithm. Useful if the system has been
restarted after having been paused for a while"""
self.static_algorithm.reset()
self.current_algorithm = self.static_algorithm
def receive(self, data: Data[NineDoFData]) -> None:
magnitude = data.value.acceleration.value.magnitude()
in_tolerance = abs(magnitude - 1) < self.acceleration_sensitivity
timestamp = data.value.angular_velocity.timestamp
if not self.start_timestamp:
self.start_timestamp = timestamp
# Switch algorithm if necessary
if self.current_algorithm is self.static_algorithm:
if not in_tolerance:
# print('Switching to Dynamic Attitude Algorithm at {:.4f}g, heading {}, time {}'.format(magnitude, self.previous_attitude.to_tait_bryan(), data.timestamp - self.start_timestamp))
self.dynamic_algorithm.initialise(self.previous_attitude, self.previous_timestamp)
self.current_algorithm = self.dynamic_algorithm
else:
if in_tolerance:
if not self.previous_in_tolerance:
self.previous_in_tolerance = True
self.switch_to_static_time = timestamp + self.cool_down
if timestamp >= self.switch_to_static_time:
# print('Switching to Static Attitude Algorithm after {:.1f}s, heading {}, time {}'.format(self.cool_down, self.previous_attitude.to_tait_bryan(), data.timestamp - self.start_timestamp))
self.static_algorithm.reset()
self.current_algorithm = self.static_algorithm
self.previous_in_tolerance = False
else:
self.previous_in_tolerance = False
self.previous_timestamp = timestamp
# Update the attitude and return
self.previous_attitude = self.current_algorithm.step(data.value)
acceleration = self.previous_attitude.rotate(data.value.acceleration.value) - self.g
data.value.status[self.STATIONARY_STATUS_BIT] = self.current_algorithm == self.static_algorithm
output = AttitudeOutput(acceleration, self.previous_attitude, data.value.status)
self.send(Data(output, data.timestamp)) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/position/filters/switching_attitude_filter.py | 0.836254 | 0.525978 | switching_attitude_filter.py | pypi |
import math
from rover_position_rjg.data.vector import Vector
from rover_position_rjg.json_aware.json_aware import *
class Quaternion(JsonAware['Quaternion']):
"""A quaternion with the four values w, i, j and k"""
_tolerance = 1e-6
_degreesPerRadian = math.degrees(1)
@staticmethod
def identity() -> 'Quaternion':
return Quaternion(1, 0, 0, 0)
def __init__(self, w: float, i: float, j: float, k: float):
self.w = w
self.i = i
self.j = j
self.k = k
def __matmul__(self, other) -> 'Quaternion':
"""Computes the Hamilton product. If other is a 3D vector then the
output includes the rotated vector."""
w = (self.w * other.w) - (self.i * other.i) - (self.j * other.j) - (self.k * other.k)
i = (self.w * other.i) + (self.i * other.w) + (self.j * other.k) - (self.k * other.j)
j = (self.w * other.j) - (self.i * other.k) + (self.j * other.w) + (self.k * other.i)
k = (self.w * other.k) + (self.i * other.j) - (self.j * other.i) + (self.k * other.w)
return Quaternion(w, i, j, k)
def rotate(self, vector: Vector) -> Vector:
v = Quaternion(0, vector.x, vector.y, vector.z)
inv_self = Quaternion(self.w, -self.i, -self.j, -self.k) # Assume self is a unit quaternion
result = self @ v @ inv_self
return Vector([result.i, result.j, result.k])
def magnitude(self) -> float:
return math.sqrt(self.w**2 + self.i**2 + self.j**2 + self.k**2)
def normalise(self) -> 'Quaternion':
"""Scales the quaternion to have a magnitude of 1"""
mag = self.magnitude()
return Quaternion(self.w/mag, self.i/mag, self.j/mag, self.k/mag)
def __neg__(self):
"""Returns a quaternion which is the inverse of this.
i.e. a unit quaternion will undo the rotation."""
mag = self.magnitude()
return Quaternion(self.w/mag, -self.i/mag, -self.j/mag, -self.k/mag)
@staticmethod
# See http://www.sedris.org/wg8home/Documents/WG80485.pdf section 3.4.10
def from_tait_bryan(tait_bryan_angles: Vector) -> 'Quaternion':
"""Returns a Quaternion representing the given rotation in Tait-Bryan angles.
AKA Cardan angles, aviation angles. The angles must follow the ZYX convention. AKA 321.
:param tait_bryan_angles: Tait Bryan angles in degrees where x=roll, y=pitch and z=yaw.
:return: a unit Quaternion
"""
return Quaternion.from_tait_bryan_radians(tait_bryan_angles / Quaternion._degreesPerRadian)
@staticmethod
# See http://www.sedris.org/wg8home/Documents/WG80485.pdf section 3.4.10
def from_tait_bryan_radians(tait_bryan_angles: Vector) -> 'Quaternion':
"""Returns a Quaternion representing the given rotation in Tait-Bryan angles.
AKA Cardan angles, aviation angles. The angles must follow the ZYX convention. AKA 321.
:param tait_bryan_angles: Tait Bryan angles in degrees where x=roll, y=pitch and z=yaw.
:return: a unit Quaternion
"""
roll = tait_bryan_angles.x
cr = math.cos(roll * 0.5)
sr = math.sin(roll * 0.5)
pitch = tait_bryan_angles.y
cp = math.cos(pitch * 0.5)
sp = math.sin(pitch * 0.5)
yaw = tait_bryan_angles.z
cy = math.cos(yaw * 0.5)
sy = math.sin(yaw * 0.5)
w = cy * cr * cp + sy * sr * sp
i = cy * sr * cp - sy * cr * sp
j = cy * cr * sp + sy * sr * cp
k = sy * cr * cp - cy * sr * sp
return Quaternion(w, i, j, k)
# See http://www.sedris.org/wg8home/Documents/WG80485.pdf section 3.4.10
def to_tait_bryan(self) -> Vector:
"""
Converts this Quaternion to Tait-Bryan angles. AKA Cardan or aviation angles
Apply the rotations in the order Yaw, Pitch, Roll to get the correct pose.
Known as the ZYX or 321 ordering.
:return: A vector with x=roll, y=pitch and z=yaw
"""
return self.to_tait_bryan_radians() * Quaternion._degreesPerRadian
# See http://www.sedris.org/wg8home/Documents/WG80485.pdf section 3.4.10
def to_tait_bryan_radians(self) -> Vector:
"""
Converts this Quaternion to Tait-Bryan angles. AKA Cardan or aviation angles
Apply the rotations in the order Yaw, Pitch, Roll to get the correct pose.
Known as the ZYX or 321 ordering.
:return: A vector with x=roll, y=pitch and z=yaw
"""
# roll (x-axis rotation)
q = Quaternion(self.w, self.i, self.j, self.k)
sinr_cosp = +2.0 * (q.w * q.i + q.j * q.k)
cosr_cosp = +1.0 - 2.0 * (q.i * q.i + q.j * q.j)
roll = math.atan2(sinr_cosp, cosr_cosp)
# pitch (y-axis rotation)
sin_p = +2.0 * (q.w * q.j - q.k * q.i)
if math.fabs(sin_p) >= 1:
pitch = math.copysign(math.pi / 2, sin_p) # use 90 degrees if out of range
else:
pitch = math.asin(sin_p)
# yaw (z-axis rotation)
siny_cosp = +2.0 * (q.w * q.k + q.i * q.j)
cosy_cosp = +1.0 - 2.0 * (q.j * q.j + q.k * q.k)
yaw = math.atan2(siny_cosp, cosy_cosp)
return Vector([roll, pitch, yaw])
@staticmethod
def from_euler(e: Vector) -> 'Quaternion':
"""http://www.sedris.org/wg8home/Documents/WG80485.pdf section 3.4.10
Produces quaternion in the z-x-z orientation from gamma, beta, alpha.
Angles in vector are in the order alpha, beta, gamma
This doesn't round trip with the to_euler method. I swapped the x and z
and negated the signs to make it round trip."""
alpha = math.radians(e.z)
cr = math.cos(alpha * 0.5)
sr = math.sin(alpha * 0.5)
beta = math.radians(e.y)
cp = math.cos(beta * 0.5)
sp = math.sin(beta * 0.5)
gamma = math.radians(e.x)
cy = math.cos(gamma * 0.5)
sy = math.sin(gamma * 0.5)
w = -(cy*cr - sy*sr) * cp
i = -(cy*cr + sy*sr) * sp
j = -(sy*cr - cy*sr) * sp
k = -(sy*cr + cy*sr) * cp
return Quaternion(w, i, j, k)
def to_euler(self) -> Vector:
"""http://www.sedris.org/wg8home/Documents/WG80485.pdf section 3.4.10
Produces Euler angles of the z-x-z 3-1-3 orientation from gamma, beta, alpha angles.
This formulation of Euler angles is very rare. The Tait Bryan formulation
is much more common.
NOT confirmed to be a valid set of output angles."""
alpha = math.atan2((self.i*self.k + self.w*self.j), -(self.j*self.k - self.w*self.i))
beta = math.acos(1 - 2*(self.i**2 + self.j**2))
gamma = math.atan2((self.i*self.k - self.w*self.j), (self.j*self.k + self.w*self.i))
return Vector([math.degrees(alpha), math.degrees(beta), math.degrees(gamma)])
def __eq__(self, other) -> bool:
if isinstance(other, Quaternion):
return math.isclose(self.w, other.w, rel_tol=Quaternion._tolerance) and \
math.isclose(self.i, other.i, rel_tol=Quaternion._tolerance) and \
math.isclose(self.j, other.j, rel_tol=Quaternion._tolerance) and \
math.isclose(self.k, other.k, rel_tol=Quaternion._tolerance)
return False
def __repr__(self) -> str:
return '[{},{},{},{}]'.format(self.w, self.i, self.j, self.k)
def to_json(self) -> str:
return '{{"w":{}, "i":{}, "j":{}, "k":{}}}'.format(self.w, self.i, self.j, self.k)
@staticmethod
def from_json(obj: dict) -> 'Quaternion':
return Quaternion(obj['w'], obj['i'], obj['j'], obj['k']) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/data/quaternion.py | 0.924304 | 0.544922 | quaternion.py | pypi |
import math
from rover_position_rjg.json_aware.json_aware import JsonAware
class Vector(JsonAware['Vector']):
"""A 3D vector"""
_tolerance = 1e-6
@staticmethod
def zero():
return Vector([0, 0, 0])
@staticmethod
def one():
return Vector([1, 1, 1])
def __init__(self, values: list):
self.values = values
@property
def x(self):
return self.values[0]
@property
def y(self):
return self.values[1]
@property
def z(self):
return self.values[2]
def enu_to_ned(self):
"""Converts from the East North Up axis convention that we use
to the commonly used aircraft convention of North East Down."""
return Vector([self.y, self.x, -self.z])
def ned_to_enu(self):
"""Converts from the aircraft convention of North East Down
to the land vehicle convention of East North Up that we use."""
return self.enu_to_ned()
def __eq__(self, other) -> bool:
if isinstance(other, Vector):
return math.isclose(self.x, other.x, rel_tol=Vector._tolerance) and \
math.isclose(self.y, other.y, rel_tol=Vector._tolerance) and \
math.isclose(self.z, other.z, rel_tol=Vector._tolerance)
return False
def __add__(self, other) -> 'Vector':
return Vector([self.x + other.x, self.y + other.y, self.z + other.z])
def __sub__(self, other) -> 'Vector':
return Vector([self.x - other.x, self.y - other.y, self.z - other.z])
def __mul__(self, other: float) -> 'Vector':
return Vector([self.x * other, self.y * other, self.z * other])
def __truediv__(self, other: float) -> 'Vector':
return Vector([self.x / other, self.y / other, self.z / other])
def scale(self, other) -> 'Vector':
"""Scales each element of this vector by the corresponding element of other."""
return Vector([self.x * other.x, self.y * other.y, self.z * other.z])
def magnitude(self) -> float:
return math.sqrt(self.x**2 + self.y**2 + self.z**2)
def change_hand(self) -> 'Vector':
"""Flips from the left hand rule to the right hand rule or vice versa
by swapping the x and y axes."""
return Vector([self.y, self.x, self.z])
def __repr__(self) -> str:
return '[{},{},{}]'.format(self.x, self.y, self.z)
def __str__(self) -> str:
return '[{:.4f}, {:.4f}, {:.4f}]'.format(self.x, self.y, self.z)
def __lt__(self, other) -> bool:
return self.x < other.x and self.y < other.y and self.z < other.z
def __gt__(self, other) -> bool:
return other < self
def to_json(self) -> str:
return '{{"x":{}, "y":{}, "z":{}}}'.format(self.x, self.y, self.z)
@staticmethod
def from_json(obj: dict) -> 'Vector':
return Vector([obj['x'], obj['y'], obj['z']]) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/data/vector.py | 0.887205 | 0.723749 | vector.py | pypi |
import logging
import time
from multiprocessing import Pipe, Process, get_context
from rover_position_rjg.data.data import *
from rover_position_rjg.data.data_pump.data_provider import DataProvider
from rover_position_rjg.data.data_pump.data_pump import DataPump
class ProcessDataPump(DataPump[TValue]):
"""A DataPump that spawns a sub-process to read data"""
HALT_COMMAND = 'halt'
PAUSE_COMMAND = 'pause'
RESUME_COMMAND = 'resume'
def __init__(self,
provider_fn: callable(DataProvider[TValue]),
data_ready_timeout: float,
name: str,
initial_samples_to_reject: int = 0,
samples_to_reject_on_resume: int = 1
):
"""
Constructor.
:param provider_fn: Function that creates the DataProvider this pump will use
:param data_ready_timeout: timeout when calling DataProvider.poll()
:param name: displayed in log messages to distinguish this DataPump from any others.
:param initial_samples_to_reject: number of initial samples to reject. Default 0. Used to get rid
of dodgy samples from devices that need a while to warm up
:param samples_to_reject_on_resume: number of samples to reject after resuming from pause
"""
self.provider_fn = provider_fn
self._name = name
self.timeout = data_ready_timeout
self.receive_pipe, self.send_pipe = Pipe(False)
self.receive_control_pipe, self.send_control_pipe = Pipe(False)
self.process: Process = None
self.samples_to_reject = initial_samples_to_reject
self.samples_to_reject_on_resume = samples_to_reject_on_resume
self.logger = logging.getLogger(__name__)
@property
def name(self):
return self._name
def process_loop(self):
logging.basicConfig(format='{} %(message)s (PID %(process)d)'.format(self.name), level=logging.INFO)
self.logger.info("DataPump process started.")
data_provider = self.provider_fn() # type: DataProvider[TValue]
running = True
paused = False
try:
while running:
if self.receive_control_pipe.poll():
command = self.receive_control_pipe.recv()
if command[0] == ProcessDataPump.HALT_COMMAND:
running = False
elif command[0] == ProcessDataPump.PAUSE_COMMAND:
paused = True
elif command[0] == ProcessDataPump.RESUME_COMMAND:
self.samples_to_reject = self.samples_to_reject_on_resume
paused = False
else:
if not paused:
if data_provider.poll(self.timeout):
data = data_provider.get()
if self.samples_to_reject > 0:
self.samples_to_reject -= 1
else:
self.send_pipe.send(data)
# self.logger.debug("Pumped data at time {} on thread {}".format(data.timestamp, threading.get_ident())
else:
# Sleep a while to keep the CPU load down
time.sleep(max(self.timeout, 0.1))
except (KeyboardInterrupt, SystemExit):
pass
finally:
data_provider.close()
self.logger.info("DataPump process stopped.")
return 0
def poll(self, timeout: float) -> bool:
return self.receive_pipe.poll(timeout)
def recv(self) -> Data[TValue]:
return self.receive_pipe.recv()
def fileno(self) -> int:
return self.receive_pipe.fileno()
def run(self):
if self.process is None:
# Make sure we run the pump in a separate process
process_context = get_context('spawn')
self.process = process_context.Process(target=self.process_loop, daemon=True)
self.process.start()
def halt(self):
if self.process is not None:
self.send_control_pipe.send([ProcessDataPump.HALT_COMMAND])
timeout = 2
self._wait_for_process_to_end(timeout)
if self.process.is_alive():
self.logger.info("DataPump process did not stop after {} seconds.")
self.process = None
def pause(self):
if self.process is not None:
self.send_control_pipe.send([ProcessDataPump.PAUSE_COMMAND])
def resume(self):
if self.process is not None:
self.send_control_pipe.send([ProcessDataPump.RESUME_COMMAND])
def _wait_for_process_to_end(self, timeout: float):
""" Waits for the process to exit.
:param timeout: timeout in seconds
"""
step = 0.1
while self.process.is_alive() and timeout > 0:
timeout = timeout - step
time.sleep(step)
def set_frequency(self, frequency: float):
if self.process is not None:
self.send_control_pipe.send(['frequency', frequency]) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/data/data_pump/process_data_pump.py | 0.639173 | 0.318167 | process_data_pump.py | pypi |
import numpy as np
import numpy.linalg as lin
# noinspection PyPep8Naming
class KalmanFilter:
"""
A general purpose Kalman filter implementation based on
Based on "An Introduction to the Kalman Filter" by Welch and Bishop
https://www.cs.unc.edu/~welch/media/pdf/kalman_intro.pdf
Additional inspiration from
https://towardsdatascience.com/wtf-is-sensor-fusion-part-2-the-good-old-kalman-filter-3642f321440
Process Model
x = Ax + Bu + w
x - system state vector at time k
A - function giving the new state after time T assuming no noise or control input
w - process noise where w approx = N(0, R)
R - the variance of w
Measurement Model
z = Hx + v
z - the measurements at time k
H - a function predicting the measurement
v - measurement noise where v approx = N(0, Q)
Q - the variance of v
"""
def __init__(self,
x0: np.ndarray,
P0: np.ndarray,
A: np.ndarray,
B: np.ndarray or None,
H: np.ndarray,
R: np.ndarray,
Q: np.ndarray):
"""
:param x0: initial state vector
:param P0: initial covariance matrix
:param A: matrix that updates the next state from the previous state (AKA F)
:param B: matrix that adjusts the next state by the control inputs u
:param H: matrix that converts the current state into an expected measurement
:param R: measurement noise matrix
:param Q: process noise matrix
"""
# Key dimensions
self.n = np.shape(x0)[0] # number of dimensions in state vector
self.m = np.shape(R)[1] # number of sensor inputs
# Control inputs are optional
self.B = None
self.l = 0 # number of control inputs
if B is not None:
self.l = np.shape(B)[1]
self.B = np.array(B)
self.assert_shape(self.B, 'B', self.n, self.l)
self.x = np.array(x0)
self.assert_shape(self.x, 'x', self.n, 1)
self.P = np.array(P0)
self.assert_shape(self.P, 'P', self.n, self.n)
self.A = np.array(A)
self.assert_shape(self.A, 'A', self.n, self.n)
self.H = np.array(H)
self.assert_shape(self.H, 'H', self.m, self.n)
self.R = np.array(R)
self.assert_shape(self.R, 'R', self.m, self.m)
self.Q = np.array(Q)
self.assert_shape(self.Q, 'Q', self.n, self.n)
def step(self, z: np.ndarray, u: np.ndarray = None) -> np.ndarray:
"""
Updates the filter from the supplied measurement and control inputs.
Before calling this method, you should update self.A with the time
since the last call to step unless the time interval is fixed.
You should adjust the observation matrix, self.H, unless the same
number of observations are present in every call.
You should adjust the control matrix, self.B, unless the same number
of control inputs are present in every call.
:param z: The measured state of the system
:param u: Control input. None if the control input is unavailable
:return: The new state (x)
"""
self.assert_shape(z, 'z', self.m, 1)
if self.B is not None:
self.assert_shape(u, 'u', self.l, 1)
# Predict phase
if self.B is not None:
self.x = (self.A @ self.x) + (self.B @ u)
else:
self.x = self.A @ self.x
self.P = (self.A @ self.P @ self.A.T) + self.Q
# Update phase
HT = self.H.T
S = self.H @ self.P @ HT + self.R
self.assert_shape(S, 'S', self.m, self.m)
K = self.P @ HT @ lin.inv(S)
self.assert_shape(K, 'K', self.n, self.m)
v = z - (self.H @ self.x)
self.assert_shape(v, 'v', self.m, 1)
self.x = self.x + (K @ v)
self.assert_shape(self.x, 'x', self.n, 1)
self.P = self.P - ((K @ S) @ K.T)
self.assert_shape(self.P, 'P', self.n, self.n)
return self.x
@staticmethod
def assert_shape(matrix: np.ndarray, name: str, expected_rows: int, expected_columns: int):
shape = np.shape(matrix)
if len(shape) != 2:
raise Exception('{} matrix should have 2 dimensions but got {}'.format(name, len(shape)))
if (expected_rows is not None) and (shape[0] != expected_rows):
raise Exception('{} matrix should have {} rows but got {}'.format(name, expected_rows, shape[0]))
if (expected_columns is not None) and (shape[1] != expected_columns):
raise Exception('{} matrix should have {} columns but got {}'.format(name, expected_columns, shape[1])) | /rover_position_rjg-0.1.8-py3-none-any.whl/rover_position_rjg/kalman/kalman_filter.py | 0.886623 | 0.856212 | kalman_filter.py | pypi |
<div align="center">
# Rover IO
Rover IO is a suite of tools that traverses your directories and performs IO file operations.
[](https://github.com/Justintime50/roverio/actions)
[](https://coveralls.io/github/Justintime50/roverio?branch=main)
[](https://pypi.org/project/roverio/)
[](LICENSE)
<img src="https://raw.githubusercontent.com/justintime50/assets/main/src/roverio/showcase.png" alt="Showcase">
</div>
Rover IO is the perfect companion to any source control workflow. Find files still containing secrets or search for specific file types or strings of characters you may have forgotten to add to your gitignore. Rename massive collections of files sequentially and recursively (perfect for something like a photo library).
## Install
```bash
# Install tool
pip3 install roverio
# Install locally
make install
```
## Usage
### File Extension
File Extension searches for all files in a path with the specified file extension and returns all the specified results.
```
Usage:
roverio-file-extension --path ~/code/my_project --extension ".py"
Options:
-h, --help show this help message and exit
-p PATH, --path PATH Where File Extension will search for files with the specified file extension.
-e EXTENSION, --extension EXTENSION The file extension to search a path for.
```
### Phone Email Searcher
Phone Email Searcher searches for files that may contain phone numbers or email addresses.
```
Usage:
roverio-phone-email-searcher --path ~/code/my_project --phone --email
Options:
-h, --help show this help message and exit
--path PATH Where Phone/Email Searcher will search.
-p, --phone Search for phone numbers in the directory specified.
-e, --email Search for emails in the directory specified.
```
### Readmy Readmes
Readmy Readmes is a fantastic tool to help find the holes in your project's documentation. Quickly iterate over every project README you have to search for key phrases you know should be there.
Each rule must be on its own line in your `rules` test file. Depending on the path you specify, you can search all your project README's or just a single project.
**Use Cases**
* Ensure you have instructions for installing, usage, and testing your project
* Find long-forgotten TODO's that still need fixing
* Search for a particular phrase when you can't remember where it lived
* Find hiding README's deep in a project's structure
```
Usage
roverio-readmy-readmes -p path/to/git_projects -r path/to/rules.txt -l -c -csv_path path/to/file.csv
Options:
-h, --help show this help message and exit
-p PATH, --path PATH The path where the tool will search for README's.
-r RULES, --rules RULES
The path to your rule text file.
-l, --lazy Match rules lazily (case-insensitive).
-c, --create_csv Save output to a CSV file.
--csv_path CSV_PATH The file path where a CSV file will be saved. By default, it will be saved to the current directory.
```
**Sample Output**
```
| README File | install | usage | test | todo |
| ----------------------- | ------- | ----- | ----- | ----- |
| adventofcode/README.md | True | True | True | False |
| algorithms/README.md | True | True | True | False |
| brew-backup/README.md | True | True | False | False |
| brew-update/README.md | False | True | False | False |
| build-project/README.md | True | True | False | False |
| build-readme/README.md | True | True | True | False |
| burn-notice/README.md | False | True | False | False |
| dad/README.md | True | True | True | False |
| dev-tools/README.md | False | True | True | False |
| diff/README.md | True | True | True | False |
| dotfiles/README.md | True | True | False | False |
...
```
### Scout
Scout searches through a directory for any string of text you specify. Perfect for searching across multiple projects or large code bases.
```
Usage:
roverio-scout --path ~/code/my_project --search "My string of text"
Options:
-h, --help show this help message and exit
-p PATH, --path PATH Where Scout will search for the string specified in each file.
-s SEARCH, --search SEARCH The string to search for in each file of a path.
```
### Secrets
Secrets searches a path for possible secrets in code. Perfect for finding any passwords, API keys, or secrets you were about to commit. This is accomplished through loose searching of strings of a certain length and is not foolproof in determining what an actual secret is vs a long string.
```
Usage:
roverio-secrets --path ~/code/my_project --length 20
Options:
-h, --help show this help message and exit
-p PATH, --path PATH Where Secrets will search for the string specified in each file.
-l LENGTH, --length LENGTH The minimum length of the secrets to search for.
```
### Sequential Renamer
Sequential Renamer recursively renames files in a directory in a sequential manner and prepends the parent folder name. The filename is slugified and lowercased for a uniform naming scheme.
A perfect use case for Seqential Renamer is a large photo library where filenames may be all over the place such as `IMG_1234.JPG` and you want them renamed according to folder. This script has been tested with a library of `10,000` photos.
```
Usage:
roverio-sequential-renamer --path ~/path/to/photos --force
Options:
-h, --help show this help message and exit
-p PATH, --path PATH Where Sequential Renamer will recursively rename files it finds.
-f, --force Force changes which take permenant effect.
```
**Sample Output**
```
/Users/jhammond/Downloads/Justin's Skydive 2019/IMG_2462_proc_592015324.JPG -> justins-skydive-2019-0.jpg
/Users/jhammond/Downloads/Justin's Skydive 2019/IMG_2494_proc_592015326.JPG -> justins-skydive-2019-1.jpg
/Users/jhammond/Downloads/Justin's Skydive 2019/IMG_2514_proc_592015327.JPG -> justins-skydive-2019-2.jpg
```
## Development
```bash
# Get a comprehensive list of development tools
make help
# Run the scripts locally
venv/bin/python roverio/secrets.py --help
```
| /roverio-2.3.1.tar.gz/roverio-2.3.1/README.md | 0.477311 | 0.809991 | README.md | pypi |
# Rover Pro Python Suite
This is the official Python driver for the [Rover Robotics](https://roverrobotics.com/) "Rover Pro" robot. Use this as a starting point to get up and running quickly.
Included in this package are:
1. A Python library for programmatically interfacing with the Rover over USB
2. A command line application "`pitstop`" for upgrading and configuring the Rover firmware
3. A test suite that confirms the Firmware and hardware are operating as expected.

## Setup
To install official releases from PyPi:
```shell script
python3 -m pip install -U pip setuptools
python3 -m pip install -U roverpro --no-cache-dir
```
On Linux, you may not have permission to access USB devices. If this is the case, run the following then restart your computer:
```shell script
sudo usermod -a -G dialout $(whoami)
```
### pitstop
Pitstop is a helper program to bootload your rover and set options. After installing the roverpro package, you can invoke it with `pitstop` or `python3 -m roverpro.pitstop`.
```text
> pitstop --help
usage: pitstop [-h] [-p port] action ...
Rover Pro companion utility to upgrade firmware, configure settings, and test hardware health.
positional arguments:
action
flash write the given firmware hex file onto the rover
checkversion Check the version of firmware installed
test Run tests on the rover
config Update rover persistent settings
optional arguments:
-h, --help show this help message and exit
-p port, --port port Which device to use. If omitted, we will search for a possible rover device
```
## tests
To run tests, first attach the rover via breakout cable then run `pitstop test`.
By default, tests that involve running the motors will be skipped, since you may not want a rover ripping cables out of your computer. If you have made sure running the motors will not damage anything, these tests can be opted in with the flag `--motorok`.
```text
> pitstop test --motorok
Scanning for possible rover devices
Using device /dev/ttyUSB0
========================== test session starts ============================
platform linux -- Python 3.8.2, pytest-5.4.3, py-1.9.0, pluggy-0.13.1
rootdir: /home/dan/Documents/roverpro-python/roverpro
plugins: trio-0.6.0
collected 73 items
tests/test_bootloader.py .s [ 2%]
tests/test_find_device.py ..... [ 9%]
tests/test_roverpro_protocol.py .... [ 15%]
tests/test_rover.py ..................x.x.........x................Xxxx.......... [ 98%]
tests/burnin/test_burnin.py s [100%]
===== 64 passed, 2 skipped, 6 xfailed, 1 xpassed in 83.94s (0:01:23) =====
```
### Development setup
Manual Prerequisites:
* Python3 (recommended to install Python3.6, Python3.7, and Python3.8 if you plan on using tox for all):
* On Ubuntu: `sudo apt install python3 python3-venv python3-pip`
* On another OS: https://www.python.org/downloads/
* [Poetry](https://python-poetry.org/docs/#installation):
* `curl -sSL https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py | (python3 || python)`
Then to get and initialize the project:
```
git clone https://github.com/RoverRobotics/roverpro-python.git
cd roverpro-python
poetry install
poetry run githooks setup
```
To run a single command: `poetry run pitstop --help`
#### Useful commands
Note that you haven't called `poetry shell`, you must prepend the following with `poetry run`
<dl>
<dt><code>pytest</code></dt>
<dd>Test on current Python interpreter</dd>
<dt><code>tox</code></dt>
<dd>Test across multiple versions of Python</dd>
<dt><code>black .</code></dt>
<dd>Reformat code to a uniform style</dd>
<td><code>poetry update</code></td>
<dd>Update all dependencies to the latest released version</dd>
</dl>
### Caveats
* When running in PyCharm in debug mode, you will get a warning like "RuntimeWarning: You seem to already have a custom sys.excepthook handler installed ..." https://github.com/python-trio/trio/issues/1553
* Note this is a pyproject (PEP-517) project so it will NOT work to `pip install --editable ...` for development. Instead use `poetry install` as above.
| /roverpro-1.0.0rc1.tar.gz/roverpro-1.0.0rc1/README.md | 0.52975 | 0.875255 | README.md | pypi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.