index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
48,846 | D0cWhats0n/Foodrition_app | refs/heads/master | /foodrition_backend/foodrition_api/migrations/0004_food_ndb_no.py | # Generated by Django 2.1.5 on 2019-02-19 19:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('foodrition_api', '0003_foodimage'),
]
operations = [
migrations.AddField(
model_name='food',
name='ndb_no',
field=models.IntegerField(default=1, unique=True),
preserve_default=False,
),
]
| {"/foodrition_backend/foodrition_api/tests.py": ["/foodrition_backend/foodrition_api/services/ml_model.py"], "/foodrition_backend/foodrition_api/serializers.py": ["/foodrition_backend/foodrition_api/models.py"]} |
48,847 | D0cWhats0n/Foodrition_app | refs/heads/master | /foodrition_backend/foodrition_api/migrations/0002_auto_20190206_1142.py | # Generated by Django 2.1.5 on 2019-02-06 10:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('foodrition_api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='food',
name='energy_kcal',
field=models.IntegerField(blank=True, null=True),
),
]
| {"/foodrition_backend/foodrition_api/tests.py": ["/foodrition_backend/foodrition_api/services/ml_model.py"], "/foodrition_backend/foodrition_api/serializers.py": ["/foodrition_backend/foodrition_api/models.py"]} |
48,855 | yakovdabush1/Ex3_python | refs/heads/main | /src/GraphAlgo.py | from typing import List
from src import GraphInterface
import json
import sys
from src.DiGraph import DiGraph
import heapq
import matplotlib.pyplot as plt
class GraphAlgo:
def __init__(self, graph=None):
self.graph = graph
# two "hashmaps" for djikstra
self.distance = {}
self.parents = {}
def get_graph(self) -> GraphInterface:
"""
:return: the directed graph on which the algorithm works on.
"""
return self.graph
def load_from_json(self, file_name: str) -> bool:
"""
Loads a graph from a json file.
@param file_name: The path to the json file
@returns True if the loading was successful, False o.w.
"""
graph = DiGraph()
try:
# open file with 'r'=read mode
file = open(file_name, 'r')
json_data = json.load(file)
for node_json in json_data['Nodes']:
try:
# try to read pos
s = node_json['pos']
split_s = s.split(",")
x, y, z = float(split_s[0]), float(split_s[1]), float(split_s[2])
graph.add_node(node_json['id'], (x, y, z))
except:
# read without pos
graph.add_node(node_json['id'])
for edge_json in json_data["Edges"]:
graph.add_edge(edge_json['src'], edge_json['dest'], edge_json['w'])
self.graph = graph
file.close()
return True
except OSError as err:
print("error: {}".format(err))
return False
except:
print("unexpected error ", sys.exc_info()[0])
return False
def save_to_json(self, file_name: str) -> bool: # {
"""
Saves the graph in JSON format to a file
@param file_name: The path to the out file
@return: True if the save was successful, Flase o.w.
"""
try:
# new dict to represent json
json_dict = {"Nodes": [], "Edges": []}
node_list = self.graph.get_all_v()
for node_key in node_list:
node = node_list[node_key]
# convert pos from tuple to string
if node.pos is None:
x, y, z = 0.0, 0.0, 0.0
else:
x, y, z = node.pos
pos_string = "{},{},{}".format(x, y, z)
json_dict["Nodes"].append({
"id": node_key,
"pos": pos_string
})
for out in node_list[node_key].out_neighbors:
json_dict["Edges"].append({
"src": node_key,
"dest": out,
"w": node_list[node_key].out_neighbors[out]
})
# open file with 'w'=write mode
file = open(file_name, 'w')
# use json.dump to write data to file
json.dump(json_dict, file)
file.close()
return True
except OSError as err:
print("error: {}".format(err))
return False
except:
print("unexpected error ", sys.exc_info()[0])
return False
def djikstra(self, src_node):
"""
algorithm for djkistra using priority queue
:param src_node:
:return:
"""
# for key i in node dict
for i in self.graph.get_all_v():
self.distance[i] = float('inf')
self.distance = {i: float('inf') for i in self.graph.get_all_v()}
self.parents = {i: None for i in self.graph.get_all_v()}
# insert into list tuple -> (priority, node_key)
# the
priority_queue = [(0.0, src_node)]
self.distance[src_node] = 0.0
while priority_queue:
min_distance, node_key = heapq.heappop(priority_queue)
# for all neighbors of node
out_edges = self.graph.all_out_edges_of_node(node_key)
for ni_key in out_edges:
alt = self.distance[node_key] + out_edges[ni_key]
if alt < self.distance[ni_key]:
self.distance[ni_key] = alt
self.parents[ni_key] = node_key
heapq.heappush(priority_queue, (alt, ni_key))
def shortest_path(self, id1: int, id2: int) -> (float, list):
"""
Returns the shortest path from node id1 to node id2 using Dijkstra's Algorithm
@param id1: The start node id
@param id2: The end node id
@return: The distance of the path, the path as a list
Example:
# >>> from GraphAlgo import GraphAlgo
# >>> g_algo = GraphAlgo()
# >>> g_algo.addNode(0)
# >>> g_algo.addNode(1)
# >>> g_algo.addNode(2)
# >>> g_algo.addEdge(0,1,1)
# >>> g_algo.addEdge(1,2,4)
# >>> g_algo.shortestPath(0,1)
# (1, [0, 1])
# >>> g_algo.shortestPath(0,2)
# (5, [0, 1, 2])
More info:
https://en.wikipedia.org/wiki/Dijkstra's_algorithm
"""
node_list = self.graph.get_all_v()
if id1 not in node_list or id2 not in node_list:
return float('inf'), []
# fill distance and parent dict
self.djikstra(id1)
shortest_dist = self.distance[id2]
index_runner = id2
shortest_path = [index_runner]
while index_runner is not None:
shortest_path.append(self.parents[index_runner])
index_runner = self.parents[index_runner]
if index_runner == id1:
# shortest_path[::-1] -> reverse traverse
return shortest_dist, shortest_path[::-1]
if index_runner is None:
return shortest_dist, []
def connected_component(self, id1: int) -> list:
"""
Finds the Strongly Connected Component(SCC) that node id1 is a part of.
@param id1: The node id
@return: The list of nodes in the SCC
"""
"""
dfs(src)
transpose_graph
dfs(src)
"""
if id1 in self.graph.get_all_v():
components = self.connected_components()
for component in components:
if id1 in component:
return component
return []
def connected_components(self) -> List[list]:
"""
Finds all the Strongly Connected Component(SCC) in the graph.
@return: The list all SCC
"""
# part 1 -> dfs with stack
stack = []
visited = [False] * self.graph.v_size()
for node_key in self.graph.get_all_v():
if node_key in visited:
# node and all its branches have been visited
return stack, visited
# if not visited[node_key]:
self.dfs_stack(node_key, visited, stack)
# part 2 -> transpose graph
transpose_graph = DiGraph()
for node_key in self.graph.get_all_v():
transpose_graph.add_node(node_key)
for node_key in self.graph.get_all_v():
outs = self.graph.all_out_edges_of_node(node_key)
for dest_key in outs:
transpose_graph.add_edge(dest_key, node_key, outs[dest_key])
# part 3 -> dfs from stack
visited = [False] * self.graph.v_size()
components = []
while stack:
key = stack.pop()
if not visited[key]:
c = self.dfs_scc(key, visited, [], transpose_graph)
components.append(c)
return components
def dfs_stack(self, current_node, visited, stack):
visited[current_node] = True
for dest_key in self.graph.all_out_edges_of_node(current_node):
if not visited[dest_key]:
self.dfs_stack(dest_key, visited, stack)
stack.append(current_node)
def dfs_scc(self, current_node, visited, scc_list, transpoe_graph):
visited[current_node] = True
scc_list.append(current_node)
for dest_key in transpoe_graph.all_out_edges_of_node(current_node):
if not visited[dest_key]:
self.dfs_scc(dest_key, visited, scc_list, transpoe_graph)
return scc_list
def plot_graph(self) -> None:
"""
Plots the graph.
If the nodes have a position, the nodes will be placed there.
Otherwise, they will be placed in a random but elegant manner.
@return: None
"""
"""
import matplotlib.pyplot as plt
matplotlib -> https://matplotlib.org/
"""
# for n in self.graph.get_all_v():
#
# node = self.graph.nodes[n]
#
# node.update_position(self.graph.v_size())
ax = plt.axes()
pos_x = []
pos_y = []
for n in self.graph.get_all_v():
node = self.graph.nodes[n]
if node.pos is None: node.update_position(self.graph.v_size())
x, y, z = node.pos
pos_x.append(x)
pos_y.append(y)
for dest_key in self.graph.all_out_edges_of_node(n):
dest_node = self.graph.nodes[dest_key]
if dest_node.pos is None: dest_node.update_position(self.graph.v_size())
dest_x, dest_y, dest_z = dest_node.pos
# (x,y) -> (x+dx, y+dy)
ax.quiver(x, y, dest_x-x, dest_y-y, angles='xy', scale_units='xy', scale=1)
plt.plot(pos_x, pos_y, 'ro')
plt.show() | {"/src/GraphAlgo.py": ["/src/DiGraph.py"], "/src/DiGraph.py": ["/src/Node_Data.py"]} |
48,856 | yakovdabush1/Ex3_python | refs/heads/main | /src/test.py | import unittest
import time
from GraphAlgo import GraphAlgo
import networkx
import json
class Tests(unittest.TestCase):
def test_networkx_load_json(self):
print()
G_10_80_0 = '../test/G_10_80_0.json'
G_100_800_0 = '../test/G_100_800_0.json'
G_1000_8000_0 = '../test/G_1000_8000_0.json'
G_10000_80000_0 = '../test/G_10000_80000_0.json'
G_20000_160000_0 = '../test/G_10000_80000_0.json'
G_30000_240000_0 = '../test/G_30000_240000_0.json'
G_10_80_1 = '../test/G_10_80_1.json'
G_100_800_1 = '../test/G_100_800_1.json'
G_1000_8000_1 = '../test/G_1000_8000_1.json'
G_10000_80000_1 = '../test/G_10000_80000_1.json'
G_20000_160000_1 = '../test/G_10000_80000_1.json'
G_30000_240000_1 = '../test/G_30000_240000_1.json'
G_10_80_2 = '../test/G_10_80_2.json'
G_100_800_2 = '../test/G_100_800_2.json'
G_1000_8000_2 = '../test/G_1000_8000_2.json'
G_10000_80000_2 = '../test/G_10000_80000_2.json'
G_20000_160000_2 = '../test/G_10000_80000_2.json'
G_30000_240000_2 = '../test/G_30000_240000_2.json'
algo = GraphAlgo()
file_list = [G_10_80_0, G_100_800_0, G_1000_8000_0, G_10000_80000_0, G_20000_160000_0,G_30000_240000_0,
G_10_80_1, G_100_800_1, G_1000_8000_1, G_10000_80000_1, G_20000_160000_1, G_30000_240000_1,
G_10_80_2, G_100_800_2, G_1000_8000_2, G_10000_80000_2, G_20000_160000_2, G_30000_240000_2]
for file_name in file_list:
# load graph from networkx
networkx_graph = networkx.DiGraph()
# by networkx documentation
file = open(file_name)
data = json.load(file)
nodes = []
edges = []
for node in data['Nodes']:
nodes.append(node['id'])
for edge in data['Edges']:
edges.append((edge['src'], edge['dest'], edge['w']))
networkx_graph.add_nodes_from(nodes)
networkx_graph.add_weighted_edges_from(edges)
start = time.perf_counter()
# execute logic
networkx.shortest_path(0, 10)
print("time took to networkx shortestpath {}: {}".format(file_name, time.perf_counter()-start))
def test_networkx_load_json(self):
print()
G_10_80_0 = '../test/G_10_80_0.json'
G_100_800_0 = '../test/G_100_800_0.json'
G_1000_8000_0 = '../test/G_1000_8000_0.json'
G_10000_80000_0 = '../test/G_10000_80000_0.json'
G_20000_160000_0 = '../test/G_10000_80000_0.json'
G_30000_240000_0 = '../test/G_30000_240000_0.json'
G_10_80_1 = '../test/G_10_80_1.json'
G_100_800_1 = '../test/G_100_800_1.json'
G_1000_8000_1 = '../test/G_1000_8000_1.json'
G_10000_80000_1 = '../test/G_10000_80000_1.json'
G_20000_160000_1 = '../test/G_10000_80000_1.json'
G_30000_240000_1 = '../test/G_30000_240000_1.json'
G_10_80_2 = '../test/G_10_80_2.json'
G_100_800_2 = '../test/G_100_800_2.json'
G_1000_8000_2 = '../test/G_1000_8000_2.json'
G_10000_80000_2 = '../test/G_10000_80000_2.json'
G_20000_160000_2 = '../test/G_10000_80000_2.json'
G_30000_240000_2 = '../test/G_30000_240000_2.json'
algo = GraphAlgo()
file_list = [G_10_80_0, G_100_800_0, G_1000_8000_0, G_10000_80000_0, G_20000_160000_0,G_30000_240000_0,
G_10_80_1, G_100_800_1, G_1000_8000_1, G_10000_80000_1, G_20000_160000_1, G_30000_240000_1,
G_10_80_2, G_100_800_2, G_1000_8000_2, G_10000_80000_2, G_20000_160000_2, G_30000_240000_2]
start1 = time.time()
for file_name in file_list:
start = time.perf_counter()
# execute logic
# load graph from networkx
networkx_graph = networkx.DiGraph()
# by networkx documentation
file = open(file_name)
data = json.load(file)
nodes = []
edges = []
for node in data['Nodes']:
nodes.append(node['id'])
for edge in data['Edges']:
edges.append((edge['src'], edge['dest'], edge['w']))
networkx_graph.add_nodes_from(nodes)
networkx_graph.add_weighted_edges_from(edges)
print("time took to networkx load {}: {}".format(file_name, time.perf_counter()-start))
file.close()
print("time took to load all fiels: {}".format( time.time()-start1))
def test_connected_components(self):
print()
G_10_80_0 = '../test/G_10_80_0.json'
G_100_800_0 = '../test/G_100_800_0.json'
G_1000_8000_0 = '../test/G_1000_8000_0.json'
G_10000_80000_0 = '../test/G_10000_80000_0.json'
G_20000_160000_0 = '../test/G_10000_80000_0.json'
G_30000_240000_0 = '../test/G_30000_240000_0.json'
G_10_80_1 = '../test/G_10_80_1.json'
G_100_800_1 = '../test/G_100_800_1.json'
G_1000_8000_1 = '../test/G_1000_8000_1.json'
G_10000_80000_1 = '../test/G_10000_80000_1.json'
G_20000_160000_1 = '../test/G_10000_80000_1.json'
G_30000_240000_1 = '../test/G_30000_240000_1.json'
G_10_80_2 = '../test/G_10_80_2.json'
G_100_800_2 = '../test/G_100_800_2.json'
G_1000_8000_2 = '../test/G_1000_8000_2.json'
G_10000_80000_2 = '../test/G_10000_80000_2.json'
G_20000_160000_2 = '../test/G_10000_80000_2.json'
G_30000_240000_2 = '../test/G_30000_240000_2.json'
algo = GraphAlgo()
file_list = [G_10_80_0, G_100_800_0, G_1000_8000_0, G_10000_80000_0, G_20000_160000_0,G_30000_240000_0,
G_10_80_1, G_100_800_1, G_1000_8000_1, G_10000_80000_1, G_20000_160000_1, G_30000_240000_1,
G_10_80_2, G_100_800_2, G_1000_8000_2, G_10000_80000_2, G_20000_160000_2, G_30000_240000_2]
for file_name in file_list:
algo.load_from_json(file_name)
start = time.perf_counter()
# execute logic
algo.connected_components()
print("time took to connected component {}: {}".format(file_name, time.perf_counter()-start))
def test_shortest_path_0_10(self):
print()
G_10_80_0 = '../test/G_10_80_0.json'
G_100_800_0 = '../test/G_100_800_0.json'
G_1000_8000_0 = '../test/G_1000_8000_0.json'
G_10000_80000_0 = '../test/G_10000_80000_0.json'
G_20000_160000_0 = '../test/G_10000_80000_0.json'
G_30000_240000_0 = '../test/G_30000_240000_0.json'
G_10_80_1 = '../test/G_10_80_1.json'
G_100_800_1 = '../test/G_100_800_1.json'
G_1000_8000_1 = '../test/G_1000_8000_1.json'
G_10000_80000_1 = '../test/G_10000_80000_1.json'
G_20000_160000_1 = '../test/G_10000_80000_1.json'
G_30000_240000_1 = '../test/G_30000_240000_1.json'
G_10_80_2 = '../test/G_10_80_2.json'
G_100_800_2 = '../test/G_100_800_2.json'
G_1000_8000_2 = '../test/G_1000_8000_2.json'
G_10000_80000_2 = '../test/G_10000_80000_2.json'
G_20000_160000_2 = '../test/G_10000_80000_2.json'
G_30000_240000_2 = '../test/G_30000_240000_2.json'
algo = GraphAlgo()
file_list = [G_10_80_0, G_100_800_0, G_1000_8000_0, G_10000_80000_0, G_20000_160000_0,G_30000_240000_0,
G_10_80_1, G_100_800_1, G_1000_8000_1, G_10000_80000_1, G_20000_160000_1, G_30000_240000_1,
G_10_80_2, G_100_800_2, G_1000_8000_2, G_10000_80000_2, G_20000_160000_2, G_30000_240000_2]
for file_name in file_list:
algo.load_from_json(file_name)
start = time.perf_counter()
# execute logic
algo.shortest_path(0, 10)
print("time took to shortest_path in from 0-10 {}: {}".format(file_name, time.perf_counter()-start))
# test run time - load json
def test_load_from_json(self):
print()
G_10_80_0 = '../test/G_10_80_0.json'
G_100_800_0 = '../test/G_100_800_0.json'
G_1000_8000_0 = '../test/G_1000_8000_0.json'
G_10000_80000_0 = '../test/G_10000_80000_0.json'
G_20000_160000_0 = '../test/G_10000_80000_0.json'
G_30000_240000_0 = '../test/G_30000_240000_0.json'
G_10_80_1 = '../test/G_10_80_1.json'
G_100_800_1 = '../test/G_100_800_1.json'
G_1000_8000_1 = '../test/G_1000_8000_1.json'
G_10000_80000_1 = '../test/G_10000_80000_1.json'
G_20000_160000_1 = '../test/G_10000_80000_1.json'
G_30000_240000_1 = '../test/G_30000_240000_1.json'
G_10_80_2 = '../test/G_10_80_2.json'
G_100_800_2 = '../test/G_100_800_2.json'
G_1000_8000_2 = '../test/G_1000_8000_2.json'
G_10000_80000_2 = '../test/G_10000_80000_2.json'
G_20000_160000_2 = '../test/G_10000_80000_2.json'
G_30000_240000_2 = '../test/G_30000_240000_2.json'
algo = GraphAlgo()
file_list = [G_10_80_0, G_100_800_0, G_1000_8000_0, G_10000_80000_0, G_20000_160000_0,G_30000_240000_0,
G_10_80_1, G_100_800_1, G_1000_8000_1, G_10000_80000_1, G_20000_160000_1, G_30000_240000_1,
G_10_80_2, G_100_800_2, G_1000_8000_2, G_10000_80000_2, G_20000_160000_2, G_30000_240000_2]
for file_name in file_list:
start = time.perf_counter()
# execute logic
algo.load_from_json(file_name)
print("time took to load {}: {}".format(file_name, time.perf_counter()-start))
if __name__ == '__main__':
unittest.main() | {"/src/GraphAlgo.py": ["/src/DiGraph.py"], "/src/DiGraph.py": ["/src/Node_Data.py"]} |
48,857 | yakovdabush1/Ex3_python | refs/heads/main | /src/Node_Data.py | import random
class Node_Data:
def __init__(self, key, info="", tag=0, pos=None):
self.key = key
self.info = info
self.tag = tag
self.pos = pos
# this is all out edges
self.out_neighbors = {}
# this is all in edges
self.in_neighbors = {}
def update_position(self, v_size):
if self.pos is None:
x, y, z = random.uniform(0, v_size), random.uniform(0, v_size), 0
self.pos = (x, y, z)
def __repr__(self):
return "{}: |edges out| {} |edges in| {}".format(self.key, len(self.out_neighbors), len(self.in_neighbors))
| {"/src/GraphAlgo.py": ["/src/DiGraph.py"], "/src/DiGraph.py": ["/src/Node_Data.py"]} |
48,858 | yakovdabush1/Ex3_python | refs/heads/main | /src/DiGraph.py | from src.Node_Data import Node_Data
class DiGraph:
def __init__(self):
self.nodes = {}
self.mc = 0
self.edge_size = 0
self.node_size = 0
def v_size(self) -> int:
"""
Returns the number of vertices in this graph
@return: The number of vertices in this graph
"""
return self.node_size
def e_size(self) -> int:
"""
Returns the number of edges in this graph
@return: The number of edges in this graph
"""
return self.edge_size
def get_all_v(self) -> dict:
"""return a dictionary of all the nodes in the Graph, each node is represented using apair (key, node_data)
"""
return self.nodes
def all_in_edges_of_node(self, id1: int) -> dict:
"""return a dictionary of all the nodes connected to (into) node_id ,
each node is represented using a pair (key, weight)
"""
if id1 in self.nodes:
return self.nodes[id1].in_neighbors
return {}
def all_out_edges_of_node(self, id1: int) -> dict:
"""return a dictionary of all the nodes connected from node_id , each node is represented using a pair (key,
weight)
"""
if id1 in self.nodes:
return self.nodes[id1].out_neighbors
return {}
def get_mc(self) -> int:
"""
Returns the current version of this graph,
on every change in the graph state - the MC should be increased
@return: The current version of this graph.
"""
return self.mc
def add_edge(self, id1: int, id2: int, weight: float) -> bool:
"""
Adds an edge to the graph.
@param id1: The start node of the edge
@param id2: The end node of the edge
@param weight: The weight of the edge
@return: True if the edge was added successfully, False o.w.
Note: If the edge already exists or one of the nodes dose not exists the functions will do nothing
"""
if id1 is not id2 and id1 in self.nodes and id2 in self.nodes:
node1 = self.nodes[id1]
node2 = self.nodes[id2]
if weight > 0 and id2 not in node1.out_neighbors and id1 not in node2.in_neighbors:
node1.out_neighbors[id2] = weight
node2.in_neighbors[id1] = weight
self.edge_size = self.edge_size + 1
self.mc += 1
return True
return False
def add_node(self, node_id: int, pos: tuple = None) -> bool:
"""
Adds a node to the graph.
@param node_id: The node ID
@param pos: The position of the node
@return: True if the node was added successfully, False o.w.
Note: if the node id already exists the node will not be added
"""
if node_id not in self.nodes:
self.nodes[node_id] = Node_Data(node_id, pos)
self.node_size += 1
return True
return False
def remove_node(self, node_id: int) -> bool:
"""
Removes a node from the graph.
@param node_id: The node ID
@return: True if the node was removed successfully, False o.w.
Note: if the node id does not exists the function will do nothing
"""
if node_id not in self.nodes:
return False
node = self.nodes[node_id]
outs = node.out_neighbors
ins = node.in_neighbors
for n in outs:
del self.nodes[n].in_neighbors[node_id]
self.edge_size -= 1
self.mc += 1
for n in ins:
del self.nodes[n].out_neighbors[node_id]
self.edge_size -= 1
self.mc += 1
self.node_size -= 1
self.mc += 1
del self.nodes[node_id]
return True
def remove_edge(self, node_id1: int, node_id2: int) -> bool:
"""
Removes an edge from the graph.
@param node_id1: The start node of the edge
@param node_id2: The end node of the edge
@return: True if the edge was removed successfully, False o.w.
Note: If such an edge does not exists the function will do nothing
"""
if node_id2 in self.nodes[node_id1].out_neighbors:
del self.nodes[node_id1].out_neighbors[node_id2]
del self.nodes[node_id2].in_neighbors[node_id1]
self.edge_size -= 1
self.mc += 1
return True
return False
# toString() in Java
def __repr__(self):
return "|V|={} , |E|={}".format(self.v_size(), self.e_size()) | {"/src/GraphAlgo.py": ["/src/DiGraph.py"], "/src/DiGraph.py": ["/src/Node_Data.py"]} |
48,859 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /train/predict.py | from data_scripts.scaler import StandardScaler
from typing import List
import torch
from tqdm import tqdm
import numpy as np
from data_scripts import SynergyDataLoader, SynergyDataset, StandardScaler
from models import MatchMaker
def predict(model: MatchMaker,
data_loader: SynergyDataLoader,
disable_progress_bar: bool = False,
scaler: StandardScaler = None,
device: torch.device = 'cpu') -> List[float]:
"""
Makes predictions on a dataset using an ensemble of models.
:param model: A :class:`~chemprop.models.model.MoleculeModel`.
:param data_loader: A :class:`~chemprop.data.data.MoleculeDataLoader`.
:param disable_progress_bar: Whether to disable the progress bar.
:param scaler: A :class:`~chemprop.features.scaler.StandardScaler` object fit on the training targets.
:return: A list of lists of predictions. The outer list is molecules while the inner list is tasks.
"""
model.eval()
preds = []
# for batch in tqdm(data_loader, disable=disable_progress_bar, leave=False):
for batch in data_loader:
# Prepare batch
batch: SynergyDataset
# mol_batch, features_batch, atom_descriptors_batch = batch.batch_graph(), batch.features(), batch.atom_descriptors()
mol_batch, features_batch, cell_batch = batch.batch_graph(), batch.features(), batch.cell_lines()
# print(type(features_batch[0]))
# print(model.device)
cell_batch = torch.FloatTensor(cell_batch).to(device)
# Make predictions
with torch.no_grad():
batch_preds = model(mol_batch, cell_batch, features_batch)#, atom_descriptors_batch)
batch_preds = batch_preds.data.cpu().numpy()
# Inverse scale if regression
if scaler is not None:
np.expand_dims(batch_preds, 0)
batch_preds = scaler.inverse_transform(batch_preds).flatten()
# Collect vectors
batch_preds = batch_preds.tolist()
preds.extend(batch_preds)
return preds
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,860 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /args.py | from typing import List, Optional, Tuple
from typing_extensions import Literal
import os
import torch
from tap import Tap
import re
class CommonArgs(Tap):
""":class:`CommonArgs` contains arguments that are used in both :class:`TrainArgs` and :class:`PredictArgs`."""
smiles_columns: List[str] = ['drug1', 'drug2']
"""List of names of the columns containing SMILES strings.
By default, uses the first :code:`number_of_molecules` columns."""
number_of_molecules: int = 2
"""Number of molecules in each input to the model.
This must equal the length of :code:`smiles_columns` (if not :code:`None`)."""
result_dir: str = 'results'
"""Result directory"""
checkpoint_path: str = None
"""Path to model checkpoint (:code:`.pt` file)."""
no_cuda: bool = False
"""Turn off cuda (i.e., use CPU instead of GPU)."""
gpu: int = None
"""Which GPU to use."""
features_generator: List[str] = None
"""Method(s) of generating additional features."""
features_path: List[str] = None
"""Path(s) to features to use in FNN (instead of features_generator)."""
no_features_scaling: bool = False
"""Turn off scaling of features."""
no_cell_line_scaling: bool = False
"""Turn off scaling of cell line features."""
num_workers: int = 8
"""Number of workers for the parallel data loading (0 means sequential)."""
batch_size: int = 50
"""Batch size."""
atom_descriptors: Literal['feature', 'descriptor'] = None
"""
Custom extra atom descriptors.
:code:`feature`: used as atom features to featurize a given molecule.
:code:`descriptor`: used as descriptor and concatenated to the machine learned atomic representation.
"""
mode: str = 'common'
no_input_shuffle: bool = False
def __init__(self, *args, **kwargs):
super(CommonArgs, self).__init__(*args, **kwargs)
self._atom_features_size = 0
self._atom_descriptors_size = 0
@property
def device(self) -> torch.device:
"""The :code:`torch.device` on which to load and process data and models."""
if not self.cuda:
return torch.device('cpu')
return torch.device('cuda', self.gpu)
@device.setter
def device(self, device: torch.device) -> None:
self.cuda = device.type == 'cuda'
self.gpu = device.index
@property
def cuda(self) -> bool:
"""Whether to use CUDA (i.e., GPUs) or not."""
return not self.no_cuda and torch.cuda.is_available()
@property
def shuffle_input_drugs(self) -> bool:
return not self.no_input_shuffle
@cuda.setter
def cuda(self, cuda: bool) -> None:
self.no_cuda = not cuda
@property
def features_scaling(self) -> bool:
"""Whether to apply normalization with a :class:`~chemprop.data.scaler.StandardScaler` to the additional molecule-level features."""
return not self.no_features_scaling
@property
def cell_line_scaling(self) -> bool:
"""Whether to apply normalization with a :class:`~chemprop.data.scaler.StandardScaler` to the additional molecule-level features."""
return not self.no_cell_line_scaling
@property
def atom_features_size(self) -> int:
"""The size of the atom features."""
return self._atom_features_size
@atom_features_size.setter
def atom_features_size(self, atom_features_size: int) -> None:
self._atom_features_size = atom_features_size
@property
def atom_descriptors_size(self) -> int:
"""The size of the atom descriptors."""
return self._atom_descriptors_size
@atom_descriptors_size.setter
def atom_descriptors_size(self, atom_descriptors_size: int) -> None:
self._atom_descriptors_size = atom_descriptors_size
def configure(self) -> None:
self.add_argument('--gpu', choices=list(range(torch.cuda.device_count())))
def process_args(self) -> None:
# Load checkpoint paths
if not os.path.exists(self.result_dir):
os.makedirs(self.result_dir)
# Validate features
if self.features_generator is not None and 'rdkit_2d_normalized' in self.features_generator and self.features_scaling:
raise ValueError('When using rdkit_2d_normalized features, --no_features_scaling must be specified.')
if self.smiles_columns is None:
self.smiles_columns = [None] * self.number_of_molecules
elif len(self.smiles_columns) != self.number_of_molecules:
raise ValueError('Length of smiles_columns must match number_of_molecules.')
# # Validate atom descriptors
# if (self.atom_descriptors is None) != (self.atom_descriptors_path is None):
# raise ValueError('If atom_descriptors is specified, then an atom_descriptors_path must be provided '
# 'and vice versa.')
if self.atom_descriptors is not None and self.number_of_molecules > 1:
raise NotImplementedError('Atom descriptors are currently only supported with one molecule '
'per input (i.e., number_of_molecules = 1).')
dir_names = [name for name in os.listdir(self.result_dir)
if os.path.isdir(os.path.join(self.result_dir, name))]
iter = [re.findall(r'\d+', dname)[0] for dname in dir_names if re.findall(r'\d+', dname)]
if len(iter) > 0:
n = max(list(map(int, iter)))+1
else:
n = 0
self.save_dir = os.path.join(self.result_dir, str(n).zfill(3)+'-'+self.mode)
os.makedirs(self.save_dir)
class TrainArgs(CommonArgs):
""":class:`TrainArgs` includes :class:`CommonArgs` along with additional arguments used for training a Chemprop model."""
# General arguments
data_path: str = 'data/small_DrugCombData.csv'
"""Path to data CSV file."""
cell_lines: str = 'data/small_cell_line_gex.csv'
"""Path to the cell line features"""
target_column: List[str] = 'loewe'
"""
Name of the columns containing target values.
By default, uses all columns except the SMILES column and the :code:`ignore_columns`.
"""
separate_val_path: str = None
"""Path to separate val set, optional."""
separate_test_path: str = None
"""Path to separate test set, optional."""
split_type: Literal['random', 'mm-split'] = 'mm-split'
"""Method of splitting the data into train/val/test."""
split_sizes: Tuple[float, float, float] = (0.8, 0.1, 0.1)
"""Split proportions for train/validation/test sets."""
num_folds: int = 1
"""Number of folds when performing cross validation."""
folds_file: str = None
"""Optional file of fold labels."""
train_index: str = None
"""Which fold to use as val for leave-one-out cross val."""
val_index: str = None
"""Which fold to use as val for leave-one-out cross val."""
test_index: str = None
"""Which fold to use as test for leave-one-out cross val."""
crossval_index_dir: str = None
"""Directory in which to find cross validation index files."""
crossval_index_file: str = None
"""Indices of files to use as train/val/test. Overrides :code:`--num_folds` and :code:`--seed`."""
seed: int = 0
"""
Random seed to use when splitting data into train/val/test sets.
When :code`num_folds > 1`, the first fold uses this seed and all subsequent folds add 1 to the seed.
"""
pytorch_seed: int = 0
"""Seed for PyTorch randomness (e.g., random initial weights)."""
save_smiles_splits: bool = False
"""Save smiles for each train/val/test splits for prediction convenience later."""
test: bool = False
"""Whether to skip training and only test the model."""
quiet: bool = False
"""Skip non-essential print statements."""
log_frequency: int = 1000
"""The number of batches between each logging of the training loss."""
show_individual_scores: bool = False
"""Show all scores for individual targets, not just average, at the end."""
cache_cutoff: float = 10000
datapoints_path: str = None
"""
Maximum number of molecules in dataset to allow caching.
Below this number, caching is used and data loading is sequential.
Above this number, caching is not used and data loading is parallel.
Use "inf" to always cache.
"""
save_preds: bool = False
"""Whether to save test split predictions during training."""
# Model arguments
bias: bool = False
"""Whether to add bias to linear layers."""
hidden_size: int = 300
"""Dimensionality of hidden layers in MPN."""
depth: int = 3
"""Number of message passing steps."""
mpn_shared: bool = True
"""Whether to use the same message passing neural network for all input molecules
Only relevant if :code:`number_of_molecules > 1`"""
dropout: float = 0
"""Dropout probability."""
mm_dropout: float = 0.5
mm_in_dropout: float = 0.2
atom_messages: bool = False
"""Centers messages on atoms instead of on bonds."""
undirected: bool = False
"""Undirected edges (always sum the two relevant bond vectors)."""
dsn_architecture: str = '2048-4096-2048'
dsn_output: int = None
spn_architecture: str = '2048-1024'
features_only: bool = False
"""Use only the additional features in an FFN, no graph network."""
separate_val_features_path: List[str] = None
"""Path to file with features for separate val set."""
separate_test_features_path: List[str] = None
"""Path to file with features for separate test set."""
config_path: str = None
"""
Path to a :code:`.json` file containing arguments. Any arguments present in the config file
will override arguments specified via the command line or by the defaults.
"""
ensemble_size: int = 1
"""Number of models in ensemble."""
aggregation: Literal['mean', 'sum', 'norm', 'attention'] = 'mean'
"""Aggregation scheme for atomic vectors into molecular vectors"""
aggregation_norm: int = 100
"""For norm aggregation, number by which to divide summed up atomic features"""
embedding_agg: Literal['concat', 'maxpooling', 'sum'] = 'concat'
# Training arguments
epochs: int = 1000
"""Number of epochs to run."""
warmup_epochs: float = 2.0
"""
Number of epochs during which learning rate increases linearly from :code:`init_lr` to :code:`max_lr`.
Afterwards, learning rate decreases exponentially from :code:`max_lr` to :code:`final_lr`.
"""
init_lr: float = 1e-4
"""Initial learning rate."""
max_lr: float = 1e-3
"""Maximum learning rate."""
final_lr: float = 1e-4
"""Final learning rate."""
grad_clip: float = None
"""Maximum magnitude of gradient during training."""
mode: str = 'train'
def __init__(self, *args, **kwargs) -> None:
super(TrainArgs, self).__init__(*args, **kwargs)
self._task_names = None
self._crossval_index_sets = None
self._task_names = None
self._num_tasks = None
self._features_size = None
self._train_data_size = None
self._cell_line_size = None
self._dsn_layers = None
self._spn_layers = None
@property
def use_input_features(self) -> bool:
"""Whether the model is using additional molecule-level features."""
return self.features_generator is not None or self.features_path is not None
@property
def num_lrs(self) -> int:
"""The number of learning rates to use (currently hard-coded to 1)."""
return 1
@property
def crossval_index_sets(self) -> List[List[List[int]]]:
"""Index sets used for splitting data into train/validation/test during cross-validation"""
return self._crossval_index_sets
@property
def task_names(self) -> List[str]:
"""A list of names of the tasks being trained on."""
return self._task_names
@task_names.setter
def task_names(self, task_names: List[str]) -> None:
self._task_names = task_names
@property
def num_tasks(self) -> int:
"""The number of tasks being trained on."""
return len(self.task_names) if self.task_names is not None else 0
@property
def features_size(self) -> int:
"""The dimensionality of the additional molecule-level features."""
return self._features_size
@features_size.setter
def features_size(self, features_size: int) -> None:
self._features_size = features_size
@property
def cell_line_size(self) -> int:
"""The dimensionality of the additional molecule-level features."""
return self._cell_line_size
@cell_line_size.setter
def cell_line_size(self, features_size: int) -> None:
self._cell_line_size = features_size
@property
def train_data_size(self) -> int:
"""The size of the training data set."""
return self._train_data_size
@train_data_size.setter
def train_data_size(self, train_data_size: int) -> None:
self._train_data_size = train_data_size
def process_args(self) -> None:
super(TrainArgs, self).process_args()
global temp_dir # Prevents the temporary directory from being deleted upon function return
# Load config file
if self.config_path is not None:
with open(self.config_path) as f:
config = json.load(f)
for key, value in config.items():
setattr(self, key, value)
if self.datapoints_path is not None and not os.path.exists(self.datapoints_path):
os.makedirs(self.datapoints_path)
# Handle MPN variants
if self.atom_messages and self.undirected:
raise ValueError('Undirected is unnecessary when using atom_messages '
'since atom_messages are by their nature undirected.')
if not (self.split_type == 'crossval') == (self.crossval_index_dir is not None):
raise ValueError('When using crossval split type, must provide crossval_index_dir.')
if not (self.split_type in ['crossval', 'index_predetermined']) == (self.crossval_index_file is not None):
raise ValueError('When using crossval or index_predetermined split type, must provide crossval_index_file.')
if self.split_type in ['crossval', 'index_predetermined']:
with open(self.crossval_index_file, 'rb') as rf:
self._crossval_index_sets = pickle.load(rf)
self.num_folds = len(self.crossval_index_sets)
self.seed = 0
# Test settings
if self.test:
self.epochs = 0
class HyperoptArgs(TrainArgs):
mode: str = 'hyper-opt'
num_iters: int = 150
resource_gpu: float = 1
resource_cpu: int = 3
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,861 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /models/__init__.py | from .model import MatchMaker
from .mpn import MPN, MPNEncoder
from .nn_utils import NoamLR, compute_gnorm, compute_pnorm | {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,862 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /train/evaluate.py | from collections import defaultdict
import logging
from typing import Dict, List
from .predict import predict
from data_scripts import SynergyDataLoader, StandardScaler
from models import MatchMaker
from scipy import stats
from sklearn.metrics import mean_squared_error
import numpy as np
import torch
def pearson(y : List[float],
pred: List[float]) -> float:
pear = stats.pearsonr(y, pred)
pear_value = pear[0]
pear_p_val = pear[1]
# print("Pearson correlation is {} and related p_value is {}".format(pear_value, pear_p_val))
return pear_value
def spearman(y : List[float],
pred: List[float]) -> float:
spear = stats.spearmanr(y, pred)
spear_value = spear[0]
spear_p_val = spear[1]
# print("Spearman correlation is {} and related p_value is {}".format(spear_value, spear_p_val))
return spear_value
def mse(y : List[float],
pred: List[float]) -> float:
err = mean_squared_error(y, pred)
# print("Mean squared error is {}".format(err))
return err
def squared_error(y : List[float],
pred: List[float]) -> float:
errs = []
for i in range(len(y)):
err = (y[i]-pred[i]) * (y[i]-pred[i])
errs.append(err)
return np.asarray(errs)
def evaluate_predictions(preds: List[float],
targets: List[float],
logger: logging.Logger = None) -> Dict[str, List[float]]:
"""
Evaluates predictions using a metric function after filtering out invalid targets.
:param preds: A list of lists of shape :code:`(data_size, num_tasks)` with model predictions.
:param targets: A list of lists of shape :code:`(data_size, num_tasks)` with targets.
:param num_tasks: Number of tasks.
:param metrics: A list of names of metric functions.
:param dataset_type: Dataset type.
:param logger: A logger to record output.
:return: A dictionary mapping each metric in :code:`metrics` to a list of values for each task.
"""
info = logger.info if logger is not None else print
# Compute metric
results = {}
results['Pearson'] = pearson(targets, preds)
results['Spearman'] = spearman(targets, preds)
results['MSE'] = mse(targets, preds)
# results['Squared error'] = squared_error(targets, preds)
return results
def evaluate(model: MatchMaker,
data_loader: SynergyDataLoader,
scaler: StandardScaler = None,
logger: logging.Logger = None,
device: torch.device = 'cpu') -> Dict[str, List[float]]:
"""
Evaluates an ensemble of models on a dataset by making predictions and then evaluating the predictions.
:param model: A :class:`~chemprop.models.model.MoleculeModel`.
:param data_loader: A :class:`~chemprop.data.data.MoleculeDataLoader`.
:param num_tasks: Number of tasks.
:param metrics: A list of names of metric functions.
:param dataset_type: Dataset type.
:param scaler: A :class:`~chemprop.features.scaler.StandardScaler` object fit on the training targets.
:param logger: A logger to record output.
:return: A dictionary mapping each metric in :code:`metrics` to a list of values for each task.
"""
preds = predict(
model=model,
data_loader=data_loader,
scaler=scaler,
device=device,
)
results = evaluate_predictions(
preds=preds,
targets=data_loader.targets,
logger=logger
)
return results
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,863 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /train/train.py | import logging
from typing import Callable
from tensorboardX import SummaryWriter
import torch
import torch.nn as nn
from torch.optim import Optimizer
from torch.optim.lr_scheduler import _LRScheduler
from tqdm import tqdm
from args import TrainArgs
from data_scripts import SynergyDataLoader, SynergyDataset
from models import MatchMaker
from models import NoamLR, compute_gnorm, compute_pnorm
def train(model: MatchMaker,
data_loader: SynergyDataLoader,
loss_func: Callable,
optimizer: Optimizer,
scheduler: _LRScheduler,
args: TrainArgs,
n_iter: int = 0,
logger: logging.Logger = None,
writer: SummaryWriter = None
) -> int:
"""
Trains a model for an epoch.
:param model: A :class:`~chemprop.models.model.MoleculeModel`.
:param data_loader: A :class:`~chemprop.data.data.MoleculeDataLoader`.
:param loss_func: Loss function.
:param optimizer: An optimizer.
:param scheduler: A learning rate scheduler.
:param args: A :class:`~chemprop.args.TrainArgs` object containing arguments for training the model.
:param n_iter: The number of iterations (training examples) trained on so far.
:param logger: A logger for recording output.
:param writer: A tensorboardX SummaryWriter.
:return: The total number of iterations (training examples) trained on so far.
"""
debug = logger.debug if logger is not None else print
model.train()
loss_sum = iter_count = 0
# for batch in tqdm(data_loader, total=len(data_loader), leave=False):
for batch in data_loader:
# Prepare batch
batch: SynergyDataset
mol_batch, features_batch, cell_batch, target_batch, weights= \
batch.batch_graph(), batch.features(), batch.cell_lines(), batch.targets(), batch.loss_weights()
cell_batch = torch.FloatTensor(cell_batch).to(args.device)
weights = torch.FloatTensor(weights).to(args.device)
# mask = torch.Tensor([x is not None for x in target_batch])
# targets = torch.Tensor([[0 if x is None else x for x in tb] for tb in target_batch])
# Run model
# model.zero_grad()
optimizer.zero_grad()
preds = model(mol_batch, cell_batch, features_batch)
# Move tensors to correct device
# mask = mask.to(preds.device)
targets = torch.Tensor(target_batch).to(preds.device)
# class_weights = torch.ones(targets.shape, device=preds.device)
loss = loss_func(preds, targets) * weights
loss = loss.mean() #.sum() / mask.sum()
loss_sum += loss.item()
iter_count += 1
loss.backward()
if args.grad_clip:
nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip)
optimizer.step()
if isinstance(scheduler, NoamLR):
scheduler.step()
n_iter += len(batch)
# Log and/or add to tensorboard
if (n_iter // args.batch_size) % args.log_frequency == 0:
lrs = scheduler.get_lr()
lrs = [0, 1]
pnorm = compute_pnorm(model)
gnorm = compute_gnorm(model)
loss_avg = loss_sum / iter_count
loss_sum = iter_count = 0
lrs_str = ', '.join(f'lr_{i} = {lr:.4e}' for i, lr in enumerate(lrs))
debug(f'Loss = {loss_avg}, PNorm = {pnorm:.4f}, GNorm = {gnorm:.4f}, {lrs_str}')
if writer is not None:
writer.add_scalar('train_loss', loss_avg, n_iter)
writer.add_scalar('param_norm', pnorm, n_iter)
writer.add_scalar('gradient_norm', gnorm, n_iter)
for i, lr in enumerate(lrs):
writer.add_scalar(f'learning_rate_{i}', lr, n_iter)
return n_iter
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,864 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /models/model.py | from typing import List, Union
from rdkit import Chem
import torch.nn as nn
import torch.optim as optim
import torch
import numpy as np
from args import TrainArgs
from data_scripts import BatchMolGraph
from .nn_utils import initialize_weights
from .mpn import MPN
def concat(embedding1, embedding2):
if torch.rand(1)[0] < 0.5:
return torch.cat((embedding1, embedding2), 1)
return torch.cat((embedding2, embedding1), 1)
def maxpooling(embedding1, embedding2):
emb = torch.stack((embedding1, embedding2))
return torch.max(emb, 0)[0]
def sum(embedding1, embedding2):
return embedding1 + embedding2
def get_agg_func(embedding, dsn_layer):
if embedding == 'concat':
return concat, dsn_layer*2
elif embedding == 'maxpooling':
return maxpooling, dsn_layer
elif embedding == 'sum':
return sum, dsn_layer
class MatchMaker(nn.Module):
def __init__(self, args: TrainArgs):
super(MatchMaker, self).__init__()
self.create_encoder(args)
dsn_layers = list(map(int, args.dsn_architecture.split('-')))
if 'embedding_agg' not in vars(args):
args.embedding_agg = 'deterministic_concat'
if args.dsn_output is not None:
dsn_layers.append(args.dsn_output)
self.embedding_agg, spn_input_size = get_agg_func(args.embedding_agg, dsn_layers[-1])
spn_layers = list(map(int, args.spn_architecture.split('-')))
spn_layers.insert(0, spn_input_size)
self.create_dsn(args, dsn_layers)
self.create_spn(args, spn_layers)
initialize_weights(self)
def create_encoder(self, args:TrainArgs) -> None:
self.encoder = MPN(args)
def create_dsn(self, args: TrainArgs,
dsn_layers: List[int]) -> None:
first_layer_dim = args.cell_line_size
if args.use_input_features:
first_layer_dim += args.features_size
if not args.features_only:
first_layer_dim += args.hidden_size
dsn_layers.insert(0, first_layer_dim)
dropout = nn.Dropout(args.mm_dropout)
in_dropout = nn.Dropout(args.mm_in_dropout)
dsn = []
for i, size in enumerate(dsn_layers[1:], 1):
dsn.append(nn.Linear(dsn_layers[i-1], size))
if i < len(dsn_layers)-1 :
dsn.append(nn.ReLU())
if i == 1:
dsn.append(in_dropout)
else:
dsn.append(dropout)
self.dsn = nn.Sequential(*dsn)
def create_spn(self, args: TrainArgs,
spn_layers: List[int]) -> None:
dropout = nn.Dropout(args.mm_dropout)
spn = []
for i, size in enumerate(spn_layers[1:], 1):
spn.extend([nn.Linear(spn_layers[i-1], size),
nn.ReLU()])
if i == len(spn_layers)-1:
spn.append(nn.Linear(size, 1))
else:
spn.append(dropout)
self.spn = nn.Sequential(*spn)
def forward(self,
batch: Union[List[str], List[Chem.Mol], BatchMolGraph],
cell_batch: List[np.ndarray],
features_batch: List[np.ndarray] = None) -> torch.FloatTensor:
drug_encodings = self.encoder(batch, features_batch)
x1 = torch.cat((drug_encodings[0], cell_batch), 1)
x2 = torch.cat((drug_encodings[1], cell_batch), 1)
embedding1 = self.dsn(x1)
embedding2 = self.dsn(x2)
combination = self.embedding_agg(embedding1, embedding2)
out = self.spn(combination)
return out.flatten()
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,865 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /data_scripts/utils.py | from .data import SynergyDataset
from args import TrainArgs
from random import Random
from typing import List, Optional, Set, Tuple, Union
from logging import Logger
import numpy as np
def split_data(data: SynergyDataset,
split_type: str = 'random',
sizes: Tuple[float, float, float] = (0.8, 0.1, 0.1),
seed: int = 0,
num_folds: int = 1,
args: TrainArgs = None,
logger: Logger = None) -> Tuple[SynergyDataset,
SynergyDataset,
SynergyDataset]:
r"""
Splits data into training, validation, and test splits.
:param data: A :class:`~chemprop.data.MoleculeDataset`.
:param split_type: Split type.
:param sizes: A length-3 tuple with the proportions of data in the train, validation, and test sets.
:param seed: The random seed to use before shuffling data.
:param num_folds: Number of folds to create (only needed for "cv" split type).
:param args: A :class:`~chemprop.args.TrainArgs` object.
:param logger: A logger for recording output.
:return: A tuple of :class:`~chemprop.data.MoleculeDataset`\ s containing the train,
validation, and test splits of the data.
"""
if not (len(sizes) == 3 and sum(sizes) == 1):
raise ValueError('Valid split sizes must sum to 1 and must have three sizes: train, validation, and test.')
random = Random(seed)
if args is not None:
train_index, val_index, test_index = \
args.train_index, args.val_index, args.test_index
if split_type == 'mm-split':
train_indices = np.loadtxt(train_index, dtype=np.int)
val_indices = np.loadtxt(val_index, dtype=np.int)
test_indices = np.loadtxt(test_index, dtype=np.int)
train = [data[i] for i in train_indices]
valid = [data[i] for i in val_indices]
test = [data[i] for i in test_indices]
return SynergyDataset(train), SynergyDataset(valid), SynergyDataset(test)
elif split_type == 'random':
indices = list(range(len(data)))
random.shuffle(indices)
train_size = int(sizes[0] * len(data))
train_val_size = int((sizes[0] + sizes[1]) * len(data))
train = [data[i] for i in indices[:train_size]]
val = [data[i] for i in indices[train_size:train_val_size]]
test = [data[i] for i in indices[train_val_size:]]
return SynergyDataset(train), SynergyDataset(val), SynergyDataset(test)
else:
raise ValueError(f'split_type "{split_type}" not supported.')
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,866 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /data_scripts/data.py | import threading
from collections import OrderedDict
import csv
from logging import Logger
import pickle
from random import Random
from typing import Dict, Iterator, List, Optional, Union
from torch.utils.data import Dataset, DataLoader, Sampler
import os
from rdkit import Chem
import numpy as np
from tqdm import tqdm
import pickle
import pandas as pd
from .features_generators import get_features_generator, FeaturesGenerator
from .featurization import BatchMolGraph, MolGraph
from .scaler import StandardScaler
# from .utils import load_features
# from .data import MoleculeDatapoint, MoleculeDataset
# from .scaffold import log_scaffold_stats, scaffold_split
from args import TrainArgs
# from chemprop.features import load_features, load_valid_atom_features
SMILES_TO_MOL: Dict[str, Chem.Mol] = {}
CACHE_GRAPH = True
SMILES_TO_GRAPH: Dict[str, MolGraph] = {}
class SynergyDatapoint:
def __init__(self,
smiles: List[str],
cell_line: np.ndarray,
target: float,
features: np.ndarray = None,
features_generator: List[str] = None,
shuffler: Random = None):
# self.smiles = [smiles1, smiles2]
self.drugs = [Drug(s, f, fg) for s, f, fg in zip(smiles, features, features_generator)]
if shuffler is not None:
shuffler.shuffle(self.drugs)
# self.drugs = [Drug(smiles1, features, features_generator),
# Drug(smiles2, features, features_generator)]
self.cell_line = cell_line
self.target = target
self.weight = 1
# Save a copy of the raw features and targets to enable different scaling later on
self.raw_target, self.raw_cell_line = self.target, self.cell_line
@property
def smiles(self) -> List[str]:
return [d.smile for d in self.drugs]
@property
def mol(self) -> List[Chem.Mol]:
"""Gets the corresponding list of RDKit molecules for the corresponding SMILES list."""
mol = [SMILES_TO_MOL.get(s, Chem.MolFromSmiles(s)) for s in self.smiles]
for s, m in zip(self.smiles, mol):
SMILES_TO_MOL[s] = m
return mol
@property
def features(self) -> List[np.ndarray]:
return [d.features for d in self.drugs]
@property
def raw_features(self) -> List[np.ndarray]:
return [d.raw_features for d in self.drugs]
def reset_features_and_targets(self) -> None:
"""Resets the features and targets to their raw values."""
self.target, self.cell_line = self.raw_target, self.raw_cell_line
for drug in self.drugs:
drug.reset_features()
def set_targets(self, target: float):
"""
Sets the targets of a molecule.
:param targets: A list of floats containing the targets.
"""
self.target = target
def set_features(self, features: np.ndarray) -> None:
"""
Sets the features of the molecule.
:param features: A 1D numpy array of features for the molecule.
"""
for d, f in zip(self.drugs, features):
d.set_features(f)
def set_cell_line_features(self, features: np.ndarray) -> None:
self.cell_line = features
class Drug:
def __init__(self,
smile: str,
features: np.ndarray,
features_generator: List[str] = None):
self.smile = smile
self.features_generator = features_generator
self.features = features
if features is not None and features_generator is not None:
raise ValueError('Cannot provide both loaded features and a features generator.')
if self.features_generator is not None:
self.features = []
for fg in self.features_generator:
features_generator = get_features_generator(fg)
if self.mol is not None and self.mol.GetNumHeavyAtoms() > 0:
self.features.extend(features_generator(self.mol))
# for H2
elif self.mol is not None and self.mol.GetNumHeavyAtoms() == 0:
# not all features are equally long, so use methane as dummy molecule to determine length
self.features.extend(np.zeros(len(features_generator(Chem.MolFromSmiles('C')))))
self.features = np.array(self.features)
replace_token = 0
if self.features is not None:
self.features = np.where(np.isnan(self.features), replace_token, self.features)
self.raw_features = self.features
@property
def mol(self) -> List[Chem.Mol]:
if self.smile in SMILES_TO_MOL:
return SMILES_TO_MOL[self.smile]
m = Chem.MolFromSmiles(self.smile)
SMILES_TO_MOL[self.smile] = m
return m
def set_features(self, features:np.ndarray) -> None:
self.features = features
def reset_features(self) -> None:
self.features = self.raw_features
class SynergyDataset(Dataset):
def __init__(self, data: List[SynergyDatapoint]):
self._data = data
self._scaler = None
self._cell_scaler = None
self._batch_graph = None
self._random = Random()
self._weights = None
def smiles(self, flatten: bool = False) -> Union[List[str], List[List[str]]]:
"""
Returns a list containing the SMILES list associated with each :class:`MoleculeDatapoint`.
:param flatten: Whether to flatten the returned SMILES to a list instead of a list of lists.
:return: A list of SMILES or a list of lists of SMILES, depending on :code:`flatten`.
"""
if flatten:
return [smiles for d in self._data for smiles in d.smiles]
return [d.smiles for d in self._data]
def mols(self, flatten: bool = False) -> Union[List[Chem.Mol], List[List[Chem.Mol]]]:
"""
Returns a list of the RDKit molecules associated with each :class:`MoleculeDatapoint`.
:param flatten: Whether to flatten the returned RDKit molecules to a list instead of a list of lists.
:return: A list of SMILES or a list of lists of RDKit molecules, depending on :code:`flatten`.
"""
if flatten:
return [mol for d in self._data for mol in d.mol]
return [d.mol for d in self._data]
def batch_graph(self) -> List[BatchMolGraph]:
r"""
Constructs a :class:`~chemprop.features.BatchMolGraph` with the graph featurization of all the molecules.
.. note::
The :class:`~chemprop.features.BatchMolGraph` is cached in after the first time it is computed
and is simply accessed upon subsequent calls to :meth:`batch_graph`. This means that if the underlying
set of :class:`MoleculeDatapoint`\ s changes, then the returned :class:`~chemprop.features.BatchMolGraph`
will be incorrect for the underlying data.
:return: A list of :class:`~chemprop.features.BatchMolGraph` containing the graph featurization of all the
molecules in each :class:`MoleculeDatapoint`.
"""
if self._batch_graph is None:
self._batch_graph = []
mol_graphs = []
for d in self._data:
mol_graphs_list = []
for s, m in zip(d.smiles, d.mol):
if s in SMILES_TO_GRAPH:
mol_graph = SMILES_TO_GRAPH[s]
else:
mol_graph = MolGraph(m)
SMILES_TO_GRAPH[s] = mol_graph
mol_graphs_list.append(mol_graph)
mol_graphs.append(mol_graphs_list)
self._batch_graph = [BatchMolGraph([g[i] for g in mol_graphs]) for i in range(len(mol_graphs[0]))]
return self._batch_graph
def features(self) -> List[np.ndarray]:
"""
Returns the features associated with each molecule (if they exist).
:return: A list of 1D numpy arrays containing the features for each molecule or None if there are no features.
"""
if len(self._data) == 0 or self._data[0].features is None:
return None
return [d.features for d in self._data]
def targets(self) -> List[float]:
"""
Returns the targets associated with each molecule.
:return: A list of lists of floats (or None) containing the targets.
"""
return [d.target for d in self._data]
def features_size(self) -> int:
"""
Returns the size of the additional features vector associated with the molecules.
:return: The size of the additional features vector.
"""
return len(self._data[0].features[1]) if len(self._data) > 0 and self._data[0].features is not None else None
def cell_lines(self) -> List[List[float]]:
"""
Returns cell lines associated with the datapoint
"""
return [d.cell_line for d in self._data]
def cell_line_size(self) -> int:
"""
Returns the size of the Cell line.
"""
return len(self._data[0].cell_line) if len(self._data) > 0 else None
def loss_weights(self) -> None:
return [d.weight for d in self._data]
def calculate_weights(self) -> float:
targets = [d.target for d in self._data]
min_synergy = min(targets)
for d in self._data:
d.weight = np.log(d.target - min_synergy + np.e)
def normalize_targets(self) -> StandardScaler:
"""
Normalizes the targets of the dataset using a :class:`~chemprop.data.StandardScaler`.
The :class:`~chemprop.data.StandardScaler` subtracts the mean and divides by the standard deviation
for each task independently.
This should only be used for regression datasets.
:return: A :class:`~chemprop.data.StandardScaler` fitted to the targets.
"""
targets = [[d.raw_target] for d in self._data]
scaler = StandardScaler().fit(targets)
scaled_targets = scaler.transform(targets).tolist()
self.set_targets(scaled_targets)
return scaler
def set_targets(self, targets: List[List[Optional[float]]]) -> None:
"""
Sets the targets for each molecule in the dataset. Assumes the targets are aligned with the datapoints.
:param targets: A list of lists of floats (or None) containing targets for each molecule. This must be the
same length as the underlying dataset.
"""
assert len(self._data) == len(targets)
for i in range(len(self._data)):
self._data[i].set_targets(targets[i][0])
def normalize_features(self, scaler: StandardScaler = None, replace_nan_token: int = 0) -> StandardScaler:
"""
Normalizes the features of the dataset using a :class:`~chemprop.data.StandardScaler`.
The :class:`~chemprop.data.StandardScaler` subtracts the mean and divides by the standard deviation
for each feature independently.
If a :class:`~chemprop.data.StandardScaler` is provided, it is used to perform the normalization.
Otherwise, a :class:`~chemprop.data.StandardScaler` is first fit to the features in this dataset
and is then used to perform the normalization.
:param scaler: A fitted :class:`~chemprop.data.StandardScaler`. If it is provided it is used,
otherwise a new :class:`~chemprop.data.StandardScaler` is first fitted to this
data and is then used.
:param replace_nan_token: A token to use to replace NaN entries in the features.
:return: A fitted :class:`~chemprop.data.StandardScaler`. If a :class:`~chemprop.data.StandardScaler`
is provided as a parameter, this is the same :class:`~chemprop.data.StandardScaler`. Otherwise,
this is a new :class:`~chemprop.data.StandardScaler` that has been fit on this dataset.
"""
if len(self._data) == 0 or self._data[0].features is None:
return None
if scaler is not None:
self._scaler = scaler
elif self._scaler is None:
features = np.vstack([d.raw_features for d in self._data])
self._scaler = StandardScaler(replace_nan_token=replace_nan_token)
self._scaler.fit(features)
for d in self._data:
d.set_features([self._scaler.transform(f) for f in d.features])
return self._scaler
def normalize_cell_lines(self, scaler: StandardScaler = None, replace_nan_token: int = 0) -> StandardScaler:
if len(self._data) == 0 or self._data[0].cell_line is None:
return None
if scaler is not None:
self._cell_scaler = scaler
elif self._cell_scaler is None:
cell_lines = [d.cell_line for d in self._data]
self._cell_scaler = StandardScaler(replace_nan_token=replace_nan_token).fit(cell_lines)
for d in self._data:
d.set_cell_line_features(self._cell_scaler.transform(d.raw_cell_line.reshape(1, -1))[0])
return self._cell_scaler
def reset_features_and_targets(self) -> None:
"""Resets the features and targets to their raw values."""
for d in self._data:
d.reset_features_and_targets()
def __len__(self) -> int:
"""
Returns the length of the dataset (i.e., the number of molecules).
:return: The length of the dataset.
"""
return len(self._data)
def __getitem__(self, item) -> List[SynergyDatapoint]:
r"""
Gets one or more :class:`MoleculeDatapoint`\ s via an index or slice.
:param item: An index (int) or a slice object.
:return: A :class:`MoleculeDatapoint` if an int is provided or a list of :class:`MoleculeDatapoint`\ s
if a slice is provided.
"""
return self._data[item]
class SynergySampler(Sampler):
"""A :class:`MoleculeSampler` samples data from a :class:`MoleculeDataset` for a :class:`MoleculeDataLoader`."""
def __init__(self,
dataset: SynergyDataset,
shuffle: bool = False,
seed: int = 0):
"""
:param shuffle: Whether to shuffle the data.
:param seed: Random seed. Only needed if :code:`shuffle` is True.
"""
super(Sampler, self).__init__()
self.dataset = dataset
self.shuffle = shuffle
self._random = Random(seed)
self.length = len(self.dataset)
def __iter__(self) -> Iterator[int]:
"""Creates an iterator over indices to sample."""
indices = list(range(len(self.dataset)))
if self.shuffle:
self._random.shuffle(indices)
return iter(indices)
def __len__(self) -> int:
"""Returns the number of indices that will be sampled."""
return self.length
def construct_molecule_batch(data: List[SynergyDatapoint]) -> SynergyDataset:
r"""
Constructs a :class:`MoleculeDataset` from a list of :class:`MoleculeDatapoint`\ s.
Additionally, precomputes the :class:`~chemprop.features.BatchMolGraph` for the constructed
:class:`MoleculeDataset`.
:param data: A list of :class:`MoleculeDatapoint`\ s.
:return: A :class:`MoleculeDataset` containing all the :class:`MoleculeDatapoint`\ s.
"""
data = SynergyDataset(data)
data.batch_graph() # Forces computation and caching of the BatchMolGraph for the molecules
return data
class SynergyDataLoader(DataLoader):
"""A :class:`MoleculeDataLoader` is a PyTorch :class:`DataLoader` for loading a :class:`MoleculeDataset`."""
def __init__(self,
dataset: SynergyDataset,
batch_size: int = 50,
num_workers: int = 8,
shuffle: bool = False,
seed: int = 0):
"""
:param dataset: The :class:`MoleculeDataset` containing the molecules to load.
:param batch_size: Batch size.
:param num_workers: Number of workers used to build batches.
:param shuffle: Whether to shuffle the data.
:param seed: Random seed. Only needed if shuffle is True.
"""
self._dataset = dataset
self._batch_size = batch_size
self._num_workers = num_workers
self._shuffle = shuffle
self._seed = seed
self._context = None
self._timeout = 0
is_main_thread = threading.current_thread() is threading.main_thread()
if not is_main_thread and self._num_workers > 0:
self._context = 'forkserver' # In order to prevent a hanging
self._timeout = 3600 # Just for sure that the DataLoader won't hang
self._sampler = SynergySampler(
dataset=self._dataset,
shuffle=self._shuffle,
seed=self._seed
)
super(SynergyDataLoader, self).__init__(
dataset=self._dataset,
batch_size=self._batch_size,
sampler=self._sampler,
num_workers=self._num_workers,
collate_fn=construct_molecule_batch,
multiprocessing_context=self._context,
timeout=self._timeout
)
@property
def targets(self) -> List[List[Optional[float]]]:
"""
Returns the targets associated with each molecule.
:return: A list of lists of floats (or None) containing the targets.
"""
if self._shuffle:
raise ValueError('Cannot safely extract targets when class balance or shuffle are enabled.')
return [self._dataset[index].target for index in self._sampler]
@property
def iter_size(self) -> int:
"""Returns the number of data points included in each full iteration through the :class:`MoleculeDataLoader`."""
return len(self._sampler)
def __iter__(self) -> Iterator[SynergyDataset]:
r"""Creates an iterator which returns :class:`MoleculeDataset`\ s"""
return super(SynergyDataLoader, self).__iter__()
def get_data(path: str,
cell_lines: str,
args: Union[TrainArgs] = None,
skip_none_targets: bool = False,
shuffle_drugs: bool = True,
seed: int = 0) -> SynergyDataset:
dataset = pd.read_csv(path)
targets = dataset[args.target_column].to_numpy()
cell_line_data = np.loadtxt(cell_lines, delimiter=',')
drug1 = dataset[args.smiles_columns[0]].to_numpy()
drug2 = dataset[args.smiles_columns[1]].to_numpy()
shuffler = None
if shuffle_drugs:
shuffler = Random(seed)
if targets.shape[0] != cell_line_data.shape[0]:
raise ValueError('Cell line feature line should be the same size as dataset file')
if args.features_path is not None:
features_data = []
for feat_path in args.features_path:
features_data.append(np.loadtxt(feat_path, delimiter=',', dtype=np.float32))
features_data = np.asarray(features_data)
else:
features_data = None
if features_data is not None and len(features_data) != 2:
raise ValueError('Only 2 files can be provided (one for each drug)')
features_generator = [args.features_generator] * 2
datapoints = []
for i, (smile1, smile2, cell, target) in enumerate(zip(drug1, drug2, cell_line_data, targets)):
if args.datapoints_path is not None and os.path.exists(os.path.join(args.datapoints_path, f'{i}.pickle')):
with open(os.path.join(args.datapoints_path, f'{i}.pickle'), 'rb') as f:
dp = pickle.load(f)
else:
dp = SynergyDatapoint([smile1, smile2], cell, target,
features=features_data[:,i] if features_data is not None else [None, None],
features_generator=features_generator, shuffler=shuffler)
if args.datapoints_path is not None:
with open(os.path.join(args.datapoints_path, f'{i}.pickle'), 'wb') as f:
pickle.dump(dp, f)
datapoints.append(dp)
data = SynergyDataset(datapoints)
return data
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,867 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /utils.py | from typing import Any, Callable, List, Tuple, Union
from functools import wraps
from datetime import timedelta
import logging
import os
from time import time
from models import MatchMaker
from argparse import Namespace
import torch
import logging
from data_scripts import StandardScaler
from args import TrainArgs
def timeit(logger_name: str = None) -> Callable[[Callable], Callable]:
"""
Creates a decorator which wraps a function with a timer that prints the elapsed time.
:param logger_name: The name of the logger used to record output. If None, uses :code:`print` instead.
:return: A decorator which wraps a function with a timer that prints the elapsed time.
"""
def timeit_decorator(func: Callable) -> Callable:
"""
A decorator which wraps a function with a timer that prints the elapsed time.
:param func: The function to wrap with the timer.
:return: The function wrapped with the timer.
"""
@wraps(func)
def wrap(*args, **kwargs) -> Any:
start_time = time()
result = func(*args, **kwargs)
delta = timedelta(seconds=round(time() - start_time))
info = logging.getLogger(logger_name).info if logger_name is not None else print
info(f'Elapsed time = {delta}')
return result
return wrap
return timeit_decorator
pass
def create_logger(name: str, save_dir: str = None, quiet: bool = False) -> logging.Logger:
"""
Creates a logger with a stream handler and two file handlers.
If a logger with that name already exists, simply returns that logger.
Otherwise, creates a new logger with a stream handler and two file handlers.
The stream handler prints to the screen depending on the value of :code:`quiet`.
One file handler (:code:`verbose.log`) saves all logs, the other (:code:`quiet.log`) only saves important info.
:param name: The name of the logger.
:param save_dir: The directory in which to save the logs.
:param quiet: Whether the stream handler should be quiet (i.e., print only important info).
:return: The logger.
"""
if name in logging.root.manager.loggerDict:
return logging.getLogger(name)
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.propagate = False
# Set logger depending on desired verbosity
ch = logging.StreamHandler()
if quiet:
ch.setLevel(logging.INFO)
else:
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
if save_dir is not None:
fh_v = logging.FileHandler(os.path.join(save_dir, 'verbose.log'))
fh_v.setLevel(logging.DEBUG)
fh_q = logging.FileHandler(os.path.join(save_dir, 'quiet.log'))
fh_q.setLevel(logging.INFO)
logger.addHandler(fh_v)
logger.addHandler(fh_q)
return logger
def save_checkpoint(path: str,
model: MatchMaker,
scaler: StandardScaler = None,
features_scaler: StandardScaler = None,
cell_line_scaler: StandardScaler = None,
args: TrainArgs = None) -> None:
"""
Saves a model checkpoint.
:param model: A :class:`~chemprop.models.model.MoleculeModel`.
:param scaler: A :class:`~chemprop.data.scaler.StandardScaler` fitted on the data.
:param features_scaler: A :class:`~chemprop.data.scaler.StandardScaler` fitted on the features.
:param args: The :class:`~chemprop.args.TrainArgs` object containing the arguments the model was trained with.
:param path: Path where checkpoint will be saved.
"""
# Convert args to namespace for backwards compatibility
if args is not None:
args = Namespace(**args.as_dict())
state = {
'args': args,
'state_dict': model.state_dict(),
'data_scaler': {
'means': scaler.means,
'stds': scaler.stds
} if scaler is not None else None,
'features_scaler': {
'means': features_scaler.means,
'stds': features_scaler.stds
} if features_scaler is not None else None,
'cell_line_scaler': {
'means': cell_line_scaler.means,
'stds': cell_line_scaler.stds
} if cell_line_scaler is not None else None
}
torch.save(state, path)
def load_checkpoint(path: str,
device: torch.device = None,
logger: logging.Logger = None) -> MatchMaker:
"""
Loads a model checkpoint.
:param path: Path where checkpoint is saved.
:param device: Device where the model will be moved.
:param logger: A logger for recording output.
:return: The loaded :class:`~chemprop.models.model.MoleculeModel`.
"""
if logger is not None:
debug, info = logger.debug, logger.info
else:
debug = info = print
# Load model and args
state = torch.load(path, map_location=lambda storage, loc: storage)
args = TrainArgs()
args.from_dict(vars(state['args']), skip_unsettable=True)
loaded_state_dict = state['state_dict']
if device is not None:
args.device = device
# Build model
model = MatchMaker(args)
# model_state_dict = model.state_dict()
# # Skip missing parameters and parameters of mismatched size
# pretrained_state_dict = {}
# for loaded_param_name in loaded_state_dict.keys():
# # Backward compatibility for parameter names
# if re.match(r'(encoder\.encoder\.)([Wc])', loaded_param_name):
# param_name = loaded_param_name.replace('encoder.encoder', 'encoder.encoder.0')
# else:
# param_name = loaded_param_name
# # Load pretrained parameter, skipping unmatched parameters
# if param_name not in model_state_dict:
# info(f'Warning: Pretrained parameter "{loaded_param_name}" cannot be found in model parameters.')
# elif model_state_dict[param_name].shape != loaded_state_dict[loaded_param_name].shape:
# info(f'Warning: Pretrained parameter "{loaded_param_name}" '
# f'of shape {loaded_state_dict[loaded_param_name].shape} does not match corresponding '
# f'model parameter of shape {model_state_dict[param_name].shape}.')
# else:
# debug(f'Loading pretrained parameter "{loaded_param_name}".')
# pretrained_state_dict[param_name] = loaded_state_dict[loaded_param_name]
# Load pretrained weights
# model_state_dict.update(pretrained_state_dict)
model.load_state_dict(loaded_state_dict)
if args.cuda:
debug('Moving model to cuda')
model = model.to(args.device)
return model
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,868 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /train/run_training.py | from logging import Logger
import os
from typing import Dict, List
import numpy as np
import pandas as pd
from tensorboardX import SummaryWriter
import torch
# from tqdm import trange
from torch.optim.lr_scheduler import ExponentialLR
import torch.nn as nn
from torch.optim import Adam, Optimizer
from models import MatchMaker
from models import NoamLR
from .train import train
from .evaluate import evaluate, evaluate_predictions
from .predict import predict
from args import TrainArgs
from constant import MODEL_FILE_NAME
from data_scripts import SynergyDataLoader, SynergyDataset, get_data, split_data
from utils import load_checkpoint, save_checkpoint
from ray import tune
# from data_scripts import get_class_sizes, set_cache_graph, split_data
# from chemprop.models import MoleculeModel
# from chemprop.nn_utils import param_count
# from chemprop.utils import build_optimizer, build_lr_scheduler, get_loss_func, load_checkpoint, makedirs, \
# save_checkpoint, save_smiles_splits
def run_training(args: TrainArgs,
data: SynergyDataset,
logger: Logger = None,
tuning: bool = None) -> Dict[str, List[float]]:
if logger is not None:
debug, info = logger.debug, logger.info
else:
debug = info = print
# Set pytorch seed for random initial weights
torch.manual_seed(args.pytorch_seed)
# Split data
debug(f'Splitting data with seed {args.seed}')
train_data, val_data, test_data = split_data(data=data, split_type=args.split_type,
sizes=args.split_sizes, seed=args.seed,
num_folds=args.num_folds, args=args, logger=logger)
train_data.calculate_weights()
if args.features_scaling:
features_scaler = train_data.normalize_features(replace_nan_token=0)
val_data.normalize_features(features_scaler)
test_data.normalize_features(features_scaler)
else:
features_scaler = None
if args.cell_line_scaling:
cell_scaler = train_data.normalize_cell_lines(replace_nan_token=0)
val_data.normalize_cell_lines(cell_scaler)
test_data.normalize_cell_lines(cell_scaler)
else:
cell_scaler = None
args.train_data_size = len(train_data)
debug(f'Total size = {len(data):,} | '
f'train size = {len(train_data):,} | val size = {len(val_data):,} | test size = {len(test_data):,}')
scaler = None#train_data.normalize_targets()
loss_func = nn.MSELoss(reduction='none')
# Create data loaders
train_data_loader = SynergyDataLoader(
dataset=train_data,
batch_size=args.batch_size,
num_workers=args.num_workers,
shuffle=True,
seed=args.seed
)
val_data_loader = SynergyDataLoader(
dataset=val_data,
batch_size=args.batch_size,
num_workers=args.num_workers
)
test_data_loader = SynergyDataLoader(
dataset=test_data,
batch_size=args.batch_size,
num_workers=args.num_workers
)
try:
writer = SummaryWriter(log_dir=args.save_dir)
except:
writer = SummaryWriter(logdir=args.save_dir)
if args.checkpoint_path is not None:
debug(f'Loading model from {args.checkpoint_path}')
model = load_checkpoint(args.checkpoint_path)
else:
debug(f'Building model')
model = MatchMaker(args)
model.to(args.device)
debug(model)
optimizer = Adam(model.parameters())#, lr=args.init_lr)#, weight_decay=0)
# scheduler = None
scheduler = NoamLR(
optimizer=optimizer,
warmup_epochs=[args.warmup_epochs],
total_epochs=[args.epochs] * args.num_lrs,
steps_per_epoch=args.train_data_size // args.batch_size,
init_lr=[args.init_lr],
max_lr=[args.max_lr],
final_lr=[args.final_lr]
)
best_score = float('inf')
best_epoch, n_iter = 0, 0
patience = 100
patience_level=0
for epoch in range(args.epochs):
debug(f'Epoch {epoch}')
n_iter = train(
model=model,
data_loader=train_data_loader,
loss_func=loss_func,
optimizer=optimizer,
scheduler=scheduler,
args=args,
n_iter=n_iter,
logger=logger,
writer=writer
)
val_scores = evaluate(
model=model,
data_loader=val_data_loader,
scaler=scaler,
logger=logger,
device=args.device
)
for metric, score in val_scores.items():
debug(f'Validation {metric} = {score:.6f}')
writer.add_scalar(f'validation_{metric}', score, n_iter)
patience_level += 1
if tuning:
with tune.checkpoint_dir(epoch) as checkpoint_dir:
path = os.path.join(checkpoint_dir, "checkpoint")
save_checkpoint(path, model, scaler, features_scaler, cell_scaler, args)
tune.report(mse=val_scores['MSE'], pearson=val_scores['Pearson'])
if val_scores['MSE'] < best_score:
best_score, best_epoch = val_scores['MSE'], epoch
save_checkpoint(os.path.join(args.save_dir, MODEL_FILE_NAME), model, scaler, features_scaler, cell_scaler, args)
patience_level = 0
if patience_level > patience:
break
# Evaluate on test set using model with best validation score
info(f'Model best validation loss = {best_score:.6f} on epoch {best_epoch}')
model = load_checkpoint(os.path.join(args.save_dir, MODEL_FILE_NAME), device=args.device, logger=logger)
test_preds = predict(
model=model,
data_loader=test_data_loader,
scaler=scaler,
device=args.device
)
test_scores = evaluate_predictions(
preds=test_preds,
targets=test_data.targets(),
logger=logger
)
for metric, scores in test_scores.items():
info(f'Model test {metric} = {scores:.6f}')
writer.add_scalar(f'test_{metric}', scores, 0)
writer.close()
test_preds_dataframe = pd.DataFrame(data={
'Synergy': test_data.targets(),
'Predictions': test_preds})
test_preds_dataframe.to_csv(os.path.join(args.save_dir, 'test_preds.csv'), index=False)
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,869 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /train.py | from args import TrainArgs
from data_scripts.data import get_data
from utils import timeit, create_logger
from constant import TEST_SCORES_FILE_NAME, TRAIN_LOGGER_NAME
import os
import sys
from train import run_training
@timeit(logger_name=TRAIN_LOGGER_NAME)
def train(args: TrainArgs) -> None:
logger = create_logger(name=TRAIN_LOGGER_NAME, save_dir=args.save_dir, quiet=args.quiet)
if logger is not None:
debug, info = logger.debug, logger.info
else:
debug = info = print
debug('Command line')
debug(f'python {" ".join(sys.argv)}')
debug('Args')
debug(args)
args.save(os.path.join(args.save_dir, 'args.json'))
debug('Loading data')
data = get_data(
path=args.data_path,
cell_lines=args.cell_lines,
args=args,
skip_none_targets=True,
shuffle_drugs=args.shuffle_input_drugs,
seed=args.seed
)
args.features_size = data.features_size()
args.cell_line_size = data.cell_line_size()
args.seed = 0
# data.reset_features_and_targets()
model_scores = run_training(args, data, logger)
if __name__ == '__main__':
train(TrainArgs().parse_args()) | {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,870 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /hyperparameter_optimization.py | from ray import tune
from ray.tune.schedulers.async_hyperband import ASHAScheduler
from args import HyperoptArgs
from utils import timeit, create_logger
from copy import deepcopy
from train import run_training
from ray.tune.schedulers import AsyncHyperBandScheduler
from constant import HYPEROPT_LOGGER_NAME
from data_scripts import get_data
from ax.service.ax_client import AxClient
from ray.tune.suggest.ax import AxSearch
from ax import ParameterType
import os
import shutil
# SPACE = {
# pass
# }
@timeit(logger_name=HYPEROPT_LOGGER_NAME)
def hyperopt(args: HyperoptArgs) -> None:
logger = create_logger(name=HYPEROPT_LOGGER_NAME, save_dir=args.save_dir, quiet=True)
data = get_data(
path=args.data_path,
cell_lines=args.cell_lines,
args=args,
skip_none_targets=True
)
args.features_size = data.features_size()
args.cell_line_size = data.cell_line_size()
def objective(config, data=None):
hyper_args = deepcopy(args)
data.reset_features_and_targets()
for k, v in config.items():
setattr(hyper_args, k, v)
run_training(hyper_args, data, tuning=True)
ax = AxClient(enforce_sequential_optimization=False)
ax.create_experiment(
name="hyper_search",
parameters=[
{"name": "init_lr", "type": "range", "bounds": [1e-6, 1e-2], "log_scale": True},
{"name": "depth", "type": "range", "bounds": [2, 6], 'value_type': 'int'},
{"name": "batch_size", "type": "choice", "values": [2**x for x in range(3, 8)]},
{"name": "hidden_size", "type": "choice", "values": [x*100 for x in range(3, 25)]},
{"name": "dropoud", "type": "choice", "values": [x/20 for x in range(0,12)]},
{"name": "mm_dropoud", "type": "choice", "values": [x/20 for x in range(0,12)]},
{"name": "mm_in_dropoud", "type": "choice", "values": [x/20 for x in range(0,12)]},
{"name": "dns_num_layers", "type": "range", "bounds": [1, 6], 'value_type': 'int'},
{"name": "dsn_hidden_size", "type": "choice", "values": [x*100 for x in range(3, 25)]},
{"name": "spn_num_layers", "type": "range", "bounds": [1, 6], 'value_type': 'int'},
{"name": "spn_hidden_size", "type": "choice", "values": [x*100 for x in range(3, 25)]},
],
objective_name="mse",
minimize=True,
)
results = tune.run(
tune.with_parameters(objective, data=data),
num_samples = args.num_iters,
search_alg=AxSearch(
ax_client=ax,
mode='min'
),
# metric='mse',
resources_per_trial={'cpu':args.resource_cpu, 'gpu': args.resource_gpu},
# mode='min',
scheduler=ASHAScheduler(
metric='mse',
mode='min',
max_t=args.epochs
)
)
best_trial = results.get_best_trial("mse", "min", "last")
print("Best trial config: {}".format(best_trial.config))
print("Best trial final validation loss: {}".format(
best_trial.last_result["mse"]))
print("Best trial final validation accuracy: {}".format(
best_trial.last_result["pearson"]))
logger.info("Best trial config: {}".format(best_trial.config))
best_checkpoint_dir = best_trial.checkpoint.value
shutil.copytree(best_checkpoint_dir, os.path.join(args.save_dir, 'model'))
if __name__ == '__main__':
hyperopt(HyperoptArgs().parse_args())
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,871 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /data_scripts/__init__.py | from .data import (
SynergyDataset,
SynergyDatapoint,
SynergyDataLoader,
SynergySampler,
get_data,
BatchMolGraph,
)
from .featurization import get_atom_fdim, get_bond_fdim, mol2graph
#get_atom_fdim, get_bond_fdim, mol2graph
from .utils import split_data
from .scaler import StandardScaler | {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,872 | NKI-AI/dmpnn-matchmaker | refs/heads/master | /train/__init__.py | from .run_training import run_training
| {"/train/predict.py": ["/data_scripts/__init__.py", "/models/__init__.py"], "/models/__init__.py": ["/models/model.py"], "/train/evaluate.py": ["/train/predict.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/train/train.py": ["/args.py", "/data_scripts/__init__.py", "/models/__init__.py"], "/models/model.py": ["/args.py", "/data_scripts/__init__.py"], "/data_scripts/utils.py": ["/data_scripts/data.py", "/args.py"], "/data_scripts/data.py": ["/args.py"], "/utils.py": ["/models/__init__.py", "/data_scripts/__init__.py", "/args.py"], "/train/run_training.py": ["/models/__init__.py", "/train/train.py", "/train/evaluate.py", "/train/predict.py", "/args.py", "/data_scripts/__init__.py", "/utils.py"], "/train.py": ["/args.py", "/data_scripts/data.py", "/utils.py"], "/hyperparameter_optimization.py": ["/args.py", "/utils.py", "/train.py", "/data_scripts/__init__.py"], "/data_scripts/__init__.py": ["/data_scripts/data.py", "/data_scripts/utils.py"], "/train/__init__.py": ["/train/run_training.py"]} |
48,873 | zy10zm/AI-Motion-Planning | refs/heads/master | /rl/navigation.py | import os
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
import sys
import numpy as np
import tensorflow as tf
from rl.distributions.categorical import CategoricalPd
class normc_initializer(tf.keras.initializers.Initializer):
def __init__(self, std=1.0, axis=0):
self.std = std
self.axis = axis
def __call__(self, shape, dtype=None):
out = np.random.randn(*shape).astype(np.float32)
out *= self.std / np.sqrt(np.square(out).sum(axis=self.axis, keepdims=True))
return tf.constant(out)
class Navigation(tf.keras.Model):
def __init__(self, batch_size=1, training=True):
super(Navigation, self).__init__()
self.batch_size = batch_size
self.training = training
self.categoricalPd = CategoricalPd()
self.core = tf.keras.Sequential([
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01))
])
with tf.name_scope("xyyaw"):
self.act_core = tf.keras.Sequential([
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01))
])
self.logits_x1 = tf.keras.layers.Dense(21,
kernel_initializer=normc_initializer(0.01),
name="logits_x1")
self.logits_x2 = tf.keras.layers.Dense(21,
kernel_initializer=normc_initializer(0.01),
name="logits_x2")
self.logits_y1 = tf.keras.layers.Dense(21,
kernel_initializer=normc_initializer(0.01),
name="logits_y1")
self.logits_y2 = tf.keras.layers.Dense(21,
kernel_initializer=normc_initializer(0.01),
name="logits_y2")
self.logits_w1 = tf.keras.layers.Dense(21,
kernel_initializer=normc_initializer(0.01),
name="logits_w1")
self.logits_w2 = tf.keras.layers.Dense(21,
kernel_initializer=normc_initializer(0.01),
name="logits_w2")
with tf.name_scope("value"):
self.val_core = tf.keras.Sequential([
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01)),
tf.keras.layers.Dense(128, activation=tf.nn.tanh, kernel_initializer=normc_initializer(0.01))
])
self.value = tf.keras.layers.Dense(1, name="value", activation=None, kernel_initializer=normc_initializer(1.0))
@tf.function
def call(self, obs):
core_output = self.core(obs)
with tf.name_scope("xyyaw"):
act_core = self.act_core(core_output)
logit_x1 = self.logits_x1(act_core)
logit_x2 = self.logits_x2(act_core)
logit_y1 = self.logits_y1(act_core)
logit_y2 = self.logits_y2(act_core)
logit_w1 = self.logits_w1(act_core)
logit_w2 = self.logits_w2(act_core)
sampled_x1 = self.categoricalPd.sample(logit_x1)
sampled_x2 = self.categoricalPd.sample(logit_x2)
sampled_y1 = self.categoricalPd.sample(logit_y1)
sampled_y2 = self.categoricalPd.sample(logit_y2)
sampled_w1 = self.categoricalPd.sample(logit_w1)
sampled_w2 = self.categoricalPd.sample(logit_w2)
with tf.name_scope('value'):
val_core = self.val_core(core_output)
value = self.value(val_core)[:, 0] # flatten value otherwise it might broadcast
actions = {
'x1': sampled_x1,
'x2': sampled_x2,
'y1': sampled_y1,
'y2': sampled_y2,
'w1': sampled_w1,
'w2': sampled_w2
}
logits = {
'x1': logit_x1,
'x2': logit_x2,
'y1': logit_y1,
'y2': logit_y2,
'w1': logit_w1,
'w2': logit_w2
}
neglogp = (
self.categoricalPd.neglogp(logit_x1, sampled_x1) +
self.categoricalPd.neglogp(logit_x2, sampled_x2) +
self.categoricalPd.neglogp(logit_y1, sampled_y1) +
self.categoricalPd.neglogp(logit_y2, sampled_y2) +
self.categoricalPd.neglogp(logit_w1, sampled_w1) +
self.categoricalPd.neglogp(logit_w2, sampled_w2)
)
entropy = (
self.categoricalPd.entropy(logit_x1) +
self.categoricalPd.entropy(logit_x2) +
self.categoricalPd.entropy(logit_y1) +
self.categoricalPd.entropy(logit_y2) +
self.categoricalPd.entropy(logit_w1) +
self.categoricalPd.entropy(logit_w2)
)
return actions, neglogp, entropy, value, logits
def call_build(self):
"""
IMPORTANT: This function has to be editted so that the below input features
have the same shape as the actual inputs, otherwise the weights would not
be restored properly.
"""
self(np.zeros([self.batch_size, 16]))
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,874 | zy10zm/AI-Motion-Planning | refs/heads/master | /reachability/tracker.py | import numpy as np
from copy import deepcopy
class Tracker():
def __init__(self):
self.size = 6
def dynamics(self, x, u):
raise NotImplementedError
def optControl(self, x, rnext):
raise NotImplementedError
class Planner():
def __init__(self):
self.size = 3
def dynamics(self, x, u):
raise NotImplementedError
def control(self, x, goal):
raise NotImplementedError
class Relative():
def __init__(self):
self.size = 3
def dynamics(self, x, u):
raise NotImplementedError
def control(self, x, goal):
raise NotImplementedError
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,875 | zy10zm/AI-Motion-Planning | refs/heads/master | /rl/main.py | import os
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
import sys
import time
import datetime
import numpy as np
from mpi4py import MPI
from copy import deepcopy
from env import make_env
from navigation import Navigation
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
actors = [[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]]
learners = [15]
if rank >= 0 and rank <= 14:
os.environ["CUDA_VISIBLE_DEVICES"]="-1"
elif rank == 15:
pass
import tensorflow as tf
# Training parameters
batch_size = nsteps = 256
model_save_path = os.path.join("data", "nav_model")
optimizer_save_path = os.path.join("data", "optimizer")
record_dir = os.path.join("data", "record")
def actor():
print(f"STARTING ACTOR with rank {rank}")
sys.stdout.flush()
# GAE hyper-parameters
lam = 0.95
gamma = 0.99
# Build network architecture
nav = Navigation(1, training=False)
nav.call_build()
# Get agent type
agent_type = np.where(np.array(actors) == rank)[0][0]
# Setup environment
env = make_env()
obs = env.reset()
dones = False
while True:
weights = comm.recv(source=learners[agent_type])
nav.set_weights(weights)
mb_rewards = np.zeros([nsteps, 1], dtype=np.float32)
mb_values = np.zeros([nsteps, 1], dtype=np.float32)
mb_neglogpacs = np.zeros([nsteps, 1], dtype=np.float32)
mb_dones = np.zeros([nsteps, 1], dtype=np.float32)
mb_obs = np.zeros([nsteps, 16], dtype=np.float32)
mb_actions = {
'x1': np.zeros([nsteps, 1], dtype=np.int32),
'x2': np.zeros([nsteps, 1], dtype=np.int32),
'y1': np.zeros([nsteps, 1], dtype=np.int32),
'y2': np.zeros([nsteps, 1], dtype=np.int32),
'w1': np.zeros([nsteps, 1], dtype=np.int32),
'w2': np.zeros([nsteps, 1], dtype=np.int32)
}
mb_logits = {
'x1': np.zeros([nsteps, 21], dtype=np.float32),
'x2': np.zeros([nsteps, 21], dtype=np.float32),
'y1': np.zeros([nsteps, 21], dtype=np.float32),
'y2': np.zeros([nsteps, 21], dtype=np.float32),
'w1': np.zeros([nsteps, 21], dtype=np.float32),
'w2': np.zeros([nsteps, 21], dtype=np.float32)
}
for i in range(nsteps):
# Get actions of training agent
actions, neglogp, entropy, value, logits = nav(np.expand_dims(obs, axis=0))
mb_values[i] = value
mb_neglogpacs[i] = neglogp
mb_obs[i] = obs
for k in actions.keys():
mb_actions[k][i] = actions[k]
mb_logits[k][i] = logits[k]
mb_dones[i] = dones
# Take actions in env and look at the results
actions = {k: (v[0] - 10) / 10 for k, v in actions.items()}
agent_actions = np.array([[actions['x1'], actions['y1'], actions['w1']],
[actions['x2'], actions['y2'], actions['w2']]])
obs, rewards, dones, infos = env.step(agent_actions)
# Handle rewards
mb_rewards[i] = rewards
if dones:
obs = env.reset()
# get last value for bootstrap
_, _, _, last_values, _ = nav(np.expand_dims(obs, axis=0))
# discount/bootstrap off value fn
mb_returns = np.zeros_like(mb_rewards)
mb_advs = np.zeros_like(mb_rewards)
lastgaelam = 0
# perform GAE calculation
for t in reversed(range(nsteps)):
if t == nsteps - 1:
nextnonterminal = 1.0 - dones
nextvalues = last_values
else:
nextnonterminal = 1.0 - mb_dones[t+1]
nextvalues = mb_values[t+1]
delta = mb_rewards[t] + gamma * nextvalues * nextnonterminal - mb_values[t]
mb_advs[t] = lastgaelam = delta + gamma * lam * nextnonterminal * lastgaelam
mb_returns = mb_advs + mb_values
# Send trajectory to learner
mb_values = np.squeeze(mb_values, axis=-1)
mb_rewards = np.squeeze(mb_rewards, axis=-1)
mb_neglogpacs = np.squeeze(mb_neglogpacs, axis=-1)
mb_returns = np.squeeze(mb_returns, axis=-1)
mb_dones = np.squeeze(mb_dones, axis=-1)
trajectory = {
'mb_obs': mb_obs,
'mb_actions': mb_actions,
'mb_logits': mb_logits,
'mb_returns': mb_returns,
'mb_dones': mb_dones,
'mb_values': mb_values,
'mb_neglogpacs': mb_neglogpacs,
'mb_rewards': mb_rewards
}
comm.send(trajectory, dest=learners[agent_type])
def learner():
print(f"STARTING LEARNER with rank {rank}")
sys.stdout.flush()
# Truly-ppo hyperparameters
KLRANGE = 0.03
slope_rollback = -5
slope_likelihood = 1
target_kl = 0.05
# Learner hyperparameters
ent_coef = 0.01
vf_coef = 0.5
CLIPRANGE = 0.2
max_grad_norm = 5.0
noptepochs = 4
nbatch = batch_size * len(actors[0])
batch_scale = 2
nbatch_steps = nbatch // batch_scale
# Build network architecture
nav = Navigation(nbatch_steps, training=True)
nav.call_build()
optimizer = tf.keras.optimizers.Adam(learning_rate=3e-4, beta_1=0.9, beta_2=0.99, epsilon=1e-5)
if os.path.isfile(model_save_path + ".npy"):
weights = np.load(model_save_path + ".npy", allow_pickle=True)
optimizer_weights = np.load(optimizer_save_path + ".npy", allow_pickle=True)
grad_vars = nav.trainable_weights
zero_grads = [tf.zeros_like(w) for w in grad_vars]
optimizer.apply_gradients(zip(zero_grads, grad_vars))
optimizer.set_weights(optimizer_weights)
nav.set_weights(weights)
# Get weights from file
weights = nav.get_weights()
# Get agent type
agent_type = np.where(np.array(learners) == rank)[0][0]
# Send agent to actor
for actor in actors[agent_type]:
comm.send(weights, dest=actor)
# Truly PPO RL optimization loss function
@tf.function
def t_ppo_loss(b_obs, b_actions, b_logits, b_returns, b_dones, b_values, b_neglogpacs):
# Stochastic selection
inds = tf.range(nbatch)
# Buffers for recording
losses_total = []
approxkls = []
entropies = []
# Start SGD
for _ in range(noptepochs):
inds = tf.random.shuffle(inds)
for start in range(0, nbatch, nbatch_steps):
end = start + nbatch_steps
# Gather mini-batch
mbinds = inds[start:end]
mb_obs = tf.gather(b_obs, mbinds)
mb_actions = {k: tf.gather(b_actions[k], mbinds) for k, v in b_actions.items()}
mb_logits = {k: tf.gather(b_logits[k], mbinds) for k, v in b_logits.items()}
mb_returns = tf.gather(b_returns, mbinds)
mb_dones = tf.gather(b_dones, mbinds)
mb_values = tf.gather(b_values, mbinds)
mb_neglogpacs = tf.gather(b_neglogpacs, mbinds)
with tf.GradientTape() as tape:
p_actions, p_neglogp, p_entropy, vpred, p_logits = nav(mb_obs)
# Batch normalize the advantages
advs = mb_returns - mb_values
advs = (advs - tf.reduce_mean(advs)) / (tf.math.reduce_std(advs) + 1e-8)
# Calculate neglogpac
#neglogpac = nav.diagguass.neglogp(p_mean, p_logstd, mb_actions)
neglogpac = (
nav.categoricalPd.neglogp(p_logits['x1'], mb_actions['x1']) +
nav.categoricalPd.neglogp(p_logits['x2'], mb_actions['x2']) +
nav.categoricalPd.neglogp(p_logits['y1'], mb_actions['y1']) +
nav.categoricalPd.neglogp(p_logits['y2'], mb_actions['y2']) +
nav.categoricalPd.neglogp(p_logits['w1'], mb_actions['w1']) +
nav.categoricalPd.neglogp(p_logits['w2'], mb_actions['w2'])
)
# Calculate the entropy
entropy = tf.reduce_mean(p_entropy)
# Get the predicted value
vpredclipped = mb_values + tf.clip_by_value(vpred - mb_values, -CLIPRANGE, CLIPRANGE)
# Unclipped value
vf_losses1 = tf.square(vpred - mb_returns)
# Clipped value
vf_losses2 = tf.square(vpredclipped - mb_returns)
vf_loss = .5 * tf.reduce_mean(tf.maximum(vf_losses1, vf_losses2))
# KL
all_logits = tf.concat([p_logits['x1'], p_logits['x2'], p_logits['y1'], p_logits['y2'], p_logits['w1'], p_logits['w2']], axis=-1)
all_taken_logits = tf.concat([mb_logits['x1'], mb_logits['x2'], mb_logits['y1'], mb_logits['y2'], mb_logits['w1'], mb_logits['w2']], axis=-1)
kl = nav.categoricalPd.kl(all_logits, all_taken_logits)
#kl = nav.diagguass.kl(p_mean, p_logstd, mb_mean, mb_logstd)
approxkl = tf.reduce_mean(kl)
# Early stopping
#if approxkl > 1.5 * target_kl:
# break
# Calculate ratio (pi current policy / pi old policy)
ratio = tf.exp(mb_neglogpacs - neglogpac)
# Defining Loss = - J is equivalent to max J
pg_targets = tf.where(
tf.logical_and( kl >= KLRANGE, ratio * advs > 1 * advs),
slope_likelihood * ratio * advs + slope_rollback * kl,
ratio * advs
)
pg_loss = -tf.reduce_mean(pg_targets)
# Total loss
loss = pg_loss - entropy * ent_coef + vf_loss * vf_coef
# 1. Get the model parameters
var = nav.trainable_variables
grads = tape.gradient(loss, var)
# 3. Calculate the gradients
grads, _grad_norm = tf.clip_by_global_norm(grads, max_grad_norm)
grads_and_var = zip(grads, var)
# zip aggregate each gradient with parameters associated
optimizer.apply_gradients(grads_and_var)
losses_total.append(loss)
approxkls.append(approxkl)
entropies.append(entropy)
losses_total = tf.reduce_mean(losses_total)
approxkls = tf.reduce_mean(approxkls)
entropies = tf.reduce_mean(entropies)
return losses_total, approxkls, entropies
# Normal trajectory
trajectory = {
'mb_obs': np.empty((nbatch, 16), dtype=np.float32),
'mb_actions': {
'x1': np.empty((nbatch, 1), dtype=np.int32),
'x2': np.empty((nbatch, 1), dtype=np.int32),
'y1': np.empty((nbatch, 1), dtype=np.int32),
'y2': np.empty((nbatch, 1), dtype=np.int32),
'w1': np.empty((nbatch, 1), dtype=np.int32),
'w2': np.empty((nbatch, 1), dtype=np.int32),
},
'mb_logits': {
'x1': np.empty((nbatch, 21), dtype=np.float32),
'x2': np.empty((nbatch, 21), dtype=np.float32),
'y1': np.empty((nbatch, 21), dtype=np.float32),
'y2': np.empty((nbatch, 21), dtype=np.float32),
'w1': np.empty((nbatch, 21), dtype=np.float32),
'w2': np.empty((nbatch, 21), dtype=np.float32),
},
'mb_returns': np.empty((nbatch,), dtype=np.float32),
'mb_dones': np.empty((nbatch,), dtype=np.float32),
'mb_values': np.empty((nbatch,), dtype=np.float32),
'mb_neglogpacs': np.empty((nbatch,), dtype=np.float32),
'mb_rewards': np.empty((nbatch,), dtype=np.float32)
}
# Start learner process loop
start_time = time.time()
itrs = 0
while True:
# Collect enough rollout to fill batch_size
traj_size = 0
for idx, actor in enumerate(actors[agent_type]):
# Append to trajectory
a_trajectory = comm.recv(source=actor)
a_traj_size = a_trajectory['mb_returns'].shape[0]
for k, v in trajectory.items():
if k == 'mb_actions' or k == 'mb_logits':
for k2 in trajectory[k].keys():
trajectory[k][k2][traj_size:min(traj_size+a_traj_size, nbatch)] = a_trajectory[k][k2][0:max(0, nbatch-traj_size)]
else:
trajectory[k][traj_size:min(traj_size+a_traj_size, nbatch)] = a_trajectory[k][0:max(0, nbatch-traj_size)]
traj_size += min(a_traj_size, nbatch-traj_size)
# Update Navigation when conditions are met
b_obs = trajectory['mb_obs']
b_actions = trajectory['mb_actions']
b_logits = trajectory['mb_logits']
b_dones = trajectory['mb_dones']
b_values = trajectory['mb_values']
b_neglogpacs = trajectory['mb_neglogpacs']
b_returns = trajectory['mb_returns']
b_rewards = trajectory['mb_rewards']
# Start SGD and optimize model via Adam
losses, approxkls, entropies = t_ppo_loss(b_obs, b_actions,
b_logits,
b_returns, b_dones,
b_values, b_neglogpacs)
# Send agent to actor
weights = nav.get_weights()
for actor in actors[agent_type]:
comm.send(weights, dest=actor)
model_save_path_tmp = model_save_path + "_tmp"
np.save(model_save_path_tmp, weights)
if os.path.exists(model_save_path + ".npy"):
os.remove(model_save_path + ".npy")
os.rename(model_save_path_tmp + ".npy", model_save_path + ".npy")
optimizer_save_path_tmp = optimizer_save_path + "_tmp"
np.save(optimizer_save_path_tmp, optimizer.get_weights())
if os.path.exists(optimizer_save_path + ".npy"):
os.remove(optimizer_save_path + ".npy")
os.rename(optimizer_save_path_tmp + ".npy", optimizer_save_path + ".npy")
itrs += 1
if itrs % 1000 == 0:
curTime = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
np.save(os.path.join(record_dir, curTime), weights)
print(f"{itrs}: approxkl = {approxkls} ; loss = {losses} ; entropy = {entropies} ; reward = {np.mean(b_rewards, axis=0)} ; time = {time.time() - start_time}")
sys.stdout.flush()
if rank >= 0 and rank <= 14:
actor()
elif rank >= 15 and rank <= 15:
learner()
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,876 | zy10zm/AI-Motion-Planning | refs/heads/master | /reachability/rm_tp.py | import numpy as np
from copy import deepcopy
from scipy.io import loadmat
from reachability.tracker import Tracker, Planner, Relative
def normalize_angles(angles):
'''Puts angles in [-pi, pi] range.'''
angles = angles.copy()
if angles.size > 0:
angles = (angles + np.pi) % (2 * np.pi) - np.pi
assert -(np.pi + 1e-6) <= angles.min() and angles.max() <= (np.pi + 1e-6)
return angles
class RmTracker(Tracker):
def __init__(self):
self.size = 5
self.dt = 0.01
def dynamics(self, x, u):
new_x = deepcopy(x)
new_x[0] += (x[1]*np.cos(x[4]) - x[3]*np.sin(x[4])) * self.dt
new_x[1] += u[0] * self.dt
new_x[2] += (x[1]*np.sin(x[4]) + x[3]*np.cos(x[4])) * self.dt
new_x[3] += u[1] * self.dt
new_x[4] += u[2] * self.dt
#new_x[4] = normalize_angles(np.array([new_x[4]]))[0]
return new_x
class RmPlanner(Planner):
def __init__(self):
self.size = 3
def dynamics(self, x, u):
new_x = deepcopy(x)
new_x[0] += u[0]*np.cos(x[2]) - u[1]*np.sin(x[2])
new_x[1] += u[0]*np.sin(x[2]) + u[1]*np.cos(x[2])
new_x[2] += u[2]
return new_x
def project(self, s):
p = [s[0], s[2], s[4]]
return p
def control(self, p):
x = 5
y = 5
theta = 0.7 #p[2] + 4.16 * 0.01
#theta = normalize_angles(np.array([theta]))[0]
return [x, y, theta]
class RmRelative(Relative):
def __init__(self):
self.size = 5
self.uMax = np.array([10.0, 10.0, 6.0, 2.0, 3.0, 4.16])
self.uMin = -self.uMax
def clip_state(self, r):
r[0] = np.clip(r[0], -5, 5)
r[1] = np.clip(r[1], -2, 2)
r[2] = np.clip(r[2], -5, 5)
r[3] = np.clip(r[3], -3, 3)
r[4] = np.clip(r[4], -np.pi, np.pi)
return r
def state(self, s, p):
r = deepcopy(s)
r[0] -= p[0]
r[2] -= p[1]
r[4] -= p[2]
r = self.clip_state(r)
return r
def dynamics(self, r, u, d):
rnext = deepcopy(r)
rnext = np.clip(rnext, [-5, -2, -5, -3, -np.pi], [5, 2, 5, 3, np.pi])
rnext[0] += x[1] - u[3]*np.cos(x[4]) + u[4]*np.sin(x[4]) + d[0]
rnext[1] += u[0]
rnext[2] += x[3] - u[3]*np.sin(x[4]) - u[4]*np.cos(x[4]) + d[1]
rnext[3] += u[1]
rnext[4] += u[2] - u[5] + d[2]
return rnext
def optControl(self, deriv, r, x):
ax = (deriv[1]>=0)*self.uMin[0] + (deriv[1]<0)*self.uMax[0]
ay = (deriv[3]>=0)*self.uMin[1] + (deriv[3]<0)*self.uMax[1]
w = (deriv[4]>=0)*self.uMin[2] + (deriv[4]<0)*self.uMax[2]
# Calculate opt ctrl
uopt = [
ax, ay, w
]
return uopt
class Reach():
def __init__(self):
matlabf = "./reachability/RMAI_g_dt01_t5_medium_quadratic.mat"
fst = loadmat(matlabf)
eps = 0.1
self.eb = np.max([fst['TEB_X1'], fst['TEB_X2'], fst['TEB_X3'], fst['TEB_X4']]) + eps
self.vf_X1 = np.array(fst['data0_X1'])
self.vf_X2 = np.array(fst['data0_X2'])
self.vf_X3 = np.array(fst['data0_X3'])
self.vf_X4 = np.array(fst['data0_X4'])
self.vf_dX1 = fst['derivX1']
self.vf_dX2 = fst['derivX2']
self.vf_dX3 = fst['derivX3']
self.vf_dX4 = fst['derivX4']
def to_grid_index(self, r):
r = deepcopy(r)
r[0] = int(((r[0] + 5) / 10.0) * 60)
r[1] = int(((r[1] + 2) / 4.0) * 60)
r[2] = int(((r[2] + 5) / 10.0) * 60)
r[3] = int(((r[3] + 3) / 6.0) * 60)
r[4] = int(((r[4] + np.pi) / (2*np.pi)) * 60)
return r
def check_on_boundary(self, r):
r_int = [int(i) for i in r]
vf_X1_eb = self.vf_X1[r_int[0], r_int[1], r_int[4]]
vf_X2_eb = self.vf_X2[r_int[0], r_int[3], r_int[4]]
vf_X3_eb = self.vf_X3[r_int[1], r_int[2], r_int[4]]
vf_X4_eb = self.vf_X4[r_int[2], r_int[3], r_int[4]]
vf_eb = np.max([vf_X1_eb, vf_X2_eb, vf_X3_eb, vf_X4_eb])
if vf_eb >= self.eb:
return True
else:
return False
def control(self, s, pnext):
dx = pnext[0] - s[0]
dy = pnext[1] - s[2]
dw = pnext[2] - s[4]
ax = 5.0 * dx
ay = 5.0 * dy
return [ax, ay, dw]
def get_derivs(self, r):
r_int = [int(i) for i in r]
# All x deriv
if self.vf_X1[r_int[0], r_int[1], r_int[4]] >= self.vf_X2[r_int[0], r_int[3], r_int[4]]:
x_deriv = self.vf_dX1[0][0][r_int[0], r_int[1], r_int[4]]
else:
x_deriv = self.vf_dX2[0][0][r_int[0], r_int[3], r_int[4]]
# All vx deriv
if self.vf_X1[r_int[0], r_int[1], r_int[4]] >= self.vf_X3[r_int[1], r_int[2], r_int[4]]:
vx_deriv = self.vf_dX1[1][0][r_int[0], r_int[1], r_int[4]]
else:
vx_deriv = self.vf_dX3[1][0][r_int[1], r_int[2], r_int[4]]
# All y deriv
if self.vf_X3[r_int[1], r_int[2], r_int[4]] >= self.vf_X4[r_int[2], r_int[3], r_int[4]]:
y_deriv = self.vf_dX3[0][0][r_int[1], r_int[2], r_int[4]]
else:
y_deriv = self.vf_dX4[0][0][r_int[2], r_int[3], r_int[4]]
# All vy deriv
if self.vf_X2[r_int[0], r_int[3], r_int[4]] >= self.vf_X4[r_int[2], r_int[3], r_int[4]]:
vy_deriv = self.vf_dX2[1][0][r_int[0], r_int[3], r_int[4]]
else:
vy_deriv = self.vf_dX4[1][0][r_int[2], r_int[3], r_int[4]]
# All theta deriv
theta_idx = np.argmax([
self.vf_X1[r_int[0], r_int[1], r_int[4]],
self.vf_X2[r_int[0], r_int[3], r_int[4]],
self.vf_X3[r_int[1], r_int[2], r_int[4]],
self.vf_X4[r_int[2], r_int[3], r_int[4]]
])
theta_deriv = [
self.vf_dX1[2][0][r_int[0], r_int[1], r_int[4]],
self.vf_dX2[2][0][r_int[0], r_int[3], r_int[4]],
self.vf_dX3[2][0][r_int[1], r_int[2], r_int[4]],
self.vf_dX4[2][0][r_int[2], r_int[3], r_int[4]]
]
theta_deriv = theta_deriv[theta_idx]
deriv = [
x_deriv,
vx_deriv,
y_deriv,
vy_deriv,
theta_deriv
]
return deriv
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,877 | zy10zm/AI-Motion-Planning | refs/heads/master | /rl/env_pts.py | import math
import time
import numpy as np
import cv2
from copy import deepcopy
class Environment():
def __init__(self, MAX_timestep=10000, visualize=False):
"""
0 = floor
1 = obstacles
2 = opponent
3 = agents
4 = tracker error bound
5 = opponent min error bound
6 = opponent max error bound
"""
self.MAX_timestep = MAX_timestep
self.visualize = visualize
self.safety_bound = 100.0 # tracker error bound
self.min_opponent_bound = 100 # min opponent error bound
self.max_opponent_bound = 600 # opponent error bound
self.agent_width = 450
self.agent_height = 600
self.dt = 0.002 # seconds for each timestep
self.action_range = np.array([[2.0, 3.0, 4.16], [2.0, 3.0, 4.16]])
def _get_placement_grid(self):
placement_grid = np.zeros((4480, 8080))
eps = round(self.safety_bound)
# B1
pos = [3380, 500]
placement_grid[pos[0]-(100+eps):pos[0]+(100+eps), pos[1]-(500):pos[1]+(500+eps)] = 4
placement_grid[pos[0]-(100):pos[0]+(100), pos[1]-(500):pos[1]+(500)] = 1
# B2
pos = [2240, 1900]
placement_grid[pos[0]-(100+eps):pos[0]+(100+eps), pos[1]-(400+eps):pos[1]+(400+eps)] = 4
placement_grid[pos[0]-(100):pos[0]+(100), pos[1]-(400):pos[1]+(400)] = 1
# B3
pos = [500, 1600]
placement_grid[pos[0]-(500):pos[0]+(500+eps), pos[1]-(100+eps):pos[1]+(100+eps)] = 4
placement_grid[pos[0]-(500):pos[0]+(500), pos[1]-(100):pos[1]+(100)] = 1
# B4
pos = [3445, 4040]
placement_grid[pos[0]-(100+eps):pos[0]+(100+eps), pos[1]-(500+eps):pos[1]+(500+eps)] = 4
placement_grid[pos[0]-(100):pos[0]+(100), pos[1]-(500):pos[1]+(500)] = 1
# B5
pos = [2240, 4040]
placement_grid[pos[0]-(170+eps):pos[0]+(170+eps), pos[1]-(170+eps):pos[1]+(170+eps)] = 4
placement_grid[pos[0]-(170):pos[0]+(170), pos[1]-(170):pos[1]+(170)] = 1
# B6
pos = [1035, 4040]
placement_grid[pos[0]-(100+eps):pos[0]+(100+eps), pos[1]-(500+eps):pos[1]+(500+eps)] = 4
placement_grid[pos[0]-(100):pos[0]+(100), pos[1]-(500):pos[1]+(500)] = 1
# B7
pos = [3980, 6480]
placement_grid[pos[0]-(500+eps):pos[0]+(500+eps), pos[1]-(100+eps):pos[1]+(100+eps)] = 4
placement_grid[pos[0]-(500):pos[0]+(500), pos[1]-(100):pos[1]+(100)] = 1
# B8
pos = [2240, 6180]
placement_grid[pos[0]-(100+eps):pos[0]+(100+eps), pos[1]-(400+eps):pos[1]+(400+eps)] = 4
placement_grid[pos[0]-(100):pos[0]+(100), pos[1]-(400):pos[1]+(400)] = 1
# B9
pos = [1100, 7580]
placement_grid[pos[0]-(100+eps):pos[0]+(100+eps), pos[1]-(500+eps):pos[1]+(500+eps)] = 4
placement_grid[pos[0]-(100):pos[0]+(100), pos[1]-(500):pos[1]+(500)] = 1
return placement_grid
def get_random_opponent_position_and_fill(self, grid, width, height, boundary):
collided = True
while collided:
pos = [np.random.uniform() * 4480, np.random.uniform() * 8080, np.random.uniform() * 6.28319]
pts_in_bound = self.points_in_rectangle(width, height, pos[1], pos[0], pos[2])
collided = False
for pts in pts_in_bound:
x = pts[0]
y = pts[1]
if x < 0 or y < 0 or x >= 8080 or y >= 4480:
collided = True
break
if grid[y, x] == 1 or grid[y, x] == 2:
collided = True
break
pts_in_bound2 = self.points_in_rectangle(width+boundary, height+boundary, pos[1], pos[0], pos[2])
cols, rows = zip(*pts_in_bound2)
grid[rows, cols] = 5
cols, rows = zip(*pts_in_bound)
grid[rows, cols] = 2
return pos, grid
def get_random_position_and_fill(self, grid, width, height):
collided = True
while collided:
pos = [np.random.uniform() * 4480, np.random.uniform() * 8080, np.random.uniform() * 6.28319]
pts_in_bound = self.points_in_rectangle(width, height, pos[1], pos[0], pos[2])
collided = False
for pts in pts_in_bound:
x = pts[0]
y = pts[1]
if x < 0 or y < 0 or x >= 8080 or y >= 4480:
collided = True
break
if grid[y, x] != 0:
collided = True
break
pts_in_bound2 = self.points_in_rectangle(width+self.safety_bound, height+self.safety_bound, pos[1], pos[0], pos[2])
cols, rows = zip(*pts_in_bound2)
grid[rows, cols] = 4
cols, rows = zip(*pts_in_bound)
grid[rows, cols] = 3
return pos, grid, pts_in_bound, pts_in_bound2
def get_random_position(self, grid):
collided = True
while collided:
pos = [np.random.uniform() * 4480, np.random.uniform() * 8080, np.random.uniform() * 6.28319]
pts_in_bound = self.points_in_rectangle(self.agent_width, self.agent_height, pos[1], pos[0], pos[2])
collided = False
for pts in pts_in_bound:
x = pts[0]
y = pts[1]
if x < 0 or y < 0 or x >= 8080 or y >= 4480:
collided = True
break
if x >= 0 and y >= 0 and grid[y, x] != 0:
collided = True
break
return pos
def points_in_circle(self, radius, x0=0, y0=0):
x_ = np.arange(x0 - radius - 1, x0 + radius + 1, dtype=int)
y_ = np.arange(y0 - radius - 1, y0 + radius + 1, dtype=int)
x, y = np.where((x_[:,np.newaxis] - x0)**2 + (y_ - y0)**2 <= radius**2)
pts = []
for x, y in zip(x_[x], y_[y]):
pts.append([x, y])
pts = np.array(pts).astype(int)
inidx = np.all(np.logical_and(np.array([0, 0]) <= pts, pts < np.array([8080, 4480])), axis=1)
pts = pts[inidx]
return pts
def points_in_rectangle(self, width, height, x0=0, y0=0, theta0=0):
x = np.arange(-width//2 - 1, width//2 + 1, dtype=int)
y = np.arange(-height//2 - 1, height//2 + 1, dtype=int)
px, py = np.meshgrid(x, y)
px = px.flatten()
py = py.flatten()
qx = np.cos(theta0) * px - np.sin(theta0) * py
qy = np.sin(theta0) * px + np.cos(theta0) * py
qx = qx + x0
qy = qy + y0
pts = np.stack([qx, qy], axis=-1).astype(int)
pts = np.unique(pts, axis=0)
inidx = np.all(np.logical_and(np.array([0, 0]) <= pts, pts < np.array([8080, 4480])), axis=1)
pts = pts[inidx]
return pts
def check_collision(self, pos, grid, agent_idx):
#pts_in_bound = self.points_in_rectangle(self.agent_width, self.agent_height, pos[1], pos[0], pos[2])
pts_in_bound = self.agents_pts[agent_idx]
inidx = np.all(np.logical_and(np.array([0, 0]) <= pts_in_bound, pts_in_bound < np.array([8080, 4480])), axis=1)
pts_in_bound = pts_in_bound[inidx]
pts_in_bound = np.round(pts_in_bound).astype(int)
cols, rows = zip(*pts_in_bound)
grid_bool = grid[rows, cols]
grid_bool = grid_bool[grid_bool != 0]
for pts in pts_in_bound:
x = int(round(pts[0]))
y = int(round(pts[1]))
if grid[y, x] != 0:
return True
return False
def superpose_agents_on_grid(self, grid):
for agent_pos in self.agents_pos:
pts = self.points_in_rectangle(self.agent_width, self.agent_height, agent_pos[1], agent_pos[0], agent_pos[2])
pts2 = self.points_in_rectangle(self.agent_width + self.safety_bound, self.agent_height + self.safety_bound,
agent_pos[1], agent_pos[0], agent_pos[2])
cols, rows = zip(*pts2)
grid[rows, cols] = 4
cols, rows = zip(*pts)
grid[cols, rows] = 3
return grid
def superpose_max_opponent_bound(self, grid):
for opponent_pos in self.opponents_pos:
pts = self.points_in_circle(self.max_opponent_bound, opponent_pos[1], opponent_pos[0])
pts2 = self.points_in_circle(int(self.max_opponent_bound * 0.9), opponent_pos[1], opponent_pos[0])
dims = np.maximum(pts2.max(0),pts.max(0))+1
pts = pts[~np.in1d(np.ravel_multi_index(pts.T,dims),np.ravel_multi_index(pts2.T,dims))]
cols, rows = zip(*pts)
grid[rows, cols] = 6
return grid
def observation(self):
obs = np.array([self.agents_pos[0][0], self.agents_pos[0][1], self.agents_pos[0][2],
self.agents_pos[1][0], self.agents_pos[1][1], self.agents_pos[1][2],
self.opponents_pos[0][0], self.opponents_pos[0][1],
self.opponents_pos[1][0], self.opponents_pos[1][1]])
return obs
def dynamics(self, pos, action, idx):
dx = action[0] * np.cos(pos[2]) + action[1] * np.sin(pos[2])
dy = action[1] * np.cos(pos[2]) - action[0] * np.sin(pos[2])
dtheta = action[2]
dx *= self.dt
dy *= self.dt
dtheta *= self.dt
new_theta = pos[2] + dtheta
if new_theta < 0:
new_theta = 6.28319 - new_theta
elif new_theta > 6.28319:
new_theta = new_theta - 6.28319
new_pos = [pos[0] + dy, pos[1] + dx, new_theta]
self.agents_pts[idx][:, 0] += dx
self.agents_pts[idx][:, 1] += dy
self.agents_safety_pts[idx][:, 0] += dx
self.agents_safety_pts[idx][:, 1] += dy
return new_pos
def reward(self):
rew = [0.0 for _ in range(2)]
grid = deepcopy(self.placement_grid)
#grid = self.superpose_agents_on_grid(grid)
for idx in range(2):
# check for collision
collided = self.check_collision(self.agents_pos[idx], grid, idx)
if collided:
return -100.0, False
# check for distance with opponent agents
for opponent_pos in self.opponents_pos:
dist = np.linalg.norm(np.array(self.agents_pos[idx])[0:2]**2 - np.array(opponent_pos)[0:2]**2)
# penalize for getting into max error-bound of opponent agents
if dist <= self.max_opponent_bound:
rew[idx] += -(1.0 / dist)
# reward for getting closer to goal
dist_2_goal = np.linalg.norm(np.array(self.agents_pos[idx])[0:2]**2 - np.array(self.goals[idx])**2)
rew[idx] += -dist_2_goal
return rew, False
def step(self, action):
rew, done = self.reward()
# clip action
action = np.clip(action, -self.action_range, self.action_range)
action[:, 0:2] *= 1000.0
for idx in range(2):
self.agents_pos[idx] = self.dynamics(self.agents_pos[idx], action[idx], idx)
self.timestep += 1
if self.timestep >= self.MAX_timestep:
done = True
obs = self.observation()
return obs, rew, done, dict()
def reset(self):
# Get placement grid
grid = self._get_placement_grid()
# Get agents' initial position
opponent1_pos, grid = self.get_random_opponent_position_and_fill(grid, self.agent_width, self.agent_height, self.min_opponent_bound)
opponent2_pos, grid = self.get_random_opponent_position_and_fill(grid, self.agent_width, self.agent_height, self.min_opponent_bound)
virtual_grid = deepcopy(grid)
agent1_pos, virtual_grid, agent1_pts, agent1_safety_pts = self.get_random_position_and_fill(virtual_grid, self.agent_width, self.agent_height)
agent2_pos, virtual_grid, agent2_pts, agent2_safety_pts = self.get_random_position_and_fill(virtual_grid, self.agent_width, self.agent_height)
self.agents_pos = [agent1_pos, agent2_pos]
self.opponents_pos = [opponent1_pos, opponent2_pos]
self.agents_pts = [agent1_pts.astype(np.float32), agent2_pts.astype(np.float32)]
self.agents_safety_pts = [agent1_safety_pts.astype(np.float32), agent2_safety_pts.astype(np.float32)]
self.goals = [[np.random.uniform() * 4480, np.random.uniform() * 8080],
[np.random.uniform() * 4480, np.random.uniform() * 8080]]
self.placement_grid = grid
self.timestep = 0
return self.observation()
def render(self):
grid = deepcopy(self.placement_grid)
grid = self.superpose_agents_on_grid(grid)
grid = self.superpose_max_opponent_bound(grid)
grid = np.stack([grid, grid, grid], axis=-1)
grid[np.where((grid == [0, 0, 0]).any(axis=2))] = [169, 169, 169] # floor
grid[np.where((grid == [1, 1, 1]).any(axis=2))] = [0, 0, 0] # obstacles
grid[np.where((grid == [2, 2, 2]).any(axis=2))] = [255, 0, 0] # opponents
grid[np.where((grid == [3, 3, 3]).any(axis=2))] = [0, 0, 255] # agents
grid[np.where((grid == [4, 4, 4]).any(axis=2))] = [0, 165, 255] # obstacles' tracker bound
grid[np.where((grid == [5, 5, 5]).any(axis=2))] = [0, 255, 0] # opponents' min bound
grid[np.where((grid == [6, 6, 6]).any(axis=2))] = [128, 0, 128] # opponents' max bound
grid = cv2.resize(grid, (808, 448), interpolation=cv2.INTER_AREA).astype(np.uint8)
grid = cv2.flip(grid, 0) # flip because for opencv x-axis is flipped
cv2.imshow("Grid", grid)
cv2.waitKey(5)
def make_env(timesteps=10000, visualize=False):
env = Environment(timesteps, visualize=visualize)
#obs = env.reset()
#action = np.array([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])
#obs, rew, done, _ = env.step(action)
return env
#make_env()
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,878 | zy10zm/AI-Motion-Planning | refs/heads/master | /evaluate_rl_planner.py | import numpy as np
from rl.env import make_env
from rl.navigation import Navigation
env = make_env(visualize=True)
obs = env.reset()
nav = Navigation(1, False)
nav.call_build()
weights = np.load("./rl/data/nav_model.npy", allow_pickle=True)
nav.set_weights(weights)
while True:
actions, neglogp, entropy, value, logits = nav(np.expand_dims(obs, axis=0))
actions = {k: (v[0] - 10) / 10 for k, v in actions.items()}
agent_actions = np.array([[actions['x1'], actions['y1'], actions['w1']],
[actions['x2'], actions['y2'], actions['w2']]])
obs, rewards, dones, infos = env.step(agent_actions)
env.render()
if dones:
obs = env.reset()
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,879 | zy10zm/AI-Motion-Planning | refs/heads/master | /simulate.py | import numpy as np
from copy import deepcopy
from reachability.rm_tp import RmPlanner, RmTracker, RmRelative, Reach
def make_planner_env():
from rl.env import make_env
env = make_env()
return env
def make_tracker_env():
from environment.envs.icra import make_env
from environment.envhandler import EnvHandler
env = EnvHandler(make_env())
return env
class Simulator():
def __init__(self):
# environments
self.simul_env = make_tracker_env()
self.planner_env = make_planner_env()
self.eps = 1e-5
self.dt = 0.01
# models
self.planner = RmPlanner()
self.relative = RmRelative()
self.reach = Reach()
def get_goals(self):
goals = [
[np.random.uniform() * 8080, np.random.uniform() * 4480],
[np.random.uniform() * 8080, np.random.uniform() * 4480]
]
return goals
def get_planner_control(self, p, goal):
# TEMPORARY
p[2] = 0
return p
def get_relative_state(self, s, pnext):
return self.relative.state(s, pnext)
def simul_to_states(self, obs, u1, u2):
s1 = [0.0 for _ in range(5)]
s2 = [0.0 for _ in range(5)]
s1[0] = obs[0] / 1000
s1[1] += u1[0] * self.dt
s1[2] = obs[1] / 1000
s1[3] += u1[1] * self.dt
s1[4] = obs[2]
s2[0] = obs[6] / 1000
s2[1] += u2[0] * self.dt
s2[2] = obs[7] / 1000
s2[3] += u2[1] * self.dt
s2[4] = obs[8]
return s1, s2
def get_next_states(self, s1, s2, u1, u2):
agent_actions = {'action_movement': np.array([ [s1[1] / 2.0, s1[3] / 3.0, u1[2] / 6.0],
[s2[1] / 2.0, s2[3] / 3.0, u2[2] / 6.0],
[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0] ])}
for _ in range(int(self.dt / 0.002)):
obs, rewards, dones, infos = self.simul_env.step(agent_actions)
s1, s2 = self.simul_to_states(obs, u1, u2)
return s1, s2
def simulate(self):
while True:
obs = self.simul_env.reset()
s1, s2 = self.simul_to_states(obs, [0, 0, 0], [0, 0, 0])
s = [s1, s2]
p = [self.planner.project(s[0]), self.planner.project(s[1])]
goals = [
[obs[18], obs[19]],
[obs[20], obs[21]]
]
while (abs(p[0][0] - goals[0][0]) >= self.eps and
abs(p[0][1] - goals[0][1]) >= self.eps and
abs(p[1][0] - goals[1][0]) >= self.eps and
abs(p[1][1] - goals[1][1]) >= self.eps):
pnext = [self.get_planner_control(p[i], goals[i]) for i in range(2)]
rnext = [self.get_relative_state(s[i], pnext[i]) for i in range(2)]
rnext = [self.reach.to_grid_index(rnext[i]) for i in range(2)]
u = [
self.relative.optControl(self.reach.get_derivs(rnext[i]), rnext[i], s[i])
if self.reach.check_on_boundary(rnext[i]) else self.reach.control(s[i], pnext[i])
for i in range(2)
]
"""
if self.reach.check_on_boundary(rnext):
deriv = self.reach.get_derivs(rnext)
u = self.relative.optControl(deriv, rnext, s)
else:
u = self.reach.control(s, pnext)
"""
self.simul_env.render('human')
# New states
s[0], s[1] = self.get_next_states(s[0], s[1], u[0], u[1])
p[0], p[1] = self.planner.project(s[0]), self.planner.project(s[1])
simulator = Simulator()
simulator.simulate()
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,880 | zy10zm/AI-Motion-Planning | refs/heads/master | /rl/test_env.py | import numpy as np
import time
from copy import deepcopy
from env import make_env
def test():
env = make_env()
obs = env.reset()
# Test zero action
zero_action = np.array([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])
print(f"Before zero action = {obs}")
new_obs, rew, done, _ = env.step(zero_action)
print(f"After zero action = {new_obs}, is equal = {obs == new_obs}")
obs = new_obs
# Test dynamics
dt = 0.002
action = np.array([[1.0, 1.0, 1.0], [0.0, 0.0, 0.0]])
new_pos = deepcopy(obs)
new_pos[0] = obs[0] + (1000*action[0, 0] * np.cos(obs[2]) + 1000*action[0, 1] * np.sin(obs[2])) * dt
new_pos[1] = obs[1] + (1000*action[0, 1] * np.cos(obs[2]) - 1000*action[0, 0] * np.sin(obs[2])) * dt
new_pos[2] = obs[2] + action[0, 2] * dt
new_obs, rew, done, _ = env.step(action)
print(f"Is done = {done}, Is dynamics correct = {new_pos == new_obs}")
def test_render():
env = make_env(10000, visualize=True)
obs = env.reset()
#while True:
# env.render()
zero_action = np.array([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])
#start_time = time.time()
new_obs, rew, done, _ = env.step(zero_action)
action = np.array([[0.0, 0.0, 4.0], [0.0, 0.0, 0.0]])
while True:
new_obs, rew, done, _ = env.step(action)
env.render()
if done:
break
#print(time.time() - start_time)
#while True:
# env.render()
#test()
test_render()
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,881 | zy10zm/AI-Motion-Planning | refs/heads/master | /reachability/rm_tp copy.py | import numpy as np
from copy import deepcopy
from scipy.io import loadmat
from tracker import Tracker, Planner, Relative
class RmTracker(Tracker):
def __init__(self):
self.size = 5
self.dt = 0.01
def dynamics(self, x, u):
new_x = deepcopy(x)
new_x[0] += x[1] * self.dt#(x[1]*np.cos(x[4]) - x[3]*np.sin(x[4])) * self.dt
new_x[1] += u[0] * self.dt
new_x[2] += x[3] * self.dt#(x[1]*np.sin(x[4]) + x[3]*np.cos(x[4])) * self.dt
new_x[3] += u[1] * self.dt
new_x[4] += u[2] * self.dt
return new_x
class RmPlanner(Planner):
def __init__(self):
self.size = 3
def dynamics(self, x, u):
new_x = deepcopy(x)
new_x[0] += u[0]*np.cos(x[2]) - u[1]*np.sin(x[2])
new_x[1] += u[0]*np.sin(x[2]) + u[1]*np.cos(x[2])
new_x[2] += u[2]
return new_x
def project(self, s):
p = [s[0], s[2], s[4]]
return p
def control(self, p):
x = p[0] + 2.0 * 0.01
y = p[1] + 2.0 * 0.01
theta = p[2] + 4.16 * 0.01
return [x, y, theta]
class RmRelative(Relative):
def __init__(self):
self.size = 5
self.uMax = np.array([10.0, 10.0, 8.32, 2.0, 3.0, 4.16])
self.uMin = -self.uMax
def state(self, s, p):
r = deepcopy(s)
r[0] -= p[0]
r[2] -= p[1]
r[4] -= p[2]
return r
def dynamics(self, r, u, d):
rnext = deepcopy(r)
rnext = np.clip(rnext, [-5, -2, -5, -3, -np.pi], [5, 2, 5, 3, np.pi])
rnext[0] += x[1] - u[3]*np.cos(x[4]) + u[4]*np.sin(x[4]) + d[0]
rnext[1] += u[0]
rnext[2] += x[3] - u[3]*np.sin(x[4]) - u[4]*np.cos(x[4]) + d[1]
rnext[3] += u[1]
rnext[4] += u[2] - u[5] + d[2]
return rnext
def optControl(self, deriv, r, x):
# x-subsystem
ax_deriv = deriv[1]
uOpt_0 = (ax_deriv>=0)*self.uMin[0] + (ax_deriv<0)*self.uMin[0]
bx_x_deriv = -deriv[0]*np.cos(x[4])
uOpt_3_x = (bx_x_deriv>=0)*self.uMax[3] + (bx_x_deriv<0)*self.uMin[3]
by_x_deriv = deriv[0]*np.sin(x[4])
uOpt_4_x = (by_x_deriv>=0)*self.uMax[4] + (by_x_deriv<0)*self.uMin[4]
# y-subsystem
ay_deriv = deriv[3]
uOpt_1 = (ay_deriv>=0)*self.uMin[1] + (ay_deriv<0)*self.uMax[1]
bx_y_deriv = -deriv[2]*np.sin(x[4])
uOpt_3_y = (bx_y_deriv>=0)*self.uMax[3] + (bx_y_deriv<0)*self.uMin[3]
by_y_deriv = -deriv[2]*np.cos(x[4])
uOpt_4_y = (by_y_deriv>=0)*self.uMax[4] + (by_y_deriv<0)*self.uMin[4]
# pessimistic system
w_deriv = deriv[4]
uOpt_2 = (w_deriv>=0)*self.uMin[2] + (w_deriv<0)*self.uMax[2]
btheta_deriv = -deriv[4]
uOpt_5 = (btheta_deriv>=0)*self.uMax[5] + (btheta_deriv<0)*self.uMin[5]
# Calculate opt ctrl
uopt = [
deriv[0] * ( x[1] - uOpt_3_x*np.cos(x[4]) + uOpt_4_x*np.sin(x[4]) ),
deriv[1] * uOpt_0,
deriv[2] * ( x[3] - uOpt_3_y*np.sin(x[4]) - uOpt_4_y*np.cos(x[4]) ),
deriv[3] * uOpt_1,
deriv[4] * ( uOpt_2 - uOpt_5 )
]
return uopt
class Reach():
def __init__(self):
matlabf = "./RMAI_g_dt01_t5_medium_quadratic.mat"
fst = loadmat(matlabf)
eps = 0.1
self.eb = max(fst['TEB_X'], fst['TEB_Y']) + eps
self.vf_X = np.array(fst['data_X'])
self.vf_Y = np.array(fst['data_Y'])
self.vf_dX = fst['derivX']
self.vf_dY = fst['derivY']
def to_grid_index(self, r):
r = deepcopy(r)
r[0] = int(((r[0] + 5) / 10.0) * 60)
r[1] = int(((r[1] + 2) / 4.0) * 60)
r[2] = int(((r[2] + 5) / 10.0) * 60)
r[3] = int(((r[3] + 3) / 6.0) * 60)
r[4] = int(((r[4] + np.pi) / (2*np.pi)) * 60)
return r
def get_vf_dW(self, r):
x_w = self.vf_X[r[0], r[1], r[4]]
y_w = self.vf_Y[r[2], r[3], r[4]]
if x_w >= y_w:
dw = self.vf_dX[2][0][r[0], r[1], r[4]]
else:
dw = self.vf_dY[2][0][r[2], r[3], r[4]]
return dw
def check_on_boundary(self, r):
r_int = [int(i) for i in r]
vf_X_eb = self.vf_X[r_int[0], r_int[1], r_int[4]]
vf_Y_eb = self.vf_Y[r_int[2], r_int[3], r_int[4]]
vf_eb = max(vf_X_eb, vf_Y_eb)
if vf_eb >= self.eb:
return True
else:
return False
def control(self, s, pnext):
dx = pnext[0] - s[0]
dy = pnext[1] - s[1]
dw = pnext[2] - s[2]
ax = 5.0 * dx
ay = 5.0 * dy
return [ax, ay, dw]
def get_derivs(self, r):
r_int = [int(i) for i in r]
deriv = [
self.vf_dX[0][0][r_int[0], r_int[1], r_int[4]],
self.vf_dX[1][0][r_int[0], r_int[1], r_int[4]],
self.vf_dY[0][0][r_int[2], r_int[3], r_int[4]],
self.vf_dY[1][0][r_int[2], r_int[3], r_int[4]],
self.get_vf_dW(r_int)
]
return deriv
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,882 | zy10zm/AI-Motion-Planning | refs/heads/master | /rl/test_architecture.py | import numpy as np
from navigation import Navigation
def test():
nav = Navigation()
actions, neglogp, entropy, value, xyyaw_mean, xyyaw_logstd = nav(np.zeros([1, 10]))
print(actions)
test()
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,883 | zy10zm/AI-Motion-Planning | refs/heads/master | /rl/env.py | import math
import time
import numpy as np
import cv2
from copy import deepcopy
from rl.sat import separating_axis_theorem
def checkEqual(lst):
return lst[1:] == lst[:-1]
def normalize_angles(angles):
'''Puts angles in [-pi, pi] range.'''
angles = angles.copy()
if angles.size > 0:
angles = (angles + np.pi) % (2 * np.pi) - np.pi
assert -(np.pi + 1e-6) <= angles.min() and angles.max() <= (np.pi + 1e-6)
return angles
class Environment():
def __init__(self, MAX_timestep=10000, visualize=False):
"""
0 = floor
1 = obstacles
2 = opponent
3 = agents
4 = tracker error bound
5 = opponent min error bound
6 = opponent max error bound
"""
self.MAX_timestep = MAX_timestep
self.safety_bound = 100.0 # tracker error bound
self.min_opponent_bound = 100 # min opponent error bound
self.max_opponent_bound = 600 # opponent error bound
self.agent_width = 450
self.agent_height = 600
self.dt = 0.01 # seconds for each timestep
self.action_range = np.array([[2.0, 3.0, 4.16], [2.0, 3.0, 4.16]])
self.action_clip_range = np.array([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]])
self.visualize_obstacles = self._get_obstacles_noBound()
self.visualize = visualize
def _get_obstacles(self):
eps = round(self.safety_bound)
b1 = [500, 3380]
b2 = [1900, 2240]
b3 = [1600, 500]
b4 = [4040, 3445]
b5 = [4040, 2240]
b6 = [4040, 1035]
b7 = [6480, 3980]
b8 = [6180, 2240]
b9 = [7580, 1100]
obstacles = [
# ul, ur, br, bl
[[b1[0]-500, b1[1]+(100+eps)], [b1[0]+(500+eps), b1[1]+(100+eps)], [b1[0]+(500+eps), b1[1]-(100+eps)], [b1[0]-500, b1[1]-(100+eps)]],
[[b2[0]-(400+eps), b2[1]+(100+eps)], [b2[0]+(400+eps), b2[1]+(100+eps)], [b2[0]+(400+eps), b2[1]-(100+eps)], [b2[0]-(400+eps), b2[1]-(100+eps)]],
[[b3[0]-(100+eps), b3[1]+(500+eps)], [b3[0]+(100+eps), b3[1]+(500+eps)], [b3[0]+(100+eps), b3[1]-(500)], [b3[0]-(100+eps), b3[1]-(500)]],
[[b4[0]-(500+eps), b4[1]+(100+eps)], [b4[0]+(500+eps), b4[1]+(100+eps)], [b4[0]+(500+eps), b4[1]-(100+eps)], [b4[0]-(500+eps), b4[1]-(100+eps)]],
[[b5[0]-(170+eps), b5[1]+(170+eps)], [b5[0]+(170+eps), b5[1]+(170+eps)], [b5[0]+(170+eps), b5[1]-(170+eps)], [b5[0]-(170+eps), b5[1]-(170+eps)]],
[[b6[0]-(500+eps), b6[1]+(100+eps)], [b6[0]+(500+eps), b6[1]+(100+eps)], [b6[0]+(500+eps), b6[1]-(100+eps)], [b6[0]-(500+eps), b6[1]-(100+eps)]],
[[b7[0]-(100+eps), b7[1]+(500)], [b7[0]+(100+eps), b7[1]+(500)], [b7[0]+(100+eps), b7[1]-(500+eps)], [b7[0]-(100+eps), b7[1]-(500+eps)]],
[[b8[0]-(400+eps), b8[1]+(100+eps)], [b8[0]+(400+eps), b8[1]+(100+eps)], [b8[0]+(400+eps), b8[1]-(100+eps)], [b8[0]-(400+eps), b8[1]-(100+eps)]],
[[b9[0]-(500+eps), b9[1]+(100+eps)], [b9[0]+(500), b9[1]+(100+eps)], [b9[0]+(500), b9[1]-(100+eps)], [b9[0]-(500+eps), b9[1]-(100+eps)]]
]
return obstacles
def _get_obstacles_noBound(self):
eps = 0
b1 = [500, 3380]
b2 = [1900, 2240]
b3 = [1600, 500]
b4 = [4040, 3445]
b5 = [4040, 2240]
b6 = [4040, 1035]
b7 = [6480, 3980]
b8 = [6180, 2240]
b9 = [7580, 1100]
obstacles = [
# ul, ur, br, bl
[[b1[0]-500, b1[1]+(100+eps)], [b1[0]+(500+eps), b1[1]+(100+eps)], [b1[0]+(500+eps), b1[1]-(100+eps)], [b1[0]-500, b1[1]-(100+eps)]],
[[b2[0]-(400+eps), b2[1]+(100+eps)], [b2[0]+(400+eps), b2[1]+(100+eps)], [b2[0]+(400+eps), b2[1]-(100+eps)], [b2[0]-(400+eps), b2[1]-(100+eps)]],
[[b3[0]-(100+eps), b3[1]+(500+eps)], [b3[0]+(100+eps), b3[1]+(500+eps)], [b3[0]+(100+eps), b3[1]-(500)], [b3[0]-(100+eps), b3[1]-(500)]],
[[b4[0]-(500+eps), b4[1]+(100+eps)], [b4[0]+(500+eps), b4[1]+(100+eps)], [b4[0]+(500+eps), b4[1]-(100+eps)], [b4[0]-(500+eps), b4[1]-(100+eps)]],
[[b5[0]-(170+eps), b5[1]+(170+eps)], [b5[0]+(170+eps), b5[1]+(170+eps)], [b5[0]+(170+eps), b5[1]-(170+eps)], [b5[0]-(170+eps), b5[1]-(170+eps)]],
[[b6[0]-(500+eps), b6[1]+(100+eps)], [b6[0]+(500+eps), b6[1]+(100+eps)], [b6[0]+(500+eps), b6[1]-(100+eps)], [b6[0]-(500+eps), b6[1]-(100+eps)]],
[[b7[0]-(100+eps), b7[1]+(500)], [b7[0]+(100+eps), b7[1]+(500)], [b7[0]+(100+eps), b7[1]-(500+eps)], [b7[0]-(100+eps), b7[1]-(500+eps)]],
[[b8[0]-(400+eps), b8[1]+(100+eps)], [b8[0]+(400+eps), b8[1]+(100+eps)], [b8[0]+(400+eps), b8[1]-(100+eps)], [b8[0]-(400+eps), b8[1]-(100+eps)]],
[[b9[0]-(500+eps), b9[1]+(100+eps)], [b9[0]+(500), b9[1]+(100+eps)], [b9[0]+(500), b9[1]-(100+eps)], [b9[0]-(500+eps), b9[1]-(100+eps)]]
]
return obstacles
def get_rectangle(self, pos, width, height):
hwidth = width // 2
hheight = height // 2
px = np.array([-hwidth, hwidth, hwidth, -hwidth])
py = np.array([hheight, hheight, -hheight, -hheight])
qx = np.cos(pos[2]) * px - np.sin(pos[2]) * py
qy = np.sin(pos[2]) * px + np.cos(pos[2]) * py
qx += pos[0]
qy += pos[1]
rect = np.stack([qx, qy], axis=-1)
return rect
def get_random_position(self, obstacles, width, height, bound):
collided = True
while collided:
pos = [np.random.uniform() * 8080, np.random.uniform() * 4480, np.random.uniform() * 6.28319]
rect = self.get_rectangle(pos, width, height)
out_of_bound = False
for r in rect:
x = r[0]
y = r[1]
if x < 0 or y < 0 or x >= 8080 or y >= 4480:
out_of_bound = True
break
if out_of_bound:
collided = True
continue
collided = False
for obstacle in obstacles:
separated = self.check_rectangle_intersect(rect, obstacle)
if not separated:
collided = True
break
rect = self.get_rectangle(pos, width+bound, height+bound)
obstacles.append(rect)
return pos, obstacles
def check_rectangle_intersect(self, rect1, rect2):
rect1 = np.array(rect1)
rect2 = np.array(rect2)
return not separating_axis_theorem(rect1, rect2)
def check_collision(self, pos, obstacles):
rect = self.get_rectangle(pos, self.agent_width, self.agent_height)
for r in rect:
x = r[0]
y = r[1]
if x < 0 or y < 0 or x >= 8080 or y >= 4480:
return True
for obstacle in obstacles:
separated = self.check_rectangle_intersect(rect, obstacle)
if not separated:
return True
return False
def observation(self):
agents_pos = np.array(self.agents_pos)
agents_pos[:, 0] /= 8080
agents_pos[:, 1] /= 4480
agents_pos[:, 2] = normalize_angles(agents_pos[:, 2]) / np.pi
opponents_pos = np.array(self.opponents_pos)
opponents_pos[:, 0] /= 8080
opponents_pos[:, 1] /= 4480
opponents_pos[:, 2] = normalize_angles(opponents_pos[:, 2]) / np.pi
goals = np.array(self.goals)
goals[:, 0] /= 8080
goals[:, 1] /= 4480
obs = np.array([agents_pos[0][0], agents_pos[0][1], agents_pos[0][2],
agents_pos[1][0], agents_pos[1][1], agents_pos[1][2],
opponents_pos[0][0], opponents_pos[0][1], opponents_pos[0][2],
opponents_pos[1][0], opponents_pos[1][1], opponents_pos[1][2],
goals[0][0], goals[0][1], goals[1][0], goals[1][1]])
return obs
def dynamics(self, pos, action, idx):
dx = action[0] * np.cos(pos[2]) - action[1] * np.sin(pos[2])
dy = action[0] * np.sin(pos[2]) + action[1] * np.cos(pos[2])
dtheta = action[2]
dx *= self.dt
dy *= self.dt
dtheta *= self.dt
new_theta = pos[2] + dtheta
if new_theta < 0:
new_theta = 6.28319 - new_theta
elif new_theta > 6.28319:
new_theta = new_theta - 6.28319
new_pos = [pos[0] + dx, pos[1] + dy, new_theta]
return new_pos
def reward(self):
rew = 0.0
for idx in range(2):
# check for collision between agents
rect_idx = self.get_rectangle(self.agents_pos[idx], self.agent_width, self.agent_height)
rect_jdx = self.get_rectangle(self.agents_pos[1 - idx], self.agent_width + self.safety_bound, self.agent_height + self.safety_bound)
separated = self.check_rectangle_intersect(rect_idx, rect_jdx)
if not separated:
return -10.0, False
# check for collision with obstacles including opponents
collided = self.check_collision(self.agents_pos[idx], self.obstacles)
if collided:
return -10.0, False
# check for distance with opponent agents
for opponent_pos in self.opponents_pos:
dist = np.linalg.norm(np.array(self.agents_pos[idx])[0:2] - np.array(opponent_pos)[0:2])
# penalize for getting into max error-bound of opponent agents
if dist <= self.max_opponent_bound:
rew += -(1.0 / dist)
pass
# reward for getting closer to goal
dist_2_goal = np.linalg.norm(np.array(self.agents_pos[idx])[0:2] - np.array(self.goals[idx]))
rew += -dist_2_goal / np.sqrt(8080**2 + 4480**2)
return rew, False
def step(self, action):
rew, done = self.reward()
# clip action
action = np.clip(action, -self.action_clip_range, self.action_clip_range)
action[:, 0] *= 2000.0
action[:, 1] *= 3000.0
action[:, 2] *= 4.16
for idx in range(2):
self.agents_pos[idx] = self.dynamics(self.agents_pos[idx], action[idx], idx)
self.timestep += 1
if self.timestep >= self.MAX_timestep:
done = True
obs = self.observation()
return obs, rew, done, dict()
def reset(self):
obstacles = self._get_obstacles()
# Get agents' initial position
opponent1_pos, obstacles = self.get_random_position(obstacles, self.agent_width, self.agent_height, self.min_opponent_bound)
opponent2_pos, obstacles = self.get_random_position(obstacles, self.agent_width, self.agent_height, self.min_opponent_bound)
virtual_obstacles = deepcopy(obstacles)
agent1_pos, virtual_obstacles = self.get_random_position(virtual_obstacles, self.agent_width, self.agent_height, self.safety_bound)
agent2_pos, virtual_obstacles = self.get_random_position(virtual_obstacles, self.agent_width, self.agent_height, self.safety_bound)
self.agents_pos = [agent1_pos, agent2_pos]
self.opponents_pos = [opponent1_pos, opponent2_pos]
self.goals = [[np.random.uniform() * 8080, np.random.uniform() * 4480],
[np.random.uniform() * 8080, np.random.uniform() * 4480]]
self.obstacles = obstacles
self.timestep = 0
if self.visualize:
self.image = self.get_background()
return self.observation()
def get_background(self):
image = np.full((4480, 8080), 169)
image = np.stack([image, image, image], axis=-1).astype(np.uint8)
obstacles = deepcopy(self.obstacles)[:-2]
# Draw tracker bound + obstacles first
for obstacle in obstacles:
rect = cv2.minAreaRect(np.array(obstacle).astype(np.float32))
box = cv2.boxPoints(rect)
box = np.int0(box)
cv2.fillPoly(image, [box], color=(0,165,255))
# Then draw obstacles
for obstacle in self.visualize_obstacles:
rect = cv2.minAreaRect(np.array(obstacle).astype(np.float32))
box = cv2.boxPoints(rect)
box = np.int0(box)
cv2.fillPoly(image, [box], color=(0,0,0))
# Display all opponents
for opponent_pos in self.opponents_pos:
# Draw min error bound first
rect = self.get_rectangle(opponent_pos, self.agent_width + self.min_opponent_bound, self.agent_height + self.min_opponent_bound)
rect = cv2.minAreaRect(np.array(rect).astype(np.float32))
box = cv2.boxPoints(rect)
box = np.int0(box)
cv2.fillPoly(image, [box], color=(0,255,0))
# Then draw opponent
rect = self.get_rectangle(opponent_pos, self.agent_width, self.agent_height)
rect = cv2.minAreaRect(np.array(rect).astype(np.float32))
box = cv2.boxPoints(rect)
box = np.int0(box)
cv2.fillPoly(image, [box], color=(255,0,0))
# Draw opponents' max error bound
cv2.circle(image, (int(opponent_pos[0]), int(opponent_pos[1])), self.max_opponent_bound, (128, 0, 128), thickness=50)
# Display goals
cv2.circle(image, (int(self.goals[0][0]), int(self.goals[0][1])), 5, (255, 255, 0), thickness=100)
cv2.circle(image, (int(self.goals[1][0]), int(self.goals[1][1])), 5, (0, 255, 255), thickness=100)
return image
def render(self):
image = deepcopy(self.image)
# Display all agents
for agent_pos in self.agents_pos:
# Draw tracker bound first
rect = self.get_rectangle(agent_pos, self.agent_width + self.safety_bound, self.agent_height + self.safety_bound)
rect = cv2.minAreaRect(np.array(rect).astype(np.float32))
box = cv2.boxPoints(rect)
box = np.int0(box)
cv2.fillPoly(image, [box], color=(0,165,255))
# Then draw agents
rect = self.get_rectangle(agent_pos, self.agent_width, self.agent_height)
rect = cv2.minAreaRect(np.array(rect).astype(np.float32))
box = cv2.boxPoints(rect)
box = np.int0(box)
cv2.fillPoly(image, [box], color=(0,0,255))
image = cv2.resize(image, (808, 448), interpolation=cv2.INTER_AREA)
image = cv2.flip(image, 0) # flip because for opencv x-axis is flipped
cv2.imshow("image", image)
cv2.waitKey(5)
def make_env(timesteps=10000, visualize=False):
env = Environment(timesteps, visualize=visualize)
#obs = env.reset()
#action = np.array([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])
#obs, rew, done, _ = env.step(action)
return env
#make_env()
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,884 | zy10zm/AI-Motion-Planning | refs/heads/master | /environment/worldgen/rgbviewer.py | import copy
import glfw
import imageio
import numpy as np
import time
import sys
from mujoco_py.builder import cymj
from mujoco_py.generated import const
from mujoco_py.utils import rec_copy, rec_assign
from multiprocessing import Process, Queue
from threading import Lock
class RgbViewerBasic(cymj.MjRenderContextWindow):
"""
A simple display GUI showing the scene of an :class:`.MjSim` as an RGB array.
Parameters
----------
sim : :class:`.MjSim`
The simulator to display.
"""
def __init__(self, sim):
sim.model.stat.extent = 15.0
super().__init__(sim)
self._gui_lock = Lock()
framebuffer_width, _ = glfw.get_framebuffer_size(self.window)
window_width, _ = glfw.get_window_size(self.window)
self._scale = framebuffer_width * 1.0 / window_width
def render(self):
"""
Render the current simulation state to the screen or off-screen buffer.
Call this in your main loop.
"""
if self.window is None:
return
elif glfw.window_should_close(self.window):
glfw.terminate()
sys.exit(0)
with self._gui_lock:
super().render()
glfw.poll_events()
frame = self._read_pixels_as_in_window()
return frame
def _read_pixels_as_in_window(self, resolution=None):
if resolution is None:
resolution = glfw.get_framebuffer_size(self.sim._render_context_window.window)
resolution = np.array(resolution)
resolution = np.array([640, 480]) #resolution * min(1000 / np.min(resolution), 1)[2560 1559]
resolution = resolution.astype(np.int32)
if self.sim._render_context_offscreen is None:
self.sim.render(resolution[0], resolution[1])
img = self.sim.render(*resolution)
img = img[::-1, :, :] # Rendered images are upside-down.
return img
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,885 | zy10zm/AI-Motion-Planning | refs/heads/master | /reachability/test_ft.py | import numpy as np
from fastrack import FastTrack
from rm_tp import RmPlanner, RmTracker, RmRelative, Reach
tracker = RmTracker()
planner = RmPlanner()
relative = RmRelative()
reach = Reach()
ft = FastTrack(tracker, planner, relative, reach)
ft.run(100000)
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,886 | zy10zm/AI-Motion-Planning | refs/heads/master | /reachability/test_opt_ctrl.py | import numpy as np
import cv2
from copy import deepcopy
from scipy.io import loadmat
grid_max_X = 100
grid_max_Y = 100
grid = np.zeros([grid_max_Y, grid_max_X])
grid = np.stack([grid, grid, grid], axis=-1)
eps = 1e-5
dt = 0.002
fst = loadmat('RMAI_FO_dt01_t50_veryHigh_quadratic.mat')
eb = 1.2413
vf_X = fst['data_X']
vf_Y = fst['data_Y']
vf_dX = fst['derivX']
vf_dY = fst['derivY']
def random_goal():
goal = [np.random.randint(20, 80), np.random.randint(20, 80)]
return goal
def planner_model(p, goal):
vec = [goal[0] - p[0], goal[1] - p[1]]
if vec[0] > 0:
bx = 2.0
elif vec[0] < 0:
bx = -2.0
else:
bx = 0.0
if vec[1] > 0:
by = 3.0
elif vec[1] < 0:
by = -3.0
else:
by = 0.0
p_next = [p[0] + bx * dt, p[1] + by * dt]
return p_next
def relative_model(s, p):
r = deepcopy(s)
r[0] -= p[0]
r[2] -= p[1]
return r
def pid_ctrl(s, goal):
dx = goal[0] - s[0]
dy = goal[1] - s[1]
ax = 1.0 * dx
ay = 1.0 * dy
return [ax, ay, 0, 0]
def tracker_model(s, u):
new_s = deepcopy(s)
new_s[0] += s[1] - u[2] * dt
new_s[1] += u[0] * dt
new_s[2] += s[3] - u[3] * dt
new_s[3] += u[1] * dt
return new_s
def opt_ctrl(s, rnext):
deriv1X = vf_dX[0][0][int(rnext[0]), int(rnext[1])]
deriv2X = vf_dX[1][0][int(rnext[0]), int(rnext[1])]
deriv1Y = vf_dY[0][0][int(rnext[2]), int(rnext[3])]
deriv2Y = vf_dY[1][0][int(rnext[2]), int(rnext[3])]
if deriv1X > 0:
bx = -2.0
else:
bx = 2.0
if deriv2X > 0:
ax = 10.0
else:
ax = -10.0
if deriv1Y > 0:
by = -3.0
else:
by = 3.0
if deriv2Y > 0:
ay = 10.0
else:
ay = -10.0
g = [s[1] - bx, ax, s[3] - by, ay]
g[0] *= deriv1X
g[1] *= deriv2X
g[2] *= deriv1Y
g[3] *= deriv2Y
return g
p = [0, 0]
s = [0, 0, 0, 0]
while True:
goal = random_goal()
while abs(p[0] - goal[0]) >= eps and abs(p[1] - goal[1]) >= eps:
pnext = planner_model(p, goal)
rnext = relative_model(s, pnext)
if max(vf_X[int(rnext[0]), int(rnext[1])], vf_Y[int(rnext[2]), int(rnext[3])]) >= eb:
print("hehhe")
u = opt_ctrl(s, rnext)
else:
u = pid_ctrl(s, goal)
# visualize
px = int(pnext[0])
py = int(pnext[1])
vgrid = deepcopy(grid)
#vgrid[py, px, :] = [255, 0, 0]
cv2.circle(vgrid, (px, py), 1, (255, 0, 0), thickness=1, lineType=8, shift=0)
sx = int(s[0])
sy = int(s[2])
vgrid[sy, sx, :] = [0, 0, 255]
vgrid = cv2.resize(vgrid, (416, 416)).astype(np.uint8)
cv2.imshow("Grid", vgrid)
cv2.waitKey(50)
# New states
s = tracker_model(s, u)
p = [s[0], s[2]]
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,887 | zy10zm/AI-Motion-Planning | refs/heads/master | /reachability/fastrack.py | import numpy as np
import cv2
from copy import deepcopy
class FastTrack():
def __init__(self, tracker, planner, relative, reach):
self.tracker = tracker
self.planner = planner
self.relative = relative
self.reach = reach
grid_max_X = 50
grid_max_Y = 50
grid = np.zeros([grid_max_Y, grid_max_X])
self.grid = np.stack([grid, grid, grid], axis=-1)
self.eps = 1e-5
self.dt = 0.01
def random_goal(self):
goal = [np.random.randint(20, 80), np.random.randint(20, 80)]
return goal
def run(self, its):
p = [0 for _ in range(self.planner.size)]
s = [0 for _ in range(self.tracker.size)]
while True:
#goal = self.random_goal()
#while abs(p[0] - goal[0]) >= self.eps and abs(p[1] - goal[1]) >= self.eps:
pnext = self.planner.control(p)
rnext = self.relative.state(s, pnext)
rnext = self.reach.to_grid_index(rnext)
if self.reach.check_on_boundary(rnext):
deriv = self.reach.get_derivs(rnext)
u = self.relative.optControl(deriv, rnext, s)
else:
u = self.reach.control(s, pnext)
# visualize
vgrid = deepcopy(self.grid)
px = int(pnext[0])
py = int(pnext[1])
cv2.circle(vgrid, (px, py), 1, (255, 0, 0), thickness=2, lineType=8, shift=0)
sx = int(s[0])
sy = int(s[2])
vgrid[sy, sx, :] = [0, 0, 255]
vgrid = cv2.resize(vgrid, (416, 416)).astype(np.uint8)
cv2.imshow("Grid", vgrid)
cv2.waitKey(5)
# New states
s = self.tracker.dynamics(s, u)
p = self.planner.project(s)
| {"/reachability/rm_tp.py": ["/reachability/tracker.py"], "/evaluate_rl_planner.py": ["/rl/env.py", "/rl/navigation.py"], "/simulate.py": ["/reachability/rm_tp.py", "/rl/env.py"]} |
48,888 | oas89/pyuwsgi | refs/heads/master | /pyuwsgi/worker.py | import os
import time
import mmap
import errno
import ctypes
import signal
import socket
import logging
from . import util
logger = logging.getLogger(__name__)
class Worker(object):
def __init__(self, sock, app, timeout=1, connection_cls=None,
handler_cls=None):
self.sock = sock
self.app = app
self.timeout = timeout
self.connection_cls = connection_cls
self.handler_cls = handler_cls
self.pid = 0
self.birth = 0
self.death = 0
self._shared = mmap.mmap(-1, mmap.PAGESIZE)
self._requests = ctypes.c_int.from_buffer(self._shared, 1)
self._accepting = ctypes.c_bool.from_buffer(self._shared, False)
@property
def requests(self):
return self._requests.value
@requests.setter
def requests(self, value):
self._requests.value = value
@property
def accepting(self):
return self._accepting.value
@accepting.setter
def accepting(self, value):
self._accepting.value = value
def stop(self):
raise StopIteration
def stop_gracefully(self):
self.accepting = False
def reset(self, pid):
self.pid = pid or os.getpid()
self.birth = time.time()
self.death = 0
self.requests = 0
self.accepting = False
def run(self):
signal.signal(signal.SIGQUIT, lambda n, f: self.stop())
signal.signal(signal.SIGTERM, lambda n, f: self.stop_gracefully())
util.seed()
util.set_blocking(self.sock)
# TODO: util.set_process_title('pyuwsgi: worker')
self.app = util.import_name(self.app)
self.accepting = True
logger.info('(worker pid %s) accepting connections', self.pid)
while self.accepting:
try:
client, addr = self.sock.accept()
except socket.error as e:
if e.args[0] in [errno.EINTR, errno.EAGAIN]:
continue
raise
self.handle(client, addr)
self.requests += 1
def handle(self, client, addr):
with self.connection_cls(client, self.app) as connection:
logger.debug(
'(worker pid %s) %s %s "%s"',
self.pid,
addr[0],
connection.environ['REQUEST_METHOD'],
connection.environ['REQUEST_URI'],
)
handler = self.handler_cls(
connection.stdin,
connection.stdout,
connection.stderr,
connection.environ,
multithread=False,
multiprocess=True,
)
handler.run(self.app)
| {"/pyuwsgi/app.py": ["/pyuwsgi/server.py", "/pyuwsgi/worker.py", "/pyuwsgi/uwsgi.py", "/pyuwsgi/util.py"], "/pyuwsgi/util.py": ["/pyuwsgi/errors.py"]} |
48,889 | oas89/pyuwsgi | refs/heads/master | /pyuwsgi/errors.py | STOPPING = 10
RELOADING = 20
APPLICATION_ERROR = 30
UNHANDLED_EXCEPTION = 40
class ApplicationError(Exception):
pass
class ConfigurationError(Exception):
pass
| {"/pyuwsgi/app.py": ["/pyuwsgi/server.py", "/pyuwsgi/worker.py", "/pyuwsgi/uwsgi.py", "/pyuwsgi/util.py"], "/pyuwsgi/util.py": ["/pyuwsgi/errors.py"]} |
48,890 | oas89/pyuwsgi | refs/heads/master | /pyuwsgi/server.py | import os
import time
import errno
import select
import signal
import logging
from . import errors, util
logger = logging.getLogger(__name__)
class Server(object):
mercy = 20
def __init__(self, sock, app,
timeout=1,
processes=1,
worker_cls=None,
connection_cls=None,
handler_cls=None,
max_requests=None,
max_lifetime=None):
self.sock = sock
self.app = app
self.timeout = timeout
self.processes = processes
self.worker_cls = worker_cls
self.connection_cls = connection_cls
self.handler_cls = handler_cls
self.max_requests = max_requests
self.max_lifetime = max_lifetime
self.workers = {}
self.is_stopping = False
self.chaining = None
self.chaining_pid = None
self._signals = []
self._selfpipe = []
def find_worker_id(self, pid):
for n, worker in self.workers.items():
if worker.pid == pid:
return n
def stop(self):
logger.info('stopping workers...')
self.is_stopping = True
for worker in self.workers.values():
if worker.pid > 0 and not worker.death:
worker.death = time.time() + self.mercy
util.kill(worker.pid, signal.SIGQUIT)
def stop_gracefully(self):
logger.info('stopping workers gracefully...')
self.is_stopping = True
for worker in self.workers.values():
if worker.pid > 0:
worker.death = time.time() + self.mercy
util.kill(worker.pid, signal.SIGTERM)
def reload(self):
logger.info('reloading workers...')
for worker in self.workers.values():
if worker.pid > 0:
worker.death = time.time() + self.mercy
util.kill(worker.pid, signal.SIGTERM)
def chain(self):
logger.info('chaining workers...')
self.chaining = 0
def spawn(self, n):
worker = self.workers[n]
pid = os.fork()
worker.reset(pid)
if pid:
logger.info('spawning worker %s pid %s', n, pid)
return
try:
worker.run()
except StopIteration:
pass
except errors.ApplicationError:
os._exit(errors.APPLICATION_ERROR)
except:
os._exit(errors.UNHANDLED_EXCEPTION)
os._exit(errors.STOPPING)
def signal(self, signum, frame):
if len(self._signals) < 10:
self._signals.append(signum)
def setup_signals(self):
if self._selfpipe:
map(os.close, self._selfpipe)
self._selfpipe = os.pipe()
map(util.set_not_blocking, self._selfpipe)
#map(util.set_close_on_exec, self._signal_pipe)
signal.set_wakeup_fd(self._selfpipe[1])
for signame in ['SIGINT', 'SIGQUIT', 'SIGTERM', 'SIGCHLD', 'SIGWINCH', 'SIGHUP']:
signum = getattr(signal, signame)
signal.signal(signum, self.signal)
def setup_workers(self):
for n in range(self.processes):
self.workers[n] = self.worker_cls(
self.sock,
self.app,
timeout=self.timeout,
connection_cls=self.connection_cls,
handler_cls=self.handler_cls,
)
self.spawn(n)
def run(self):
logger.info('running server pid %s', os.getpid())
self.setup_signals()
self.setup_workers()
# TODO: util.set_process_title('pyuwsgi: server')
try:
while True:
signum = self._signals.pop(0) if len(self._signals) else None
if signum:
if signum == signal.SIGINT:
self.stop()
elif signum == signal.SIGQUIT:
self.stop()
elif signum == signal.SIGTERM:
self.stop_gracefully()
elif signum == signal.SIGWINCH:
self.reload()
elif signum == signal.SIGHUP:
self.chain()
elif signum == signal.SIGCHLD:
pass
else:
logger.warning('ignoring signal %s', signum)
try:
select.select([self._selfpipe[0]], [], [], self.timeout)
while os.read(self._selfpipe[0], 1):
pass
except select.error as e:
if e.args[0] not in [errno.EINTR, errno.EAGAIN]:
raise
except OSError as e:
if e.errno not in [errno.EINTR, errno.EAGAIN]:
raise
self.check_state()
self.check_children()
self.check_deadlines()
self.check_chaining()
except StopIteration:
pass
except:
logger.exception('unhandled exception in server loop')
for worker in self.workers.values():
util.kill(worker.pid, signal.SIGKILL)
os._exit(errors.UNHANDLED_EXCEPTION)
os._exit(errors.STOPPING)
def check_state(self):
if self.is_stopping:
for worker in self.workers.values():
if worker.pid > 0:
return
raise StopIteration
def check_children(self):
try:
pid, status = os.waitpid(-1, os.WNOHANG)
if pid <= 0:
return
wid = self.find_worker_id(pid)
worker = self.workers.get(wid)
if not worker:
logger.warning('unknown process pid %s died', pid)
return
worker.reset(-1)
if self.is_stopping:
logger.info('worker %s pid %s died', wid, pid)
return
if os.WIFEXITED(status):
if os.WEXITSTATUS(status) == errors.APPLICATION_ERROR:
logger.info(
'worker %s pid %s '
'failed to load application, respawning',
wid, pid)
elif os.WEXITSTATUS(status) == errors.UNHANDLED_EXCEPTION:
logger.info(
'worker %s pid %s '
'got unhandled exception, respwaning',
wid, pid)
elif os.WEXITSTATUS(status) == errors.STOPPING:
logger.info(
'worker %s pid %s '
'exited normally, respwaning', wid, pid)
else:
logger.info(
'worker %s pid %s '
'exited with status %s, respwaning',
wid, pid, os.WEXITSTATUS(status))
elif os.WIFSIGNALED(status):
logger.info(
'worker %s pid %s '
'killed by signal %s, respawning',
wid, pid, os.WTERMSIG(status))
else:
logger.warning(
'worker %s pid %s '
'died for unknown reason, respwaning',
wid, pid)
if not self.is_stopping:
self.spawn(wid)
except OSError as e:
if e.errno not in [errno.EINTR, errno.ECHILD]:
raise
def check_deadlines(self):
for wid, worker in self.workers.items():
if worker.pid > 0:
if worker.death and worker.death < time.time():
logger.info(
'worker %s pid %s '
'dying to long, killing',
wid, worker.pid)
util.kill(worker.pid, signal.SIGKILL)
continue
if not worker.death and self.max_requests and \
worker.requests > self.max_requests:
logger.info(
'worker %s pid %s '
'exceeded requests limit, stopping',
wid, worker.pid)
worker.death = time.time() + self.mercy
util.kill(worker.pid, signal.SIGTERM)
continue
if not worker.death and self.max_lifetime and \
worker.lifetime > self.max_lifetime:
logger.info(
'worker %s pid %s '
'exceeded lifetime limit, stopping',
wid, worker.pid)
worker.death = time.time() + self.mercy
util.kill(worker.pid, signal.SIGTERM)
continue
def check_chaining(self):
if self.chaining is not None:
if self.chaining >= self.processes:
self.chaining = None
logger.info('chaining complete')
return
worker = self.workers[self.chaining]
if not self.chaining_pid:
self.chaining_pid = worker.pid
if not worker.death:
worker.death = time.time() + self.mercy
util.kill(worker.pid, signal.SIGTERM)
return
if worker.pid != self.chaining_pid:
if worker.accepting:
self.chaining_pid = None
self.chaining += 1
| {"/pyuwsgi/app.py": ["/pyuwsgi/server.py", "/pyuwsgi/worker.py", "/pyuwsgi/uwsgi.py", "/pyuwsgi/util.py"], "/pyuwsgi/util.py": ["/pyuwsgi/errors.py"]} |
48,891 | oas89/pyuwsgi | refs/heads/master | /pyuwsgi/app.py | import socket
import logging
import argparse
from wsgiref.handlers import BaseCGIHandler
from .server import Server
from .worker import Worker
from .uwsgi import Connection
from .util import parse_address
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
def make_server(bind, wsgi,
processes=1,
backlog=socket.SOMAXCONN,
worker_cls=Worker,
connection_cls=Connection,
handler_cls=BaseCGIHandler,
max_requests=None,
max_lifetime=None):
family, address = parse_address(bind)
sock = socket.socket(family, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(address)
sock.listen(backlog)
instance = Server(
sock,
wsgi,
worker_cls=worker_cls,
connection_cls=connection_cls,
handler_cls=handler_cls,
max_requests=max_requests,
max_lifetime=max_lifetime,
processes=processes,
)
return instance
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--address', '-a',
required=True,
action='store',
help='unix/tcp address to bind')
parser.add_argument('--wsgi', '-w',
required=True,
action='store',
help='wsgi application')
parser.add_argument('--processes', '-p',
action='store',
default=1,
type=int,
help='number of worker processes')
args = parser.parse_args()
server = make_server(args.address, args.wsgi, processes=args.processes)
server.run()
| {"/pyuwsgi/app.py": ["/pyuwsgi/server.py", "/pyuwsgi/worker.py", "/pyuwsgi/uwsgi.py", "/pyuwsgi/util.py"], "/pyuwsgi/util.py": ["/pyuwsgi/errors.py"]} |
48,892 | oas89/pyuwsgi | refs/heads/master | /pyuwsgi/uwsgi.py | import struct
import logging
from . import util
logger = logging.getLogger(__name__)
HEADER_SIZE = 4
MODIFIER1 = 0
MODIFIER2 = 0
def unpack_header(data):
return struct.unpack('<BHB', data)
def unpack_pair(data, pos=0):
key_length = struct.unpack('<H', data[pos:pos+2])[0]
pos += 2
key = data[pos:pos+key_length]
pos += key_length
value_length = struct.unpack('<H', data[pos:pos+2])[0]
pos += 2
value = data[pos:pos+value_length]
pos += value_length
return pos, (key, value)
def unpack_pairs(data):
pos, end = 0, len(data)
while pos < end:
pos, (key, value) = unpack_pair(data, pos)
yield key, value
class Connection(object):
def __init__(self, sock, app):
util.set_blocking(sock) # XXX: Not sure about this
self.sock = sock
self.app = app
self.stdin = self.sock.makefile('r', 4096) # XXX: Is it a right way to
self.stdout = self.sock.makefile('w') # handle socket I/O?
self.stderr = self.stdout
self.environ = {}
def begin(self):
data = self.stdin.read(HEADER_SIZE)
if len(data) != HEADER_SIZE:
raise IOError
modifier1, size, modifier2 = unpack_header(data)
if (modifier1, modifier2) != (MODIFIER1, MODIFIER2):
raise IOError
if size:
data = self.stdin.read(size)
if len(data) != size:
raise IOError
pairs = unpack_pairs(data)
self.environ.update(pairs)
def close(self):
util.close(self.stdin)
util.close(self.stdout)
util.close(self.sock)
def __enter__(self):
self.begin()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
| {"/pyuwsgi/app.py": ["/pyuwsgi/server.py", "/pyuwsgi/worker.py", "/pyuwsgi/uwsgi.py", "/pyuwsgi/util.py"], "/pyuwsgi/util.py": ["/pyuwsgi/errors.py"]} |
48,893 | oas89/pyuwsgi | refs/heads/master | /pyuwsgi/util.py | import os
import re
import fcntl
import errno
import socket
import random
import ctypes
import ctypes.util
from .errors import ApplicationError
_argv_length = 0
def set_process_title(title, use_all_argv=False):
global _argv_length
libc = ctypes.CDLL(ctypes.util.find_library('c'))
argc = ctypes.c_int()
argv = ctypes.POINTER(ctypes.c_char_p)()
ctypes.pythonapi.Py_GetArgcArgv(ctypes.byref(argc), ctypes.byref(argv))
if not _argv_length:
for n in range(argc.value):
_argv_length += len(argv[n]) + 1
if use_all_argv:
break
title = title[:_argv_length - 2]
title = title.ljust(_argv_length, '\x00')
libc.strncpy(argv.contents, title, _argv_length)
def kill(pid, sig):
try:
os.kill(pid, sig)
except OSError as e:
if e.errno not in [errno.ESRCH]:
raise
def close(fd):
if hasattr(fd, 'close'):
fd.close()
else:
os.close(fd)
def seed():
random.seed(os.urandom(128))
def set_blocking(fd):
if hasattr(fd, 'setblocking'):
fd.setblocking(1)
else:
if hasattr(fd, 'fileno'):
fd = fd.fileno()
flags = fcntl.fcntl(fd, fcntl.F_GETFD) & ~os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def set_not_blocking(fd):
if hasattr(fd, 'setblocking'):
fd.setblocking(0)
else:
if hasattr(fd, 'fileno'):
fd = fd.fileno()
flags = fcntl.fcntl(fd, fcntl.F_GETFD) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def set_close_on_exec(fd):
if hasattr(fd, 'fileno'):
fd = fd.fileno()
flags = fcntl.fcntl(fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def import_name(import_string):
parts = import_string.split(':', 1)
module_name = parts[0]
app_name = parts[1] if len(parts) > 1 else 'application'
try:
module = __import__(module_name)
app = getattr(module, app_name)
if not callable(app):
raise ApplicationError('WSGI-application must be callable object.')
except ImportError:
raise ApplicationError('Cannot import module: {}.'.format(module_name))
except AttributeError:
raise ApplicationError('Cannot find object: {}.'.format(app_name))
return app
def daemonize(umask=0):
if os.fork():
os._exit(0)
os.setsid()
os.umask(umask)
with os.open(os.devnull, os.O_RDWR) as null:
os.dup2(null, 0)
os.dup2(null, 1)
os.dup2(null, 2)
def parse_address(address):
if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+$', address):
address = address.split(':', 1)
return socket.AF_INET, (str(address[0]), int(address[1]))
else:
return socket.AF_UNIX, str(address)
| {"/pyuwsgi/app.py": ["/pyuwsgi/server.py", "/pyuwsgi/worker.py", "/pyuwsgi/uwsgi.py", "/pyuwsgi/util.py"], "/pyuwsgi/util.py": ["/pyuwsgi/errors.py"]} |
48,897 | manimaul/mxmcc | refs/heads/master | /tilebuilder.py | #!/usr/bin/env python
__author__ = "Will Kamp"
__copyright__ = "Copyright 2015, Matrix Mariner Inc."
__license__ = "BSD"
__email__ = "will@mxmariner.com"
__status__ = "Development" # "Prototype", "Development", or "Production"
'''Builds zxy map tiles for a single map or all the maps within a map catalog (catalog.py)
1.) A (stack) gdal vrt files is created as follows:
--warped (base_w.vrt)
rescaled to tile system pixels
EPSG:900913 (tile system) re-projection if needed
cropped cut line if defined
rotated to be north up
--expanded rgba if needed (base_c.vrt)--
--base (the source map)--
2.) The peek of the (stack) is offset (into tile window) and tiles are then rendered
3.) The files in the vrt stack are then disposed of
depends on gdal (1.10+)
gdal python package
gdal command line utilities
'''
import subprocess
import os
import shlex
import multiprocessing
from functools import partial
import traceback
import json
import shutil
from PIL import Image
from osgeo import gdal
import osr
from . import logger
from . import tilesystem
from . import gdalds
from . import catalog
from . import config
# http://www.gdal.org/formats_list.html
geotiff = 'GTIFF'
bsb = 'BSB'
png = 'PNG'
supported_formats = {geotiff, bsb, png}
# needed to set this to be able to process new 400dpi charts from NOAA
# http://www.charts.noaa.gov/RNCs_400/
os.environ['BSB_IGNORE_LINENUMBERS'] = 'TRUE'
gdal.AllRegister()
mem_driver = gdal.GetDriverByName('MEM')
png_driver = gdal.GetDriverByName('PNG')
log_on = logger.OFF
resampling = 'average'
# near:
# nearest neighbour resampling (default, fastest algorithm, worst interpolation quality).
# bilinear:
# bilinear resampling.
# cubic:
# cubic resampling.
# cubicspline:
# cubic spline resampling.
# lanczos:
# Lanczos windowed sinc resampling.
# average:
# average resampling, computes the average of all non-NODATA contributing pixels. (GDAL >= 1.10.0)
# mode:
# mode resampling, selects the value which appears most often of all the sampled points. (GDAL >= 1.10.0)
gdal_resampling = gdal.GRA_NearestNeighbour
# GRA_NearestNeighbour
# GRA_Bilinear
# GRA_Cubic
# GRA_CubicSpline
# GRA_Lanczos
# GRA_Average
# GRA_Mode
def _cleanup_tmp_vrt_stack(vrt_stack):
"""convenience method for removing temporary vrt files created with _build_tmp_vrt_stack_for_map()
"""
for i in range(1, len(vrt_stack), 1):
os.remove(vrt_stack[i])
logger.log(log_on, 'deleting temp file:', vrt_stack[i])
def stack_peek(vrt_stack):
"""the peek of a vrt stack
"""
return vrt_stack[-1]
def build_tile_vrt_for_map(map_path, cutline=None):
"""builds a stack of temporary vrt files for an input path to a map file
the peek of the stack is the target file to use to create tiles
after use the temporary files should be deleted using cleanup_tmp_vrt_stack(the_stack)
returns stack of map paths
note: stack always has input map_path at the base, then expanded rgba vrt if necessary
and tile-ready vrt result at the peek
"""
map_stack = [map_path]
dataset = gdal.Open(map_path, gdal.GA_ReadOnly)
if dataset is None:
raise Exception('could not open map file: ' + map_path)
map_type = dataset.GetDriver().ShortName
_, is_north_up = gdalds.dataset_lat_lng_bounds(dataset)
if map_type.upper() not in supported_formats:
raise Exception(map_type + ' is not a supported format')
# log = open(os.devnull, 'w') # subprocess.PIPE
log = subprocess.PIPE
# -----paths and file names
base_dir = os.path.dirname(map_path)
map_fname = os.path.basename(map_path)
map_name = map_fname[0:map_fname.find('.')] # remove file extension
# -----if map has a palette create vrt with expanded rgba
if gdalds.dataset_has_color_palette(dataset):
logger.log(log_on, 'dataset has color palette')
c_vrt_path = os.path.join(base_dir, map_name + '_c.vrt')
if os.path.isfile(c_vrt_path):
os.remove(c_vrt_path)
# try:
command = "gdal_translate -of vrt -expand rgba \'%s\' \'%s\'" % (map_path, c_vrt_path)
subprocess.Popen(shlex.split(command), stdout=log).wait()
logger.log(log_on, 'creating c_vrt with command', command)
del dataset
dataset = gdal.Open(c_vrt_path, gdal.GA_ReadOnly)
logger.log(log_on, 'openning dataset')
map_stack.append(c_vrt_path)
# except BaseException as e:
# logger.log(log_on, e)
# -----repoject map to tilesystem projection, crop to cutline
w_vrt_path = os.path.join(base_dir, map_name + '.vrt')
if os.path.isfile(w_vrt_path):
os.remove(w_vrt_path)
epsg_900913 = gdalds.dataset_get_as_epsg_900913(dataset) # offset for crossing dateline
command = ['gdalwarp', '-of', 'vrt', '-r', resampling, '-t_srs', epsg_900913]
# logger.log(log_on, 'using ply overrides', use_ply_overrides)
# if use_ply_overrides:
# override = overrides.get_poly_override(map_path)
# if override is not None:
# cutline = override
if cutline is not None:
cut_poly = gdalds.dataset_get_cutline_geometry(dataset, cutline)
command += ['-wo', 'CUTLINE=%s' % cut_poly]
command += [stack_peek(map_stack), # gdal input source
w_vrt_path] # gdal output destination
subprocess.Popen(command).wait()
map_stack.append(w_vrt_path)
return map_stack
def _render_tmp_vrt_stack_for_map(map_stack, zoom, out_dir):
"""renders a stack of vrts built with _build_tmp_vrt_stack_for_map()
into tiles for specified zoom level
rendered tiles placed in out_dir directory
if out_dir is None or not a directory, tiles placed in map_stack, map directory
"""
logger.log(log_on, '_render_tmp_vrt_stack_for_map: out_dir = ' + out_dir + ', zoom = ' + zoom)
# elif verify.verify_tile_dir(out_dir):
# logger.log(log_on, 'skipping: ' + out_dir
# return
logger.log(log_on, 'tile out dir:', out_dir)
map_path = stack_peek(map_stack)
ds = gdal.Open(map_path, gdal.GA_ReadOnly)
if ds is None:
logger.log(log_on, 'unable to open', map_path)
return
zoom_level = int(zoom)
# fetch vrt data-set extends as tile bounds
lat_lng_bounds_wnes, is_north_up = gdalds.dataset_lat_lng_bounds(ds)
min_lng, max_lat, max_lng, min_lat = lat_lng_bounds_wnes
tile_bounds_wnes = tilesystem.lat_lng_bounds_to_tile_bounds_count(min_lng, max_lat, max_lng, min_lat, zoom_level)
tile_west, tile_north, tile_east, tile_south, tile_count_x, tile_count_y = tile_bounds_wnes
# ---- create coordinate transform from lat lng to data set coords
ds_wkt = gdalds.dataset_get_projection_wkt(ds)
ds_srs = osr.SpatialReference()
ds_srs.ImportFromWkt(ds_wkt)
wgs84_srs = osr.SpatialReference()
wgs84_srs.ImportFromEPSG(4326)
transform = osr.CoordinateTransformation(wgs84_srs, ds_srs)
# ---- grab inverted geomatrix from ground control points
geotransform = gdalds.get_geo_transform(ds)
inv_transform = gdal.InvGeoTransform(geotransform)
logger.log(log_on, 'west east', tile_west, tile_east)
if tile_west > tile_east: # dateline wrap
logger.log(log_on, 'wrapping tile to dateline')
_cut_tiles_in_range(0, tile_west, tile_south, tile_north, transform,
inv_transform, zoom_level, out_dir, ds)
_cut_tiles_in_range(tile_east, tilesystem.map_size_tiles(zoom_level),
tile_south, tile_north, transform, inv_transform, zoom_level, out_dir, ds)
else:
_cut_tiles_in_range(tile_west, tile_east, tile_south, tile_north, transform,
inv_transform, zoom_level, out_dir, ds)
del ds
def _scale_tile(tile_dir, z, x, y):
have_scale_tile = False
zoom_in_level = z + 1
tile_size = tilesystem.tile_size
diff = abs(z - zoom_in_level)
m_tile_size = tile_size >> diff
xx = x << diff
yy = y << diff
num_tiles = 1 << diff
in_tile_paths = []
for xi in range(num_tiles):
for yi in range(num_tiles):
lower_x = xx + xi
lower_y = yy + yi
p = os.path.join(tile_dir, '%s/%s/%s.png' % (zoom_in_level, lower_x, lower_y))
if os.path.isfile(p):
in_tile_paths.append(p)
have_scale_tile = True
else:
in_tile_paths.append(None)
if have_scale_tile:
im = Image.new("RGBA", (tile_size, tile_size), (0, 0, 0, 0))
i = 0
xoff = 0
yoff = 0
for in_tile_path in in_tile_paths:
if i == 1:
yoff += m_tile_size
if i == 2:
yoff -= m_tile_size
xoff += m_tile_size
if i == 3:
yoff += m_tile_size
if in_tile_path is not None:
logger.log(log_on, 'using anti-alias scaled tile')
im.paste(Image.open(in_tile_path).resize((m_tile_size, m_tile_size), Image.ANTIALIAS), (xoff, yoff))
i += 1
t_dir = os.path.join(tile_dir, '%s/%s' % (z, x))
if not os.path.isdir(t_dir):
os.makedirs(t_dir)
im.save(os.path.join(t_dir, '%s.png' % y))
return have_scale_tile
def _cut_tiles_in_range(tile_min_x, tile_max_x, tile_min_y, tile_max_y, transform,
inv_transform, zoom_level, out_dir, ds):
for tile_x in range(int(tile_min_x), int(tile_max_x) + 1, 1):
tile_dir = os.path.join(out_dir, '%s/%s' % (zoom_level, tile_x))
for tile_y in range(int(tile_min_y), int(tile_max_y) + 1, 1):
tile_path = os.path.join(tile_dir, '%s.png' % tile_y)
logger.log(log_on, tile_path)
# logger.debug = True
# skip tile if exists
if os.path.isfile(tile_path):
logger.log(log_on, 'skipping tile that exists', tile_path)
continue
# we can continue if the upper zoom exists even if _scale_tile returns false
# because all upper zoom tiles may not exist if they were all fully transparent
upper_zoom_exists = os.path.isdir(os.path.join(out_dir, str(zoom_level + 1)))
# attempt to create tile from existing lower zoom tile
if _scale_tile(out_dir, zoom_level, tile_x, tile_y) or upper_zoom_exists:
logger.log(log_on, 'scaled tile', tile_path)
continue
logger.log(log_on, 'creating tile', tile_path)
# logger.debug = False
m_px, m_py = tilesystem.tile_xy_to_pixel_xy(tile_x, tile_y)
lat, lng = tilesystem.pixel_xy_to_lat_lng(m_px, m_py, zoom_level)
geo_x, geo_y = transform.TransformPoint(float(lng), float(lat))[:2]
ds_px = int(inv_transform[0] + inv_transform[1] * geo_x + inv_transform[2] * geo_y)
ds_py = int(inv_transform[3] + inv_transform[4] * geo_x + inv_transform[5] * geo_y)
lat, lng = tilesystem.pixel_xy_to_lat_lng(m_px + tilesystem.tile_size, m_py + tilesystem.tile_size,
zoom_level)
geo_x, geo_y = transform.TransformPoint(float(lng), float(lat))[:2]
ds_pxx = int(inv_transform[0] + inv_transform[1] * geo_x + inv_transform[2] * geo_y)
ds_pyy = int(inv_transform[3] + inv_transform[4] * geo_x + inv_transform[5] * geo_y)
logger.log(log_on, 'ds_px, ds_py is the datset coordinate of tile (upper left)')
logger.log(log_on, 'ds_px', ds_px, 'ds_py', ds_py)
logger.log(log_on, 'ds_pxx, ds_pyy is the datset coordinate of tile (lower right)')
logger.log(log_on, 'ds_pxx', ds_pxx, 'ds_pyy', ds_pyy)
logger.log(log_on, 'lat lng', lat, lng)
logger.log(log_on, 'geo', geo_x, geo_y)
logger.log(log_on, 'raster actual size x y', ds.RasterXSize, ds.RasterYSize)
ds_px_clip = tilesystem.clip(ds_px, 0, ds.RasterXSize)
ds_pxx_clip = tilesystem.clip(ds_pxx, 0, ds.RasterXSize)
x_size_clip = ds_pxx_clip - ds_px_clip
ds_py_clip = tilesystem.clip(ds_py, 0, ds.RasterYSize)
ds_pyy_clip = tilesystem.clip(ds_pyy, 0, ds.RasterYSize)
y_size_clip = ds_pyy_clip - ds_py_clip
if x_size_clip <= 0 or y_size_clip <= 0:
continue
logger.log(log_on, 'ds_px_clip', ds_px_clip)
logger.log(log_on, 'ds_py_clip', ds_py_clip)
logger.log(log_on, 'x_size_clip', x_size_clip)
logger.log(log_on, 'y_size_clip', y_size_clip)
logger.log(log_on, '-----------------------------')
logger.log(log_on, 'reading dataset')
data = ds.ReadRaster(int(ds_px_clip), int(ds_py_clip), int(x_size_clip), int(y_size_clip))
transparent = True
if data is not None:
for ea in data:
if ea != 0:
transparent = False
break
# only create tiles that have data (not completely transparent)
if not transparent:
x_size = ds_pxx - ds_px
y_size = ds_pyy - ds_py
logger.log(log_on, 'x_size', x_size)
logger.log(log_on, 'y_size', y_size)
if not os.path.isdir(tile_dir):
os.makedirs(tile_dir)
logger.log(log_on, 'ds_pxx', ds_pxx)
logger.log(log_on, 'ds_pxx_clip', ds_pxx_clip)
logger.log(log_on, 'ds_pyy', ds_pyy)
logger.log(log_on, 'ds_pyy_clip', ds_pyy_clip)
if ds_pxx == ds_pxx_clip:
xoff = x_size - x_size_clip
elif ds_px_clip == 0 and ds_px < 0:
xoff = abs(ds_px)
else:
xoff = 0
if ds_pyy == ds_pyy_clip:
yoff = y_size - y_size_clip
elif ds_py_clip == 0 and ds_py < 0:
yoff = abs(ds_py)
else:
yoff = 0
logger.log(log_on, 'xoff', xoff)
logger.log(log_on, 'yoff', yoff)
tile_bands = ds.RasterCount + 1
logger.log(log_on, 'create mem window')
tmp = mem_driver.Create('', int(x_size), int(y_size), bands=ds.RasterCount)
logger.log(log_on, 'write mem window')
tmp.WriteRaster(int(xoff), int(yoff), int(x_size_clip), int(y_size_clip), data,
band_list=range(1, tile_bands))
logger.log(log_on, 'create mem tile')
tile = mem_driver.Create('', tilesystem.tile_size, tilesystem.tile_size, bands=ds.RasterCount)
scaling_up = int(x_size) < tilesystem.tile_size or int(y_size) < tilesystem.tile_size
# check if we're scaling image up
if scaling_up:
logger.log(log_on, 'scaling up')
tmp.SetGeoTransform((0.0, tilesystem.tile_size / float(x_size), 0.0,
0.0, 0.0, tilesystem.tile_size / float(y_size)))
tile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0))
gdal.ReprojectImage(tmp, tile, None, None, gdal_resampling)
# or scaling image down
else:
logger.log(log_on, 'scaling down')
for i in range(1, ds.RasterCount + 1):
gdal.RegenerateOverview(tmp.GetRasterBand(i), tile.GetRasterBand(i), resampling)
logger.log(log_on, 'write to file')
png_driver.CreateCopy(tile_path, tile, strict=0)
del data
del tmp
del tile
def build_tiles_for_map(kap, map_path, start_zoom, stop_zoom, cutline=None, out_dir=None):
"""builds tiles for a map_path - path to map to render tiles for
zoom_level - int or string representing int of the single zoom level to render
cutline - string defining the map border cutout... this can be None if the whole
map should be rendered.
out_dir - path to where tiles will be rendered to, if set to None then
tiles will be rendered int map_path's base directory
cutline string format example: 48.3,-123.2:48.5,-123.2:48.5,-122.7:48.3,-122.7:48.3,-123.2
: dilineated latitude/longitude WGS-84 coordinates (in decimal degrees)
"""
map_stack = build_tile_vrt_for_map(map_path, cutline=cutline)
# ---- render tiles in the same directory of the map if not specified
if out_dir is None:
out_dir = os.path.dirname(stack_peek(map_stack))
out_dir = os.path.join(out_dir, 'tiles')
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
# ---- if we are only rendering 1 zoom level, over-shoot by one so we can scale down with anti-aliasing
single_z_mode = config.use_single_zoom_over_zoom and stop_zoom == start_zoom
logger.log(log_on, 'single zoom mode', single_z_mode)
if single_z_mode:
stop_zoom += 1
zoom_range = range(stop_zoom, start_zoom - 1, -1)
if single_z_mode:
stop_zoom -= 1
logger.log(log_on, 'zoom range', zoom_range)
logger.log(log_on, 'out_dir', out_dir)
try:
# Mxmcc tiler
for z in zoom_range:
logger.log(log_on, 'rendering map_stack peek')
_render_tmp_vrt_stack_for_map(map_stack, str(z), out_dir)
if single_z_mode:
oz_dir = os.path.join(out_dir, str(stop_zoom + 1))
logger.log(log_on, 'removing overzoom dir: ', oz_dir)
shutil.rmtree(oz_dir)
ds = gdal.Open(map_path, gdal.GA_ReadOnly)
bounds, _ = gdalds.dataset_lat_lng_bounds(ds)
west, north, east, south = bounds
tilejson_tilemap = {
'name': kap,
'description': None,
'attribution': 'MXMariner.com',
'type': 'overlay',
'version': '1',
'format': 'png',
'minzoom': start_zoom,
'maxzoom': stop_zoom,
'bounds': '%s,%s,%s,%s' % (west, south, east, north),
'profile': 'mercator',
'basename': kap,
'tilejson': '2.0.0',
'scheme': 'xyz'
}
logger.log(log_on, 'writing tile json', tilejson_tilemap)
write_tilejson_tilemap(out_dir, tilejson_tilemap)
copy_viewer(out_dir)
except BaseException as e:
traceback.print_exc()
logger.log(log_on, str(e))
_cleanup_tmp_vrt_stack(map_stack)
def write_tilejson_tilemap(dst_dir, tilemap):
f = os.path.join(dst_dir, 'metadata.json')
logger.log(log_on, "writing ", f)
if os.path.exists(f):
os.remove(f)
with open(f, 'w') as f:
json.dump(tilemap, f, indent=2)
def copy_viewer(dest):
for f in ['viewer.js', 'google.html']:
src = os.path.join(os.path.dirname(os.path.realpath(__file__)), f)
dst = os.path.join(dest, f)
shutil.copy(src, dst)
def _build_tiles_for_map_helper(entry, name):
"""helper method for multiprocessing pool map_async
"""
try:
m_name = os.path.basename(entry['path'])
out_dir = os.path.join(config.unmerged_tile_dir, name, m_name[0:m_name.rfind('.')])
m_path = entry['path']
min_zoom = int(entry['min_zoom'])
max_zoom = int(entry['max_zoom'])
m_outline = entry['outline']
build_tiles_for_map(m_name, m_path, min_zoom, max_zoom, cutline=m_outline, out_dir=out_dir)
except BaseException as e:
traceback.print_exc()
logger.log(log_on, e)
def build_tiles_for_catalog(catalog_name):
"""builds tiles for every map in a catalog
tiles output to tile directory in config.py
"""
catalog_name = catalog_name.upper()
reader = catalog.get_reader_for_region(catalog_name)
pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
pool.map_async(partial(_build_tiles_for_map_helper, name=catalog_name), reader)
pool.close()
pool.join() # wait for pool to empty
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,898 | manimaul/mxmcc | refs/heads/master | /lookups.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''This contains methods for looking up information about a region chart file.
The lookup methods are used by catalog.py to build a catalog of charts for
a region.
'''
import datetime
import os
from . import bsb
from . import ukho_xlrd_lookup
from . import findzoom
from . import gdalds
from . import config
# coordinates need to be longitude,latitude,altitude
cutline_kml = '''<?xml version='1.0' encoding='UTF-8'?>
<kml xmlns='http://www.opengis.net/kml/2.2'>
<Placemark>
<name>cutline</name>
<Polygon>
<extrude>1</extrude>
<altitudeMode>relativeToGround</altitudeMode>
<outerBoundaryIs>
<LinearRing>
<coordinates>%s</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
</kml>
'''
def _get_cutline_kml(poly):
return cutline_kml % (''.join('%s,%s,0 ' % (ea.split(',')[1], ea.split(',')[0]) for ea in poly))
def get_cutline_kml(poly_string):
poly = []
for coord_str in poly_string.split(':'):
poly.append(coord_str)
return _get_cutline_kml(poly)
class Lookup(object):
"""Lookup information using the bsb header"""
def __init__(self):
self.lookup_db = {}
def _get(self, map_path):
return self.lookup_db[map_path]
def get_name(self, map_path):
return self._get(map_path).get_name()
def get_min_zoom(self, map_path):
return self._get(map_path).get_zoom()
def get_max_zoom(self, map_path):
return self._get(map_path).get_zoom()
def get_scale(self, map_path):
return self._get(map_path).get_scale()
def get_updated(self, map_path):
return self._get(map_path).get_updated()
def get_depth_units(self, map_path):
return self._get(map_path).get_depth_units()
def get_outline(self, map_path):
return self._get(map_path).get_outline()
def get_is_valid(self, map_path):
return self._get(map_path).get_is_valid()
class BsbLookup(Lookup):
def _get(self, map_path):
if map_path not in self.lookup_db:
self.lookup_db[map_path] = bsb.BsbHeader(map_path)
# noinspection PyProtectedMember
return super(BsbLookup, self)._get(map_path)
class UKHOLookup(Lookup):
"""Lookup information using the ukho excel meta data files"""
def __init__(self):
super(UKHOLookup, self).__init__()
self.meta_lookup = ukho_xlrd_lookup.MetaLookup()
def _get(self, map_path):
return self.meta_lookup.get_data(map_path)
def get_min_zoom(self, map_path):
data_set = gdalds.get_ro_dataset(map_path)
true_scale = gdalds.get_true_scale(data_set, config.ukho_chart_dpi)
return findzoom.get_zoom_from_true_scale(true_scale)
def get_max_zoom(self, map_path):
return self.get_min_zoom(map_path)
class GdalGeoTiffLookup(Lookup):
"""Lookup information using information coded in file names"""
def __init__(self):
super(GdalGeoTiffLookup, self).__init__()
def get_name(self, map_path):
return os.path.basename(map_path)[:-4]
def get_min_zoom(self, map_path):
return findzoom.get_zoom_from_true_scale(self.get_scale(map_path)) + 1
def get_max_zoom(self, map_path):
return findzoom.get_zoom_from_true_scale(self.get_scale(map_path)) + 1
def get_scale(self, map_path):
data_set = gdalds.get_ro_dataset(map_path)
return int(gdalds.get_true_scale(data_set, 400))
def get_updated(self, map_path):
return datetime.datetime.now().strftime('%b-%d-%Y')
def get_depth_units(self, map_path):
return 'None'
def get_outline(self, map_path):
return self.get_outline_bounds(map_path)
@staticmethod
def get_outline_bounds(map_path):
data_set = gdalds.get_ro_dataset(map_path)
return gdalds.dataset_lat_lng_bounds_as_cutline(data_set)
def get_is_valid(self, map_path):
return True
class BsbGdalMixLookup(GdalGeoTiffLookup):
"""Lookup information using information coded in file names"""
def __init__(self):
super(BsbGdalMixLookup, self).__init__()
self.bsb_lookup = BsbLookup()
def _is_bsb(self, map_path):
return map_path.upper().endswith('KAP')
def get_name(self, map_path):
if self._is_bsb(map_path):
return self.bsb_lookup.get_name(map_path)
return super(BsbGdalMixLookup, self).get_name(map_path)
def get_min_zoom(self, map_path):
if self._is_bsb(map_path):
return self.bsb_lookup.get_min_zoom(map_path)
return super(BsbGdalMixLookup, self).get_min_zoom(map_path)
def get_max_zoom(self, map_path):
return self.get_min_zoom(map_path)
def get_scale(self, map_path):
if self._is_bsb(map_path):
return self.bsb_lookup.get_scale(map_path)
return super(BsbGdalMixLookup, self).get_scale(map_path)
def get_updated(self, map_path):
if self._is_bsb(map_path):
return self.bsb_lookup.get_updated(map_path)
return super(BsbGdalMixLookup, self).get_updated(map_path)
def get_depth_units(self, map_path):
if self._is_bsb(map_path):
return self.bsb_lookup.get_depth_units(map_path)
return 'Unknown'
def get_outline(self, map_path):
if self._is_bsb(map_path):
return self.bsb_lookup.get_outline(map_path)
return super(BsbGdalMixLookup, self).get_outline(map_path)
def get_is_valid(self, map_path):
if self._is_bsb(map_path):
return self.bsb_lookup.get_is_valid(map_path)
return super(BsbGdalMixLookup, self).get_is_valid(map_path)
class FAALookup(GdalGeoTiffLookup):
def get_min_zoom(self, map_path):
return max(0, findzoom.get_zoom_from_true_scale(self.get_scale(map_path)) - 8)
def get_max_zoom(self, map_path):
return findzoom.get_zoom_from_true_scale(self.get_scale(map_path))
def get_scale(self, map_path):
data_set = gdalds.get_ro_dataset(map_path)
return int(gdalds.get_true_scale(data_set, 300))
class WaveylinesLookup(BsbGdalMixLookup):
"""Lookup information using information coded in file names"""
def __init__(self):
super(WaveylinesLookup, self).__init__()
ovr_dir = os.path.join(os.path.dirname(__file__), 'wl_overrides')
self.overrides = {}
for ea in os.listdir(ovr_dir):
with open(os.path.join(ovr_dir, ea), 'r') as f:
name = ea[:-4]
outline = ''
for ea_c in f.readlines():
outline += ea_c.strip() + ':'
outline = outline[:-1]
self.overrides[name] = outline
self.bsb_lookup = BsbLookup()
def get_depth_units(self, map_path):
if self._is_bsb(map_path):
return self.bsb_lookup.get_depth_units(map_path)
return 'Meters'
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,899 | manimaul/mxmcc | refs/heads/master | /tilesmerge.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Modified for mxmcc: Will Kamp
# Original source: https://code.google.com/p/tilers-tools/
###############################################################################
# Copyright (c) 2010, Vadim Shlyakhov
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#******************************************************************************
from PIL import Image
import glob
from . import catalog
from . import config
import os
import pickle
import re
import shutil
import sys
import multiprocessing
from .tilesystem import tile_size
def set_nothreads():
global multiprocessing
multiprocessing = None
def parallel_map(func, iterable):
if multiprocessing is None or len(iterable) < 2:
return map(func, iterable)
else:
# map in parallel
mp_pool = multiprocessing.Pool() # multiprocessing pool
res = mp_pool.map(func, iterable)
# wait for threads to finish
mp_pool.close()
mp_pool.join()
return res
def re_sub_file(fname, subs_list):
"""stream edit file using reg exp substitution list"""
new = fname + '.new'
with open(new, 'w') as out:
for l in open(fname, 'rU'):
for (pattern, repl) in subs_list:
l = re.sub(pattern, repl, string=l)
out.write(l)
shutil.move(new, fname)
class KeyboardInterruptError(Exception):
pass
def transparency(img):
"""estimate transparency of an image"""
(r, g, b, a) = img.split()
(a_min, a_max) = a.getextrema() # get min/max values for alpha channel
return 1 if a_min == 255 else 0 if a_max == 0 else -1
class MergeSet:
def __init__(self, src_dir, dst_dir):
(self.src, self.dest) = (src_dir, dst_dir)
self.tile_sz = (tile_size, tile_size) #tuple(map(int, options.tile_size.split(',')))
#if options.strip_src_ext:
# self.src = os.path.splitext(self.src)[0]
#if options.add_src_ext is not None:
# self.src += options.add_src_ext
try:
cwd = os.getcwd()
os.chdir(self.src)
self.src_lst = glob.glob('[0-9]*/*/*.png')
try:
self.max_zoom = max([int(i) for i in glob.glob('[0-9]*')])
except:
print("there is a problem")
print(self.src)
sys.exit()
finally:
os.chdir(cwd)
#ld(self.src_lst)
# load cached tile transparency data if any
self.src_transp = dict.fromkeys(self.src_lst, None)
self.src_cache_path = os.path.join(self.src, 'merge-cache')
try:
self.src_transp.update(pickle.load(open(self.src_cache_path, 'r')))
except:
pass
#ld("cache load failed")
#ld(repr(self.src_transp))
# define crop map for underlay function
tsx, tsy = self.tile_sz
self.underlay_map = [# lf up rt lw
( 0, 0, tsx / 2, tsy / 2), (tsx / 2, 0, tsx, tsy / 2),
( 0, tsy / 2, tsx / 2, tsy), (tsx / 2, tsy / 2, tsx, tsy),
]
# do the thing
self.merge_dirs()
def __call__(self, tile):
"""called by map() to merge a source tile into the destination tile set"""
try:
src_path = os.path.join(self.src, tile)
dst_tile = os.path.join(self.dest, tile)
dpath = os.path.dirname(dst_tile)
src_raster = None
transp = self.src_transp[tile]
if transp == None: # transparency value not cached yet
src_raster = Image.open(src_path).convert("RGBA")
transp = transparency(src_raster)
if transp != 0: # fully transparent
if not os.path.exists(dpath):
try: # thread race safety
os.makedirs(dpath)
except os.error:
pass
if transp == 1 or not os.path.exists(dst_tile):
# fully opaque or no destination tile exists yet
shutil.copy(src_path, dst_tile)
else: # semitransparent, combine with destination (exists! see above)
if not src_raster:
src_raster = Image.open(src_path).convert("RGBA")
dst_raster = Image.composite(src_raster, Image.open(dst_tile).convert("RGBA"), src_raster)
dst_raster.save(dst_tile)
#if options.underlay and transp != 0:
# self.underlay(tile, src_path, src_raster, options.underlay)
except KeyboardInterrupt: # http://jessenoller.com/2009/01/08/multiprocessingpool-and-keyboardinterrupt/
print('got KeyboardInterrupt')
raise KeyboardInterruptError()
return (tile, transp) # send back transparency values for caching
def upd_stat(self, transparency_data):
self.src_transp.update(dict(transparency_data))
try:
pickle.dump(self.src_transp, open(self.src_cache_path, 'w'))
except:
pass
#ld("cache save failed")
#pf('')
def merge_dirs(self):
src_transparency = parallel_map(self, self.src_lst)
self.upd_stat(src_transparency)
# MergeSet end
def merge_list(name, tile_dir_list, nothreads=False):
"""merge a list of XZY tiled map directories into a single directory
name - output directory will be the directory defined in config.py + name
tile_dir_list - list of input ZXY tiled map directories
nothreads - set to true if you don't want multiprocessing
note: tile_dir_list should be sorted by map scale descending so that larger scale
map tiles (that are close in scale and the same zoom level as another intersecting
map) get priority
note: tiles with transparency will be combined if possible with data from another tile
"""
if nothreads:
set_nothreads()
merge_dir = os.path.join(config.merged_tile_dir, name)
if not os.path.isdir(merge_dir):
os.makedirs(merge_dir)
else:
shutil.rmtree(merge_dir, ignore_errors=True)
for tile_dir in tile_dir_list:
if os.path.isdir(tile_dir):
MergeSet(tile_dir, merge_dir)
else:
raise Exception('map %s is missing from tiles list', os.path.basename(tile_dir))
def merge_catalog(catalog_name, nothreads=False):
"""merge a catalog of XZY tiled map directories into a single directory
catalog_name - name of catalog to merge, also the name of the ouput directory
to be created in congig.merged_tile_dir
nothreads - set to true if you don't want multiprocessing
"""
if nothreads:
set_nothreads()
reader = catalog.get_reader_for_region(catalog_name)
merge_dir = os.path.join(config.merged_tile_dir, catalog_name)
if not os.path.isdir(merge_dir):
os.makedirs(merge_dir)
else:
shutil.rmtree(merge_dir, ignore_errors=True)
unmerged_tile_dir = None
#find unmerged dir
for ea in os.listdir(config.unmerged_tile_dir):
if ea.upper() == catalog_name.upper():
unmerged_tile_dir = os.path.join(config.unmerged_tile_dir, ea)
break
if unmerged_tile_dir is None:
raise Exception('%s is not in unmerged tiles directory' % catalog_name)
for entry in reader:
map_name = os.path.basename(entry['path'])
map_name = map_name[0:map_name.find('.')]
tile_dir = os.path.join(unmerged_tile_dir, map_name)
if os.path.isdir(tile_dir):
MergeSet(tile_dir, merge_dir)
else:
raise Exception('map %s missing from tiles' % map_name) | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,900 | manimaul/mxmcc | refs/heads/master | /gemf.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__email__ = 'will@mxmariner.com'
__license__ = 'BSD'
# most of the credit belongs to:
__credits__ = ['http://www.cgtk.co.uk/gemf',
'A. Budden']
__copyright__ = 'Copyright (c) 2015, Matrix Mariner Inc.\n' + \
'A. Budden'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''This parses through a given ZXY tiled map directory and generates a gemf file.'''
import os
from Crypto import Random
from . import config
from .tilesystem import tile_size
file_size_limit = 2000000000
def _valto_n_bytes(value, n):
result = []
for _ in range(n):
result.append(value & 0xFF)
value >>= 8
result.reverse()
return result
def _valto_4_bytes(value):
return _valto_n_bytes(value, 4)
def _valto_8_bytes(value):
return _valto_n_bytes(value, 8)
def generate_gemf(name, add_uid=False):
"""generates a (s)gemf archive for tiles in mapdir
name - name of the (s)gemf archive to be created in the config.compiled_dir directory
add_uid - set to true if the tiles are encrypted and have a 16 byte initial vector
"""
if not os.path.isdir(os.path.join(config.merged_tile_dir, name)):
raise Exception(name + ' not a directory')
options = {}
if add_uid:
options['add-uid'] = True
if add_uid:
ext = '.sgemf'
else:
ext = '.gemf'
base_name = name[:name.rfind('.')].upper() # remove .enc or .opt
output_file = os.path.join(config.compiled_dir, base_name + ext)
tilesize = tile_size
extensions = ('.png.tile', '.jpg.tile', '.png', '.jpg')
all_sources = {}
source_order = []
source_index = 0
source_indices = {}
count = {}
mapdir = config.merged_tile_dir
source_list = [name]
for source in source_list:
results = {}
source_mapdir = os.path.join(mapdir, source)
if not os.path.isdir(source_mapdir):
print('Skipping ' + source_mapdir)
continue
source_indices[source] = source_index
# Generate results[zoom][x] = [y1,y2,...]
for zoom_level_str in os.listdir(source_mapdir):
zoom_level = int(zoom_level_str)
results[zoom_level] = {}
zoom_dir = os.path.join(source_mapdir, zoom_level_str)
if not os.path.isdir(zoom_dir):
print('Skipping ' + zoom_dir)
continue
for x_str in os.listdir(zoom_dir):
x_set = []
x_val = int(x_str)
x_dir = os.path.join(zoom_dir, x_str)
if not os.path.isdir(x_dir):
print('Skipping ' + x_dir)
continue
for y_str in os.listdir(x_dir):
y_val = int(y_str.split('.')[0])
x_set.append(y_val)
results[zoom_level][x_val] = x_set[:]
if 'allow-empty' in options:
full_sets = {}
for zoom_level in list(results.keys()):
full_sets[zoom_level] = []
xmax = max(results[zoom_level].keys())
xmin = min(results[zoom_level].keys())
y_vals = []
for x_val in list(results[zoom_level].keys()):
y_vals += results[zoom_level][x_val]
ymax = max(y_vals)
ymin = min(y_vals)
full_sets[zoom_level].append({'xmin': xmin, 'xmax': xmax,
'ymin': ymin, 'ymax': ymax,
'source_index': source_index})
else:
# A record representing a square of 1-5 tiles at zoom 10
# unique_sets[zoom][Y values key] = [X values array]
# unique_sets[10]['1-2-3-4-5'] = [1,2,3,4,5]
unique_sets = {}
for zoom_level in list(results.keys()):
unique_sets[zoom_level] = {}
for x_val in list(results[zoom_level].keys()):
# strkey: Sorted list of Y values for a zoom/X, eg: '1-2-3-4'
strkey = '-'.join(['%d' % i for i in sorted(results[zoom_level][x_val])])
if strkey in list(unique_sets[zoom_level].keys()):
unique_sets[zoom_level][strkey].append(x_val)
else:
unique_sets[zoom_level][strkey] = [x_val, ]
# Find missing X rows in each unique_set record
split_xsets = {}
for zoom_level in list(results.keys()):
split_xsets[zoom_level] = []
for xset in list(unique_sets[zoom_level].values()):
setxmin = min(xset)
setxmax = max(xset)
last_valid = None
for xv in range(setxmin, setxmax+2):
if xv not in xset and last_valid is not None:
split_xsets[zoom_level].append({'xmin': last_valid, 'xmax': xv-1})
last_valid = None
elif xv in xset and last_valid is None:
last_valid = xv
# Find missing Y rows in each unique_set chunk, create full_sets records for each complete chunk
full_sets = {}
for zoom_level in list(split_xsets.keys()):
full_sets[zoom_level] = []
for xr in split_xsets[zoom_level]:
yset = results[zoom_level][xr['xmax']]
setymin = min(yset)
setymax = max(yset)
last_valid = None
for yv in range(setymin, setymax+2):
if yv not in yset and last_valid is not None:
full_sets[zoom_level].append({'xmin': xr['xmin'], 'xmax': xr['xmax'],
'ymin': last_valid, 'ymax': yv-1,
'source_index': source_index})
last_valid = None
elif yv in yset and last_valid is None:
last_valid = yv
count[source] = {}
for zoom_level in list(full_sets.keys()):
count[source][zoom_level] = 0
for rangeset in full_sets[zoom_level]:
for xv in range(rangeset['xmin'], rangeset['xmax']+1):
for yv in range(rangeset['ymin'], rangeset['ymax']+1):
found = False
for extension in extensions:
fpath = os.path.join(source_mapdir, '%d/%d/%d%s' % (zoom_level, xv, yv, extension))
if os.path.exists(fpath):
found = True
break
if not found and 'allow-empty' not in options:
raise IOError('Could not find file (%s, %d, %d, %d)' % (source, zoom_level, xv, yv))
count[source][zoom_level] += 1
print(source_mapdir, zoom_level, count[source][zoom_level])
all_sources[source] = full_sets
source_order.append(source)
source_index += 1
u32_size = 4
u64_size = 8
range_size = (u32_size * 6) + (u64_size * 1) # xmin, xmax, ymin, ymax, zoom, source, offset
file_info_size = u64_size + u32_size
number_of_ranges = 0
number_of_files = 0
for source in source_order:
full_sets = all_sources[source]
number_of_ranges += sum([len(full_sets[i]) for i in list(full_sets.keys())])
number_of_files += sum(count[source].values())
source_count = 0
source_list = []
for source in source_order:
source_list += _valto_4_bytes(source_indices[source])
source_list += _valto_4_bytes(len(source))
source_list += [i for i in source.encode('ascii', 'ignore')]
source_count += 1
source_list_size = len(source_list)
gemf_version = 4
uid_size = 0
if 'add-uid' in options:
uid_size = 16
pre_info_size = (uid_size + # Random 16 byte uid
u32_size + # GEMF Version
u32_size + # Tile size
u32_size + # Number of ranges
u32_size + # Number of sources
source_list_size + # Size of source list
number_of_ranges * range_size) # Ranges
header_size = (pre_info_size + (number_of_files * file_info_size)) # File header info
image_offset = header_size
print('Source Count:', source_count)
print('Source List Size:', source_list_size)
print('Source List:', repr(source_list))
print('Pre Info Size:', pre_info_size)
print('Number of Ranges:', number_of_ranges)
print('Number of files:', number_of_files)
print('Header Size (first image location): 0x%08X' % header_size)
header = []
header += _valto_4_bytes(gemf_version)
header += _valto_4_bytes(tilesize)
header += _valto_4_bytes(source_count)
header += source_list
header += _valto_4_bytes(number_of_ranges)
data_locations = []
data_location_address = 0
file_list = []
first_range = True
first_tile = True
tile_count = 0
for tile_source in source_order:
full_source_set = all_sources[tile_source]
for zoom_level in list(full_source_set.keys()):
for rangeset in full_source_set[zoom_level]:
if first_range:
h = len(header)
print('First range at 0x%08X' % len(header))
header += _valto_4_bytes(zoom_level)
header += _valto_4_bytes(rangeset['xmin'])
header += _valto_4_bytes(rangeset['xmax'])
header += _valto_4_bytes(rangeset['ymin'])
header += _valto_4_bytes(rangeset['ymax'])
header += _valto_4_bytes(rangeset['source_index'])
header += _valto_8_bytes(data_location_address + pre_info_size)
if first_range:
hb = header[h:]
print('Range Data: [' + ','.join(['%02X' % i for i in hb]) + ']')
print('First Data Location: 0x%08X' % (data_location_address + pre_info_size))
first_range = False
for xv in range(rangeset['xmin'], rangeset['xmax']+1):
for yv in range(rangeset['ymin'], rangeset['ymax']+1):
found = False
for extension in extensions:
fpath = os.path.join(mapdir, '%s/%d/%d/%d%s' % (tile_source, zoom_level, xv, yv, extension))
if os.path.exists(fpath):
found = True
break
if not found:
if 'allow-empty' in options:
file_size = 0
else:
raise IOError('Could not find file (%s, %d, %d, %d)'
% (tile_source, zoom_level, xv, yv))
else:
file_size = os.path.getsize(fpath)
file_list.append(fpath)
# This file is at image_offset, length file_size
data_locations += _valto_8_bytes(image_offset)
data_locations += _valto_4_bytes(file_size)
tile_count += 1
if first_tile:
print('First Tile Info: [' + ','.join(['%02X' % i for i in data_locations]) + ']')
print('(0x%016X, 0x%08X)' % (image_offset, file_size))
first_tile = False
data_location_address += u64_size + u32_size
# Update the image_offset
image_offset += file_size
print('Header Length is 0x%08X' % len(header))
print('First tile expected at 0x%08X' % (len(header) + len(data_locations)))
print('Tile Count is %d (c.f. %d)' % (tile_count, number_of_files))
print('')
gemf_f = open(output_file, 'wb')
if 'add-uid' in options:
gemf_f.write(Random.get_random_bytes(16))
gemf_f.write(bytes(header))
gemf_f.write(bytes(data_locations))
file_size = len(header) + len(data_locations)
index = 0
for fn in file_list:
if os.path.exists(fn):
this_file_size = os.path.getsize(fn)
else:
if 'allow-empty' in options:
this_file_size = 0
else:
raise IOError('Could not find file %s' % fn)
if (file_size + this_file_size) > file_size_limit:
gemf_f.close()
index += 1
gemf_f = open(output_file + '-%d' % index, 'wb')
file_size = 0
if os.path.exists(fn):
tile_f = open(fn, 'rb')
gemf_f.write(tile_f.read())
tile_f.close()
file_size += this_file_size
gemf_f.close()
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,901 | manimaul/mxmcc | refs/heads/master | /checkpoint.py | #!/usr/bin/env python
__author__ = "Will Kamp"
__copyright__ = "Copyright 2015, Matrix Mariner Inc."
__license__ = "BSD"
__email__ = "will@mxmariner.com"
__status__ = "Development" # "Prototype", "Development", or "Production"
'''Store tiling process checkpoints, so successful steps don't have to be repeated
'''
import os
from enum import Enum
from . import config
class OrderedEnum(Enum):
def __ge__(self, other):
if self.__class__ is other.__class__:
return self._value_ >= other._value_
return NotImplemented
def __gt__(self, other):
if self.__class__ is other.__class__:
return self._value_ > other._value_
return NotImplemented
def __le__(self, other):
if self.__class__ is other.__class__:
return self._value_ <= other._value_
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
return self._value_ < other._value_
return NotImplemented
class CheckPoint(OrderedEnum):
CHECKPOINT_NOT_STARTED, \
CHECKPOINT_CATALOG, \
CHECKPOINT_TILE_VERIFY, \
CHECKPOINT_MERGE, \
CHECKPOINT_OPT, \
CHECKPOINT_ENCRYPTED, \
CHECKPOINT_ARCHIVE, \
CHECKPOINT_METADATA, \
CHECKPOINT_PUBLISHED = range(9)
@classmethod
def fromstring(cls, str):
return getattr(cls, str.upper(), CheckPoint.CHECKPOINT_NOT_STARTED)
@classmethod
def tostring(cls, val):
for k, v in vars(cls).items():
if v == val:
return k
def __str__(self):
return CheckPoint.tostring(self)
class CheckPointStore:
def __init__(self):
self._p_path = os.path.join(config.catalog_dir, 'checkpoint.txt')
self.checkpoints = {}
self._read()
def _read(self):
if not os.path.exists(self._p_path):
return
store = open(self._p_path, 'r')
lines = store.readlines()
if len(lines) > 0:
for ea in lines:
values = ea.strip().split(':::')
if len(values) == 3:
r, p, c = values
self._set_checkpoint_internal(r, p, CheckPoint.fromstring(c))
store.close()
self._commit()
def _set_checkpoint_internal(self, region, profile, checkpoint):
if region in self.checkpoints:
self.checkpoints[region][profile] = checkpoint
else:
self.checkpoints[region] = {profile: checkpoint}
def clear_checkpoint(self, region, profile, checkpoint):
self._set_checkpoint_internal(region, profile, checkpoint)
self._commit()
def get_checkpoint(self, region, profile):
if region in self.checkpoints:
p = self.checkpoints[region]
if profile in p:
return p[profile]
return CheckPoint.CHECKPOINT_NOT_STARTED
def _commit(self):
store = open(self._p_path, 'w+')
for region in self.checkpoints:
for profile in self.checkpoints[region]:
cp = CheckPoint.tostring(self.checkpoints[region][profile])
store.write('{}:::{}:::{}\n'.format(region, profile, cp))
store.close()
if __name__ == '__main__':
print(CheckPoint.CHECKPOINT_ARCHIVE)
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,902 | manimaul/mxmcc | refs/heads/master | /wl_filter_list_generator.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2015, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''Creates dictionary containing a list of charts for each region based on if they are in defined boundaries
'''
import os
import json
from shapely.geometry import Polygon
from .noaaxml import NoaaXmlReader
from . import config
from . import gdalds
from .region_constants import *
from .search import MapPathSearch
BOUNDARIES = {REGION_WL1: Polygon(((20.653346, -75.816650),
(27.973699, -71.410607),
(27.973699, -81.793212),
(20.653346, -81.793212),
(20.653346, -75.816650))),
REGION_WL2: Polygon(((20.653346, -75.816661),
(16.445521, -75.816661),
(16.445521, -66.040652),
(28.030423, -66.040652),
(27.030423, -71.381531),
(20.653346, -75.816661)))}
JSON_PATH = os.path.join(config.wl_meta_dir, 'region_files.json')
def _should_include(region, poly):
return BOUNDARIES[region].intersects(poly)
def get_file_list_region_dictionary(n=True):
if os.path.isfile(JSON_PATH):
with open(JSON_PATH, 'r') as json_file:
return json.loads(json_file.read())
elif n:
_make_file_list_region_dictionary()
return get_file_list_region_dictionary(n=False)
else:
raise Exception('failed to get or create json manifest')
def _make_file_list_region_dictionary():
reader = NoaaXmlReader('REGION_10')
mps = MapPathSearch(config.noaa_bsb_dir, ['kap'], reader.get_map_files())
matched = {REGION_WL1: [],
REGION_WL2: mps.file_paths}
num_matched = 0
mps = MapPathSearch(config.wavey_line_geotiff_dir, ['tif'])
n = 1
o = len(mps.file_paths)
for abs_map_path in mps.file_paths:
map_name = os.path.basename(abs_map_path)
map_name = map_name[:map_name.rfind('.')] # remove extension
print('inspecting', map_name, 'for inclusion', '%s of %s' % (n, o))
n += 1
ds = gdalds.get_ro_dataset(abs_map_path)
wnes, is_north_up = gdalds.dataset_lat_lng_bounds(ds)
del ds
west, north, east, south = wnes
poly = Polygon(((north, west), (north, east), (south, east), (south, west), (north, west)))
for region in BOUNDARIES.keys():
if _should_include(region, poly):
matched[region].append(abs_map_path)
num_matched += 1
print('num_matched : ', num_matched)
print('skipped : ', max(0, o - num_matched))
with open(os.path.join(JSON_PATH), 'w') as f:
json.dump(matched, f, indent=2)
if __name__ == '__main__':
print(get_file_list_region_dictionary()) | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,903 | manimaul/mxmcc | refs/heads/master | /regions.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''This is a database of sorts for nautical chart regions, their providing hydro-graphic offices
and additional information such as a listing of files or description.
'''
import os.path
from . import config
from .noaaxml import NoaaXmlReader
from . import lookups
from . import wl_filter_list_generator
from . import file_name_sanitizer
from .region_constants import *
from .search import MapPathSearch
class _RegionInfo:
def __init__(self, desc, map_type):
self.description = desc
self.map_type = map_type
class _RegionDatabase:
def __init__(self):
self.db = {}
self.rdb = {}
self.provider_dirs = {}
def add_provider(self, provider, map_dir):
if provider not in self.db:
self.db[provider] = {}
self.provider_dirs[provider] = map_dir
def add_region(self, provider, region, desc, map_type):
if provider in self.db:
self.db[provider][region] = _RegionInfo(desc, map_type)
self.rdb[region] = provider
def provider_for_region(self, region):
region = region.upper()
if region in self.rdb:
return self.rdb[region]
return None
def get_description(self, provider, region):
if provider in self.db and region in self.db[provider]:
return self.db[provider][region].description
def get_map_type(self, provider, region):
if provider in self.db and region in self.db[provider]:
return self.db[provider][region].map_type
def get_directory_for_provider(self, provider):
if provider in self.provider_dirs:
return self.provider_dirs[provider]
def provider_has_region(self, provider, region):
return provider in self.db and region in self.db[provider]
def is_valid_region(self, region):
return region in self.rdb.keys()
# Chart format types
map_type_bsb = 'kap'
map_type_geotiff = 'tif'
# Providers
provider_noaa = 'noaa'
provider_faa = 'faa'
provider_brazil = 'brazil'
provider_linz = 'linz'
provider_ukho = 'ukho'
provider_wavey_lines = 'wavey-lines'
# Build the database
_db = _RegionDatabase()
# US - NOAA
_db.add_provider(provider_noaa, config.noaa_bsb_dir)
_db.add_region(provider_noaa, REGION_02, 'Block Island RI to the Canadian Border', map_type_bsb)
_db.add_region(provider_noaa, REGION_03, 'New York to Nantucket and Cape May NJ', map_type_bsb)
_db.add_region(provider_noaa, REGION_04, 'Chesapeake and Delaware Bays', map_type_bsb)
_db.add_region(provider_noaa, REGION_06, 'Norfolk VA to Florida including the ICW', map_type_bsb)
_db.add_region(provider_noaa, REGION_07, 'Florida East Coast and the Keys', map_type_bsb)
_db.add_region(provider_noaa, REGION_08, 'Florida West Coast and the Keys', map_type_bsb)
_db.add_region(provider_noaa, REGION_10, 'Puerto Rico and the U.S. Virgin Islands', map_type_bsb)
_db.add_region(provider_noaa, REGION_12, 'Southern California, Point Arena to the Mexican Border', map_type_bsb)
_db.add_region(provider_noaa, REGION_13, 'Lake Michigan', map_type_bsb)
_db.add_region(provider_noaa, REGION_14, 'San Francisco to Cape Flattery', map_type_bsb)
_db.add_region(provider_noaa, REGION_15, 'Pacific Northwest, Puget Sound to the Canadian Border', map_type_bsb)
_db.add_region(provider_noaa, REGION_17, 'Mobile AL to the Mexican Border', map_type_bsb)
_db.add_region(provider_noaa, REGION_22, 'Lake Superior and Lake Huron (U.S. Waters)', map_type_bsb)
_db.add_region(provider_noaa, REGION_24, 'Lake Erie (U.S. Waters)', map_type_bsb)
_db.add_region(provider_noaa, REGION_26, 'Lake Ontario (U.S. Waters)', map_type_bsb)
_db.add_region(provider_noaa, REGION_30, 'Southeast Alaska', map_type_bsb)
_db.add_region(provider_noaa, REGION_32, 'South Central Alaska, Yakutat to Kodiak', map_type_bsb)
_db.add_region(provider_noaa, REGION_34, 'Alaska, The Aleutians and Bristol Bay', map_type_bsb)
_db.add_region(provider_noaa, REGION_36, 'Alaska, Norton Sound to Beaufort Sea', map_type_bsb)
_db.add_region(provider_noaa, REGION_40, 'Hawaiian Islands and U.S. Territories', map_type_bsb)
# BRAZIL NAVY
_db.add_provider(provider_brazil, config.brazil_bsb_dir)
_db.add_region(provider_brazil, REGION_BR, 'Brazil: Guyana to Uruguay', map_type_bsb)
# New Zealand - LINZ
_db.add_provider(provider_linz, config.linz_bsb_dir)
_db.add_region(provider_linz, REGION_NZ, 'New Zealand and South Pacific: Samoa to Ross Sea', map_type_bsb)
# United Kingdom - UKHO
_db.add_provider(provider_ukho, config.ukho_geotiff_dir)
_db.add_region(provider_ukho, REGION_UK1, 'United Kingdom North East Coast to Shetland Islands', map_type_geotiff)
_db.add_region(provider_ukho, REGION_UK2, 'United Kingdom South East Coast and Channel Islands', map_type_geotiff)
_db.add_region(provider_ukho, REGION_UK3, 'United Kingdom North West Coast and Ireland West Coast', map_type_geotiff)
_db.add_region(provider_ukho, REGION_UK4, 'United Kingdom South West Coast and Ireland East Coast - Irish Sea',
map_type_geotiff)
# Wavey Lines
_db.add_provider(provider_wavey_lines, config.wavey_line_geotiff_dir)
_db.add_region(provider_wavey_lines, REGION_WL1, 'Caribbean West Florida and Bahamas to Long Island', map_type_geotiff)
_db.add_region(provider_wavey_lines, REGION_WL2, 'Caribbean East Turks And Caicos Islands Crooked Island to Dominican Republic', map_type_geotiff)
# FAA
_db.add_provider(provider_faa, config.faa_geotiff_dir)
_db.add_region(provider_faa, REGION_FAA_PLANNING, 'FAA VFR Planning Charts', map_type_geotiff)
_db.add_region(provider_faa, REGION_FAA_SECTIONAL, 'FAA VFR Sectional Charts', map_type_geotiff)
_db.add_region(provider_faa, REGION_FAA_TERMINAL, 'FAA VFR Terminal Charts', map_type_geotiff)
_db.add_region(provider_faa, REGION_FAA_HELICOPTER, 'FAA VFR Helicopter charts', map_type_geotiff)
_db.add_region(provider_faa, REGION_FAA_CARIBBEAN, 'FAA VFR Caribbean Charts', map_type_geotiff)
def description_for_region(region):
"""returns the description for region defined in regions.py"""
provider = _db.provider_for_region(region)
region = region.upper()
return _db.get_description(provider, region)
def map_type_for_region(region):
"""returns the regions map type file extension eg. tif or kap"""
provider = _db.provider_for_region(region)
region = region.upper()
return _db.get_map_type(provider, region)
def map_list_for_region(region):
"""returns a list of absolute paths to chart files for queried region"""
provider = _db.provider_for_region(region)
region = region.upper()
if _db.provider_has_region(provider, region):
if provider == provider_noaa:
reader = NoaaXmlReader(region)
mps = MapPathSearch(config.noaa_bsb_dir, [map_type_for_region(region)], reader.get_map_files())
return mps.file_paths
elif provider == provider_linz:
mps = MapPathSearch(config.linz_bsb_dir, [map_type_for_region(region)])
return mps.file_paths
elif provider == provider_brazil:
mps = MapPathSearch(config.brazil_bsb_dir, [map_type_geotiff, map_type_bsb])
return mps.file_paths
elif provider == provider_wavey_lines:
file_name_sanitizer.sanitize(config.wavey_line_geotiff_dir)
return wl_filter_list_generator.get_file_list_region_dictionary()[region]
elif provider == provider_ukho:
region_txt = os.path.join(config.ukho_meta_dir, region.upper() + '.txt')
paths = []
with open(region_txt, 'r') as manifest:
for ea in manifest.readlines():
p = os.path.join(config.ukho_png_dir, ea.strip() + '.png')
if os.path.isfile(p):
paths.append(p)
else:
p = os.path.join(config.ukho_geotiff_dir, ea.strip() + '.tif')
if os.path.isfile(p):
paths.append(p)
else:
raise Exception('path not found for chart: ' + p)
return paths
elif provider == provider_faa:
map_dir = os.path.join(config.faa_geotiff_dir, region.upper())
mps = MapPathSearch(map_dir, [map_type_for_region(region)])
return mps.file_paths
else:
raise Exception('unknown region')
def lookup_for_region(region):
"""returns the lookup class for queried region
see lookups.py which are used to build the region's catalog
"""
provider = _db.provider_for_region(region)
if provider == provider_ukho:
return lookups.UKHOLookup()
elif provider == provider_wavey_lines:
return lookups.WaveylinesLookup()
elif provider == provider_brazil:
return lookups.BsbGdalMixLookup()
elif provider == provider_faa:
return lookups.FAALookup()
else:
return lookups.BsbLookup()
def provider_for_region(region):
"""returns the provider eg. noaa for queried region"""
return _db.provider_for_region(region)
def directory_for_provider(provider):
"""returns the directory where chart files live for queried region"""
provider = provider.lower()
return _db.get_directory_for_provider(provider)
def is_valid_region(region):
"""returns True or False"""
return _db.is_valid_region(region.upper())
def find_custom_region_path(region):
"""look for a custom regions' directory if this is not a known (invalid) region"""
for root, dirs, files in os.walk(config.map_dir):
if region in dirs:
return os.path.join(root, region)
return None | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,904 | manimaul/mxmcc | refs/heads/master | /ukho_crest_burner.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2014, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''Burns (removes images) previously indexed with computer vision from charts.
This is useful for removing images / logos that the original chart producer requires us to remove
'''
import os
import shlex
import subprocess
from osgeo import gdal
from PIL import Image
from . import config
match_f_name = "%s:%s:%s:%s:%s.png" # chart#crest#score#xoff#yoff
matched_crest_dir = os.path.join(config.ukho_meta_dir, 'CRESTS', 'MATCHED')
gdal.AllRegister()
def burn(chart_f_name, burn_coord_list):
# chart png to create
png_path = os.path.join(config.ukho_png_dir, chart_f_name + ".png")
if os.path.isfile(png_path):
print('done')
return
# chart png to open
tif_path = os.path.join(config.ukho_geotiff_dir, chart_f_name + ".tif")
if not os.path.isfile(tif_path):
print('WARNING: not tif for: ' + chart_f_name)
return
if not os.path.isfile(png_path):
print("creating png with gdal")
# -----create a png
command = "gdal_translate -of PNG -expand rgba \"%s\" \"%s\"" % (
os.path.normpath(tif_path), os.path.normpath(png_path))
subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE).wait()
# -----
if not os.path.isfile(png_path):
raise Exception('failed to create png: ' + chart_f_name)
# open png with PIL
img = Image.open(png_path)
for dicTuple in burn_coord_list:
crest, score, xoff, yoff = dicTuple
print("burning away crest %s" % crest)
cst = match_f_name % (chart_f_name, crest, score, xoff, yoff)
cst_path = os.path.join(matched_crest_dir, cst)
# get width height and bg color of crest
im = Image.open(cst_path)
im = im.convert("RGB")
px = im.getpixel((0, 0))
# burn away rectangle
rec = Image.new('RGB', im.size, px)
img.paste(rec, (int(xoff), int(yoff)))
# save chart png
img.save(png_path)
def build_dictionary():
"""list files in matched directory
build dictionary with chart name as key containing list of tuples of data to burn
tuples are (crest,x_offset,y_offset) """
dictionary = {}
for f in os.listdir(matched_crest_dir):
tif, crest, score, xoff, yoff = f.rstrip(".png").split(":")
if tif in dictionary:
dictionary[tif].append((crest, score, xoff, yoff))
else:
lst = [(crest, score, xoff, yoff)]
dictionary[tif] = lst
return dictionary
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,905 | manimaul/mxmcc | refs/heads/master | /tilesystem.py | #!/usr/bin/env python
# converted to python by (from C# and Java sources):
__author__ = 'Will Kamp'
__email__ = 'will@mxmariner.com'
# most of the credit belongs to:
__credits__ = ['http://msdn.microsoft.com/en-us/library/bb259689.aspx',
'http://www.klokan.cz/projects/gdal2tiles/gdal2tiles.py']
__copyright__ = 'Copyright (c) 2013, Matrix Mariner Inc.\n' + \
'Copyright (c) 2006-2009 Microsoft Corporation. All rights reserved.\n' + \
'Copyright (c) 2008, Klokan Petr Pridal'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
__license__ = 'It\'s not too clear from the original source ?(Public domain)'
'''Microsoft, Google, OpenStreetMap (ZXY) tile system conversion methods to and from:
WGS84 latitude longitude, and EPSG:900913 meter
'''
import numpy
tile_size = 256
earth_radius = 6378137.
earth_circumference = 2. * numpy.pi * earth_radius # at equator
origin_shift = earth_circumference / 2. # 20037508.342789244
min_latitude = -85.05112878
max_latitude = 85.05112878
min_longitude = -180.
max_longitude = 180.
inches_per_meter = 39.3701
max_zoom_level = 23
# Following methods adapted from http://msdn.microsoft.com/en-us/library/bb259689.aspx
def clip(num, min_value, max_value):
"""num - the number to clip
min_value - minimum allowable value
max_value - maximum allowable value
"""
return numpy.minimum(numpy.maximum(num, min_value), max_value)
def map_size(level_of_detail):
"""determines the map width and height (in pixels) at a specified level of detail
level_of_detail, from 1 (lowest detail) to 23 (highest detail)
returns map height and width in pixels
"""
return float(tile_size << level_of_detail)
def map_size_tiles(level_of_detail):
"""determines the map width and height (in tiles) at a specified level of detail
level_of_detail, from 1 (lowest detail) to 23 (highest detail)
returns map height and width in number of tiles
"""
return int(map_size(level_of_detail) / tile_size)
def ground_resolution(latitude, level_of_detail):
"""determines the ground resolution (in meters per pixel) at a specifiec latitude and
level of detail
latitude - (in decimal degrees) at which to measure the ground resolution
level_of_detail, from 1 (lowest detail) to 23 (highest detail)
returns the ground resolution in meters per pixel
"""
latitude = clip(latitude, min_latitude, max_latitude)
return numpy.cos(latitude * numpy.pi / 180.) * 2 * numpy.pi * earth_radius / map_size(level_of_detail)
def map_scale(latitude, level_of_detail, dpi):
"""determines the map scale at a specified latitude, level of detail, and dpi resolution
latitude - (in decimal degrees) at which to measure the ground resolution
level_of_detail, from 1 (lowest detail) to 23 (highest detail)
dpi - resolution in dots per inch
"""
return ground_resolution(latitude, level_of_detail) * dpi / 0.0254
def lat_lng_to_pixel_xy(latitude, longitude, level_of_detail):
"""converts latitude/longitude WGS-84 coordinates (in decimal degrees) into pixel x,y
latitude - (in decimal degrees) to convert
longitude - (in decimal degrees) to convert
level_of_detail, from 1 (lowest detail) to 23 (highest detail)
"""
latitude = clip(latitude, min_latitude, max_latitude)
longitude = clip(longitude, min_longitude, max_longitude)
x = (longitude + 180.) / 360.
sin_lat = numpy.sin(latitude * numpy.pi / 180.)
y = .5 - numpy.log((1. + sin_lat) / (1. - sin_lat)) / (4. * numpy.pi)
m_size = map_size(level_of_detail)
x = int(clip(x*m_size + .5, 0, m_size - 1))
y = int(clip(y*m_size + .5, 0, m_size - 1))
return x, y
def lat_lng_to_tile_xy(latitude, longitude, level_of_detail):
"""gives you zxy tile coordinate for given latitude, longitude WGS-84 coordinates (in decimal degrees)
"""
x, y = lat_lng_to_pixel_xy(latitude, longitude, level_of_detail)
return pixel_xy_to_tile_xy(x, y)
def pixel_xy_to_lat_lng(x, y, level_of_detail):
"""converts a pixel x,y coordinates at a specified level of detail into
latitude,longitude WGS-84 coordinates (in decimal degrees)
x - coordinate of point in pixels
y - coordinate of point in pixels
level_of_detail, from 1 (lowest detail) to 23 (highest detail)
"""
m_size = map_size(level_of_detail)
x = (clip(x, 0, m_size - 1) / m_size) - .5
y = .5 - (clip(y, 0, m_size - 1) / m_size)
lat = 90. - 360. * numpy.arctan(numpy.exp(-y * 2 * numpy.pi)) / numpy.pi
lng = 360. * x
return lat, lng
def pixel_xy_to_tile_xy(x, y):
"""converts pixel x,y coordinates into tile x,y coordinates of the tile containing the specified pixel
x - pixel coordinate
y - pixel coordinate
"""
return x / tile_size, y / tile_size
def tile_xy_to_pixel_xy(tile_x, tile_y):
"""converts tile x,y coordinates into pixel x,y coordinates of the upper-left pixel of the specified tile
tile_x - tile coordinate
tile_y - tile coordinate
"""
return tile_x * tile_size, tile_y * tile_size
def level_of_detail_for_pixel_size(latitude, pixel_size):
"""maximal scale down zoom of the pyramid closest to the pixel_size
"""
for zoom in range(max_zoom_level):
if pixel_size > ground_resolution(latitude, zoom):
if zoom is not 0:
return zoom
else:
return 0 # We don't want to scale up
# Following methods adapted from http://www.klokan.cz/projects/gdal2tiles/gdal2tiles.py
# and changed from TMS pyramid coordinate to ZXY coordinate outputs
def pixels_to_meters(px, py, level_of_detail):
"""
:param px: X tile system pixels
:param py: Y tile system pixels
:param level_of_detail: tile system zoom level
:return: EPSG:900913 map coordinates
"""
res = ground_resolution(0, level_of_detail)
mx = px * res - origin_shift
my = py * res - origin_shift
return mx, my
def meters_to_pixels(meters_x, meters_y, level_of_detail):
"""converts XY point from Spherical Mercator EPSG:900913 to ZXY pixel coordinates
"""
res = ground_resolution(0, level_of_detail) # ground resolution at equator
x = int((meters_x + origin_shift) / res)
y = int((meters_y + origin_shift) / res)
return tms_to_zxy_coord(x, y, level_of_detail)
def meters_to_tile(meters_x, meters_y, level_of_detail):
"""converts XY point from Spherical Mercator EPSG:900913 to ZXY tile coordinates
"""
x, y = meters_to_pixels(meters_x, meters_y, level_of_detail)
tx, ty = pixel_xy_to_tile_xy(x, y)
return tx, ty
def meters_to_lat_lng(meters_x, meters_y):
"""converts XY point from Spherical Mercator EPSG:900913 to lat/lon in WGS84 Datum
"""
lng = (meters_x / origin_shift) * 180.0
lat = (meters_y / origin_shift) * 180.0
lat = 180 / numpy.pi * (2 * numpy.arctan(numpy.exp(lat * numpy.pi / 180.0)) - numpy.pi / 2.0)
return lat, lng
# def lat_lng_to_meters(lat, lng):
# """converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:900913
# """
# meters_x = lng * origin_shift / 180.0
# meters_y = numpy.log(numpy.tan((90 + lat) * numpy.pi / 360.0)) / (numpy.pi / 180.0)
#
# meters_y = meters_y * origin_shift / 180.0
# return meters_x, meters_y
#conversions from TMS tile system coordinates
#TMS coordinates originate from bottom left
#ZXY coordinates originate from the top left
def tms_to_zxy_coord(px, py, zoom):
"""Converts TMS pixel coordinates to ZXY pixel coordinates
or vise versa
"""
return px, (2**zoom) * tile_size - py - 1
def tms_tile_to_zxy_tile(tx, ty, zoom):
"""Converts TMS tile coordinates to ZXY tile coordinates
or vise versa
"""
return tx, (2**zoom - 1) - ty
def lat_lng_bounds_to_pixel_bounds_res(min_lng, max_lat, max_lng, min_lat, level_of_detail):
"""
Latitude longitude bounds to tile system pixel bounds
:param level_of_detail: tile system zoom level
:return: tile system pixel extents and resolution
"""
# this seems counter intuitive... tile / pixel 0,0 is top left where as lat long 0,0 is bottom left
pixel_west, pixel_north = lat_lng_to_pixel_xy(min_lat, min_lng, int(level_of_detail))
pixel_east, pixel_south = lat_lng_to_pixel_xy(max_lat, max_lng, int(level_of_detail))
res_x = pixel_east - pixel_west + 1
res_y = pixel_north - pixel_south + 1
# dateline wrap
if res_x < 0:
res_x = (map_size(level_of_detail) - pixel_west) + pixel_east + 2
return pixel_west, pixel_north, pixel_east, pixel_south, res_x, res_y
def lat_lng_bounds_to_tile_bounds_count(min_lng, max_lat, max_lng, min_lat, level_of_detail):
"""
Latitude longitude bounds to tile system bounds
:param level_of_detail: tile system zoom level
:return: tile bounding extents and count
"""
# this seems counter intuitive... tile / pixel 0,0 is top left where as lat long 0,0 is bottom left
tile_west, tile_north = lat_lng_to_tile_xy(min_lat, min_lng, level_of_detail)
tile_east, tile_south = lat_lng_to_tile_xy(max_lat, max_lng, level_of_detail)
num_tiles_west_east = tile_east - tile_west + 1
num_tiles_north_south = tile_north - tile_south + 1
# dateline wrap
if num_tiles_west_east < 0:
num_tiles_west_east = (map_size_tiles(level_of_detail) - tile_west) + tile_east + 2
return tile_west, tile_north, tile_east, tile_south, num_tiles_west_east, num_tiles_north_south | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,906 | manimaul/mxmcc | refs/heads/master | /ukho_remove_duplicates.py | import functools
import os
import shutil
from . import config
from .ukho_xlrd_lookup import stamp
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2014, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''Removes duplicate geotiff charts from the ukho collection
'''
def cmp_val(o1, o2):
"""
:param o1: object 1 to compare
:param o2: object 2 to compare
:return: if o1 is less than o2
"""
if '_' in o1 and '_' not in o2:
return False
if '_UD' in o1 and '_W' in o2:
return False
return True
def make_comparator(less_than):
def compare(x, y):
if less_than(x, y):
return -1
elif less_than(y, x):
return 1
else:
return 0
return compare
def inspect_lst(the_list):
if len(the_list) > 1:
sorted(the_list, key=functools.cmp_to_key(cmp_val))
# print the_list
while len(the_list) > 1:
t = the_list.pop()
print(('moving:', t))
if t is not None:
t = os.path.join(config.ukho_geotiff_dir, t + '.tif')
shutil.move(t, config.ukho_dup_dir)
def get_dictionary():
# inspect_lst(['2706-2_W', '2706-2_UD'])
# inspect_lst(['2706-2_UD', '2706-2_W'])
# inspect_lst(['2706-2', '2706-2_UD'])
# inspect_lst(['2706-2_W', '2706-2'])
charts = {}
for ea in os.listdir(config.ukho_geotiff_dir):
f = ea.rstrip('.tif')
key = stamp(ea)
if key not in charts:
charts[key] = [f]
else:
charts[key].append(f)
return charts
def remove_duplicates():
if not os.path.isdir(config.ukho_dup_dir):
raise Exception('config not setup!, did you run config?')
charts = get_dictionary()
for k in list(charts.keys()):
inspect_lst(charts[k])
def has_duplicates():
charts = get_dictionary()
for k in list(charts.keys()):
if len(charts[k]) > 1:
return True
return False
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,907 | manimaul/mxmcc | refs/heads/master | /__init__.py | __author__ = "Will Kamp"
__copyright__ = "Copyright 2015, Matrix Mariner Inc."
__license__ = "BSD"
__email__ = "will@mxmariner.com"
__status__ = "Development" # "Prototype", "Development", or "Production"
VERSION = 1
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,908 | manimaul/mxmcc | refs/heads/master | /bsb.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''Scans the header portion of a BSB nautical chart, extracts data and offers
convenient methods for accessing chart meta data
'''
import os.path
from re import sub
from . import findzoom
class BsbHeader:
def __init__(self, map_path):
self.map_path = map_path
self.updated = None
self.name = None
self.lines = []
self.poly = []
self.refs = []
self.scale = None
self.projection = None
self.units = None
self.datum = None
self._read_header(map_path)
def _read_header(self, map_path):
with open(map_path, 'rb') as map_file:
for line in map_file:
if b'\x1A' in line:
break
line = line.decode('ascii', 'ignore')
self.lines.append(line)
if line.find('KNP/SC') > -1:
line = line.lstrip('KNP/')
values = line.split(',')
for val in values:
if val.startswith('SC='):
self.scale = int(val[3:len(val)])
elif val.startswith('PR='):
self.projection = val[3:len(val)]
elif val.startswith('GD='):
self.datum = val[3:len(val)]
elif line.find('REF/') > -1:
ref = sub('REF/[0-9]*,', '', line).rstrip('\r\n')
self.refs.append(ref)
elif line.find('UN=') > -1:
li = line.find('UN=') + 3
ri = line.find(',', li)
self.units = line[li:ri]
elif line.find('CED/SE') > -1:
li = line.find('ED=') + 3
ri = li + 11
self.updated = line[li:ri]
elif line.find('BSB/NA') > -1:
li = line.find('BSB/NA=') + 7
ri = line.find(',')
self.name = line[li:ri]
elif line.find('PLY/') > -1:
self._read_ply(line)
# look for overrides
or_path = os.path.join(os.path.dirname(__file__), 'ply_overrides', self.get_base_filename()[:-4])
if os.path.isfile(or_path):
self.poly = []
with open(or_path, 'rU') as override:
for line in override:
if line.find('PLY/') > -1:
self._read_ply(line)
if len(self.poly) > 0:
self.poly.append(self.poly[0]) # add first coord to close polygon
def _read_ply(self, line):
lat = line.split(',')[1].lstrip(',')
lon = float(line.split(',')[2])
ply = lat + ',' + str(lon)
self.poly.append(ply.rstrip())
def get_is_valid(self):
if self.scale is None or 'Cover for Chart' in self.name:
return False
return True
def get_lines(self):
return self.lines
def get_updated(self):
return self.updated.strip()
def get_scale(self):
return self.scale
def get_zoom(self):
if self.scale is None:
return 0
return findzoom.get_zoom(self.scale, self.get_center()[1])
def get_projection(self):
return self.projection.strip()
def get_datum(self):
return self.datum.strip()
def get_base_filename(self):
return os.path.basename(self.map_path)
def get_name(self):
return self.name.strip().replace('\'', '')
def get_poly_list(self):
return self.poly
def get_outline(self):
outline = ''
for ply in self.get_poly_list():
outline += ply + ':'
return outline.rstrip(':')
def get_depth_units(self):
if self.units is None:
self.units = 'Unknown'
return self.units
def crosses_dateline(self):
lngs = []
for ll in self.poly:
lng = ll.split(',')[1]
lngs.append(float(lng))
if len(lngs) is 0:
return False
return min(lngs) < 0 < max(lngs)
def has_duplicate_refs(self):
for ea in self.refs:
if self.refs.count(ea) > 1:
return True
return False
def get_center(self):
lats = []
lngs = []
for ll in self.poly:
lat, lon = ll.split(',')
lats.append(float(lat))
lngs.append(float(lon))
if len(lats) is 0:
centerlat = 0
else:
centerlat = min(lats) + (max(lats) - min(lats)) / 2
if len(lngs) is 0:
centerlng = 0
else:
centerlng = min(lngs) + (max(lngs) - min(lngs)) / 2
return centerlng, centerlat
def print_header(self):
for line in self.lines:
print(line.strip())
# if __name__ == '__main__':
# import os
#
# p = 'D:\\mxmcc\\charts\\linz\\BSB_ROOT\\NZ5312\NZ531201.KAP'
# bsbHeader = BsbHeader(p)
# bsbHeader.print_header()
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,909 | manimaul/mxmcc | refs/heads/master | /ukho_xlrd_lookup.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2015, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
import os
import re
import xlrd
from . import config
def file_name_decoder(file_name):
"""returns tuple(chart_number, suffix, panel_number)"""
file_name = file_name[:file_name.rfind('.')]
fn = os.path.basename(file_name)
ri = len(fn)
non_digit = re.search('\D', fn)
if non_digit is not None:
ri = non_digit.start()
#print("non_digit found at i {}".format(ri))
chart_number = fn[0:ri].lstrip('0')
hyphen = fn.find('-')
suffix = fn[ri:hyphen]
#print("suffix: \"{}\"".format(suffix))
if suffix == '':
suffix = '-'
last_index = fn.find('_')
if last_index is -1:
last_index = len(fn)
panel_number = fn[hyphen+1:last_index]
# sometimes there is an additional dash if the chart is broken up into multiple files
last_index = panel_number.find('-')
if last_index is not -1:
panel_number = panel_number[0:last_index]
#mprint('fileNameDecoded: chart_number: {} suffix: {} panel_number: {}'.format(chart_number, suffix, panel_number))
return chart_number, suffix, panel_number
def stamp(file_name):
return stamp_from_detail(*file_name_decoder(file_name))
def stamp_from_detail(chart_number, suffix, panel_number):
return str(chart_number) + '_' + str(suffix) + '_' + str(panel_number)
def _lat_lng_dmm_to_ddd(lat_dmm, lng_dmm):
"""converts lat,lng in degree-decimal minutes
to decimal degrees.
ex. latDmm, lngDmm = ('-28 59.803', '048 06.998')
latDmm, lngDmm = ('28.204', -000 34.086') """
lat_deg, lat_min = lat_dmm.split(' ')
lng_deg, lng_min = lng_dmm.split(' ')
if lat_deg.startswith('-'):
lat = float(lat_deg) - (float(lat_min) / 60)
else:
lat = float(lat_deg) + (float(lat_min) / 60)
if lng_deg.startswith('-'):
lng = float(lng_deg) - (float(lng_min) / 60)
else:
lng = float(lng_deg) + (float(lng_min) / 60)
return lat, lng
# noinspection PyMethodMayBeStatic
class Data:
def __init__(self, chart_number, suffix, panel_number, name, scale, depth_units):
self.chart_number = chart_number
self.suffix = suffix
self.panel_number = panel_number
self.name = name
self.scale = int(scale)
self.depth_units = depth_units
self.updated = None
self.coords = []
self.override_coords = []
or_name = str(self.chart_number) + '-' + str(self.panel_number)
override_path = os.path.join(os.path.dirname(__file__), 'ukho_overrides', or_name)
if os.path.isfile(override_path):
print('using ply override coordinates', or_name)
self.override_coords = self._get_override_coords(override_path)
def _get_override_coords(self, path_to_override):
coords = []
with open(path_to_override, 'r') as override:
for line in override.readlines():
lat, lng = line.strip().split(',')
coords.append(_lat_lng_dmm_to_ddd(lat, lng))
coords.append(coords[0])
return coords
def _get_coords(self):
if len(self.override_coords) > 0:
return self.override_coords
else:
return self.coords
def get_center(self):
lats = []
lngs = []
for lat, lng in self._get_coords():
lats.append(lat)
lngs.append(lng)
if len(lats) is 0:
centerlat = 0
else:
centerlat = min(lats) + (max(lats) - min(lats)) / 2
if len(lngs) is 0:
centerlng = 0
else:
centerlng = min(lngs) + (max(lngs) - min(lngs)) / 2
return centerlng, centerlat
def set_updated(self, updated):
self.updated = updated
def get_name(self):
return self.name
def get_zoom(self):
# we don't know the path to the data set at this point to be able to calculate the zoom from true scale
raise NotImplementedError('handle get_zoom in lookups.UKHOLookup')
# def get_zoom(self):
# if self.scale is None:
# return 0
#
# return findzoom.get_zoom(int(self.scale), self.get_center()[1])
def get_scale(self):
return self.scale
def get_updated(self):
return self.updated
def get_depth_units(self):
return self.depth_units
def get_outline(self):
outline = ''
for ea in self._get_coords():
outline += str(ea[0]) + ',' + str(ea[1]) + ':'
return outline.rstrip(':')
def get_is_valid(self):
return True
# noinspection PyBroadException
class MetaLookup:
def __init__(self):
xls_path = config.ukho_quarterly_extract
self.xls = xlrd.open_workbook(xls_path)
self.charts = {}
self.depth_codes = self._read_depth_codes()
self._read_charts()
self._read_panels()
self._read_editions()
self._read_coords()
def _read_depth_codes(self):
dcodes = []
sheet = self.xls.sheet_by_name('Depth Units')
for n in range(sheet.nrows):
try:
code = str(int(sheet.row(n)[0].value))
dunit = str(sheet.row(n)[1].value)
dcodes.append((code, dunit))
except ValueError: # sometimes UKHO adds a header :\
pass
return dict(dcodes)
def _read_charts(self):
sheet = self.xls.sheet_by_name('Charts & Titles')
for n in range(sheet.nrows):
if n > 0:
prefix = str(sheet.row(n)[0].value).strip()
if prefix != '---':
continue
chart_number = str(int(sheet.row(n)[1].value))
suffix = sheet.row(n)[2].value
panel_number = '0'
name = re.sub(r'\s+', ' ', sheet.row(n)[4].value).replace('\'', '')
scale = str(int(sheet.row(n)[6].value))
depth_code = str(int(sheet.row(n)[8].value))
try:
depth_unit = self.depth_codes[depth_code].upper().replace('/', ' AND ')
except:
# print 'error finding depth unit for', chart_number, depth_code
depth_unit = 'UNKNOWN'
d = Data(chart_number, suffix, panel_number, name, scale, depth_unit.strip())
s = stamp_from_detail(chart_number, suffix, panel_number)
self.charts[s] = d
def _read_panels(self):
sheet = self.xls.sheet_by_name('Panels')
for n in range(sheet.nrows):
if n > 0:
prefix = str(sheet.row(n)[0].value).strip()
if prefix != '---':
continue
chart_number = str(int(sheet.row(n)[1].value))
suffix = sheet.row(n)[2].value
panel_number = str(int(sheet.row(n)[3].value))
name = re.sub(r'\s+', ' ', sheet.row(n)[4].value).replace('\'', '')
scale = str(int(sheet.row(n)[5].value))
depth_code = str(int(sheet.row(n)[7].value))
try:
depth_unit = self.depth_codes[depth_code].upper().replace('/', ' AND ')
except:
# print 'error finding depth unit for', chart_number, depth_code
depth_unit = 'UNKNOWN'
d = Data(chart_number, suffix, panel_number, name, scale, depth_unit.strip())
s = stamp_from_detail(chart_number, suffix, panel_number)
self.charts[s] = d
def _read_editions(self):
sheet = self.xls.sheet_by_name('Edition date & latest NM')
for n in range(sheet.nrows):
if n > 0:
prefix = str(sheet.row(n)[0].value).strip()
if prefix != '---':
continue
chart_number = str(int(sheet.row(n)[1].value))
try:
year, month, day, _, __, ___ = xlrd.xldate_as_tuple(sheet.row(n)[3].value, self.xls.datemode)
# edition = datetime.datetime(*xlrd.xldate_as_tuple(edi, self.xls.datemode))
edition = '%s/%s/%s' % (month, day, year)
for data in self.charts.values():
if data.chart_number == chart_number:
data.set_updated(edition)
except:
pass
def _read_coords(self):
xls_path = config.ukho_chart_data
xls = xlrd.open_workbook(xls_path)
sheet = xls.sheet_by_name('Chart Vertices')
data_to_close_coords = set()
for n in range(sheet.nrows):
if n > 0:
prefix = str(sheet.row(n)[0].value).strip()
if prefix != '---':
continue
try:
chart_number = str(int(sheet.row(n)[1].value))
suffix = str(sheet.row(n)[2].value).strip()
panel_number = str(int(sheet.row(n)[3].value))
lat_cell = str(sheet.row(n)[5].value)
i = max(0, lat_cell.find('.') - 2)
lat_deg = lat_cell[:i]
if lat_deg == '-' or lat_deg == '':
lat_deg += '0'
lat_min = lat_cell[i:]
lng_cell = str(sheet.row(n)[6].value)
i = max(0, lng_cell.find('.') - 2)
lng_deg = lng_cell[:i]
if lng_deg == '-' or lng_deg == '':
lng_deg += '0'
lng_min = lng_cell[i:]
lat_dmm = lat_deg + ' ' + lat_min
lng_dmm = lng_deg + ' ' + lng_min
s = stamp_from_detail(chart_number, suffix, panel_number)
self.charts[s].coords.append(_lat_lng_dmm_to_ddd(lat_dmm, lng_dmm))
data_to_close_coords.add(self.charts[s])
except:
pass
# close polygons
for ea in data_to_close_coords:
ea.coords.append(ea.coords[0])
def get_data(self, tif_path):
s = stamp(tif_path)
return self.charts[s]
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,910 | manimaul/mxmcc | refs/heads/master | /region_constants.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2015, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
REGION_WL1 = 'REGION_WL1'
REGION_WL2 = 'REGION_WL2'
REGION_UK1 = 'REGION_UK1'
REGION_UK2 = 'REGION_UK2'
REGION_UK3 = 'REGION_UK3'
REGION_UK4 = 'REGION_UK4'
REGION_02 = 'REGION_02'
REGION_03 = 'REGION_03'
REGION_04 = 'REGION_04'
REGION_06 = 'REGION_06'
REGION_07 = 'REGION_07'
REGION_08 = 'REGION_08'
REGION_10 = 'REGION_10'
REGION_12 = 'REGION_12'
REGION_13 = 'REGION_13'
REGION_14 = 'REGION_14'
REGION_15 = 'REGION_15'
REGION_17 = 'REGION_17'
REGION_22 = 'REGION_22'
REGION_24 = 'REGION_24'
REGION_26 = 'REGION_26'
REGION_30 = 'REGION_30'
REGION_32 = 'REGION_32'
REGION_34 = 'REGION_34'
REGION_36 = 'REGION_36'
REGION_40 = 'REGION_40'
REGION_BR = 'REGION_BR'
REGION_NZ = 'REGION_NZ'
REGION_FAA_PLANNING = 'REGION_FAA_PLANNING'
REGION_FAA_SECTIONAL = 'REGION_FAA_SECTIONAL'
REGION_FAA_TERMINAL = 'REGION_FAA_TERMINAL'
REGION_FAA_HELICOPTER = 'REGION_FAA_HELICOPTER'
REGION_FAA_CARIBBEAN = 'REGION_FAA_CARIBBEAN'
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,911 | manimaul/mxmcc | refs/heads/master | /tiles_opt.py | #!/usr/bin/env python
###############################################################################
# Copyright (c) 2010, Vadim Shlyakhov
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#******************************************************************************
import re
import shutil
import logging
import itertools
import sys
import os
from .config import png_nq_binary
from subprocess import *
from .tilesystem import tile_size
from PIL import Image
tick_rate = 50
tick_count = 0
try:
import multiprocessing # available in python 2.6 and above
class KeyboardInterruptError(Exception):
pass
except:
multiprocessing = None
def data_dir():
return sys.path[0]
def set_nothreads():
global multiprocessing
multiprocessing = None
def parallel_map(func, iterable):
if multiprocessing is None or len(iterable) < 2:
return list(map(func, iterable))
else:
# map in parallel
mp_pool = multiprocessing.Pool() # multiprocessing pool
res = mp_pool.map(func, iterable)
# wait for threads to finish
mp_pool.close()
mp_pool.join()
return res
def ld(*parms):
logging.debug(' '.join(map(repr, parms)))
def ld_nothing(*parms):
return
def pf(*parms, **kparms):
end = kparms['end'] if 'end' in kparms else '\n'
sys.stdout.write(' '.join(map(str, parms)) + end)
sys.stdout.flush()
def pf_nothing(*parms, **kparms):
return
def flatten(two_level_list):
return list(itertools.chain(*two_level_list))
#try:
# import win32pipe
#except:
# win32pipe=None
win32pipe = False
def if_set(x, default=None):
return x if x is not None else default
def path2list(path):
head, ext = os.path.splitext(path)
split = [ext]
while head:
head, p = os.path.split(head)
split.append(p)
split.reverse()
return split
def command(params, child_in=None):
cmd_str = ' '.join(('"%s"' % i if ' ' in i else i for i in params))
ld('>', cmd_str, child_in)
process = Popen(params, stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True)
(child_out, child_err) = process.communicate(child_in)
if process.returncode != 0:
raise Exception("*** External program failed: %s\n%s" % (cmd_str, child_err))
ld('<', child_out, child_err)
return child_out
def dest_path(src, dest_dir, ext='', template='%s'):
src_dir, src_file = os.path.split(src)
base, sext = os.path.splitext(src_file)
dest = (template % base) + ext
if not dest_dir:
dest_dir = src_dir
if dest_dir:
dest = '%s/%s' % (dest_dir, dest)
ld(base, dest)
return dest
def re_sub_file(fname, subs_list):
'stream edit file using reg exp substitution list'
new = fname + '.new'
with open(new, 'w') as out:
for l in open(fname, 'rU'):
for (pattern, repl) in subs_list:
l = re.sub(pattern, repl, string=l)
out.write(l)
shutil.move(new, fname)
def counter():
global tick_count
tick_count += 1
if tick_count % tick_rate == 0:
pf('.', end='')
return True
else:
return False
def optimize_png(src, dst, dpath):
'optimize png using pngnq utility'
png_tile = os.path.basename(src)
if not png_tile.startswith('.'):
command([png_nq_binary, '-s1', '-g2.2', '-n', str(tile_size), '-e', '.png', '-d', dpath, src])
def to_jpeg(src, dst, dpath):
'convert to jpeg'
dst_jpg = os.path.splitext(dst)[0] + '.jpg'
img = Image.open(src)
img.save(dst_jpg, optimize=True, quality=75)
class KeyboardInterruptError(Exception): pass
def proc_file(f):
try:
src = os.path.join(src_dir, f)
dst = os.path.join(dst_dir, f)
dpath = os.path.split(dst)[0]
if not os.path.exists(dpath):
os.makedirs(dpath)
if f.lower().endswith('.png'):
optimize_png(src, dst, dpath)
else:
shutil.copy(src, dpath)
counter()
except KeyboardInterrupt: # http://jessenoller.com/2009/01/08/multiprocessingpool-and-keyboardinterrupt/
pf('got KeyboardInterrupt')
raise KeyboardInterruptError()
def optimize_dir(directory):
global src_dir
global dst_dir
src_dir = directory
dst_dir = src_dir + '.opt'
pf('%s -> %s ' % (src_dir, dst_dir), end='')
if os.path.exists(dst_dir):
raise Exception('Destination already exists: %s' % dst_dir)
# find all source files
try:
cwd = os.getcwd()
os.chdir(src_dir)
src_lst = flatten([os.path.join(path, name) for name in files]
for path, dirs, files in os.walk('.'))
finally:
os.chdir(cwd)
parallel_map(proc_file, src_lst)
if __name__ == '__main__':
optimize_dir('/Volumes/USB-DATA/mxmcc/tiles/unmerged/region_08/4148_1') | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,912 | manimaul/mxmcc | refs/heads/master | /manifestjson.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2014, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
import json
import hashlib
import os
from . import config
import time
from .zdata import get_zdat_epoch
BASE_URL = ''
def merge_manifest(json_path_old, json_path_new, json_path_result):
json_old = json.load(open(json_path_old, 'r'))
json_new = json.load(open(json_path_new, 'r'))
for key in json_new['regions'].keys():
json_old['regions'][key] = json_new['regions'][key]
with open(json_path_result, 'w') as f:
json.dump(json_old, f, indent=2)
def get_time_stamp(epoch=int(time.time()), local=False):
if local:
struct_time = time.localtime(epoch)
else:
struct_time = time.gmtime(epoch)
return time.strftime('TS_%Y-%m-%d_T_%H_%M', struct_time)
def checksum(abs_path):
m = hashlib.sha1()
with open(abs_path, 'rb') as f:
m.update(f.read())
return m.hexdigest()
def generate(data=None, base_url=BASE_URL):
if data is None:
data = {'manifest_version': 1, 'regions': {}}
elif data['manifest_version'] is not 1:
raise Exception('Invalid data')
for ea in os.listdir(config.compiled_dir):
if ea.endswith('gemf'):
region_ts = ea[:ea.find('.')]
region = region_ts[region_ts.find('REGION'):]
abs_path_data_org = os.path.join(config.compiled_dir, region_ts + '.zdat')
abs_path_gemf_org = os.path.join(config.compiled_dir, ea)
epoch = int(get_zdat_epoch(abs_path_data_org))
ts = get_time_stamp(epoch, local=True)
gemf_name = ts + '_' + ea
data_name = ts + '_' + region_ts + '.zdat'
abs_path_gemf = os.path.join(config.compiled_dir, gemf_name)
abs_path_data = os.path.join(config.compiled_dir, data_name)
os.rename(abs_path_gemf_org, abs_path_gemf)
os.rename(abs_path_data_org, abs_path_data)
print(region)
data['regions'][region] = {'gemf_url': base_url + '/' + gemf_name,
'data_url': base_url + '/' + data_name,
'gemf_checksum': checksum(abs_path_gemf),
'data_checksum': checksum(abs_path_data),
'size_bytes': os.path.getsize(abs_path_gemf),
'epoch': epoch}
abs_path_json = os.path.join(config.compiled_dir, 'manifest.json')
if os.path.exists(abs_path_json):
os.remove(abs_path_json)
with open(abs_path_json, 'w') as f:
json.dump(data, f, indent=2)
return data
def generate_chart_manifest(base_url=BASE_URL):
data = {'manifest_version': 1, 'charts': []}
for each in os.listdir(config.compiled_dir):
if each.endswith('mbtiles'):
chart_name = each[:each.find('.')]
abs_path_org = os.path.join(config.compiled_dir, each)
epoch = os.path.getmtime(abs_path_org)
ts = get_time_stamp(epoch, local=True)
chart_rename = ts + '_' + each
abs_path_chart_rename = os.path.join(config.compiled_dir, chart_rename)
os.rename(abs_path_org, abs_path_chart_rename)
data['charts'].append({'name': chart_name,
'url': base_url + '/' + chart_rename,
'check_sum': checksum(abs_path_chart_rename),
'size_bytes': os.path.getsize(abs_path_chart_rename),
'epoch': epoch})
abs_path_json = os.path.join(config.compiled_dir, 'chart_manifest.json')
if os.path.exists(abs_path_json):
os.remove(abs_path_json)
with open(abs_path_json, 'w') as f:
json.dump(data, f, indent=2)
return data
def revert():
for ea in os.listdir(os.path.join(config.compiled_dir)):
if 'TS_' in ea and ea.endswith('gemf') or ea.endswith('zdat'):
region_ts = ea[:ea.find('.')]
region = region_ts[region_ts.find('REGION'):]
ext = ea[ea.find('.'):]
print(ext, region, ea)
os.rename(os.path.join(config.compiled_dir, ea), os.path.join(config.compiled_dir, region + ext))
if __name__ == '__main__':
# revert()
# generate()
o = os.path.join(config.compiled_dir, 'manifest_broke.json')
n = os.path.join(config.compiled_dir, 'manifest.json')
r = os.path.join(config.compiled_dir, 'manifest_result.json')
merge_manifest(o, n, r) | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,913 | manimaul/mxmcc | refs/heads/master | /search.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
import os
class MapPathSearch:
def __init__(self, directory, map_extensions=['kap', 'tif'], include_only=None):
"""Searches for files ending with <map_extensions> in <directory> and all subdirectories
Optionally supply set of file names <include_only> to only return paths of files that
are contained in the set eg. {file1.kap, file2.tif}
file_paths is a list of all full paths found
"""
self.file_paths = []
extensions = set()
for ext in map_extensions:
extensions.add(ext.upper())
if include_only is not None:
include_only = set(include_only)
if os.path.isdir(directory):
for root, dirs, files in os.walk(directory):
for f in files:
include = False
i = f.rfind(".")
if i > 0:
ext = f[i+1:].upper()
include = ext in extensions
if include and include_only is not None:
include = f in include_only
if include:
self.file_paths.append(os.path.join(root, f))
else:
print(directory, 'is not a directory.')
# def __walker(self, args, p_dir, p_file):
# map_extensions, include_only = args
# if include_only is not None:
# include_only = set(include_only)
# for f in p_file:
# if f.upper().endswith(map_extensions) and (include_only is None or f in include_only) and not f.startswith(
# "."):
# self.file_paths.append(os.path.join(p_dir, f))
if __name__ == '__main__':
print("foo")
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,914 | manimaul/mxmcc | refs/heads/master | /noaaxml.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''Downloads noaa product catalog xml by region name and retrieves listing of chart files in the catalog
'''
from urllib import request
import os
from . import config
from xml.dom import minidom
xml_urls = {'NOAA_ALL': 'http://www.charts.noaa.gov/RNCs/RNCProdCat_19115.xml',
#'DISTRICT_01': 'http://www.charts.noaa.gov/RNCs/01CGD_RNCProdCat_19115.xml',
#'DISTRICT_05': 'http://www.charts.noaa.gov/RNCs/05CGD_RNCProdCat_19115.xml',
#'DISTRICT_07': 'http://www.charts.noaa.gov/RNCs/07CGD_RNCProdCat_19115.xml',
#'DISTRICT_08': 'http://www.charts.noaa.gov/RNCs/08CGD_RNCProdCat_19115.xml',
#'DISTRICT_09': 'http://www.charts.noaa.gov/RNCs/09CGD_RNCProdCat_19115.xml',
#'DISTRICT_11': 'http://www.charts.noaa.gov/RNCs/11CGD_RNCProdCat_19115.xml',
#'DISTRICT_13': 'http://www.charts.noaa.gov/RNCs/13CGD_RNCProdCat_19115.xml',
#'DISTRICT_14': 'http://www.charts.noaa.gov/RNCs/14CGD_RNCProdCat_19115.xml',
#'DISTRICT_17': 'http://www.charts.noaa.gov/RNCs/17CGD_RNCProdCat_19115.xml',
'REGION_02': 'http://www.charts.noaa.gov/RNCs/02Region_RNCProdCat_19115.xml',
'REGION_03': 'http://www.charts.noaa.gov/RNCs/03Region_RNCProdCat_19115.xml',
'REGION_04': 'http://www.charts.noaa.gov/RNCs/04Region_RNCProdCat_19115.xml',
'REGION_06': 'http://www.charts.noaa.gov/RNCs/06Region_RNCProdCat_19115.xml',
'REGION_07': 'http://www.charts.noaa.gov/RNCs/07Region_RNCProdCat_19115.xml',
'REGION_08': 'http://www.charts.noaa.gov/RNCs/08Region_RNCProdCat_19115.xml',
'REGION_10': 'http://www.charts.noaa.gov/RNCs/10Region_RNCProdCat_19115.xml',
'REGION_12': 'http://www.charts.noaa.gov/RNCs/12Region_RNCProdCat_19115.xml',
'REGION_13': 'http://www.charts.noaa.gov/RNCs/13Region_RNCProdCat_19115.xml',
'REGION_14': 'http://www.charts.noaa.gov/RNCs/14Region_RNCProdCat_19115.xml',
'REGION_15': 'http://www.charts.noaa.gov/RNCs/15Region_RNCProdCat_19115.xml',
'REGION_17': 'http://www.charts.noaa.gov/RNCs/17Region_RNCProdCat_19115.xml',
'REGION_22': 'http://www.charts.noaa.gov/RNCs/22Region_RNCProdCat_19115.xml',
'REGION_24': 'http://www.charts.noaa.gov/RNCs/24Region_RNCProdCat_19115.xml',
'REGION_26': 'http://www.charts.noaa.gov/RNCs/26Region_RNCProdCat_19115.xml',
'REGION_30': 'http://www.charts.noaa.gov/RNCs/30Region_RNCProdCat_19115.xml',
'REGION_32': 'http://www.charts.noaa.gov/RNCs/32Region_RNCProdCat_19115.xml',
'REGION_34': 'http://www.charts.noaa.gov/RNCs/34Region_RNCProdCat_19115.xml',
'REGION_36': 'http://www.charts.noaa.gov/RNCs/36Region_RNCProdCat_19115.xml',
'REGION_40': 'http://www.charts.noaa.gov/RNCs/40Region_RNCProdCat_19115.xml',
#'AK_N': 'http://www.charts.noaa.gov/RNCs/36Region_RNCProdCat_19115.xml',
#'AK_S': 'http://www.charts.noaa.gov/RNCs/34Region_RNCProdCat_19115.xml',
#'CT': 'http://www.charts.noaa.gov/RNCs/CT_RNCProdCat_19115.xml',
#'GA': 'http://www.charts.noaa.gov/RNCs/GA_RNCProdCat_19115.xml',
#'IL': 'http://www.charts.noaa.gov/RNCs/IL_RNCProdCat_19115.xml',
#'MA': 'http://www.charts.noaa.gov/RNCs/MA_RNCProdCat_19115.xml',
#'MI': 'http://www.charts.noaa.gov/RNCs/MI_RNCProdCat_19115.xml',
#'NC': 'http://www.charts.noaa.gov/RNCs/NC_RNCProdCat_19115.xml',
#'NV': 'http://www.charts.noaa.gov/RNCs/NV_RNCProdCat_19115.xml',
#'OR': 'http://www.charts.noaa.gov/RNCs/OR_RNCProdCat_19115.xml',
#'PR': 'http://www.charts.noaa.gov/RNCs/PR_RNCProdCat_19115.xml',
#'TX': 'http://www.charts.noaa.gov/RNCs/TX_RNCProdCat_19115.xml',
#'WA': 'http://www.charts.noaa.gov/RNCs/WA_RNCProdCat_19115.xml',
#'AL': 'http://www.charts.noaa.gov/RNCs/AL_RNCProdCat_19115.xml',
#'DE': 'http://www.charts.noaa.gov/RNCs/DE_RNCProdCat_19115.xml',
#'HI': 'http://www.charts.noaa.gov/RNCs/HI_RNCProdCat_19115.xml',
#'IN': 'http://www.charts.noaa.gov/RNCs/IN_RNCProdCat_19115.xml',
#'MD': 'http://www.charts.noaa.gov/RNCs/MD_RNCProdCat_19115.xml',
#'MN': 'http://www.charts.noaa.gov/RNCs/MN_RNCProdCat_19115.xml',
#'NH': 'http://www.charts.noaa.gov/RNCs/NH_RNCProdCat_19115.xml',
#'NY': 'http://www.charts.noaa.gov/RNCs/NY_RNCProdCat_19115.xml',
#'PA': 'http://www.charts.noaa.gov/RNCs/PA_RNCProdCat_19115.xml',
#'RI': 'http://www.charts.noaa.gov/RNCs/RI_RNCProdCat_19115.xml',
#'VA': 'http://www.charts.noaa.gov/RNCs/VA_RNCProdCat_19115.xml',
#'WI': 'http://www.charts.noaa.gov/RNCs/WI_RNCProdCat_19115.xml',
#'CA': 'http://www.charts.noaa.gov/RNCs/CA_RNCProdCat_19115.xml',
#'FL': 'http://www.charts.noaa.gov/RNCs/FL_RNCProdCat_19115.xml',
#'ID': 'http://www.charts.noaa.gov/RNCs/ID_RNCProdCat_19115.xml',
#'LA': 'http://www.charts.noaa.gov/RNCs/LA_RNCProdCat_19115.xml',
#'ME': 'http://www.charts.noaa.gov/RNCs/ME_RNCProdCat_19115.xml',
#'MS': 'http://www.charts.noaa.gov/RNCs/MS_RNCProdCat_19115.xml',
#'NJ': 'http://www.charts.noaa.gov/RNCs/NJ_RNCProdCat_19115.xml',
#'OH': 'http://www.charts.noaa.gov/RNCs/OH_RNCProdCat_19115.xml',
#'PO': 'http://www.charts.noaa.gov/RNCs/PO_RNCProdCat_19115.xml',
#'SC': 'http://www.charts.noaa.gov/RNCs/SC_RNCProdCat_19115.xml',
#'VT': 'http://www.charts.noaa.gov/RNCs/VT_RNCProdCat_19115.xml'
}
#override the NOAA XML file and add these extra charts
chart_additions = {'REGION_06': ['12200_1.KAP', '13003_1.KAP']}
class NoaaXmlReader():
def __init__(self, xml_url_key, xml_dir=None):
if xml_dir is None:
xml_dir = config.noaa_meta_dir
#chart_covers are not charts and should be skipped
self.chart_covers = {'12352_8.KAP', '12364_24.KAP', '12372_19.KAP', '13221_2.KAP', '13229_15.KAP',
'14786_79.KAP', '14786_80.KAP', '14786_81.KAP', '14786_82.KAP', '14786_83.KAP',
'14786_84.KAP', '14786_85.KAP', '14786_86.KAP', '14786_87.KAP', '14786_88.KAP',
'14842_45.KAP', '14842_46.KAP', '14842_47.KAP', '14842_48.KAP', '14842_49.KAP',
'14842_50.KAP', '14842_51.KAP', '14846_39.KAP', '14846_40.KAP', '14846_41.KAP',
'14846_42.KAP', '14846_43.KAP', '14846_44.KAP', '14853_48.KAP', '14853_49.KAP',
'14853_50.KAP', '14853_51.KAP', '14853_52.KAP', '14853_53.KAP', '14853_54.KAP',
'14886_15.KAP', '14886_16.KAP', '14886_17.KAP', '14886_18.KAP', '14886_19.KAP',
'14916_37.KAP', '14916_38.KAP', '14916_39.KAP', '14916_40.KAP', '14916_41.KAP',
'14916_42.KAP', '14916_43.KAP', '14926_33.KAP', '14926_34.KAP', '14926_35.KAP',
'14926_36.KAP', '14926_37.KAP', '11324_2.KAP', '18423_19.KAP', '18445_17.KAP',
'18652_20.KAP', '12285_19.KAP', '12285_18.KAP', '12205_13.KAP', '11451_16.KAP',
'11451_17.KAP', '11326_7.KAP'}
self.problem_charts = {'12206_6.KAP', '5161_1.KAP', '18445_7.KAP', '1116A_1.KAP', '1117A_1.KAP', '18445_8.KAP'}
#18445_8 is identical to another chart that has feet depth units that we modified the header
#1116A_1.KAP and 1117A_1.KAP have identical non lease block charts
self.region = xml_url_key
xml_url = xml_urls[xml_url_key]
self.region_name = xml_url.split('/')[-1]
xml_file_path = os.path.join(xml_dir, self.region_name)
if not os.path.isfile(xml_file_path):
print('retrieving xml from NOAA: ' + self.region_name)
with open(xml_file_path, "w") as xml:
req = request.Request(url=xml_url)
f = request.urlopen(req)
xml.write(f.read().decode('utf-8'))
self.xml_file = open(xml_file_path)
def get_map_files(self):
map_files = []
dom = minidom.parse(self.xml_file)
for node in dom.getElementsByTagName('EX_Extent'):
for child_node in node.getElementsByTagName('gco:CharacterString'):
kap = child_node.toxml()
kap = kap[kap.find('file name: ')+11:kap.find('.KAP')+4]
if not (kap in self.chart_covers or kap in self.problem_charts):
map_files.append(kap)
if self.region in chart_additions:
for chart in chart_additions[self.region]:
map_files.append(chart)
map_files.sort()
return map_files
if __name__ == '__main__':
nxl = NoaaXmlReader('REGION_04')
print(nxl.get_map_files()) | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,915 | manimaul/mxmcc | refs/heads/master | /ukho_filter_list_generator.py | import os
from shapely.geometry import Polygon
from .search import MapPathSearch
from .region_constants import *
from . import config
from . import gdalds
from . import ukho_remove_duplicates
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2014, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''Creates (txt) manifests containing a list of charts for each region based on if they are in defined boundaries
'''
EXCLUDES = {REGION_UK1: {'4102-0.tif', '0245-0.tif', '1121-0.tif', '2724-0.tif', '0245-0.tif'},
REGION_UK2: {'4102-0.tif', '1123-0.tif', '1179-0.tif', '1178-0.tif'},
REGION_UK3: {'1123-0.tif'},
REGION_UK4: {'2182A-0.tif', '2182B-0.tif', '1407-0.tif', '1125-0_UD.tif', '1127-0.tif', '2656-0.tif'}}
BOUNDARIES = {REGION_UK1: Polygon(((60.432475243, -6.312084094), (61.7815364, 0.77050051),
(55.232640709, 3.514615661), (53.991841279, -1.970770244),
(55.318427775, -2.913842661), (56.50474705, -4.761153867),
(58.677242823, -4.989699171), (60.432475243, -6.312084094))),
REGION_UK2: Polygon(((53.991841279, -1.970770244), (55.232640709, 3.514615661),
(51.209816332, 4.979579361), (47.000448924, -4.086128932),
(46.952325482, -8.282052045), (49.475693247, -7.168619247),
(50.806726787, -3.806672546), (51.091490479, -2.355807505),
(52.024815432, -0.61793567), (53.991841279, -1.970770244))),
REGION_UK3: Polygon(((60.432475243, -6.312084094), (59.562319205, -15.742039595),
(46.723720352, -15.580151947), (46.952325482, -8.282052045),
(49.475693247, -7.168619247), (51.873368713, -8.840058188),
(53.136809109, -7.567761152), (55.213543445, -7.277588144),
(55.982797256, -7.210625142), (56.50474705, -4.761153867),
(58.677242823, -4.989699171), (60.432475243, -6.312084094))),
REGION_UK4: Polygon(((56.50474705, -4.761153867), (55.982797256, -7.210625142),
(55.213543445, -7.277588144), (53.136809109, -7.567761152),
(51.873368713, -8.840058188), (49.475693247, -7.168619247),
(50.806726787, -3.801092295), (51.094995182, -2.350227255),
(52.024815432, -0.61793567), (53.991841279, -1.970770244),
(55.318427775, -2.913842661), (56.50474705, -4.761153867)))}
def should_include(region, map_name, poly):
return not map_name.startswith('40') and map_name not in EXCLUDES[region] and BOUNDARIES[region].intersects(poly)
def populate_previous(bak):
with open(bak, 'r') as manifest:
return manifest.readlines()
def compare_previous(this_list, previous_list):
tl = set(this_list)
pl = set(previous_list)
difference = list(tl.symmetric_difference(pl))
difference.sort()
print('difference:', len(difference))
def make_manifest():
if ukho_remove_duplicates.has_duplicates():
raise Exception('duplicate charts detected, run ukho_remove_duplicates first')
matched = {REGION_UK1: [],
REGION_UK2: [],
REGION_UK3: [],
REGION_UK4: []}
num_matched = 0
previous = {}
mps = MapPathSearch(config.ukho_geotiff_dir, ['tif'])
n = 1
o = len(mps.file_paths)
for abs_map_path in mps.file_paths:
map_name = os.path.basename(abs_map_path)
map_name = map_name[:map_name.rfind('.')] # remove extension
print('inspecting', map_name, 'for inclusion', '%s of %s' % (n, o))
n += 1
ds = gdalds.get_ro_dataset(abs_map_path)
wnes, is_north_up = gdalds.dataset_lat_lng_bounds(ds)
west, north, east, south = wnes
poly = Polygon(((north, west), (north, east), (south, east), (south, west), (north, west)))
for region in BOUNDARIES.keys():
if should_include(region, map_name, poly):
matched[region].append(map_name + '\n')
num_matched += 1
print('writing included - ', num_matched)
for region in matched.keys():
match_lst = matched[region]
num = len(match_lst)
if num > 0:
print(region, num)
manifest_path = os.path.join(config.ukho_meta_dir, region + '.txt')
if os.path.exists(manifest_path):
bak = manifest_path + '.bak' # '%s_BAK.txt' % time.time()
try:
os.remove(bak)
except:
pass
os.rename(manifest_path, bak)
previous[region] = populate_previous(bak)
with open(manifest_path, 'w+') as manifest:
# todo: check crest burner to see if we write png to tif
manifest.writelines(matched[region])
print('skipped - ', o - num_matched)
for region in previous.keys():
print('comparing region:', region, 'with previous generated list')
compare_previous(matched[region], previous[region])
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,916 | manimaul/mxmcc | refs/heads/master | /logger.py | # !/usr/bin/env python
__author__ = "Will Kamp"
__copyright__ = "Copyright 2015, Matrix Mariner Inc."
__credits__ = "http://code.google.com/p/tilers-tools/"
__license__ = "BSD"
__email__ = "will@mxmariner.com"
__status__ = "Development" # "Prototype", "Development", or "Production"
import inspect
import os
OFF = False
ON = True
stack = True # False to silence stack info
def log(debug=OFF, *msg):
if debug:
if stack:
frame = inspect.stack()[1]
script = os.path.basename(str(frame[1]))
lineno = str(frame[2])
func = str(frame[3])
m = script + ' ' + func + ' ' + lineno + ' '
else:
m = ''
for ea in msg:
m = m + ' ' + str(ea)
print(m) | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,917 | manimaul/mxmcc | refs/heads/master | /catalog.py | __copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''This builds a csv catalog of map information (sorted by scale) as follows:
<path, name, zoom, scale, date, depths, outline>
(csv values are tab separated)
'''
import os
from operator import itemgetter
from . import search
from . import regions
from . import config
from . import lookups
import json
class CatalogReader:
def __init__(self, catalog_path):
self._entries = []
with open(catalog_path, 'r') as fp:
self._entries = json.load(fp)
@staticmethod
def key_set():
return {"path",
"name",
"min_zoom",
"max_zoom",
"scale",
"date",
"depths",
"outline"}
def __iter__(self):
return iter(self._entries)
def __getitem__(self, index):
return self._entries[index]
def get_reader_for_region(catalog_name):
catalog_path = os.path.join(config.catalog_dir, catalog_name.upper() + '.json')
if not os.path.isfile(catalog_path):
raise Exception('catalog does not exist: %s' % catalog_path)
return CatalogReader(catalog_path)
def build_catalog(region, list_of_map_paths, lookup):
catalog_path = os.path.join(config.catalog_dir, region + '.json')
if os.path.isfile(catalog_path):
os.remove(catalog_path)
catalog = open(catalog_path, 'w')
rows = []
for map_path in list_of_map_paths:
if lookup.get_is_valid(map_path):
row = {"path": map_path,
"name": lookup.get_name(map_path),
"min_zoom": lookup.get_min_zoom(map_path),
"max_zoom": lookup.get_max_zoom(map_path),
"scale": lookup.get_scale(map_path),
"date": lookup.get_updated(map_path),
"depths": lookup.get_depth_units(map_path),
"outline": lookup.get_outline(map_path)}
rows.append(row)
# sort row items by scale descending and write to catalog
rows = sorted(rows, key=itemgetter("scale"), reverse=True)
json.dump(rows, catalog, indent=2)
def build_catalog_for_region(region):
build_catalog(region.upper(), regions.map_list_for_region(region), regions.lookup_for_region(region))
def build_catalog_for_bsb_directory(bsb_dir, name=None):
map_search = search.MapPathSearch(bsb_dir, ['kap'])
if name is None:
name = os.path.basename(bsb_dir).lower()
build_catalog(name.upper(), map_search.file_paths, lookups.BsbLookup())
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,918 | manimaul/mxmcc | refs/heads/master | /config.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''This is the global configuration for mxmcc which consists of a
directory structure where map and meta data files (should) live.
Charts and meta data files** need to be placed in their corresponding
directories. It is up to you to obtain the files from their providing
hydro-graphic office.
**With the exception of NOAA xml files which are fetched automatically
as needed.
'''
import os
import time
######################################################################
# EDIT THIS SECTION ONLY##############################################
######################################################################
# Links
linz_web = 'https://www.linz.govt.nz/sea/charts/information-about-charts'
linz_base = 'http://topo.linz.govt.nz/hydro/BSB_BASE.zip'
linz_update = 'http://topo.linz.govt.nz/hydro/BSB_UPDATE.zip'
noaa_web = 'http://www.charts.noaa.gov/RNCs/RNCs.shtml'
noaa_all = 'http://www.charts.noaa.gov/RNCs/All_RNCs.zip'
png_nq_binary = 'pngnq'
# png_nq_binary = 'C:\\pngnq\\pngnqi.exe'
# InputOutput directory
_root_dir = '/charts'
# set to true when rendering a single zoom level and you want the following behavior:
# - render a (down zoom) layer first
# - then use anti-aliased image scale down for the final pass to render the target single zoom
use_single_zoom_over_zoom = False
# UKHO specific meta data excel sheets that change every quarter
ukho_quarterly_extract = 'Quarterly Extract of Metadata for Raster Charts Oct 2021.xls'
ukho_source_breakdown = 'Raster supply lists Q3 2021.xlsx'
ukho_chart_data = 'Titles,Scales,Editions,Codes,Projection,Vertices,Shifts Oct 2021.xls'
ukho_chart_dpi = 127
######################################################################
# END EDITABLE SECTION################################################
######################################################################
# YOU DON'T NEED TO EDIT ANYTHING ELSE################################
######################################################################
# chart directories
map_dir = os.path.join(_root_dir, 'charts')
brazil_bsb_dir = os.path.join(map_dir, 'brazil')
linz_bsb_dir = os.path.join(map_dir, 'linz')
noaa_bsb_dir = os.path.join(map_dir, 'noaa')
faa_geotiff_dir = os.path.join(map_dir, 'faa')
ukho_geotiff_dir = os.path.join(map_dir, 'ukho/geotiff')
ukho_dup_dir = os.path.join(map_dir, 'ukho/duplicates')
ukho_png_dir = os.path.join(map_dir, 'ukho/png')
wavey_line_geotiff_dir = os.path.join(map_dir, 'wavey_lines/geotiff')
# finished directory
compiled_dir = os.path.join(_root_dir, 'compiled')
# tile directories
_tile_dir = os.path.join(_root_dir, 'tiles')
merged_tile_dir = os.path.join(_tile_dir, 'merged')
unmerged_tile_dir = os.path.join(_tile_dir, 'unmerged')
# meta data and catalogs
_meta_dir = os.path.join(_root_dir, 'metadata')
catalog_dir = os.path.join(_meta_dir, 'catalogs')
ukho_meta_dir = os.path.join(_meta_dir, 'ukho')
wl_meta_dir = os.path.join(_meta_dir, "wl")
noaa_meta_dir = os.path.join(_meta_dir, 'noaa')
brazil_meta_dir = os.path.join(_meta_dir, 'brazil')
# add corresponding absolute path to ukho meta data excel sheets
ukho_quarterly_extract = os.path.join(ukho_meta_dir, ukho_quarterly_extract)
ukho_source_breakdown = os.path.join(ukho_meta_dir, ukho_source_breakdown)
ukho_chart_data = os.path.join(ukho_meta_dir, ukho_chart_data)
# java encryption source in not publicly published (and not needed for most/unencrypted regions)
java_encryption_src = os.path.join(os.path.dirname(__file__), '../mx-mariner-encryption/src/main/java')
epoch = int(time.time())
_all_dirs = [_root_dir,
map_dir,
_meta_dir,
ukho_meta_dir,
wl_meta_dir,
noaa_meta_dir,
catalog_dir,
_tile_dir,
merged_tile_dir,
unmerged_tile_dir,
noaa_bsb_dir,
linz_bsb_dir,
brazil_bsb_dir,
ukho_geotiff_dir,
faa_geotiff_dir,
ukho_dup_dir,
ukho_png_dir,
wavey_line_geotiff_dir,
compiled_dir]
def check_dirs():
for each in _all_dirs:
if not os.path.isdir(each):
return False
return True
def setup_dir_structure():
print('Setting up MXMCC directory structure')
if not os.path.isdir(_root_dir):
os.makedirs(_root_dir)
if not os.path.isdir(_root_dir):
raise Exception(_root_dir + ' does not exist!')
for each in _all_dirs:
if not os.path.isdir(each):
print('creating directory: ' + each)
os.makedirs(each)
print('MXMCC directory structure is ready :)')
######################################################################
if __name__ == '__main__':
setup_dir_structure()
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,919 | manimaul/mxmcc | refs/heads/master | /faa_fetch.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2016, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''Fetches FAA Raster VFR Charts
'''
import os
import urllib
from .region_constants import *
from urlparse import urlsplit
from . import config
import zipfile
from bs4 import BeautifulSoup
import requests
import re
faa_regions = {
REGION_FAA_PLANNING: {
"http": "http://aeronav.faa.gov/content/aeronav/Grand_Canyon_files/",
"sources": []
},
REGION_FAA_SECTIONAL: {
"http": "http://aeronav.faa.gov/content/aeronav/sectional_files/",
"sources": []
},
REGION_FAA_TERMINAL: {
"http": "http://aeronav.faa.gov/content/aeronav/tac_files/",
"sources": []
},
REGION_FAA_HELICOPTER: {
"http": "http://aeronav.faa.gov/content/aeronav/heli_files/",
"sources": []
},
REGION_FAA_CARIBBEAN: {
"http": "http://aeronav.faa.gov/content/aeronav/Caribbean/",
"sources": []
}
}
excludes = {'VFR_WallPlan_.zip'} # VFR_WallPlan_##.zip was renamed to US_WallPlan_##.zip
def name_and_number(name):
n = re.sub('[\d]', '', name)
try:
i = int(re.sub('[\D]', '', name))
except ValueError:
i = 0
return n, i
def list_links(url, ext=''):
page = requests.get(url).text
soup = BeautifulSoup(page, 'html.parser')
links = list()
for node in soup.find_all('a'):
if node.get('href').endswith(ext):
u = node.get('href')
if u.startswith('/'):
links.append('http://aeronav.faa.gov/' + u)
elif u.startswith(url):
links.append(u)
else:
links.append(url + '/' + u)
return links
def directory_data(region):
http_dir = faa_regions[region]['http']
data = dict()
for each in list_links(url=http_dir, ext='.zip'):
name = each.split('/')[-1]
name, number = name_and_number(name)
if name in excludes:
continue
if name in data:
if data[name]['number'] < number:
data[name] = {'number': number, 'link': each}
else:
data[name] = {'number': number, 'link': each}
return data
def unzip(source_filename, dest_dir):
try:
with zipfile.ZipFile(source_filename) as zf:
for member in zf.infolist():
words = member.filename.split('/')
path = dest_dir
for word in words[:-1]:
drive, word = os.path.splitdrive(word)
head, word = os.path.split(word)
if word in (os.curdir, os.pardir, ''):
continue
path = os.path.join(path, word)
zf.extract(member, path)
return True
except:
return False
def fetch_region(region):
if region in faa_regions:
for link in faa_regions[region]['sources']:
p = urlsplit(link)
file_name = os.path.split(p.path)[1]
dest_path = os.path.join(config.faa_geotiff_dir, region)
if not os.path.isdir(dest_path):
os.makedirs(dest_path)
dest = os.path.join(dest_path, file_name)
if not os.path.isfile(dest):
print('retrieving {}'.format(link))
urllib.urlretrieve(link, dest)
print('unzipping {}'.format(dest))
unzip(dest, dest_path)
def fetch_all():
for r in faa_regions:
fetch_region(r)
for each_region in faa_regions:
dir_data = directory_data(each_region)
for each_data in dir_data:
faa_regions[each_region]['sources'].append(dir_data[each_data]['link'])
if __name__ == '__main__':
print('downloading regions')
fetch_all()
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,920 | manimaul/mxmcc | refs/heads/master | /verify.py | #!/usr/bin/env python
__author__ = "Will Kamp"
__copyright__ = "Copyright 2014, Matrix Mariner Inc."
__license__ = "BSD"
__email__ = "will@mxmariner.com"
__status__ = "Development" # "Prototype", "Development", or "Production"
'''This verifies tiles were created for every chart in a catalog
'''
import os.path
from PIL import Image
from . import catalog
from . import config
error_message = ''
IGNORED = {'.DS_Store'}
def _full_transparency(img):
"""is image fully transparent"""
rgba = img.split()
if len(rgba) < 4:
return False
(r, g, b, a) = rgba
(a_min, a_max) = a.getextrema() # get min/max values for alpha channel
return a_min == 0 and a_max == 0
def _x_dir_has_tiles(x_dir):
"""
:param x_dir: zxy tile x directory
:return: count of all the png tiles in the directory
"""
dir_lst = os.listdir(x_dir)
for d in dir_lst:
ne = d.split('.')
if len(ne) != 2:
continue
name, ext = ne
if name.isdigit() and ext.lower() == 'png':
img_path = os.path.join(x_dir, d)
img = Image.open(img_path)
if _full_transparency(img):
print(img_path)
else:
return True
return False
def verify_opt(catalog_name, base_dir=config.merged_tile_dir):
un_opt_dir = os.path.join(base_dir, catalog_name)
opt_dir = un_opt_dir + ".opt"
un_opt_set = set()
for path, dirs, files in os.walk(un_opt_dir):
p = path.replace(un_opt_dir, '')
for f in files:
if not f.startswith('.'):
un_opt_set.add(os.path.join(p, f))
opt_set = set()
for path, dirs, files in os.walk(opt_dir):
p = path.replace(opt_dir, '')
for f in files:
if not f.startswith('.'):
opt_set.add(os.path.join(p, f))
i = len(un_opt_set)
n = len(opt_set)
print('un-opt dir count:{}'.format(i))
print('opt dir count:{}'.format(n))
missing = opt_set ^ un_opt_set
print('number of missing charts: {} \n {}'.format(len(missing), missing))
return i == n and i != 0
def verify_catalog(catalog_name):
"""
:param catalog_name: region name
:return: if all tiles have been created for every chart in the catalog
"""
result = True
global error_message
# noinspection PyBroadException
try:
reader = catalog.get_reader_for_region(catalog_name)
except:
error_message = 'error reading catalog'
return False
region_tile_dir = os.path.join(config.unmerged_tile_dir, catalog_name)
if not os.path.isdir(region_tile_dir):
error_message += region_tile_dir + ' is not a directory\n'
result = False
tile_chart_dirs = set(os.listdir(region_tile_dir))
for chart in reader:
name = os.path.basename(chart['path'])
name = name[:name.rfind('.')]
if name not in tile_chart_dirs:
error_message = name + ' not found in chart directories\n'
result = False
else:
tile_dir = os.path.join(region_tile_dir, name)
if not (verify_tile_dir(tile_dir)):
result = False
return result
def verify_tile_dir(tile_dir):
if not os.path.isdir(tile_dir):
return False
found_zoom_dirs = []
# look for a zoom directory
for z_dir in os.listdir(tile_dir):
if z_dir.isdigit():
found_zoom_dirs.append(z_dir)
global error_message
# we should have at least one zoom dir
if len(found_zoom_dirs) > 0:
# check for tiles
for z_dir in found_zoom_dirs:
z_dir = os.path.join(tile_dir, z_dir)
x_dirs = os.listdir(z_dir)
# we should have at least one x dir
if len(x_dirs) == 0:
error_message += 'zero x directories found in path: ' + z_dir + '\n'
return False
# num_tiles = _x_dir_tile_count(os.listdir(os.path.join(z_dir, x_dirs[0])))
# if num_tiles == 0:
# error_message = 'zero tiles in directory path: ' + os.path.join(z_dir, x_dirs[0])
# return False
for x_dir in x_dirs:
if x_dir in IGNORED:
continue
x_dir = os.path.join(z_dir, x_dir)
if not _x_dir_has_tiles(x_dir):
error_message += 'zero tiles in directory path: ' + os.path.join(z_dir, x_dir) + '\n'
return False
else:
error_message += 'zero zoom directories found for ' + tile_dir + '\n'
return False
return True
def verify(region_lst):
for region in region_lst:
region = region.upper()
v = verify_catalog(region)
print(region, 'verify:', v)
if not v:
print(error_message)
v = verify_opt(region)
print(region, 'verify opt:', v)
if not v:
print(error_message)
print('------------------------------')
if __name__ == '__main__':
# import regions
# verify(regions._db.db['noaa'].keys())
verify(['REGION_FAA']) | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,921 | manimaul/mxmcc | refs/heads/master | /ukho_overrides/togpx.py | __author__ = 'william'
template = '''<?xml version="1.0" encoding="utf-8" ?>
<gpx version="1.1" creator="MTCW" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.topografix.com/GPX/1/1" xmlns:gpxx="http://www.garmin.com/xmlschemas/GpxExtensions/v3" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd" xmlns:MTCW="http://www.mxmariner.com">
<rte>
<name>override</name>
%s
</rte>
</gpx>
'''
point = '<rtept lat="%s" lon="%s"></rtept>\n'
def _lat_lng_dmm_to_ddd(lat_dmm, lng_dmm):
"""converts lat,lng in degree-decimal minutes
to decimal degrees.
ex. latDmm, lngDmm = ('-28 59.803', '048 06.998')
latDmm, lngDmm = ('28.204', -000 34.086') """
lat_deg, lat_min = lat_dmm.split(" ")
lng_deg, lng_min = lng_dmm.split(" ")
if lat_deg.startswith('-'):
lat = float(lat_deg) - (float(lat_min) / 60)
else:
lat = float(lat_deg) + (float(lat_min) / 60)
if lng_deg.startswith('-'):
lng = float(lng_deg) - (float(lng_min) / 60)
else:
lng = float(lng_deg) + (float(lng_min) / 60)
return lat, lng
name = '2552-0'
inner = ''
with open(name, 'r') as ovr:
for line in ovr.readlines():
print line
lat_dmm, lng_dmm = line.strip().split(',')
lat, lng = _lat_lng_dmm_to_ddd(lat_dmm, lng_dmm)
inner += point % (lat, lng)
with open(name+'.gpx', 'w') as gpx:
gpx.write(template % inner)
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,922 | manimaul/mxmcc | refs/heads/master | /gdalds.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
import math
from osgeo import gdal, osr
import os
'''some convenience methods for information about gdal data sets
'''
def get_ro_dataset(map_path):
if not os.path.isfile(map_path):
raise Exception(map_path + ' does not exist!')
ds = gdal.Open(map_path, gdal.GA_ReadOnly)
if ds is None:
raise Exception('dataset not openned')
return ds
def dataset_get_cutline_geometry(gdal_ds, cutline):
"""return a cutline in WKT geometry with coordinates expressed in dataset source pixel/line coordinates.
cutline string format example: 48.3,-123.2:48.5,-123.2:48.5,-122.7:48.3,-122.7:48.3,-123.2
: dilineated latitude,longitude WGS-84 coordinates (in decimal degrees)
"""
# ---- create coordinate transform from lat lng to data set coords
ds_wkt = dataset_get_projection_wkt(gdal_ds)
ds_srs = osr.SpatialReference()
ds_srs.ImportFromWkt(ds_wkt)
wgs84_srs = osr.SpatialReference()
wgs84_srs.ImportFromEPSG(4326)
transform = osr.CoordinateTransformation(wgs84_srs, ds_srs)
# ---- grab inverted geomatrix
geotransform = get_geo_transform(gdal_ds)
inv_geotransform = gdal.InvGeoTransform(geotransform)
# ---- transform lat long to dataset coordinates, then coordinates to pixel/lines
polygon_wkt = 'POLYGON (('
# x_coords = []
# y_coords = []
for latlng in cutline.split(':'):
lat, lng = latlng.split(',')
geo_x, geo_y = transform.TransformPoint(float(lng), float(lat))[:2]
px = int(inv_geotransform[0] + inv_geotransform[1] * geo_x + inv_geotransform[2] * geo_y)
py = int(inv_geotransform[3] + inv_geotransform[4] * geo_x + inv_geotransform[5] * geo_y)
# x_coords.append(geo_x)
# y_coords.append(geo_y)
polygon_wkt += '%d %d,' % (px, py)
polygon_wkt = polygon_wkt[:-1] + '))'
# --- get extents
# extents = [str(min(x_coords)), str(min(y_coords)), str(max(x_coords)), str(max(y_coords))] # xmin ymin xmax ymax
return polygon_wkt
def dataset_get_projection_wkt(gdal_ds):
"""returns a gdal dataset's projection in well known text"""
ds_wkt = gdal_ds.GetProjectionRef()
if ds_wkt == '':
ds_wkt = gdal_ds.GetGCPProjection()
return ds_wkt
def dataset_get_proj4_srs_declaration(gdal_ds):
ds_wkt = dataset_get_projection_wkt(gdal_ds)
sr = osr.SpatialReference(ds_wkt)
return sr.ExportToProj4()
def dataset_get_as_epsg_900913(gdal_ds):
epsg_900913 = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 %s +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null ' \
'+no_defs'
srs = dataset_get_proj4_srs_declaration(gdal_ds)
val = '0'
for ea in srs.split(' '):
ea = ea.strip()
if ea.startswith('+lon_0='):
val = ea[7:]
return epsg_900913 % ('+lon_0=' + val)
def dataset_has_color_palette(gdal_ds):
"""returns true of false wether a gdal dataset has a color palette
"""
return gdal_ds.GetRasterBand(1).GetRasterColorTable() is not None
def dataset_lat_lng_bounds(gdal_ds):
"""returns the bounding box of a gdal dataset in latitude,longitude WGS-84 coordinates (in decimal degrees)
bounding box returned as: min_lng, min_lat, max_lng, max_lat
"""
# bounds (west, north, east, south)
return dataset_get_bounds(gdal_ds, 4326)
def dataset_lat_lng_bounds_as_cutline(gdal_ds):
wnes, is_north_up = dataset_lat_lng_bounds(gdal_ds)
west, north, east, south = wnes
return str(north) + ',' + str(west) + ':' + \
str(north) + ',' + str(east) + ':' + \
str(south) + ',' + str(east) + ':' + \
str(south) + ',' + str(west) + ':' + \
str(north) + ',' + str(west)
def dataset_get_bounds(gdal_ds, epsg=4326):
"""returns the bounding box of a gdal dataset in latitude,longitude WGS-84 coordinates (in decimal degrees)
bounding box returned as: min_lng, min_lat, max_lng, max_lat
"""
out_srs = osr.SpatialReference()
out_srs.ImportFromEPSG(epsg)
ds_wkt = dataset_get_projection_wkt(gdal_ds)
ds_srs = osr.SpatialReference()
ds_srs.ImportFromWkt(ds_wkt)
# we need a north up dataset
ds = gdal.AutoCreateWarpedVRT(gdal_ds, ds_wkt, ds_wkt)
if ds is None:
ds = gdal_ds
geotransform = get_geo_transform(ds)
transform = osr.CoordinateTransformation(ds_srs, out_srs)
# useful information about geotransform
# geotransform[0] #top left X
# geotransform[1] #w-e pixel resolution
# geotransform[2] #rotation, 0 if image is "north up"
# geotransform[3] #top left Y
# geotransform[4] #rotation, 0 if image is "north up"
# geotransform[5] n-s pixel resolution
west = geotransform[0]
east = west + ds.RasterXSize * geotransform[1]
north = geotransform[3]
south = north - ds.RasterYSize * geotransform[1]
east_south = transform.TransformPoint(east, south)[:2]
east_north = transform.TransformPoint(east, north)[:2]
west_south = transform.TransformPoint(west, south)[:2]
west_north = transform.TransformPoint(west, north)[:2]
north = max(east_north[1], west_north[1])
south = min(east_south[1], east_south[1])
east = max(east_north[0], east_south[0])
west = min(west_north[0], west_south[0])
gt = get_geo_transform(gdal_ds)
if gt is None:
is_north_up = False
else:
rotation = get_rotation(gt)
is_north_up = rotation < .5 or rotation > 359.5
# min_lng, max_lat, max_lng, min_lat
return (west, north, east, south), is_north_up
# def get_true_scale(gdal_ds, dpi):
# wnes, is_north_up = dataset_get_bounds(gdal_ds)
# west, north, east, south = wnes
# inches = gdal_ds.RasterXSize / dpi
# meters = inches / 39.3701
# center_x = west + ((east - west) / 2)
# true_scale = cartesian_distance((west, center_x), (east, center_x)) / meters
# return true_scale
def get_true_scale(gdal_ds, dpi):
wnes, is_north_up = dataset_meters_bounds(gdal_ds)
west, north, east, south = wnes
inches = gdal_ds.RasterXSize / dpi
meters = inches / 39.3701
if west <= east:
span_meters = east - west
else:
span_meters = (20037508.3428 - abs(east)) + (20037508.3428 - abs(west))
true_scale = span_meters / meters
return true_scale
def dataset_meters_bounds(gdal_ds):
"""returns the bounding box of a gdal dataset in latitude,longitude WGS-84 coordinates (in decimal degrees)
bounding box returned as: min_lng, min_lat, max_lng, max_lat
"""
# bounds (west, north, east, south)
return dataset_get_bounds(gdal_ds, 3857)
def get_geo_transform(gdal_ds):
"""
:param gdal_ds: gdal dataset
:return: a geo transform from ground control points if possible
"""
gcps = gdal_ds.GetGCPs()
gt = None
if gcps is not None:
gt = gdal.GCPsToGeoTransform(gcps)
if gt is None:
gt = gdal_ds.GetGeoTransform()
return gt
def get_rotation(gt):
""" Get rotation angle from a geotransform
@type gt: C{tuple/list}
@param gt: geotransform
@rtype: C{float}
@return: rotation angle
"""
# noinspection PyBroadException
try:
return math.degrees(math.tanh(gt[2]/gt[5])) % 360
except:
return 0
def apply_geo_transform(inx, iny, gt):
""" Apply a geotransform
@param inx: Input x coordinate (double)
@param iny: Input y coordinate (double)
@param gt: Input geotransform (six doubles)
@return: outx, outy Output coordinates (two doubles)
"""
outx = gt[0] + inx*gt[1] + iny*gt[2]
outy = gt[3] + inx*gt[4] + iny*gt[5]
return outx, outy
def map_to_pixels(mx, my, gt):
"""Convert map to pixel coordinates
@param mx: Input map x coordinate (double)
@param my: Input map y coordinate (double)
@param gt: Input geotransform (six doubles)
@return: px,py Output coordinates (two ints)
@change: changed int(p[x,y]+0.5) to int(p[x,y])
as per http://lists.osgeo.org/pipermail/gdal-dev/2010-June/024956.html
@change: return floats
@note: 0,0 is UL corner of UL pixel, 0.5,0.5 is centre of UL pixel
"""
if gt[2] + gt[4] == 0: # Simple calc, no inversion required
px = (mx - gt[0]) / gt[1]
py = (my - gt[3]) / gt[5]
else:
px, py = apply_geo_transform(mx, my, gdal.InvGeoTransform(gt))
return int(px), int(py)
if __name__ == '__main__':
import os
# p = '/media/william/f4f4cb37-0c77-42fd-b3db-87a626a0c897/macdata/mxmcc/charts/ukho/geotiff/2124.tif'
p = '/media/william/f4f4cb37-0c77-42fd-b3db-87a626a0c897/macdata/mxmcc/charts/ukho/geotiff/0128-1.tif'
d = get_ro_dataset(p)
print("projection wkt = {}".format(dataset_get_projection_wkt(d)))
print(dataset_get_bounds(d))
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,923 | manimaul/mxmcc | refs/heads/master | /encryption_shim.py | #!/usr/bin/env python
__author__ = "Will Kamp"
__copyright__ = "Copyright 2015, Matrix Mariner Inc."
__license__ = "BSD"
__email__ = "will@mxmariner.com"
__status__ = "Development" # "Prototype", "Development", or "Production"
'''Interface to encryption
Java encryption source in not publicly published
'''
import os
import subprocess
import shlex
from . import config
enc_src_pkg = 'com/mxmariner/crypto'
def _verify_java():
files = ('FileTreeEncryptor$ProcessFile.class', 'Encryptor.class', 'TokenFactory.class')
count = 0
for ea in files:
if os.path.isfile(os.path.join(config.java_encryption_src, enc_src_pkg, ea)):
count += 1
if len(files) == count:
# nothing to do
return True
else:
return False
def _make_java_if_needed():
if _verify_java():
return True
commands = ('javac %s/FileTreeEncryptor.java' % enc_src_pkg, 'javac %s/TokenFactory.java' % enc_src_pkg)
for cmd in commands:
print('running', cmd)
p = subprocess.Popen(shlex.split(cmd), cwd=config.java_encryption_src)
p.wait()
print('complete')
return _verify_java()
def encrypt_region(region):
if _make_java_if_needed():
in_dir = os.path.join(config.merged_tile_dir, region + '.opt')
out_dir = os.path.join(config.merged_tile_dir, region + '.enc')
cmd = 'java com.mxmariner.crypto.FileTreeEncryptor \"%s\" \"%s\"' % (in_dir, out_dir)
print('running', cmd)
p = subprocess.Popen(shlex.split(cmd), cwd=config.java_encryption_src)
p.wait()
print('complete')
return os.path.isdir(out_dir) and len(os.listdir(in_dir)) == len(os.listdir(out_dir))
else:
return False
def generate_token(region):
if _make_java_if_needed():
cmd = 'java com.mxmariner.crypto.TokenFactory \"%s\" \"%s\"' % (config.compiled_dir, region)
print('running', cmd)
p = subprocess.Popen(shlex.split(cmd), cwd=config.java_encryption_src)
p.wait()
print('complete')
return True
else:
return False
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,924 | manimaul/mxmcc | refs/heads/master | /zdata.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''MX Mariner zdata generator for regions / catalogs'''
import codecs
import os.path
import zipfile
from . import config
from . import catalog
from . import regions
upd_fmt = U'UPDATE regions SET installeddate=\'%s\' WHERE name=\'%s\';\n'
custom_fmt0 = u'DELETE from regions WHERE name=\'%s\';\n'
custom_fmt1 = u'INSERT into [regions] ([name], [description], [image], [size], [installeddate] ) ' \
u'VALUES (\'%s\', \'%s\', \'%s\', \'%s\', \'%s\');\n'
fmt0 = u'DELETE from charts where region=\'%s\';\n'
fmt1 = u'INSERT INTO [charts] ([region], [file], [name], [updated], [scale], [outline], [depths], [zoom]) ' \
u'VALUES (\'%s\', \'%s\', \'%s\', \'%s\', %s, \'%s\', \'%s\', \'%s\');\n'
def get_zdat_epoch(zdat_path):
"""
:param zdat_path: path to the <region>.zdat file
:return: the installeddate value to be set
"""
zdat_file = zipfile.ZipFile(zdat_path, 'r', zipfile.ZIP_DEFLATED)
line = str(zdat_file.open(zdat_file.namelist()[0], 'r').readlines()[1])
l = line.find('\'') + 1
r = line.find('\'', l)
return line[l:r]
def generate_update():
"""generates and UPDATE.zdat file for all of the new (s)gemf regions rendered
"""
sql_fname = 'UPDATE.sql'
sql_path = os.path.join(config.compiled_dir, sql_fname)
zdat_path = os.path.join(config.compiled_dir, 'UPDATE.zdat')
print(zdat_path)
zdat = zipfile.ZipFile(zdat_path, 'w', zipfile.ZIP_DEFLATED)
sqlf = open(sql_path, 'w')
gemf_lst = []
for ea in os.listdir(config.compiled_dir):
if ea.endswith('gemf'):
gemf_lst.append(os.path.join(config.compiled_dir, ea))
gemf_lst.sort()
if len(gemf_lst) is 0:
return
sqlstr = u'update regions set latestdate=\'%s\', size=\'%s\' where name=\'%s\';'
sqlf.write(u'--MXMARINER-DBVERSION:1\n')
for p in gemf_lst:
size = str(os.path.getsize(p))
region = os.path.basename(p)
region = region[:region.rfind('.')]
z_path = os.path.join(config.compiled_dir, region + '.zdat')
sqlf.write(sqlstr % (get_zdat_epoch(z_path), size, region) + '\n')
sqlf.close()
zdat.write(sql_path, sql_fname)
os.remove(sql_path)
zdat.close()
print('update written to: ' + zdat_path)
def format_entry(region: str, entry: dict):
def san(thing):
return str(thing).strip()
return fmt1 % (region, os.path.basename(san(entry['path'])), san(entry['name']), san(entry['date']),
san(entry['scale']), san(entry['outline']), san(entry['depths']),
san(entry['max_zoom']))
def generate_zdat_for_catalog(catalog_name, description=None):
"""generates a zdat file for a region
catalog_name - the name of the catalog / region to generate data for
description - if this is a custom catalog / region... set the description here
"""
region = catalog_name.upper().strip()
reader = catalog.get_reader_for_region(catalog_name)
sql_fname = region + '.sql'
sql_path = os.path.join(config.compiled_dir, sql_fname)
zdat_path = os.path.join(config.compiled_dir, region + '.zdat')
sql_file = codecs.open(sql_path, 'w', 'utf-8')
zdat_file = zipfile.ZipFile(zdat_path, 'w', zipfile.ZIP_DEFLATED)
sql_file.write('--MXMARINER-DBVERSION:3\n')
if regions.is_valid_region(region):
sql_file.write(upd_fmt % (config.epoch, region))
sql_file.write(fmt0 % region)
else:
num_bytes = os.path.getsize(os.path.join(config.compiled_dir, region + '.gemf'))
sql_file.write(custom_fmt0 % region)
sql_file.write(custom_fmt1 % (region, description, region.lower().replace('_', ''), num_bytes, config.epoch))
for entry in reader:
sql_file.write(format_entry(region, entry))
sql_file.close()
zdat_file.write(sql_path, sql_fname)
os.remove(sql_path)
zdat_file.close()
if __name__ == '__main__':
generate_update()
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,925 | manimaul/mxmcc | refs/heads/master | /compiler.py | #!/usr/bin/env python3
__author__ = "Will Kamp"
__copyright__ = "Copyright 2013, Matrix Mariner Inc."
__license__ = "BSD"
__email__ = "will@mxmariner.com"
__status__ = "Development" # "Prototype", "Development", or "Production"
'''This is the wrapper program that ties it all together to complete this set of programs'
task of compiling charts into the MX Mariner format.
'''
import sys
import os
import inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
from mxmcc import regions
from mxmcc import catalog
from mxmcc import tilebuilder
from mxmcc import tilesmerge
from mxmcc import gemf
from mxmcc import zdata
from mxmcc import verify
from mxmcc import tiles_opt
from mxmcc.checkpoint import *
from mxmcc import encryption_shim
import mbutil as mb
import re
import shutil
PROFILE_MX_R = 'MX_REGION' # (default) renders standard MX Mariner gemf + zdat
PROFILE_MB_C = 'MB_CHARTS' # renders each chart as mbtiles file
PROFILE_MB_R = 'MB_REGION' # renders entire region as mbtiles file
def _build_catalog(checkpoint_store, profile, region):
# build catalog
point = CheckPoint.CHECKPOINT_CATALOG
if checkpoint_store.get_checkpoint(region, profile) < point:
print('building catalog for:', region)
if not regions.is_valid_region(region):
region_dir = regions.find_custom_region_path(region)
if region_dir is not None:
catalog.build_catalog_for_bsb_directory(region_dir, region)
else:
raise Exception('custom region: %s does not have a directory' % region)
else:
catalog.build_catalog_for_region(region)
checkpoint_store.clear_checkpoint(region, profile, point)
else:
print('skipping checkpoint', point)
def _create_tiles(checkpoint_store, profile, region):
# create tiles
point = CheckPoint.CHECKPOINT_TILE_VERIFY
if checkpoint_store.get_checkpoint(region, profile) < point:
print('building tiles for:', region)
tilebuilder.build_tiles_for_catalog(region)
# verify
if not verify.verify_catalog(region):
raise Exception(region + ' was not verified... ' + verify.error_message)
checkpoint_store.clear_checkpoint(region, profile, point)
else:
print('skipping checkpoint', point)
def _merge_tiles(checkpoint_store, profile, region):
# merge
point = CheckPoint.CHECKPOINT_MERGE
if checkpoint_store.get_checkpoint(region, profile) < point:
print('merging tiles for:', region)
tilesmerge.merge_catalog(region)
checkpoint_store.clear_checkpoint(region, profile, point)
else:
print('skipping checkpoint', point)
def _optimize_tiles(checkpoint_store, profile, region, base_dir=config.merged_tile_dir):
# optimize
point = CheckPoint.CHECKPOINT_OPT
if checkpoint_store.get_checkpoint(region, profile) < point:
# if platform.system() == 'Windows':
# tiles_opt.set_nothreads()
tiles_opt.optimize_dir(os.path.join(base_dir, region))
# verify all optimized tiles are there
if not verify.verify_opt(region, base_dir=base_dir):
raise Exception(region + ' was not optimized fully')
checkpoint_store.clear_checkpoint(region, profile, point)
else:
print('skipping checkpoint', point)
def _should_encrypt(region):
encrypted_providers = {regions.provider_wavey_lines, regions.provider_ukho}
return regions.provider_for_region(region) in encrypted_providers
def _encrypt_region(checkpoint_store, profile, region):
print('encrypting tiles for region:', region)
# encryption
point = CheckPoint.CHECKPOINT_ENCRYPTED
if checkpoint_store.get_checkpoint(region, profile) < point:
if not encryption_shim.encrypt_region(region):
raise Exception('encryption failed!')
checkpoint_store.clear_checkpoint(region, profile, point)
else:
print('skipping checkpoint', point)
def _create_gemf(checkpoint_store, profile, region):
point = CheckPoint.CHECKPOINT_ARCHIVE
if checkpoint_store.get_checkpoint(region, profile) < point:
print('archiving gemf for region:', region)
should_encrypt = _should_encrypt(region)
if should_encrypt:
name = region + '.enc'
else:
name = region + '.opt'
gemf.generate_gemf(name, add_uid=should_encrypt)
#if should_encrypt:
# encryption_shim.generate_token(region)
checkpoint_store.clear_checkpoint(region, profile, point)
else:
print('skipping checkpoint', point)
def _create_zdat(checkpoint_store, profile, region):
point = CheckPoint.CHECKPOINT_METADATA
if checkpoint_store.get_checkpoint(region, profile) < point:
print('building zdat metadata archive for:', region)
zdata.generate_zdat_for_catalog(region)
checkpoint_store.clear_checkpoint(region, profile, point)
else:
print('skipping checkpoint', point)
def _fill_tiles(region):
# fill
# print('filling tile \"holes\"', region)
# filler.fill_all_in_region(region)
print(region, 'fill skipped')
def _create_region_mb_tiles(checkpoint_store, profile, region):
point = CheckPoint.CHECKPOINT_ARCHIVE
if checkpoint_store.get_checkpoint(region, profile) < point:
print('archiving mbtiles for region:', region)
region_dir = os.path.join(config.merged_tile_dir, region + '.opt')
mbtiles_file = os.path.join(config.compiled_dir, region + '.mbtiles')
if os.path.isfile(mbtiles_file):
os.remove(mbtiles_file)
mb.disk_to_mbtiles(region_dir, mbtiles_file, format='png', scheme='xyz')
checkpoint_store.clear_checkpoint(region, profile, point)
else:
print('skipping checkpoint', point)
def __create_chart_mb_tiles(region):
region_charts_dir = os.path.join(config.unmerged_tile_dir, region + '.opt')
for chart in os.listdir(region_charts_dir):
print('archiving mbtiles for chart:', chart)
chart_dir = os.path.join(region_charts_dir, chart)
prefix = re.sub(r'\W+', '_', chart).lower()
mbtiles_file = os.path.join(config.compiled_dir, prefix + '.mbtiles')
if os.path.isfile(mbtiles_file):
os.remove(mbtiles_file)
mb.disk_to_mbtiles(chart_dir, mbtiles_file, format='png', scheme='xyz')
def _create_chart_mb_tiles(checkpoint_store, profile, region):
point = CheckPoint.CHECKPOINT_ARCHIVE
if checkpoint_store.get_checkpoint(region, profile) < point:
__create_chart_mb_tiles(region)
checkpoint_store.clear_checkpoint(region, profile, point)
else:
print('skipping checkpoint', point)
def _skip_zoom(region):
tile_path = os.path.join(config.unmerged_tile_dir, region)
for chart in os.listdir(tile_path):
zs = []
for z_dir in os.listdir(os.path.join(tile_path, chart)):
try:
z = int(z_dir)
zs.append(z)
except ValueError:
pass
zs.sort(reverse=True)
if len(zs) > 1 and (zs[0] - zs[1]) == 1:
i = 0
for z in zs:
if i % 2:
p = os.path.join(tile_path, chart, str(z))
shutil.rmtree(p)
i += 1
def compile_region(region, profile=PROFILE_MX_R, perform_clean=True):
region = region.upper()
profile = profile.upper()
checkpoint_store = CheckPointStore()
_build_catalog(checkpoint_store, profile, region)
_create_tiles(checkpoint_store, profile, region)
if 'REGION' in profile:
_merge_tiles(checkpoint_store, profile, region)
_fill_tiles(region)
_optimize_tiles(checkpoint_store, profile, region)
if 'MX_' in profile:
should_encrypt = _should_encrypt(region)
if should_encrypt:
_encrypt_region(checkpoint_store, profile, region)
_create_gemf(checkpoint_store, profile, region)
_create_zdat(checkpoint_store, profile, region)
if 'MB_' in profile:
_create_region_mb_tiles(checkpoint_store, profile, region)
elif 'CHARTS' in profile and 'MB_' in profile:
_skip_zoom(region)
_optimize_tiles(checkpoint_store, profile, region, base_dir=config.unmerged_tile_dir)
_create_chart_mb_tiles(checkpoint_store, profile, region)
print('final checkpoint', checkpoint_store.get_checkpoint(region, profile))
if perform_clean and checkpoint_store.get_checkpoint(region, profile) > CheckPoint.CHECKPOINT_ENCRYPTED:
cleanup(region, config.unmerged_tile_dir)
cleanup(region, config.merged_tile_dir)
def cleanup(region, base_dir):
for ea in os.listdir(base_dir):
if region in ea:
abs_path = os.path.join(base_dir, ea)
print('clean', abs_path)
for root, dirs, files in os.walk(abs_path, topdown=False):
for name in files:
p = os.path.join(root, name)
try:
os.remove(p)
except:
print('failed to delete', p)
for name in dirs:
os.rmdir(os.path.join(root, name))
def print_usage():
print('usage:\n$python mxmcc.py <region> <optional profile>')
if __name__ == "__main__":
if config.check_dirs():
args = sys.argv
if len(args) < 2:
print_usage()
else:
rgn = args[1]
if len(args) >= 3:
prof = args[2]
else:
prof = PROFILE_MX_R
compile_region(rgn, prof)
else:
print('Your mxmcc directory structure is not ready\n' +
'Please edit the top portion of config.py, run config.py,\n' +
'and place charts in their corresponding directories.')
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,926 | manimaul/mxmcc | refs/heads/master | /file_name_sanitizer.py | __author__ = 'Will Kamp'
__copyright__ = 'Copyright 2015, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''Sanitizes chart file names
(allows alpha/numeric, last '.' for file extension, replaces whitespace with '_' and replaces '&' with 'And')
'''
import os
import re
_PATTERN = '[^_a-zA-Z\d\.]|\.(?=[^.]*\.)'
def _touch(fname, times=None):
with open(fname, 'a'):
os.utime(fname, times)
def sanitize(root_path):
if not os.path.isdir(root_path):
raise Exception(root_path + 'is not a directory')
if os.path.isfile(os.path.join(root_path, "sane")):
print('sanitized already complete')
return
for root, dirs, files in os.walk(root_path):
for i, name in enumerate(dirs):
new_name = name.replace(' ', '_')
new_name = new_name.replace('&', 'And')
new_name = re.sub(_PATTERN, '', new_name)
if name != new_name:
os.rename(os.path.join(root, name), os.path.join(root, new_name))
dirs[i] = new_name
for i, name in enumerate(files):
new_name = name.replace(' ', '_')
new_name = new_name.replace('&', 'And')
new_name = re.sub(_PATTERN, '', new_name)
if name != new_name:
os.rename(os.path.join(root, name), os.path.join(root, new_name))
files[i] = new_name
_touch(os.path.join(root_path, "sane")) | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,927 | manimaul/mxmcc | refs/heads/master | /findzoom.py | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
'''This will find the optimal zoom level on a zxy tiled map for a BSB chart.
The calculated zoom accounts and compensates for latitude distortion of scales.
'''
import math
from shapely.geometry import Point
from pyproj import Proj
def haversine_distance(origin, destination):
lon1, lat1 = origin
lon2, lat2 = destination
radius = 6371 # kilometers
dlat = math.radians(lat2-lat1)
dlon = math.radians(lon2-lon1)
a = math.sin(dlat/2) * math.sin(dlat/2) + math.cos(math.radians(lat1)) \
* math.cos(math.radians(lat2)) * math.sin(dlon/2) * math.sin(dlon/2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
d = radius * c
return d * 1000 # meters
def cartesian_distance(origin, destination):
lon1, lat1 = origin
lon2, lat2 = destination
proj = Proj(init="epsg:3785") # spherical mercator, should work anywhere
point1 = proj(lon1, lat1)
point2 = proj(lon2, lat2)
point1_cart = Point(point1)
point2_cart = Point(point2)
return point1_cart.distance(point2_cart) # meters
def latitude_distortion(latitude):
origin = (0, latitude)
destination = (1, latitude)
hdist = haversine_distance(origin, destination)
cdist = cartesian_distance(origin, destination)
return cdist/hdist
def get_zoom(scale, latitude):
true_scale = scale * latitude_distortion(latitude)
return get_zoom_from_true_scale(true_scale)
def get_zoom_from_true_scale(true_scale):
t = 30
# tweak_percent = .87
tweak_percent = .70
tweak_scale = true_scale * tweak_percent
while tweak_scale > 1:
tweak_scale /= 2
t -= 1
return t
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,928 | manimaul/mxmcc | refs/heads/master | /test_format_entry.py | from unittest import TestCase
from . import zdata
class Test_zdata(TestCase):
def test_format_entry(self):
entry = {
"path": "/foo/bar/18476_1.KAP",
"name": "PUGET SOUND SHILSHOLE BAY TO COMMENCEMENT BAY\n",
"date": "today\n",
"scale": 10000,
"outline": "today\n",
"depths": "FEET\r\n",
"max_zoom": "1\n"
}
sql = zdata.format_entry("REGION_15", entry)
self.assertEquals(len(sql) - 1, sql.rfind("\n"), "new line should be at the end")
self.assertEquals(sql.find("\n"), sql.rfind("\n"), "there should only be one new line")
| {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,929 | manimaul/mxmcc | refs/heads/master | /filler.py | #!/usr/bin/env python
__author__ = "Will Kamp"
__copyright__ = "Copyright 2014, Matrix Mariner Inc."
__license__ = "BSD"
__email__ = "will@mxmariner.com"
__status__ = "Prototype" # "Prototype", "Development", or "Production"
'''Fills the "holes" (transparent portions) in merged tiles using a 1 up, 6 down search pattern
'''
#TODO: THIS IS IN AN ALPA! STATE
import os
from PIL import Image
from .tilesystem import tile_size
from .regions import is_valid_region
from .search import MapPathSearch
from . import logger
from . import config
MAX_ZOOM_TIMES = 8
STD_ZOOM_TIMES = 6
def fill_all_in_region(region):
for ea in get_tile_list(region):
mt = MapTile(*get_tile(ea))
mt.fill_if_necessary()
def get_tile(abs_path):
z = -1
x = -1
y = -1
stack = abs_path.split('/')
while z == -1 or x == -1 or y == -1:
if len(stack) == 0:
break
item = stack.pop()
if y == -1:
y = int(item[:-4])
elif x == -1:
x = int(item)
elif z == -1:
z = int(item)
tile_dir = os.path.join('/', *stack)
return z, x, y, tile_dir
def get_tile_list(region):
region = region.upper()
if is_valid_region(region):
tile_dir = os.path.join(config.merged_tile_dir, region)
png_ps = MapPathSearch(tile_dir, ['png'])
return png_ps.file_paths
return []
def _has_transparency(img):
r, g, b, a = img.split()
a_min, a_max = a.getextrema()
return a_min == 0 or a_max == 0
def _stack_images(top_img, bottom_img):
if top_img is None and bottom_img is not None:
return bottom_img
if bottom_img is None and top_img is not None:
return top_img
return Image.composite(top_img, bottom_img, top_img)
# noinspection PyProtectedMember
def _stack_overzoom_images(map_tile, top_img, zoom_times):
btm_img = map_tile._find_over_zoom_tile_img(STD_ZOOM_TIMES, zoom_times)
if btm_img is not None:
return _stack_images(top_img, btm_img)
return top_img
# noinspection PyProtectedMember
class MapTile:
def __init__(self, z, x, y, tile_dir):
self.tile_dir = tile_dir
self.image = None
self.z = int(z)
self.x = int(x)
self.y = int(y)
def fill_if_necessary(self):
if self.has_transparency():
abs_path = self.get_path()
logger.log(logger.OFF, 'filling: ' + abs_path)
self.get_image().save(abs_path)
def get_image(self):
if self.has_transparency():
bottom = self._find_zoom_tile_img(STD_ZOOM_TIMES, 1)
if bottom is not None:
self.image = _stack_images(self._get_image(), bottom)
return self._get_image()
def get_path(self):
return os.path.join(self.tile_dir, str(self.z), str(self.x), str(self.y) + '.png')
def exists(self):
return os.path.exists(self.get_path())
def get_zxy(self):
return self.z, self.x, self.y
def has_transparency(self):
if not self.exists():
return False
return _has_transparency(self._get_image())
def _get_image(self):
if self.image is None:
self.image = Image.open(self.get_path(), 'r').convert("RGBA")
return self.image
def _find_zoom_tile_img(self, max_upper_zoom, zoom_times):
uz_img = self._find_under_zoom_tile_img()
if uz_img is not None:
logger.log(logger.OFF, 'uz_image found')
if not _has_transparency(uz_img):
return uz_img
else:
oz_img = self._find_over_zoom_tile_img(max_upper_zoom, zoom_times)
return _stack_images(uz_img, oz_img)
logger.log(logger.OFF, 'uz_image not found')
return self._find_over_zoom_tile_img(max_upper_zoom, zoom_times)
def _find_over_zoom_tile_img(self, max_upper_zoom, zoom_times):
logger.log(logger.OFF, '_find_over_zoom_tile_img zoom_times %d' % zoom_times)
if zoom_times >= MAX_ZOOM_TIMES:
logger.log(logger.OFF, 'exceeded 8 times limit')
return
upper_zoom = self.z - zoom_times
diff = abs(self.z - upper_zoom)
m_tile_size = tile_size >> diff
logger.log(logger.OFF, '_find_over_zoom_tile_img m_tile_size %d' % m_tile_size)
upper_tile = MapTile(upper_zoom, self.x >> diff, self.y >> diff, self.tile_dir)
if upper_tile.exists():
img = upper_tile._get_image().copy()
xx = (self.x % (1 << diff)) * m_tile_size
yy = (self.y % (1 << diff)) * m_tile_size
#left, upper, right, lower
img = img.crop((xx, yy, xx + m_tile_size, yy + m_tile_size)) \
.resize((tile_size, tile_size), Image.NEAREST)
if _has_transparency(img):
_stack_overzoom_images(self, img, zoom_times + 1)
else:
return img
elif upper_tile.z >= max_upper_zoom:
return self._find_over_zoom_tile_img(max_upper_zoom, zoom_times + 1)
#return self._get_image()
def _find_under_zoom_tile_img(self):
have_scale_tile = False
zoom_in_level = self.z + 1
diff = abs(self.z - zoom_in_level)
m_tile_size = tile_size >> diff
xx = self.x << diff
yy = self.y << diff
num_tiles = 1 << diff
in_tile_paths = []
for xi in range(num_tiles):
for yi in range(num_tiles):
lower_x = xx + xi
lower_y = yy + yi
p = os.path.join(self.tile_dir, '%s/%s/%s.png' % (zoom_in_level, lower_x, lower_y))
if os.path.isfile(p):
in_tile_paths.append(p)
have_scale_tile = True
else:
in_tile_paths.append(None)
if have_scale_tile:
im = Image.new("RGBA", (tile_size, tile_size), (0, 0, 0, 0))
i = 0
xoff = 0
yoff = 0
for in_tile_path in in_tile_paths:
if i == 1:
yoff += m_tile_size
if i == 2:
yoff -= m_tile_size
xoff += m_tile_size
if i == 3:
yoff += m_tile_size
if in_tile_path is not None:
im.paste(Image.open(in_tile_path).resize((m_tile_size, m_tile_size), Image.ANTIALIAS), (xoff, yoff))
i += 1
return im
if __name__ == '__main__':
# mt = MapTile(10, 152, 365, '/media/aux-drive 180G/mxmcc/tiles/merged/REGION_15/')
# _has_transparency(mt._get_image())
fill_all_in_region('region_15') | {"/tilebuilder.py": ["/__init__.py"], "/lookups.py": ["/__init__.py"], "/tilesmerge.py": ["/__init__.py", "/tilesystem.py"], "/gemf.py": ["/__init__.py", "/tilesystem.py"], "/checkpoint.py": ["/__init__.py"], "/wl_filter_list_generator.py": ["/noaaxml.py", "/__init__.py", "/region_constants.py", "/search.py"], "/regions.py": ["/__init__.py", "/noaaxml.py", "/region_constants.py", "/search.py"], "/ukho_crest_burner.py": ["/__init__.py"], "/ukho_remove_duplicates.py": ["/__init__.py", "/ukho_xlrd_lookup.py"], "/bsb.py": ["/__init__.py"], "/ukho_xlrd_lookup.py": ["/__init__.py"], "/tiles_opt.py": ["/config.py", "/tilesystem.py"], "/manifestjson.py": ["/__init__.py", "/zdata.py"], "/noaaxml.py": ["/__init__.py"], "/ukho_filter_list_generator.py": ["/search.py", "/region_constants.py", "/__init__.py"], "/catalog.py": ["/__init__.py"], "/faa_fetch.py": ["/region_constants.py", "/__init__.py"], "/verify.py": ["/__init__.py"], "/encryption_shim.py": ["/__init__.py"], "/zdata.py": ["/__init__.py"], "/test_format_entry.py": ["/__init__.py"], "/filler.py": ["/tilesystem.py", "/regions.py", "/search.py", "/__init__.py"]} |
48,938 | Steguer/SimpleInject | refs/heads/master | /tests/test_service_provider.py | from typing import cast, ForwardRef
import pytest
from simpleinject.service_provider import ServicesManager, CircularReferenceError, ServiceWasNotRegistered
@pytest.fixture('function')
def services_manager():
return ServicesManager()
class ServiceB(object):
pass
class ServiceC(object):
pass
class ServiceA(object):
def __init__(self, b: ServiceB, c: ServiceC):
self.b: ServiceB = b
self.c: ServiceC = c
def test_injection_self(services_manager: ServicesManager):
services_manager.bind(ServiceB, ServiceB)
services_manager.bind(ServiceC, ServiceC)
services_manager.bind(ServiceA, ServiceA)
services_manager.initialize()
a: ServiceA = cast(ServiceA, services_manager.services[ServiceA].instance)
b: ServiceB = cast(ServiceB, services_manager.services[ServiceB].instance)
c: ServiceC = cast(ServiceC, services_manager.services[ServiceC].instance)
assert a and b and c
assert id(b) == id(a.b)
assert id(c) == id(a.c)
class IService(object):
def __init__(self):
pass
class ServiceD(IService):
def __init__(self):
super().__init__()
def test_register_service_to_interface(services_manager: ServicesManager):
services_manager.bind(IService, ServiceD)
services_manager.initialize()
service = services_manager.services[IService].instance
assert type(service) == ServiceD
class ServiceE(object):
def __init__(self, f: ForwardRef("ServiceF")):
pass
class ServiceF(object):
def __init__(self, e: ServiceE):
pass
def test_circular_dependency(services_manager: ServicesManager):
services_manager.bind_self(ServiceE)
services_manager.bind_self(ServiceF)
with pytest.raises(CircularReferenceError):
services_manager.initialize()
class Service1(object):
def __init__(self, f: ForwardRef("Service2")):
pass
class Service2(object):
def __init__(self, e: ForwardRef("Service3")):
pass
class Service3(object):
def __init__(self, e: ForwardRef("Service4")):
pass
class Service4(object):
def __init__(self, e: Service2):
pass
def test_circular_dependency_complex(services_manager: ServicesManager):
services_manager.bind_self(Service1)
services_manager.bind_self(Service2)
services_manager.bind_self(Service3)
services_manager.bind_self(Service4)
with pytest.raises(CircularReferenceError):
services_manager.initialize()
class ServiceG(object):
def __init__(self):
self.value: int = 1
def test_bind_self(services_manager: ServicesManager):
services_manager.bind_self(ServiceG)
services_manager.initialize()
service: ServiceG = cast(ServiceG, services_manager.services[ServiceG].instance)
assert 1 == service.value
def test_retrieve_from_concrete_type(services_manager: ServicesManager):
services_manager.bind_self(ServiceG)
services_manager.initialize()
service: ServiceG = services_manager.resolve(ServiceG)
assert 1 == service.value
class IServiceH(object):
pass
class ServiceH(IServiceH):
def __init__(self):
self.value: int = 1
def test_retrieve_from_interface_type(services_manager: ServicesManager):
services_manager.bind(IServiceH, ServiceH)
services_manager.initialize()
service: ServiceH = services_manager.resolve(IServiceH)
assert 1 == service.value
def test_service_not_register(services_manager: ServicesManager):
services_manager.bind(IServiceH, ServiceH)
services_manager.initialize()
with pytest.raises(ServiceWasNotRegistered):
services_manager.resolve(ServiceH)
def test_bind_self_from_instance(services_manager: ServicesManager):
instance = ServiceH()
instance.value = 4
services_manager.bind_self_from_instance(instance)
services_manager.initialize()
resolved_service = services_manager.resolve(ServiceH)
assert type(resolved_service) is ServiceH
assert resolved_service.value == 4
def test_bind_from_instance(services_manager: ServicesManager):
instance = ServiceH()
instance.value = 4
services_manager.bind_from_instance(IServiceH, instance)
services_manager.initialize()
resolved_service = services_manager.resolve(IServiceH)
assert resolved_service.value == 4
| {"/tests/test_service_provider.py": ["/simpleinject/service_provider.py"]} |
48,939 | Steguer/SimpleInject | refs/heads/master | /simpleinject/service_provider.py | import inspect
from typing import *
class ServiceWrapper(object):
def __init__(self, service_type: type, instance: object = None):
self.service_type = service_type
self.instance: object = instance
self.dependencies: List[ServiceWrapper] = []
class Dummy(object):
pass
class CircularReferenceError(Exception):
def __init__(self, msg):
super().__init__(msg)
class ServiceWasNotRegistered(Exception):
def __init__(self, service: Type):
super().__init__('{} was not registered in the service provider.'.format(service))
class ServicesManager(object):
T = TypeVar('T')
def __init__(self):
self.services: Dict[type, ServiceWrapper] = {}
def bind_from_instance(self, interface: T, instance: Type[T]):
self.services[interface] = ServiceWrapper(interface, instance)
def bind_self_from_instance(self, instance: object):
instance_type = type(instance)
self.services[instance_type] = ServiceWrapper(instance_type, instance)
def bind(self, interface: T, service_type: Type[T]) -> NoReturn:
self.services[interface] = ServiceWrapper(service_type)
def bind_self(self, service_type: type):
self.services[service_type] = ServiceWrapper(service_type)
def resolve(self, interface: T) -> T:
services = self.services
if interface not in services:
raise ServiceWasNotRegistered(interface)
return services[interface].instance
def initialize(self) -> NoReturn:
self._init_dependencies()
self._resolve_graph()
def _init_dependencies(self) -> NoReturn:
for interface, service in self.services.items():
for key, value in get_type_hints(interface.__init__).items():
if key != 'self' and key != 'args' and key != 'kwargs':
service.dependencies.append(self.services[value])
def _resolve_graph(self) -> NoReturn:
for srv_type, srv in self.services.items():
if not srv.instance:
self._instanciate_object(srv)
def _instanciate_object(self, service: ServiceWrapper) -> NoReturn:
service.instance = Dummy()
dependencies: List[ServiceWrapper] = service.dependencies
if not dependencies:
service.instance = service.service_type
elif type(service) is not service.service_type:
for dep in dependencies:
if type(dep.instance) is Dummy:
raise CircularReferenceError(
'Circular dependency between {} and {}'.format(service.service_type, dep))
self._instanciate_object(dep)
service.instance = service.service_type(*[dep.instance for dep in service.dependencies])
| {"/tests/test_service_provider.py": ["/simpleinject/service_provider.py"]} |
48,959 | Toktar/indy-plenum | refs/heads/master | /plenum/server/consensus/checkpoint_service.py | import math
import sys
from _sha256 import sha256
from typing import Tuple
from sortedcontainers import SortedListWithKey
from common.exceptions import LogicError
from common.serializers.serialization import serialize_msg_for_signing
from plenum.common.config_util import getConfig
from plenum.common.event_bus import InternalBus, ExternalBus
from plenum.common.messages.internal_messages import NeedMasterCatchup, NeedBackupCatchup, CheckpointStabilized, \
BackupSetupLastOrdered, NewViewAccepted, NewViewCheckpointsApplied
from plenum.common.messages.node_messages import Checkpoint, Ordered, CheckpointState
from plenum.common.metrics_collector import MetricsName, MetricsCollector, NullMetricsCollector
from plenum.common.router import Subscription
from plenum.common.stashing_router import StashingRouter, PROCESS
from plenum.common.util import updateNamedTuple, SortedDict, firstKey
from plenum.server.consensus.consensus_shared_data import ConsensusSharedData
from plenum.server.consensus.metrics_decorator import measure_consensus_time
from plenum.server.consensus.msg_validator import CheckpointMsgValidator
from plenum.server.database_manager import DatabaseManager
from plenum.server.replica_stasher import ReplicaStasher
from plenum.server.replica_validator_enums import STASH_WATERMARKS
from stp_core.common.log import getlogger
class CheckpointService:
STASHED_CHECKPOINTS_BEFORE_CATCHUP = 1
def __init__(self, data: ConsensusSharedData, bus: InternalBus, network: ExternalBus,
stasher: StashingRouter, db_manager: DatabaseManager,
metrics: MetricsCollector = NullMetricsCollector(),):
self._data = data
self._bus = bus
self._network = network
self._checkpoint_state = SortedDict(lambda k: k[1])
self._stasher = stasher
self._subscription = Subscription()
self._validator = CheckpointMsgValidator(self._data)
self._db_manager = db_manager
self.metrics = metrics
# Stashed checkpoints for each view. The key of the outermost
# dictionary is the view_no, value being a dictionary with key as the
# range of the checkpoint and its value again being a mapping between
# senders and their sent checkpoint
# Dict[view_no, Dict[(seqNoStart, seqNoEnd), Dict[sender, Checkpoint]]]
self._stashed_recvd_checkpoints = {}
self._config = getConfig()
self._logger = getlogger()
self._subscription.subscribe(stasher, Checkpoint, self.process_checkpoint)
self._subscription.subscribe(bus, Ordered, self.process_ordered)
self._subscription.subscribe(bus, BackupSetupLastOrdered, self.process_backup_setup_last_ordered)
self._subscription.subscribe(bus, NewViewAccepted, self.process_new_view_accepted)
def cleanup(self):
self._subscription.unsubscribe_all()
@property
def view_no(self):
return self._data.view_no
@property
def is_master(self):
return self._data.is_master
@property
def last_ordered_3pc(self):
return self._data.last_ordered_3pc
@measure_consensus_time(MetricsName.PROCESS_CHECKPOINT_TIME,
MetricsName.BACKUP_PROCESS_CHECKPOINT_TIME)
def process_checkpoint(self, msg: Checkpoint, sender: str) -> (bool, str):
"""
Process checkpoint messages
:return: whether processed (True) or stashed (False)
"""
if msg.instId != self._data.inst_id:
return None, None
self._logger.info('{} processing checkpoint {} from {}'.format(self, msg, sender))
result, reason = self._validator.validate(msg)
if result == PROCESS:
self._do_process_checkpoint(msg, sender)
return result, reason
def _do_process_checkpoint(self, msg: Checkpoint, sender: str) -> bool:
"""
Process checkpoint messages
:return: whether processed (True) or stashed (False)
"""
seqNoEnd = msg.seqNoEnd
seqNoStart = msg.seqNoStart
key = (seqNoStart, seqNoEnd)
if key not in self._checkpoint_state or not self._checkpoint_state[key].digest:
self._stash_checkpoint(msg, sender)
self._remove_stashed_checkpoints(self.last_ordered_3pc)
self._start_catchup_if_needed()
return False
checkpoint_state = self._checkpoint_state[key]
# Raise the error only if master since only master's last
# ordered 3PC is communicated during view change
if self.is_master and checkpoint_state.digest != msg.digest:
self._logger.warning("{} received an incorrect digest {} for "
"checkpoint {} from {}".format(self, msg.digest, key, sender))
return True
checkpoint_state.receivedDigests[sender] = msg.digest
self._check_if_checkpoint_stable(key)
return True
def process_backup_setup_last_ordered(self, msg: BackupSetupLastOrdered):
if msg.inst_id != self._data.inst_id:
return
self.update_watermark_from_3pc()
def process_ordered(self, ordered: Ordered):
if ordered.instId != self._data.inst_id:
return
for batch_id in reversed(self._data.preprepared):
if batch_id.pp_seq_no == ordered.ppSeqNo:
self._add_to_checkpoint(batch_id.pp_seq_no,
batch_id.pp_digest,
ordered.ledgerId,
batch_id.view_no,
ordered.auditTxnRootHash)
return
raise LogicError("CheckpointService | Can't process Ordered msg because "
"ppSeqNo {} not in preprepared".format(ordered.ppSeqNo))
def _start_catchup_if_needed(self):
stashed_checkpoint_ends = self._stashed_checkpoints_with_quorum()
lag_in_checkpoints = len(stashed_checkpoint_ends)
if self._checkpoint_state:
(s, e) = firstKey(self._checkpoint_state)
# If the first stored own checkpoint has a not aligned lower bound
# (this means that it was started after a catch-up), is complete
# and there is a quorumed stashed checkpoint from other replicas
# with the same end then don't include this stashed checkpoint
# into the lag
if s % self._config.CHK_FREQ != 0 \
and self._checkpoint_state[(s, e)].seqNo == e \
and e in stashed_checkpoint_ends:
lag_in_checkpoints -= 1
is_stashed_enough = \
lag_in_checkpoints > self.STASHED_CHECKPOINTS_BEFORE_CATCHUP
if not is_stashed_enough:
return
if self.is_master:
self._logger.display(
'{} has lagged for {} checkpoints so updating watermarks to {}'.format(
self, lag_in_checkpoints, stashed_checkpoint_ends[-1]))
self.set_watermarks(low_watermark=stashed_checkpoint_ends[-1])
if not self._data.is_primary:
self._logger.display(
'{} has lagged for {} checkpoints so the catchup procedure starts'.format(
self, lag_in_checkpoints))
self._bus.send(NeedMasterCatchup())
else:
self._logger.info(
'{} has lagged for {} checkpoints so adjust last_ordered_3pc to {}, '
'shift watermarks and clean collections'.format(
self, lag_in_checkpoints, stashed_checkpoint_ends[-1]))
# Adjust last_ordered_3pc, shift watermarks, clean operational
# collections and process stashed messages which now fit between
# watermarks
key_3pc = (self.view_no, stashed_checkpoint_ends[-1])
self._bus.send(NeedBackupCatchup(inst_id=self._data.inst_id,
caught_up_till_3pc=key_3pc))
self.caught_up_till_3pc(key_3pc)
def gc_before_new_view(self):
self._reset_checkpoints()
self._remove_stashed_checkpoints(till_3pc_key=(self.view_no, 0))
def caught_up_till_3pc(self, caught_up_till_3pc):
self._reset_checkpoints()
self._remove_stashed_checkpoints(till_3pc_key=caught_up_till_3pc)
self.update_watermark_from_3pc()
def catchup_clear_for_backup(self):
self._reset_checkpoints()
self._remove_stashed_checkpoints()
self.set_watermarks(low_watermark=0,
high_watermark=sys.maxsize)
def _add_to_checkpoint(self, ppSeqNo, digest, ledger_id, view_no, audit_txn_root_hash):
for (s, e) in self._checkpoint_state.keys():
if s <= ppSeqNo <= e:
state = self._checkpoint_state[s, e] # type: CheckpointState
state.digests.append(digest)
state = updateNamedTuple(state, seqNo=ppSeqNo)
self._checkpoint_state[s, e] = state
break
else:
s, e = ppSeqNo, math.ceil(ppSeqNo / self._config.CHK_FREQ) * self._config.CHK_FREQ
self._logger.debug("{} adding new checkpoint state for {}".format(self, (s, e)))
state = CheckpointState(ppSeqNo, [digest, ], None, {}, False)
self._checkpoint_state[s, e] = state
if state.seqNo == e:
if len(state.digests) == self._config.CHK_FREQ:
self._do_checkpoint(state, s, e, ledger_id, view_no, audit_txn_root_hash)
self._process_stashed_checkpoints((s, e), view_no)
@measure_consensus_time(MetricsName.SEND_CHECKPOINT_TIME,
MetricsName.BACKUP_SEND_CHECKPOINT_TIME)
def _do_checkpoint(self, state, s, e, ledger_id, view_no, audit_txn_root_hash):
# TODO CheckpointState/Checkpoint is not a namedtuple anymore
# 1. check if updateNamedTuple works for the new message type
# 2. choose another name
# TODO: This is hack of hacks, should be removed when refactoring is complete
if not self.is_master and audit_txn_root_hash is None:
audit_txn_root_hash = "7RJ5bkAKRy2CCvarRij2jiHC16SVPjHcrpVdNsboiQGv"
state = updateNamedTuple(state,
digest=audit_txn_root_hash,
digests=[])
self._checkpoint_state[s, e] = state
self._logger.info("{} sending Checkpoint {} view {} checkpointState digest {}. Ledger {} "
"txn root hash {}. Committed state root hash {} Uncommitted state root hash {}".
format(self, (s, e), view_no, state.digest, ledger_id,
self._db_manager.get_txn_root_hash(ledger_id),
self._db_manager.get_state_root_hash(ledger_id,
committed=True),
self._db_manager.get_state_root_hash(ledger_id,
committed=False)))
checkpoint = Checkpoint(self._data.inst_id, view_no, s, e, state.digest)
self._network.send(checkpoint)
self._data.checkpoints.append(checkpoint)
def _mark_checkpoint_stable(self, seqNo):
previousCheckpoints = []
for (s, e), state in self._checkpoint_state.items():
if e == seqNo:
# TODO CheckpointState/Checkpoint is not a namedtuple anymore
# 1. check if updateNamedTuple works for the new message type
# 2. choose another name
state = updateNamedTuple(state, isStable=True)
self._checkpoint_state[s, e] = state
self._set_stable_checkpoint(e)
break
else:
previousCheckpoints.append((s, e))
else:
self._logger.debug("{} could not find {} in checkpoints".format(self, seqNo))
return
self.set_watermarks(low_watermark=seqNo)
for k in previousCheckpoints:
self._logger.trace("{} removing previous checkpoint {}".format(self, k))
self._checkpoint_state.pop(k)
self._remove_stashed_checkpoints(till_3pc_key=(self.view_no, seqNo))
self._bus.send(CheckpointStabilized(self._data.inst_id, (self.view_no, seqNo)))
self._logger.info("{} marked stable checkpoint {}".format(self, (s, e)))
def _check_if_checkpoint_stable(self, key: Tuple[int, int]):
ckState = self._checkpoint_state[key]
if self._data.quorums.checkpoint.is_reached(len(ckState.receivedDigests)):
self._mark_checkpoint_stable(ckState.seqNo)
return True
else:
self._logger.debug('{} has state.receivedDigests as {}'.format(
self, ckState.receivedDigests.keys()))
return False
def _stash_checkpoint(self, ck: Checkpoint, sender: str):
self._logger.debug('{} stashing {} from {}'.format(self, ck, sender))
seqNoStart, seqNoEnd = ck.seqNoStart, ck.seqNoEnd
if ck.viewNo not in self._stashed_recvd_checkpoints:
self._stashed_recvd_checkpoints[ck.viewNo] = {}
stashed_for_view = self._stashed_recvd_checkpoints[ck.viewNo]
if (seqNoStart, seqNoEnd) not in stashed_for_view:
stashed_for_view[seqNoStart, seqNoEnd] = {}
stashed_for_view[seqNoStart, seqNoEnd][sender] = ck
def _stashed_checkpoints_with_quorum(self):
end_pp_seq_numbers = []
quorum = self._data.quorums.checkpoint
for (_, seq_no_end), senders in self._stashed_recvd_checkpoints.get(
self.view_no, {}).items():
if quorum.is_reached(len(senders)):
end_pp_seq_numbers.append(seq_no_end)
return sorted(end_pp_seq_numbers)
def _process_stashed_checkpoints(self, key, view_no):
# Remove all checkpoints from previous views if any
self._remove_stashed_checkpoints(till_3pc_key=(self.view_no, 0))
if key not in self._stashed_recvd_checkpoints.get(view_no, {}):
self._logger.trace("{} have no stashed checkpoints for {}")
return
# Get a snapshot of all the senders of stashed checkpoints for `key`
senders = list(self._stashed_recvd_checkpoints[view_no][key].keys())
total_processed = 0
consumed = 0
for sender in senders:
# Check if the checkpoint from `sender` is still in
# `stashed_recvd_checkpoints` because it might be removed from there
# in case own checkpoint was stabilized when we were processing
# stashed checkpoints from previous senders in this loop
if view_no in self._stashed_recvd_checkpoints \
and key in self._stashed_recvd_checkpoints[view_no] \
and sender in self._stashed_recvd_checkpoints[view_no][key]:
if self.process_checkpoint(
self._stashed_recvd_checkpoints[view_no][key].pop(sender),
sender):
consumed += 1
# Note that if `process_checkpoint` returned False then the
# checkpoint from `sender` was re-stashed back to
# `stashed_recvd_checkpoints`
total_processed += 1
# If we have consumed stashed checkpoints for `key` from all the
# senders then remove entries which have become empty
if view_no in self._stashed_recvd_checkpoints \
and key in self._stashed_recvd_checkpoints[view_no] \
and len(self._stashed_recvd_checkpoints[view_no][key]) == 0:
del self._stashed_recvd_checkpoints[view_no][key]
if len(self._stashed_recvd_checkpoints[view_no]) == 0:
del self._stashed_recvd_checkpoints[view_no]
restashed = total_processed - consumed
self._logger.info('{} processed {} stashed checkpoints for {}, '
'{} of them were stashed again'.
format(self, total_processed, key, restashed))
return total_processed
def reset_watermarks_before_new_view(self):
# Reset any previous view watermarks since for view change to
# successfully complete, the node must have reached the same state
# as other nodes
self.set_watermarks(low_watermark=0)
def should_reset_watermarks_before_new_view(self):
if self.view_no <= 0:
return False
if self.last_ordered_3pc[0] == self.view_no and self.last_ordered_3pc[1] > 0:
return False
return True
def set_watermarks(self, low_watermark: int, high_watermark: int = None):
self._data.low_watermark = low_watermark
self._data.high_watermark = self._data.low_watermark + self._config.LOG_SIZE \
if high_watermark is None else \
high_watermark
self._logger.info('{} set watermarks as {} {}'.format(self,
self._data.low_watermark,
self._data.high_watermark))
self._stasher.process_all_stashed(STASH_WATERMARKS)
def update_watermark_from_3pc(self):
last_ordered_3pc = self.last_ordered_3pc
if (last_ordered_3pc is not None) and (last_ordered_3pc[0] == self.view_no):
self._logger.info("update_watermark_from_3pc to {}".format(last_ordered_3pc))
self.set_watermarks(last_ordered_3pc[1])
else:
self._logger.info("try to update_watermark_from_3pc but last_ordered_3pc is None")
def _remove_stashed_checkpoints(self, till_3pc_key=None):
"""
Remove stashed received checkpoints up to `till_3pc_key` if provided,
otherwise remove all stashed received checkpoints
"""
if till_3pc_key is None:
self._stashed_recvd_checkpoints.clear()
self._logger.info('{} removing all stashed checkpoints'.format(self))
return
for view_no in list(self._stashed_recvd_checkpoints.keys()):
if view_no < till_3pc_key[0]:
self._logger.info('{} removing stashed checkpoints for view {}'.format(self, view_no))
del self._stashed_recvd_checkpoints[view_no]
elif view_no == till_3pc_key[0]:
for (s, e) in list(self._stashed_recvd_checkpoints[view_no].keys()):
if e <= till_3pc_key[1]:
self._logger.info('{} removing stashed checkpoints: '
'viewNo={}, seqNoStart={}, seqNoEnd={}'.
format(self, view_no, s, e))
del self._stashed_recvd_checkpoints[view_no][(s, e)]
if len(self._stashed_recvd_checkpoints[view_no]) == 0:
del self._stashed_recvd_checkpoints[view_no]
def _reset_checkpoints(self):
# That function most probably redundant in PBFT approach,
# because according to paper, checkpoints cleared only when next stabilized.
# Avoid using it while implement other services.
self._checkpoint_state.clear()
self._data.checkpoints.clear()
# TODO: change to = 1 in ViewChangeService integration.
self._data.stable_checkpoint = 0
def _set_stable_checkpoint(self, end_seq_no):
if not list(self._data.checkpoints.irange_key(end_seq_no, end_seq_no)):
raise LogicError('Stable checkpoint must be in checkpoints')
self._data.stable_checkpoint = end_seq_no
self._data.checkpoints = \
SortedListWithKey([c for c in self._data.checkpoints if c.seqNoEnd >= end_seq_no],
key=lambda checkpoint: checkpoint.seqNoEnd)
def __str__(self) -> str:
return "{} - checkpoint_service".format(self._data.name)
# TODO: move to OrderingService as a handler for Cleanup messages
# def _clear_batch_till_seq_no(self, seq_no):
# self._data.preprepared = [pp for pp in self._data.preprepared if pp.ppSeqNo >= seq_no]
# self._data.prepared = [p for p in self._data.prepared if p.ppSeqNo >= seq_no]
def discard(self, msg, reason, sender):
self._logger.trace("{} discard message {} from {} "
"with the reason: {}".format(self, msg, sender, reason))
def process_new_view_accepted(self, msg: NewViewAccepted):
# 1. update shared data
cp = msg.checkpoint
if cp not in self._data.checkpoints:
self._data.checkpoints.append(cp)
self._set_stable_checkpoint(cp.seqNoEnd)
self.set_watermarks(low_watermark=cp.seqNoEnd)
# 2. send NewViewCheckpointsApplied
self._bus.send(NewViewCheckpointsApplied(view_no=msg.view_no,
view_changes=msg.view_changes,
checkpoint=msg.checkpoint,
batches=msg.batches))
return PROCESS, None
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,960 | Toktar/indy-plenum | refs/heads/master | /plenum/test/checkpoints/test_discard_old_checkpoint_messages.py | from plenum.server.replica_validator_enums import ALREADY_STABLE
from stp_core.loop.eventually import eventually
from plenum.common.messages.node_messages import Checkpoint
from plenum.test.checkpoints.helper import chkChkpoints, chk_chkpoints_for_instance
from plenum.test.helper import checkDiscardMsg
from plenum.test.helper import sdk_send_random_and_check
def test_discard_checkpoint_msg_for_stable_checkpoint(chkFreqPatched, looper,
txnPoolNodeSet,
sdk_pool_handle,
sdk_wallet_client,
reqs_for_checkpoint):
sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle,
sdk_wallet_client, reqs_for_checkpoint)
for inst_id in txnPoolNodeSet[0].replicas.keys():
looper.run(eventually(chk_chkpoints_for_instance, txnPoolNodeSet,
inst_id, 1, 0, retryWait=1))
node1 = txnPoolNodeSet[0]
rep1 = node1.replicas[0]
oldChkpointMsg = rep1._consensus_data.checkpoints[0]
rep1.send(oldChkpointMsg)
recvReplicas = [n.replicas[0].stasher for n in txnPoolNodeSet[1:]]
looper.run(eventually(checkDiscardMsg, recvReplicas, oldChkpointMsg,
ALREADY_STABLE, retryWait=1))
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,961 | Toktar/indy-plenum | refs/heads/master | /plenum/hypothesis/helper.py | from typing import Any
from hypothesis import strategies as st
from plenum.test.simulation.sim_random import SimRandom
class HypothesisSimRandom(SimRandom):
def __init__(self, data):
self._data = data
def integer(self, min_value: int, max_value: int) -> int:
return self._data.draw(st.integers(min_value=min_value,
max_value=max_value))
def choice(self, *args) -> Any:
return self._data.draw(st.sampled_from(args))
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,962 | Toktar/indy-plenum | refs/heads/master | /plenum/test/input_validation/fields_validation/test_batch_id_field.py | from plenum.common.messages.fields import BatchIDField
validator = BatchIDField()
def test_valid():
assert not validator.validate((1, 1, "digest"))
assert not validator.validate((0, 1, "digest"))
assert not validator.validate((100, 0, "d"))
def test_invalid_view_no():
assert validator.validate((-1, 1, "digest"))
assert validator.validate(("aaa", 1, "digest"))
def test_invalid_pp_seq_no():
assert validator.validate((1, -1, "digest"))
assert validator.validate((1, "aaa", "digest"))
def test_invalid_digest():
assert validator.validate((1, 1, ""))
assert validator.validate((1, 1, 1))
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,963 | Toktar/indy-plenum | refs/heads/master | /plenum/common/messages/internal_messages.py | from typing import NamedTuple, List, Any
from plenum.common.exceptions import SuspiciousNode
HookMessage = NamedTuple('HookMessage',
[('hook', int),
('args', tuple)])
RequestPropagates = NamedTuple('RequestPropagates',
[('bad_requests', List)])
BackupSetupLastOrdered = NamedTuple('BackupSetupLastOrdered',
[('inst_id', int)])
NeedMasterCatchup = NamedTuple('NeedMasterCatchup', [])
NeedBackupCatchup = NamedTuple('NeedBackupCatchup',
[('inst_id', int),
('caught_up_till_3pc', tuple)])
CheckpointStabilized = NamedTuple('CheckpointStabilized',
[('inst_id', int),
('last_stable_3pc', tuple)])
RaisedSuspicion = NamedTuple('RaisedSuspicion',
[('inst_id', int),
('ex', SuspiciousNode)])
PreSigVerification = NamedTuple('PreSigVerification',
[('cmsg', Any)])
# by default view_no for StartViewChange is None meaning that we move to the next view
NeedViewChange = NamedTuple('StartViewChange',
[('view_no', int)])
NeedViewChange.__new__.__defaults__ = (None,) * len(NeedViewChange._fields)
ViewChangeStarted = NamedTuple('ViewChangeStarted',
[('view_no', int)])
NewViewAccepted = NamedTuple('NewViewAccepted',
[('view_no', int),
('view_changes', list),
('checkpoint', object),
('batches', list)])
NewViewCheckpointsApplied = NamedTuple('NewViewCheckpointsApplied',
[('view_no', int),
('view_changes', list),
('checkpoint', object),
('batches', list)])
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,964 | Toktar/indy-plenum | refs/heads/master | /plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py | from unittest.mock import Mock
import pytest
from common.exceptions import LogicError
from plenum.common.constants import DOMAIN_LEDGER_ID
from plenum.common.messages.internal_messages import CheckpointStabilized, NeedBackupCatchup, NeedMasterCatchup
from plenum.common.messages.node_messages import Checkpoint, Ordered, PrePrepare, CheckpointState
from plenum.common.util import updateNamedTuple, getMaxFailures
from plenum.server.consensus.checkpoint_service import CheckpointService
from plenum.server.consensus.consensus_shared_data import preprepare_to_batch_id
from plenum.test.checkpoints.helper import cp_digest
from plenum.test.helper import create_pre_prepare_params
@pytest.fixture
def pre_prepare(checkpoint_service):
params = create_pre_prepare_params(None,
ledger_id=DOMAIN_LEDGER_ID,
view_no=checkpoint_service.view_no,
pp_seq_no=1)
pp = PrePrepare(*params)
return pp
@pytest.fixture
def ordered(pre_prepare):
ord_args = [
pre_prepare.instId,
pre_prepare.viewNo,
pre_prepare.reqIdr,
[],
pre_prepare.ppSeqNo,
pre_prepare.ppTime,
pre_prepare.ledgerId,
pre_prepare.stateRootHash,
pre_prepare.txnRootHash,
pre_prepare.auditTxnRootHash,
["Alpha", "Beta"]
]
return Ordered(*ord_args)
@pytest.fixture
def checkpoint(ordered, tconf):
start = ordered.ppSeqNo % tconf.CHK_FREQ
return Checkpoint(instId=ordered.instId,
viewNo=ordered.viewNo,
seqNoStart=start,
seqNoEnd=start + tconf.CHK_FREQ - 1,
digest=cp_digest(start, start + tconf.CHK_FREQ - 1))
def test_process_checkpoint_with_incorrect_digest(checkpoint_service, checkpoint, tconf, is_master):
key = (checkpoint.seqNoStart, checkpoint.seqNoEnd)
sender = "sender"
checkpoint_service._checkpoint_state[key] = CheckpointState(1, [],
"other_digest", {}, False)
assert checkpoint_service.process_checkpoint(checkpoint, sender)
if is_master:
assert sender not in checkpoint_service._checkpoint_state[key].receivedDigests
else:
assert sender in checkpoint_service._checkpoint_state[key].receivedDigests
def test_start_catchup_on_quorum_of_stashed_checkpoints(checkpoint_service, checkpoint, pre_prepare,
tconf, ordered, validators, is_master):
master_catchup_handler = Mock()
backup_catchup_handler = Mock()
checkpoint_service._bus.subscribe(NeedMasterCatchup, master_catchup_handler)
checkpoint_service._bus.subscribe(NeedBackupCatchup, backup_catchup_handler)
quorum = checkpoint_service._data.quorums.checkpoint.value
print(quorum)
n = len(validators)
assert quorum == n - getMaxFailures(n) - 1
senders = ["sender{}".format(i) for i in range(quorum + 1)]
old_key = (1, tconf.CHK_FREQ)
key = (old_key[1] + 1, old_key[1] + tconf.CHK_FREQ)
new_checkpoint = Checkpoint(instId=ordered.instId,
viewNo=ordered.viewNo,
seqNoStart=key[0],
seqNoEnd=key[1],
digest=cp_digest(1, 1))
for sender in senders[:quorum]:
assert not checkpoint_service._do_process_checkpoint(checkpoint, sender)
assert checkpoint_service._stashed_recvd_checkpoints[checkpoint.viewNo][old_key][sender] == checkpoint
for sender in senders[:quorum - 1]:
assert not checkpoint_service._do_process_checkpoint(new_checkpoint, sender)
assert checkpoint_service._stashed_recvd_checkpoints[checkpoint.viewNo][key][sender] == new_checkpoint
assert not checkpoint_service._do_process_checkpoint(new_checkpoint, senders[quorum - 1])
if is_master:
assert checkpoint_service._data.low_watermark == key[1]
master_catchup_handler.assert_called_once_with(NeedMasterCatchup())
else:
backup_catchup_handler.assert_called_once_with(NeedBackupCatchup(inst_id=checkpoint_service._data.inst_id,
caught_up_till_3pc=(checkpoint_service.view_no,
key[1])))
def test_process_backup_catchup_msg(checkpoint_service, tconf, checkpoint):
checkpoint_service._data.last_ordered_3pc = (checkpoint_service.view_no, 0)
key = (1, tconf.CHK_FREQ)
new_key = (key[1] + 1, key[1] + tconf.CHK_FREQ)
checkpoint_service._data.stable_checkpoint = 1
checkpoint_service._stash_checkpoint(Checkpoint(1, checkpoint.viewNo, new_key[0], new_key[1], cp_digest(1, 1)),
"frm")
checkpoint_service._stash_checkpoint(Checkpoint(1, checkpoint.viewNo, key[0], key[1], cp_digest(1, 1)),
"frm")
checkpoint_service._checkpoint_state[key] = CheckpointState(key[1] - 1,
["digest"] * (tconf.CHK_FREQ - 1),
None,
{},
False)
checkpoint_service._data.checkpoints.append(checkpoint)
checkpoint_service._data.last_ordered_3pc = (checkpoint_service.view_no, key[1])
checkpoint_service.caught_up_till_3pc(checkpoint_service._data.last_ordered_3pc)
assert checkpoint_service._data.low_watermark == key[1]
assert not checkpoint_service._checkpoint_state
assert not checkpoint_service._data.checkpoints
assert checkpoint_service._data.stable_checkpoint == 0
assert key not in checkpoint_service._stashed_recvd_checkpoints[checkpoint_service.view_no]
assert new_key in checkpoint_service._stashed_recvd_checkpoints[checkpoint_service.view_no]
def test_process_checkpoint(checkpoint_service, checkpoint, pre_prepare, tconf, ordered, validators, is_master):
checkpoint_stabilized_handler = Mock()
checkpoint_service._bus.subscribe(CheckpointStabilized, checkpoint_stabilized_handler)
quorum = checkpoint_service._data.quorums.checkpoint.value
n = len(validators)
assert quorum == n - getMaxFailures(n) - 1
senders = ["sender{}".format(i) for i in range(quorum + 1)]
key = (1, tconf.CHK_FREQ)
old_key = (-1, 0)
checkpoint_service._stash_checkpoint(Checkpoint(1, checkpoint.viewNo, 1, 1, cp_digest(1, 1)), "frm")
checkpoint_service._stash_checkpoint(Checkpoint(1, checkpoint.viewNo + 1, 1, 1, cp_digest(1, 1)), "frm")
checkpoint_service._checkpoint_state[old_key] = CheckpointState(1,
["digest"] * (tconf.CHK_FREQ - 1),
None,
{},
False)
checkpoint_service._checkpoint_state[key] = CheckpointState(key[1] - 1,
["digest"] * (tconf.CHK_FREQ - 1),
None,
{},
False)
pre_prepare.ppSeqNo = key[1]
ordered.ppSeqNo = pre_prepare.ppSeqNo
checkpoint_service._data.preprepared.append(preprepare_to_batch_id(pre_prepare))
checkpoint_service.process_ordered(ordered)
_check_checkpoint(checkpoint_service, key[0], key[1], pre_prepare, check_shared_data=True)
state = updateNamedTuple(checkpoint_service._checkpoint_state[key],
digest=checkpoint.digest)
checkpoint_service._checkpoint_state[key] = state
for sender in senders[:quorum - 1]:
assert checkpoint_service.process_checkpoint(checkpoint, sender)
assert checkpoint_service._checkpoint_state[key].receivedDigests[sender] == checkpoint.digest
assert not checkpoint_service._checkpoint_state[key].isStable
# send the last checkpoint to stable it
assert checkpoint_service.process_checkpoint(checkpoint, senders[quorum - 1])
assert checkpoint_service._checkpoint_state[key].isStable
# check _remove_stashed_checkpoints()
assert checkpoint.viewNo not in checkpoint_service._stashed_recvd_checkpoints
assert checkpoint.viewNo + 1 in checkpoint_service._stashed_recvd_checkpoints
# check watermarks
assert checkpoint_service._data.low_watermark == checkpoint.seqNoEnd
# check that a Cleanup msg has been sent
checkpoint_stabilized_handler.assert_called_once_with(
CheckpointStabilized(inst_id=checkpoint_service._data.inst_id,
last_stable_3pc=(checkpoint.viewNo, checkpoint.seqNoEnd)))
# check that old checkpoint_states has been removed
assert old_key not in checkpoint_service._checkpoint_state
def test_process_oredered(checkpoint_service, ordered, pre_prepare, tconf):
with pytest.raises(LogicError, match="CheckpointService | Can't process Ordered msg because "
"ppSeqNo {} not in preprepared".format(ordered.ppSeqNo)):
checkpoint_service.process_ordered(ordered)
checkpoint_service._data.preprepared.append(preprepare_to_batch_id(pre_prepare))
checkpoint_service.process_ordered(ordered)
_check_checkpoint(checkpoint_service, 1, tconf.CHK_FREQ, pre_prepare)
pre_prepare.ppSeqNo = tconf.CHK_FREQ
ordered.ppSeqNo = pre_prepare.ppSeqNo
checkpoint_service._data.preprepared.append(preprepare_to_batch_id(pre_prepare))
state = updateNamedTuple(checkpoint_service._checkpoint_state[1, tconf.CHK_FREQ],
digests=["digest"] * (tconf.CHK_FREQ - 1))
checkpoint_service._checkpoint_state[1, tconf.CHK_FREQ] = state
checkpoint_service.process_ordered(ordered)
_check_checkpoint(checkpoint_service, 1, tconf.CHK_FREQ, pre_prepare, check_shared_data=True)
pre_prepare.ppSeqNo += 1
ordered.ppSeqNo = pre_prepare.ppSeqNo
checkpoint_service._data.preprepared.append(preprepare_to_batch_id(pre_prepare))
checkpoint_service.process_ordered(ordered)
_check_checkpoint(checkpoint_service, tconf.CHK_FREQ + 1, tconf.CHK_FREQ * 2, pre_prepare)
def _check_checkpoint(checkpoint_service: CheckpointService, start, end, pp,
check_shared_data=False):
assert (start, end) in checkpoint_service._checkpoint_state
assert checkpoint_service._checkpoint_state[(start, end)].seqNo == pp.ppSeqNo
assert (pp.digest in checkpoint_service._checkpoint_state[(start, end)].digests) or \
checkpoint_service._checkpoint_state[(start, end)].digest
for checkpoint in checkpoint_service._data.checkpoints:
if checkpoint.seqNoEnd == end and checkpoint.seqNoStart == start:
assert checkpoint.instId == pp.instId
assert checkpoint.viewNo == pp.viewNo
assert checkpoint.digest
return
assert not check_shared_data, "The checkpoint should contains in the consensus_data."
def test_remove_stashed_checkpoints_doesnt_crash_when_current_view_no_is_greater_than_last_stashed_checkpoint(
checkpoint_service):
till_3pc_key = (1, 1)
checkpoint_service._stashed_recvd_checkpoints[1] = {till_3pc_key: {}}
checkpoint_service._data.view_no = 2
checkpoint_service._remove_stashed_checkpoints(till_3pc_key)
assert not checkpoint_service._stashed_recvd_checkpoints
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,965 | Toktar/indy-plenum | refs/heads/master | /plenum/hypothesis/consensus/hyp_sim_view_change.py | from hypothesis import strategies as st, given
from plenum.hypothesis.helper import HypothesisSimRandom
from plenum.test.consensus.view_change.test_sim_view_change import check_view_change_completes_under_normal_conditions
@given(st.data())
def test_view_change_completes_under_normal_conditions(data):
random = HypothesisSimRandom(data)
check_view_change_completes_under_normal_conditions(random)
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,966 | Toktar/indy-plenum | refs/heads/master | /plenum/test/consensus/checkpoint_service/test_update_watermarks_api.py | import sys
def test_propagate_primary_is_Master_update_watermarks(checkpoint_service):
# expected behaviour is that h must be set as last ordered ppSeqNo
checkpoint_service._is_master = True
checkpoint_service._data.low_watermark = 0
checkpoint_service._data.last_ordered_3pc = (checkpoint_service.view_no, 500)
assert checkpoint_service._data.low_watermark == 0
checkpoint_service.update_watermark_from_3pc()
assert checkpoint_service._data.low_watermark == 500
def test_propagate_primary_is_Master_watermarks_not_changed_if_last_ordered_not_changed(checkpoint_service):
checkpoint_service._is_master = True
checkpoint_service._data.low_watermark = 0
assert checkpoint_service._data.low_watermark == 0
checkpoint_service.update_watermark_from_3pc()
assert checkpoint_service._data.low_watermark == 0
def test_reset_watermarks_before_new_view(checkpoint_service, tconf, is_master):
checkpoint_service._is_master = is_master
checkpoint_service._data.low_watermark = 100
checkpoint_service.reset_watermarks_before_new_view()
assert checkpoint_service._data.low_watermark == 0
assert checkpoint_service._data.high_watermark == tconf.LOG_SIZE
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,967 | Toktar/indy-plenum | refs/heads/master | /plenum/test/replica/test_catchup_after_replica_removing.py | import pytest
from plenum.common.constants import STEWARD_STRING, VALIDATOR
from pytest import fixture
from plenum.common.throughput_measurements import RevivalSpikeResistantEMAThroughputMeasurement
from plenum.common.util import getMaxFailures
from plenum.test.helper import sdk_send_random_and_check, assertExp, sdk_get_and_check_replies
from plenum.test.node_catchup.helper import waitNodeDataEquality
from plenum.test.pool_transactions.conftest import sdk_node_theta_added
from plenum.test.pool_transactions.helper import sdk_add_new_nym, prepare_new_node_data, prepare_node_request, \
sdk_sign_and_send_prepared_request, create_and_start_new_node, demote_node
from plenum.test.test_node import checkNodesConnected, TestNode
from stp_core.loop.eventually import eventually
nodeCount = 7
def test_catchup_after_replica_removing(looper, sdk_pool_handle, txnPoolNodeSet,
sdk_wallet_stewards, tdir, tconf, allPluginsPath):
view_no = txnPoolNodeSet[-1].viewNo
sdk_send_random_and_check(looper, txnPoolNodeSet,
sdk_pool_handle, sdk_wallet_stewards[0], 1)
waitNodeDataEquality(looper, *txnPoolNodeSet)
index, node_for_demote = [(i, n) for i, n in enumerate(txnPoolNodeSet) if n.replicas[1].isPrimary][0]
sdk_wallet_steward = sdk_wallet_stewards[index]
demote_node(looper, sdk_wallet_steward, sdk_pool_handle, node_for_demote)
txnPoolNodeSet.pop(index)
looper.run(eventually(lambda: assertExp(n.viewNo == view_no for n in txnPoolNodeSet)))
waitNodeDataEquality(looper, *txnPoolNodeSet)
sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle,
sdk_wallet_stewards[0], 1)
waitNodeDataEquality(looper, *txnPoolNodeSet)
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,968 | Toktar/indy-plenum | refs/heads/master | /plenum/test/replica/test_consensus_data_helper.py | import pytest
from plenum.common.messages.node_messages import Checkpoint
from plenum.server.consensus.consensus_shared_data import ConsensusSharedData, preprepare_to_batch_id
from plenum.server.replica import ConsensusDataHelper
from plenum.test.consensus.conftest import pre_prepare, validators
@pytest.fixture
def consensus_data_helper(validators):
return ConsensusDataHelper(ConsensusSharedData('sample', validators, 0))
@pytest.fixture
def checkpoint():
return Checkpoint(instId=0,
viewNo=0,
seqNoStart=1,
seqNoEnd=100,
digest='digest')
def test_pp_storages_ordering(pre_prepare, consensus_data_helper: ConsensusDataHelper):
consensus_data_helper.preprepare_batch(pre_prepare)
assert consensus_data_helper.consensus_data.preprepared
assert not consensus_data_helper.consensus_data.prepared
consensus_data_helper.prepare_batch(pre_prepare)
assert consensus_data_helper.consensus_data.preprepared
assert consensus_data_helper.consensus_data.prepared
consensus_data_helper.clear_batch(pre_prepare)
assert not consensus_data_helper.consensus_data.preprepared
assert not consensus_data_helper.consensus_data.prepared
def test_pp_storages_freeing_till(pre_prepare, consensus_data_helper: ConsensusDataHelper):
pre_prepare.ppSeqNo = 3
consensus_data_helper.consensus_data.prepared.append(preprepare_to_batch_id(pre_prepare))
consensus_data_helper.consensus_data.preprepared.append(preprepare_to_batch_id(pre_prepare))
assert consensus_data_helper.consensus_data.preprepared
assert consensus_data_helper.consensus_data.prepared
consensus_data_helper.clear_batch_till_seq_no(4)
assert not consensus_data_helper.consensus_data.preprepared
assert not consensus_data_helper.consensus_data.prepared
pre_prepare.ppSeqNo = 4
consensus_data_helper.consensus_data.prepared.append(preprepare_to_batch_id(pre_prepare))
consensus_data_helper.consensus_data.preprepared.append(preprepare_to_batch_id(pre_prepare))
consensus_data_helper.clear_batch_till_seq_no(4)
assert consensus_data_helper.consensus_data.preprepared
assert consensus_data_helper.consensus_data.prepared
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,969 | Toktar/indy-plenum | refs/heads/master | /plenum/test/audit_ledger/test_demote_backup_primary.py | from plenum.test.helper import sdk_send_random_and_check
from plenum.test.node_catchup.helper import ensure_all_nodes_have_same_data
from plenum.test.pool_transactions.helper import demote_node, disconnect_node_and_ensure_disconnected
from plenum.test.view_change.helper import start_stopped_node
nodeCount = 6
def test_demote_backup_primary(looper, txnPoolNodeSet, sdk_pool_handle,
sdk_wallet_stewards, tdir, tconf, allPluginsPath):
assert len(txnPoolNodeSet) == 6
node_to_restart = txnPoolNodeSet[-1]
node_to_demote = steward_for_demote_node = demote_node_index = None
steward_for_demote_node = None
for i, n in enumerate(txnPoolNodeSet):
if n.name == txnPoolNodeSet[0].primaries[1]:
node_to_demote = n
steward_for_demote_node = sdk_wallet_stewards[i]
demote_node_index = i
break
assert node_to_demote
demote_node(looper, steward_for_demote_node, sdk_pool_handle,
node_to_demote)
del txnPoolNodeSet[demote_node_index]
disconnect_node_and_ensure_disconnected(looper, txnPoolNodeSet, node_to_restart)
looper.removeProdable(name=node_to_restart.name)
node_to_restart = start_stopped_node(node_to_restart, looper, tconf,
tdir, allPluginsPath)
txnPoolNodeSet[-1] = node_to_restart
ensure_all_nodes_have_same_data(looper, txnPoolNodeSet)
sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle,
sdk_wallet_stewards[0], 1)
ensure_all_nodes_have_same_data(looper, txnPoolNodeSet)
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,970 | Toktar/indy-plenum | refs/heads/master | /plenum/test/consensus/order_service/test_ordering_service_on_view_change.py | from plenum.common.messages.internal_messages import ViewChangeStarted, NewViewAccepted, NewViewCheckpointsApplied
from plenum.server.consensus.consensus_shared_data import BatchID
from plenum.server.consensus.ordering_service import OrderingService
from plenum.test.consensus.helper import copy_shared_data, create_batches, \
check_service_changed_only_owned_fields_in_shared_data, create_new_view
from plenum.test.helper import create_pre_prepare_no_bls, generate_state_root, create_prepare, create_commit_no_bls_sig
def test_update_shared_data_on_view_change_started(internal_bus, orderer):
orderer._data.preprepared = create_batches(view_no=3)
orderer._data.prepared = create_batches(view_no=3)
old_data = copy_shared_data(orderer._data)
internal_bus.send(ViewChangeStarted(view_no=4))
new_data = copy_shared_data(orderer._data)
check_service_changed_only_owned_fields_in_shared_data(OrderingService, old_data, new_data)
assert orderer._data.preprepared == []
assert orderer._data.prepared == []
def test_clear_data_on_view_change_started(internal_bus, orderer):
pp = create_pre_prepare_no_bls(generate_state_root(),
view_no=0, pp_seq_no=10, inst_id=0)
prepare = create_prepare(req_key=(0, 10),
state_root=generate_state_root(), inst_id=0)
commit = create_commit_no_bls_sig(req_key=(0, 10), inst_id=0)
key = (pp.viewNo, pp.ppSeqNo)
orderer.prePrepares[key] = pp
orderer.prepares[key] = prepare
orderer.commits[key] = commit
orderer.requested_pre_prepares[key] = pp
orderer.requested_prepares[key] = prepare
orderer.requested_commits[key] = commit
orderer.pre_prepare_tss[key][pp, "Node1"] = 1234
orderer.prePreparesPendingFinReqs.append(pp)
orderer.prePreparesPendingPrevPP[key] = pp
orderer.sentPrePrepares[key] = pp
orderer.batches[key] = [pp.ledgerId, pp.discarded,
pp.ppTime, generate_state_root(), len(pp.reqIdr)]
orderer.ordered.add(*key)
internal_bus.send(ViewChangeStarted(view_no=4))
assert not orderer.prePrepares
assert not orderer.prepares
assert not orderer.commits
assert not orderer.requested_pre_prepares
assert not orderer.requested_prepares
assert not orderer.requested_commits
assert not orderer.pre_prepare_tss
assert not orderer.prePreparesPendingFinReqs
assert not orderer.prePreparesPendingPrevPP
assert not orderer.sentPrePrepares
assert not orderer.batches
assert not orderer.ordered
def test_stores_old_pre_prepares_on_view_change_started(internal_bus, orderer):
pp1 = create_pre_prepare_no_bls(generate_state_root(),
view_no=0, pp_seq_no=1, inst_id=0)
pp2 = create_pre_prepare_no_bls(generate_state_root(),
view_no=0, pp_seq_no=2, inst_id=0)
pp3 = create_pre_prepare_no_bls(generate_state_root(),
view_no=1, pp_seq_no=3, inst_id=0)
pp4 = create_pre_prepare_no_bls(generate_state_root(),
view_no=2, pp_seq_no=4, inst_id=0)
pp5 = create_pre_prepare_no_bls(generate_state_root(),
view_no=3, pp_seq_no=5, inst_id=0)
pp6 = create_pre_prepare_no_bls(generate_state_root(),
view_no=3, pp_seq_no=6, inst_id=0)
orderer.prePrepares[(pp1.viewNo, pp1.ppSeqNo)] = pp1
orderer.prePrepares[(pp3.viewNo, pp3.ppSeqNo)] = pp3
orderer.sentPrePrepares[(pp2.viewNo, pp2.ppSeqNo)] = pp2
orderer.sentPrePrepares[(pp4.viewNo, pp4.ppSeqNo)] = pp4
assert not orderer.old_view_preprepares
internal_bus.send(ViewChangeStarted(view_no=4))
assert orderer.old_view_preprepares[(pp1.ppSeqNo, pp1.digest)] == pp1
assert orderer.old_view_preprepares[(pp2.ppSeqNo, pp2.digest)] == pp2
assert orderer.old_view_preprepares[(pp3.ppSeqNo, pp3.digest)] == pp3
assert orderer.old_view_preprepares[(pp4.ppSeqNo, pp4.digest)] == pp4
# next calls append to existing data
orderer.prePrepares[(pp5.viewNo, pp5.ppSeqNo)] = pp5
orderer.sentPrePrepares[(pp6.viewNo, pp6.ppSeqNo)] = pp6
internal_bus.send(ViewChangeStarted(view_no=4))
assert orderer.old_view_preprepares[(pp1.ppSeqNo, pp1.digest)] == pp1
assert orderer.old_view_preprepares[(pp2.ppSeqNo, pp2.digest)] == pp2
assert orderer.old_view_preprepares[(pp3.ppSeqNo, pp3.digest)] == pp3
assert orderer.old_view_preprepares[(pp4.ppSeqNo, pp4.digest)] == pp4
assert orderer.old_view_preprepares[(pp5.ppSeqNo, pp5.digest)] == pp5
assert orderer.old_view_preprepares[(pp6.ppSeqNo, pp6.digest)] == pp6
def test_do_nothing_on_new_view_accepted(internal_bus, orderer):
orderer._data.preprepared = create_batches(view_no=0)
orderer._data.prepared = create_batches(view_no=0)
old_data = copy_shared_data(orderer._data)
initial_view_no = 3
new_view = create_new_view(initial_view_no=initial_view_no, stable_cp=200)
internal_bus.send(NewViewAccepted(view_no=initial_view_no + 1,
view_changes=new_view.viewChanges,
checkpoint=new_view.checkpoint,
batches=new_view.batches))
new_data = copy_shared_data(orderer._data)
assert old_data == new_data
def test_update_shared_data_on_mew_view_checkpoint_applied(internal_bus, orderer):
orderer._data.preprepared = []
orderer._data.prepared = []
old_data = copy_shared_data(orderer._data)
initial_view_no = 3
new_view = create_new_view(initial_view_no=initial_view_no, stable_cp=200)
internal_bus.send(NewViewCheckpointsApplied(view_no=initial_view_no + 1,
view_changes=new_view.viewChanges,
checkpoint=new_view.checkpoint,
batches=new_view.batches))
new_data = copy_shared_data(orderer._data)
check_service_changed_only_owned_fields_in_shared_data(OrderingService, old_data, new_data)
# preprepared are created for new view
if orderer.is_master:
assert orderer._data.preprepared
assert orderer._data.preprepared == [BatchID(view_no=initial_view_no + 1, pp_seq_no=batch_id.pp_seq_no,
pp_digest=batch_id.pp_digest)
for batch_id in new_view.batches]
assert orderer._data.prepared == []
else:
assert orderer._data.preprepared == []
assert orderer._data.prepared == []
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,971 | Toktar/indy-plenum | refs/heads/master | /plenum/server/consensus/view_change_service.py | from _sha256 import sha256
from collections import defaultdict
from functools import partial
from operator import itemgetter
from typing import List, Optional, Union, Dict, Any, Tuple
from common.serializers.json_serializer import JsonSerializer
from plenum.common.config_util import getConfig
from plenum.common.event_bus import InternalBus, ExternalBus
from plenum.common.messages.internal_messages import NeedViewChange, NewViewAccepted, ViewChangeStarted
from plenum.common.messages.node_messages import ViewChange, ViewChangeAck, NewView, Checkpoint
from plenum.common.router import Subscription
from plenum.common.stashing_router import StashingRouter, DISCARD, PROCESS
from plenum.common.timer import TimerService
from plenum.server.consensus.consensus_shared_data import ConsensusSharedData, BatchID
from plenum.server.consensus.primary_selector import RoundRobinPrimariesSelector
from plenum.server.quorums import Quorums
from plenum.server.replica_validator_enums import STASH_VIEW
from stp_core.common.log import getlogger
def view_change_digest(msg: ViewChange) -> str:
msg_as_dict = msg.__dict__
msg_as_dict['checkpoints'] = [cp.__dict__ for cp in msg_as_dict['checkpoints']]
serialized = JsonSerializer().dumps(msg_as_dict)
return sha256(serialized).hexdigest()
class ViewChangeVotesForNode:
"""
Storage for view change vote from some node for some view + corresponding acks
"""
def __init__(self, quorums: Quorums):
self._quorums = quorums
self._view_change = None
self._digest = None
self._acks = defaultdict(set) # Dict[str, Set[str]]
@property
def digest(self) -> Optional[str]:
"""
Returns digest of received view change message
"""
return self._digest
@property
def view_change(self) -> Optional[ViewChange]:
"""
Returns received view change
"""
return self._view_change
@property
def is_confirmed(self) -> bool:
"""
Returns True if received view change message and enough corresponding acks
"""
if self._digest is None:
return False
return self._quorums.view_change_ack.is_reached(len(self._acks[self._digest]))
def add_view_change(self, msg: ViewChange) -> bool:
"""
Adds view change vote and returns boolean indicating if it found node suspicios
"""
if self._view_change is None:
self._view_change = msg
self._digest = view_change_digest(msg)
return self._validate_acks()
return self._digest == view_change_digest(msg)
def add_view_change_ack(self, msg: ViewChangeAck, frm: str) -> bool:
"""
Adds view change ack and returns boolean indicating if it found node suspicios
"""
self._acks[msg.digest].add(frm)
return self._validate_acks()
def _validate_acks(self) -> bool:
digests = [digest for digest, acks in self._acks.items()
if self._quorums.weak.is_reached(len(acks))]
if len(digests) > 1:
return False
if len(digests) < 1 or self._digest is None:
return True
return self._digest == digests[0]
class ViewChangeVotesForView:
"""
Storage for view change votes for some view + corresponding acks
"""
def __init__(self, quorums: Quorums):
self._quorums = quorums
self._votes = defaultdict(partial(ViewChangeVotesForNode, quorums))
@property
def confirmed_votes(self) -> List[Tuple[str, str]]:
return [(frm, node_votes.digest) for frm, node_votes in self._votes.items()
if node_votes.is_confirmed]
def get_view_change(self, frm: str, digest: str) -> Optional[ViewChange]:
vc = self._votes[frm].view_change
if vc is not None and view_change_digest(vc) == digest:
return vc
def add_view_change(self, msg: ViewChange, frm: str) -> bool:
"""
Adds view change ack and returns boolean indicating if it found node suspicios
"""
return self._votes[frm].add_view_change(msg)
def add_view_change_ack(self, msg: ViewChangeAck, frm: str) -> bool:
"""
Adds view change ack and returns boolean indicating if it found node suspicios
"""
return self._votes[msg.name].add_view_change_ack(msg, frm)
def clear(self):
self._votes.clear()
class ViewChangeService:
def __init__(self, data: ConsensusSharedData, timer: TimerService, bus: InternalBus, network: ExternalBus,
stasher: StashingRouter):
self._config = getConfig()
self._logger = getlogger()
self._data = data
self._new_view_builder = NewViewBuilder(self._data)
self._timer = timer
self._bus = bus
self._network = network
self._router = stasher
self._votes = ViewChangeVotesForView(self._data.quorums)
self._new_view = None # type: Optional[NewView]
self._router.subscribe(ViewChange, self.process_view_change_message)
self._router.subscribe(ViewChangeAck, self.process_view_change_ack_message)
self._router.subscribe(NewView, self.process_new_view_message)
self._old_prepared = {} # type: Dict[int, BatchID]
self._old_preprepared = {} # type: Dict[int, List[BatchID]]
self._primaries_selector = RoundRobinPrimariesSelector()
self._subscription = Subscription()
self._subscription.subscribe(self._bus, NeedViewChange, self.process_need_view_change)
def __repr__(self):
return self._data.name
def process_need_view_change(self, msg: NeedViewChange):
# 1. calculate new viewno
view_no = msg.view_no
if view_no is None:
view_no = self._data.view_no + 1
# 2. Do cleanup before new view change starts
self._clean_on_view_change_start()
# 3. Update shared data
self._data.view_no = view_no
self._data.waiting_for_new_view = True
self._data.primaries = self._primaries_selector.select_primaries(view_no=self._data.view_no,
instance_count=self._data.quorums.f + 1,
validators=self._data.validators)
self._data.primary_name = self._data.primaries[self._data.inst_id]
# 4. Build ViewChange message
vc = self._build_view_change_msg()
# 5. Send ViewChangeStarted via internal bus to update other services
self._bus.send(ViewChangeStarted(view_no=self._data.view_no))
# 6. Send ViewChange msg to other nodes (via external bus)
self._network.send(vc)
self._votes.add_view_change(vc, self._data.name)
# 6. Unstash messages for new view
self._router.process_all_stashed()
def _clean_on_view_change_start(self):
self._clear_old_batches(self._old_prepared)
self._clear_old_batches(self._old_preprepared)
self._votes.clear()
self._new_view = None
def _clear_old_batches(self, batches: Dict[int, Any]):
for pp_seq_no in list(batches.keys()):
if pp_seq_no <= self._data.stable_checkpoint:
del batches[pp_seq_no]
def _build_view_change_msg(self):
for batch_id in self._data.prepared:
self._old_prepared[batch_id.pp_seq_no] = batch_id
prepared = sorted([tuple(bid) for bid in self._old_prepared.values()])
for new_bid in self._data.preprepared:
pretenders = self._old_preprepared.get(new_bid.pp_seq_no, [])
pretenders = [bid for bid in pretenders
if bid.pp_digest != new_bid.pp_digest]
pretenders.append(new_bid)
self._old_preprepared[new_bid.pp_seq_no] = pretenders
preprepared = sorted([tuple(bid) for bids in self._old_preprepared.values() for bid in bids])
return ViewChange(
viewNo=self._data.view_no,
stableCheckpoint=self._data.stable_checkpoint,
prepared=prepared,
preprepared=preprepared,
checkpoints=list(self._data.checkpoints)
)
def process_view_change_message(self, msg: ViewChange, frm: str):
result = self._validate(msg, frm)
if result != PROCESS:
return result, None
self._votes.add_view_change(msg, frm)
if self._data.is_primary:
self._send_new_view_if_needed()
return PROCESS, None
vca = ViewChangeAck(
viewNo=msg.viewNo,
name=frm,
digest=view_change_digest(msg)
)
self._network.send(vca, self._data.primary_name)
self._finish_view_change_if_needed()
return PROCESS, None
def process_view_change_ack_message(self, msg: ViewChangeAck, frm: str):
result = self._validate(msg, frm)
if result != PROCESS:
return result, None
if not self._data.is_primary:
return PROCESS, None
self._votes.add_view_change_ack(msg, frm)
self._send_new_view_if_needed()
return PROCESS, None
def process_new_view_message(self, msg: NewView, frm: str):
result = self._validate(msg, frm)
if result != PROCESS:
return result, None
if frm != self._data.primary_name:
self._logger.info(
"{} Received NewView {} for view {} from non-primary {}; expected primary {}".format(self._data.name,
msg,
self._data.view_no,
frm,
self._data.primary_name)
)
return DISCARD, "New View from non-Primary"
self._new_view = msg
self._finish_view_change_if_needed()
return PROCESS, None
def _validate(self, msg: Union[ViewChange, ViewChangeAck, NewView], frm: str) -> int:
# TODO: Proper validation
if msg.viewNo < self._data.view_no:
return DISCARD
if msg.viewNo == self._data.view_no and not self._data.waiting_for_new_view:
return DISCARD
if msg.viewNo > self._data.view_no:
return STASH_VIEW
return PROCESS
def _send_new_view_if_needed(self):
confirmed_votes = self._votes.confirmed_votes
if not self._data.quorums.view_change.is_reached(len(confirmed_votes)):
return
view_changes = [self._votes.get_view_change(*v) for v in confirmed_votes]
cp = self._new_view_builder.calc_checkpoint(view_changes)
if cp is None:
return
batches = self._new_view_builder.calc_batches(cp, view_changes)
if batches is None:
return
nv = NewView(
viewNo=self._data.view_no,
viewChanges=sorted(confirmed_votes, key=itemgetter(0)),
checkpoint=cp,
batches=batches
)
self._network.send(nv)
self._new_view = nv
self._finish_view_change()
def _finish_view_change_if_needed(self):
if self._new_view is None:
return
view_changes = []
for name, vc_digest in self._new_view.viewChanges:
vc = self._votes.get_view_change(name, vc_digest)
# We don't have needed ViewChange, so we cannot validate NewView
if vc is None:
return
view_changes.append(vc)
cp = self._new_view_builder.calc_checkpoint(view_changes)
if cp is None or cp != self._new_view.checkpoint:
# New primary is malicious
self._logger.info(
"{} Received invalid NewView {} for view {}: expected checkpoint {}".format(self._data.name,
self._new_view,
self._data.view_no,
cp)
)
self._bus.send(NeedViewChange())
return
batches = self._new_view_builder.calc_batches(cp, view_changes)
if batches != self._new_view.batches:
# New primary is malicious
self._logger.info(
"{} Received invalid NewView {} for view {}: expected batches {}".format(self._data.name,
self._new_view,
self._data.view_no,
batches)
)
self._bus.send(NeedViewChange())
return
self._finish_view_change()
def _finish_view_change(self):
# Update shared data
self._data.waiting_for_new_view = False
# send message to other services
self._bus.send(NewViewAccepted(view_no=self._new_view.viewNo,
view_changes=self._new_view.viewChanges,
checkpoint=self._new_view.checkpoint,
batches=self._new_view.batches))
class NewViewBuilder:
def __init__(self, data: ConsensusSharedData) -> None:
self._data = data
def calc_checkpoint(self, vcs: List[ViewChange]) -> Optional[Checkpoint]:
checkpoints = []
for cur_vc in vcs:
for cur_cp in cur_vc.checkpoints:
# Don't add checkpoint to pretending ones if it is already there
if cur_cp in checkpoints:
continue
# Don't add checkpoint to pretending ones if too many nodes already stabilized it
# TODO: Should we take into account view_no as well?
stable_checkpoint_not_higher = [vc for vc in vcs if cur_cp.seqNoEnd >= vc.stableCheckpoint]
if not self._data.quorums.strong.is_reached(len(stable_checkpoint_not_higher)):
continue
# Don't add checkpoint to pretending ones if not enough nodes have it
have_checkpoint = [vc for vc in vcs if cur_cp in vc.checkpoints]
if not self._data.quorums.weak.is_reached(len(have_checkpoint)):
continue
# All checks passed, this is a valid candidate checkpoint
checkpoints.append(cur_cp)
highest_cp = None
for cp in checkpoints:
# TODO: Should we take into account view_no as well?
if highest_cp is None or cp.seqNoEnd > highest_cp.seqNoEnd:
highest_cp = cp
return highest_cp
def calc_batches(self, cp: Checkpoint, vcs: List[ViewChange]) -> Optional[List[BatchID]]:
# TODO: Optimize this
batches = set()
pp_seq_no = cp.seqNoEnd + 1
while pp_seq_no <= cp.seqNoEnd + self._data.log_size:
bid = self._try_find_batch_for_pp_seq_no(vcs, pp_seq_no)
if bid:
batches.add(bid)
pp_seq_no += 1
continue
if self._check_null_batch(vcs, pp_seq_no):
# TODO: the protocol says to do the loop for all pp_seq_no till h+L (apply NULL batches)
# Since we require sequential applying of PrePrepares, we can stop on the first non-found (NULL) batch
# Double-check this!
break
# not enough quorums yet
return None
return sorted(batches)
def _try_find_batch_for_pp_seq_no(self, vcs, pp_seq_no):
for vc in vcs:
for _bid in vc.prepared:
bid = BatchID(*_bid)
if bid.pp_seq_no != pp_seq_no:
continue
if not self._is_batch_prepared(bid, vcs):
continue
if not self._is_batch_preprepared(bid, vcs):
continue
return bid
return None
def _is_batch_prepared(self, bid: BatchID, vcs: List[ViewChange]) -> bool:
def check(vc: ViewChange):
if bid.pp_seq_no <= vc.stableCheckpoint:
return False
for _some_bid in vc.prepared:
some_bid = BatchID(*_some_bid)
if some_bid.pp_seq_no != bid.pp_seq_no:
continue
# not ( (v' < v) OR (v'==v and d'==d) )
if some_bid.view_no > bid.view_no:
return False
if some_bid.view_no >= bid.view_no and some_bid.pp_digest != bid.pp_digest:
return False
return True
prepared_witnesses = sum(1 for vc in vcs if check(vc))
return self._data.quorums.strong.is_reached(prepared_witnesses)
def _is_batch_preprepared(self, bid: BatchID, vcs: List[ViewChange]) -> bool:
def check(vc: ViewChange):
for _some_bid in vc.preprepared:
some_bid = BatchID(*_some_bid)
if some_bid.pp_seq_no != bid.pp_seq_no:
continue
if some_bid.pp_digest != bid.pp_digest:
continue
if some_bid.view_no >= bid.view_no:
return True
return False
preprepared_witnesses = sum(1 for vc in vcs if check(vc))
return self._data.quorums.weak.is_reached(preprepared_witnesses)
def _check_null_batch(self, vcs, pp_seq_no):
def check(vc: ViewChange):
if pp_seq_no <= vc.stableCheckpoint:
return False
for _some_bid in vc.prepared:
some_bid = BatchID(*_some_bid)
if some_bid.pp_seq_no == pp_seq_no:
return False
return True
null_batch_witnesses = sum(1 for vc in vcs if check(vc))
return self._data.quorums.strong.is_reached(null_batch_witnesses)
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,972 | Toktar/indy-plenum | refs/heads/master | /plenum/test/simulation/sim_network.py | from collections import OrderedDict
from functools import partial
from typing import Any, Iterable
from plenum.common.event_bus import ExternalBus
from plenum.common.timer import TimerService
from plenum.test.simulation.sim_random import SimRandom
class SimNetwork:
def __init__(self, timer: TimerService, random: SimRandom):
self._timer = timer
self._random = random
self._min_latency = 1
self._max_latency = 500
self._peers = OrderedDict() # type: OrderedDict[str, ExternalBus]
def create_peer(self, name: str) -> ExternalBus:
if name in self._peers:
raise ValueError("Peer with name '{}' already exists".format(name))
bus = ExternalBus(partial(self._send_message, name))
self._peers[name] = bus
return bus
def set_latency(self, min_value: int, max_value: int):
self._min_latency = min_value
self._max_latency = max_value
def _send_message(self, frm: str, msg: Any, dst: ExternalBus.Destination):
if dst is None:
dst = [name for name in self._peers if name != frm]
elif isinstance(dst, str):
dst = [dst]
elif isinstance(dst, Iterable):
assert len(dst) > 0, "{} tried to send message {} to no one".format(frm, msg)
else:
assert False, "{} tried to send message {} to unsupported destination {}".format(frm, msg, dst)
for name in dst:
assert name != frm, "{} tried to send message {} to itself".format(frm, msg)
peer = self._peers.get(name)
assert peer, "{} tried to send message {} to unknown peer {}".format(frm, msg, name)
self._timer.schedule(self._random.integer(self._min_latency, self._max_latency),
partial(peer.process_incoming, msg, frm))
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,973 | Toktar/indy-plenum | refs/heads/master | /plenum/test/audit_ledger/test_future_primaries_unit.py | import pytest
from plenum.common.constants import TARGET_NYM, DATA, ALIAS, SERVICES, TXN_TYPE, NODE
from plenum.common.request import Request
from plenum.server.batch_handlers.three_pc_batch import ThreePcBatch
from plenum.server.future_primaries_batch_handler import FuturePrimariesBatchHandler
from plenum.server.propagator import ReqState
from plenum.test.testing_utils import FakeSomething
@pytest.fixture(scope='function')
def node():
n = FakeSomething()
n.new_future_primaries_needed = False
n.requests = {'a': ReqState(Request(operation={TARGET_NYM: 'nym7',
TXN_TYPE: NODE,
DATA: {
SERVICES: ['VALIDATOR'],
ALIAS: 'n7'}
}
))}
n.nodeReg = {'n1': 1, 'n2': 1, 'n3': 1,
'n4': 1, 'n5': 1, 'n6': 1}
n.nodeIds = {'nym1': 'n1', 'nym2': 'n2', 'nym3': 'n3',
'nym4': 'n4', 'nym5': 'n5', 'nym6': 'n6'}
n.primaries = {'n1', 'n2'}
n.primaries_selector = FakeSomething()
n.primaries_selector.select_primaries = lambda view_no, instance_count, validators: ['n1', 'n2']
n.viewNo = 0
return n
@pytest.fixture(scope='function')
def future_primaries(node):
fp = FuturePrimariesBatchHandler(FakeSomething(), node)
return fp
@pytest.fixture(scope='function')
def three_pc_batch():
fp = ThreePcBatch(0, 0, 0, 3, 1, 'state', 'txn',
['a', 'b', 'c'], ['a'])
return fp
def test_add_node_empty_states(future_primaries, node, three_pc_batch):
future_primaries.post_batch_applied(three_pc_batch)
states = future_primaries.node_states
assert len(states) == 1
node_reg = list(node.nodeReg.keys())
node_reg.append('n7')
node.primaries_selector.select_primaries = lambda view_no, instance_count, validators: ['n1', 'n2', 'n3']
assert node_reg == states[0].node_reg
def test_add_and_demote_node(future_primaries, node, three_pc_batch):
future_primaries.post_batch_applied(three_pc_batch)
node_reg = list(node.nodeReg.keys())
node_reg.append('n7')
node.primaries_selector.select_primaries = lambda view_no, instance_count, validators: ['n1', 'n2', 'n3']
node.requests['a'].request.operation[DATA][SERVICES] = []
future_primaries.post_batch_applied(three_pc_batch)
states = future_primaries.node_states
assert len(states) == 2
assert list(node.nodeReg.keys()) == states[-1].node_reg
def test_apply_and_commit_1(future_primaries, node, three_pc_batch):
future_primaries.post_batch_applied(three_pc_batch)
node_reg = list(node.nodeReg.keys())
node_reg.append('n7')
node.primaries_selector.select_primaries = lambda view_no, instance_count, validators: ['n1', 'n2', 'n3']
future_primaries.set_node_state()
assert len(future_primaries.node_states) == 1
def test_apply_and_commit_2(future_primaries, node, three_pc_batch):
future_primaries.post_batch_applied(three_pc_batch)
node_reg = list(node.nodeReg.keys())
node_reg.append('n7')
node.primaries_selector.select_primaries = lambda view_no, instance_count, validators: ['n1', 'n2', 'n3']
node.requests['a'].request.operation[DATA][SERVICES] = []
future_primaries.post_batch_applied(three_pc_batch)
future_primaries.set_node_state()
assert len(future_primaries.node_states) == 1
assert future_primaries.node_states[0].node_reg == list(node.nodeReg.keys())
def test_apply_and_revert(future_primaries, node, three_pc_batch):
future_primaries.post_batch_applied(three_pc_batch)
node_reg = list(node.nodeReg.keys())
node_reg.append('n7')
node.primaries_selector.select_primaries = lambda view_no, instance_count, validators: ['n1', 'n2', 'n3']
node.requests['a'].request.operation[DATA][SERVICES] = []
future_primaries.post_batch_applied(three_pc_batch)
future_primaries.post_batch_rejected(0)
assert len(future_primaries.node_states) == 1
node_reg = list(node.nodeReg.keys())
node_reg.append('n7')
assert future_primaries.node_states[0].node_reg == node_reg
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,974 | Toktar/indy-plenum | refs/heads/master | /plenum/test/node/test_api.py | import pytest
from common.exceptions import LogicError
from plenum.common.constants import TXN_TYPE, TXN_PAYLOAD, TXN_PAYLOAD_METADATA, TXN_PAYLOAD_METADATA_DIGEST, \
TXN_PAYLOAD_TYPE, TXN_PAYLOAD_DATA, TXN_PAYLOAD_METADATA_REQ_ID, TXN_METADATA, TXN_METADATA_SEQ_NO, \
TXN_PAYLOAD_METADATA_PAYLOAD_DIGEST
from plenum.common.request import Request
def test_on_view_change_complete_fails(test_node):
with pytest.raises(LogicError) as excinfo:
test_node.on_view_change_complete()
assert "Not all replicas have primaries" in str(excinfo.value)
def test_ledger_id_for_request_fails(test_node):
for r in (Request(operation={}), Request(operation={TXN_TYPE: None})):
with pytest.raises(ValueError) as excinfo:
test_node.ledger_id_for_request(r)
assert "TXN_TYPE is not defined for request" in str(excinfo.value)
def test_seq_no_db_updates(test_node):
oldSize = test_node.seqNoDB.size
test_txn = {
TXN_PAYLOAD: {
TXN_PAYLOAD_TYPE: "2",
TXN_PAYLOAD_METADATA: {
TXN_PAYLOAD_METADATA_DIGEST: "11222",
TXN_PAYLOAD_METADATA_PAYLOAD_DIGEST: "112222",
},
TXN_PAYLOAD_DATA: {}
},
TXN_METADATA: {
TXN_METADATA_SEQ_NO: "1"
}
}
test_node.postTxnFromCatchupAddedToLedger(2, test_txn, False)
assert oldSize == test_node.seqNoDB.size
def test_seq_no_db_updates_by_default(test_node):
oldSize = test_node.seqNoDB.size
test_txn = {
TXN_PAYLOAD: {
TXN_PAYLOAD_TYPE: "2",
TXN_PAYLOAD_METADATA: {
TXN_PAYLOAD_METADATA_DIGEST: "11222",
TXN_PAYLOAD_METADATA_PAYLOAD_DIGEST: "112222",
TXN_PAYLOAD_METADATA_REQ_ID: "12"
},
TXN_PAYLOAD_DATA: {}
},
TXN_METADATA: {
TXN_METADATA_SEQ_NO: "1"
}
}
test_node.postTxnFromCatchupAddedToLedger(2, test_txn)
assert oldSize + 2 == test_node.seqNoDB.size
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,975 | Toktar/indy-plenum | refs/heads/master | /plenum/test/audit_ledger/test_future_primaries_addition.py | import copy
from plenum.test.test_node import ensureElectionsDone
from plenum.test.view_change.helper import add_new_node
from plenum.test.helper import checkViewNoForNodes
from plenum.test.pool_transactions.helper import demote_node
nodeCount = 6
old_commit = None
def test_future_primaries_replicas_increase(looper, txnPoolNodeSet, sdk_pool_handle,
sdk_wallet_stewards, tdir, tconf, allPluginsPath):
# Don't delete NodeStates, so we could check them.
global old_commit
old_commit = txnPoolNodeSet[0].write_manager.future_primary_handler.commit_batch
for node in txnPoolNodeSet:
node.write_manager.future_primary_handler.commit_batch = lambda three_pc_batch, prev_handler_result=None: 0
initial_primaries = copy.copy(txnPoolNodeSet[0].primaries)
last_ordered = txnPoolNodeSet[0].master_replica.last_ordered_3pc
starting_view_number = checkViewNoForNodes(txnPoolNodeSet)
# Increase replicas count
add_new_node(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_stewards[0], tdir, tconf, allPluginsPath)
new_view_no = checkViewNoForNodes(txnPoolNodeSet)
assert new_view_no == starting_view_number + 1
# "seq_no + 2" because 1 domain and 1 pool txn.
state = txnPoolNodeSet[0].write_manager.future_primary_handler.node_states[-1]
assert len(state.primaries) == len(initial_primaries) + 1
assert len(state.primaries) == len(txnPoolNodeSet[0].primaries)
def test_future_primaries_replicas_decrease(looper, txnPoolNodeSet, sdk_pool_handle,
sdk_wallet_stewards, tdir, tconf, allPluginsPath):
assert len(txnPoolNodeSet) == 7
initial_primaries = copy.copy(txnPoolNodeSet[0].primaries)
last_ordered = txnPoolNodeSet[0].master_replica.last_ordered_3pc
starting_view_number = checkViewNoForNodes(txnPoolNodeSet)
# Decrease replicas count
demote_node(looper, sdk_wallet_stewards[-1], sdk_pool_handle, txnPoolNodeSet[-2])
txnPoolNodeSet.remove(txnPoolNodeSet[-2])
ensureElectionsDone(looper=looper, nodes=txnPoolNodeSet)
new_view_no = checkViewNoForNodes(txnPoolNodeSet)
assert new_view_no == starting_view_number + 1
state = txnPoolNodeSet[0].write_manager.future_primary_handler.node_states[-1]
assert len(state.primaries) + 1 == len(initial_primaries)
assert len(state.primaries) == len(txnPoolNodeSet[0].primaries)
for node in txnPoolNodeSet:
node.write_manager.future_primary_handler.commit_batch = old_commit
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,976 | Toktar/indy-plenum | refs/heads/master | /plenum/test/consensus/view_change/test_sim_view_change.py | from functools import partial
import pytest
from plenum.common.messages.internal_messages import NeedViewChange
from plenum.server.consensus.view_change_service import BatchID
from plenum.test.consensus.view_change.helper import some_pool
from plenum.test.helper import MockNetwork
from plenum.test.simulation.sim_random import SimRandom, DefaultSimRandom
def check_view_change_completes_under_normal_conditions(random: SimRandom):
# Create random pool with random initial state
pool, committed = some_pool(random)
# Schedule view change at different time on all nodes
for node in pool.nodes:
pool.timer.schedule(random.integer(0, 10000),
partial(node._view_changer.process_need_view_change, NeedViewChange()))
# Make sure all nodes complete view change
pool.timer.wait_for(lambda: all(not node._data.waiting_for_new_view
and node._data.view_no > 0
for node in pool.nodes))
# Make sure all nodes end up in same state
for node_a, node_b in zip(pool.nodes, pool.nodes[1:]):
assert node_a._data.view_no == node_b._data.view_no
assert node_a._data.primary_name == node_b._data.primary_name
assert node_a._data.stable_checkpoint == node_b._data.stable_checkpoint
assert node_a._data.preprepared == node_b._data.preprepared
# Make sure that all committed reqs are ordered with the same ppSeqNo in the new view:
stable_checkpoint = pool.nodes[0]._data.stable_checkpoint
committed = [c for c in committed if c.pp_seq_no > stable_checkpoint]
for n in pool.nodes:
assert committed == n._data.preprepared[:len(committed)]
def calc_committed(view_changes):
committed = []
for pp_seq_no in range(1, 50):
batch_id = None
for vc in view_changes:
# pp_seq_no must be present in all PrePrepares
for pp in vc.preprepared:
if pp[1] == pp_seq_no:
if batch_id is None:
batch_id = pp
assert batch_id == pp
break
# pp_seq_no must be present in all Prepares
if batch_id not in vc.prepared:
return committed
committed.append(BatchID(*batch_id))
return committed
@pytest.mark.parametrize("seed", range(200))
def test_view_change_completes_under_normal_conditions(seed):
random = DefaultSimRandom(seed)
check_view_change_completes_under_normal_conditions(random)
def test_new_view_combinations(random):
# Create pool in some random initial state
pool, _ = some_pool(random)
quorums = pool.nodes[0]._data.quorums
# Get view change votes from all nodes
view_change_messages = []
for node in pool.nodes:
network = MockNetwork()
node._view_changer._network = network
node._view_changer._bus.send(NeedViewChange())
view_change_messages.append(network.sent_messages[0][0])
# Check that all committed requests are present in final batches
for _ in range(10):
num_votes = quorums.strong.value
votes = random.sample(view_change_messages, num_votes)
cp = pool.nodes[0]._view_changer._new_view_builder.calc_checkpoint(votes)
assert cp is not None
batches = pool.nodes[0]._view_changer._new_view_builder.calc_batches(cp, votes)
committed = calc_committed(votes)
committed = [c for c in committed if c.pp_seq_no > cp.seqNoEnd]
assert batches is not None
assert committed == batches[:len(committed)]
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,977 | Toktar/indy-plenum | refs/heads/master | /plenum/test/consensus/view_change/test_primary_selector.py | from plenum.server.consensus.primary_selector import RoundRobinPrimariesSelector
import pytest
from plenum.test.greek import genNodeNames
@pytest.fixture()
def primary_selector():
return RoundRobinPrimariesSelector()
@pytest.fixture()
def instance_count(validators):
return (len(validators) - 1) // 3 + 1
@pytest.fixture(params=[4, 6, 7, 8])
def validators(request):
return genNodeNames(request.param)
def test_view_change_primary_selection(primary_selector, validators, instance_count, initial_view_no):
primaries = set(primary_selector.select_primaries(initial_view_no, instance_count, validators))
prev_primaries = set(primary_selector.select_primaries(initial_view_no - 1, instance_count, validators))
next_primaries = set(primary_selector.select_primaries(initial_view_no + 1, instance_count, validators))
assert len(set(primaries)) == instance_count
assert len(set(prev_primaries)) == instance_count
assert len(set(next_primaries)) == instance_count
assert primaries.issubset(validators)
assert prev_primaries.issubset(validators)
assert next_primaries.issubset(validators)
assert primaries != prev_primaries
assert primaries != next_primaries
assert len(primaries & prev_primaries) == instance_count - 1
assert len(primaries & next_primaries) == instance_count - 1
assert len(prev_primaries & next_primaries) == instance_count - 2
assert len(primaries | prev_primaries) == instance_count + 1
assert len(primaries | next_primaries) == instance_count + 1
assert len(prev_primaries | next_primaries) == instance_count + 2
def test_primaries_selection_viewno_0(primary_selector):
validators = ["Alpha", "Beta", "Gamma", "Delta", "Epsilon", "Zeta", "Eta"]
primaries = primary_selector.select_primaries(view_no=0,
instance_count=3,
validators=validators)
assert primaries == ["Alpha", "Beta", "Gamma"]
def test_primaries_selection_viewno_5(primary_selector):
validators = ["Alpha", "Beta", "Gamma", "Delta", "Epsilon", "Zeta", "Eta"]
primaries = primary_selector.select_primaries(view_no=5,
instance_count=3,
validators=validators)
assert primaries == ["Zeta", "Eta", "Alpha"]
def test_primaries_selection_viewno_9(primary_selector):
validators = ["Alpha", "Beta", "Gamma", "Delta", "Epsilon", "Zeta", "Eta"]
primaries = primary_selector.select_primaries(view_no=9,
instance_count=3,
validators=validators)
assert primaries == ["Gamma", "Delta", "Epsilon"]
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,978 | Toktar/indy-plenum | refs/heads/master | /plenum/test/consensus/order_service/helper.py | from plenum.common.util import get_utc_epoch
def expect_suspicious(orderer, suspicious_code):
def reportSuspiciousNodeEx(ex):
assert suspicious_code == ex.code
raise ex
orderer.report_suspicious_node = reportSuspiciousNodeEx
def _register_pp_ts(orderer, pp, sender):
tpcKey = (pp.viewNo, pp.ppSeqNo)
ppKey = (pp, sender)
orderer.pre_prepare_tss[tpcKey][ppKey] = get_utc_epoch()
def check_suspicious(handler, ex_message):
suspicious = handler.call_args_list
if len(suspicious) > 1:
print(suspicious)
assert len(suspicious) == 1
assert suspicious[0][0][0].inst_id == ex_message.inst_id
ex = suspicious[0][0][0].ex
assert ex.code == ex_message.ex.code
assert ex.offendingMsg == ex_message.ex.offendingMsg
assert ex.node == ex_message.ex.node
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,979 | Toktar/indy-plenum | refs/heads/master | /plenum/test/consensus/view_change/helper.py | from typing import Optional, List
import base58
from plenum.common.messages.node_messages import PrePrepare, Checkpoint
from plenum.server.consensus.view_change_service import ViewChangeService, BatchID
from plenum.test.consensus.helper import SimPool
from plenum.test.simulation.sim_random import SimRandom
def some_checkpoint(random: SimRandom, view_no: int, pp_seq_no: int) -> Checkpoint:
return Checkpoint(
instId=0, viewNo=view_no, seqNoStart=pp_seq_no, seqNoEnd=pp_seq_no,
digest=base58.b58encode(random.string(32)).decode())
def some_pool(random: SimRandom) -> (SimPool, List):
pool_size = random.integer(4, 8)
pool = SimPool(pool_size, random)
# Create simulated history
# TODO: Move into helper?
faulty = (pool_size - 1) // 3
seq_no_per_cp = 10
max_batches = 50
batches = [BatchID(0, n, random.string(40)) for n in range(1, max_batches)]
checkpoints = [some_checkpoint(random, 0, n) for n in range(0, max_batches, seq_no_per_cp)]
# Preprepares
pp_count = [random.integer(0, len(batches)) for _ in range(pool_size)]
max_pp = sorted(pp_count)[faulty]
# Prepares
p_count = [random.integer(0, min(max_pp, pp)) for pp in pp_count]
max_p = sorted(p_count)[faulty]
# Checkpoints
cp_count = [1 + random.integer(0, min(max_p, p)) // seq_no_per_cp for p in pp_count]
max_stable_cp_indx = sorted(cp_count)[faulty] - 1
stable_cp = [checkpoints[random.integer(0, min(max_stable_cp_indx, cp))].seqNoEnd for cp in cp_count]
# Initialize consensus data
for i, node in enumerate(pool.nodes):
node._data.preprepared = batches[:pp_count[i]]
node._data.prepared = batches[:p_count[i]]
node._data.checkpoints.update(checkpoints[:cp_count[i]])
node._data.stable_checkpoint = stable_cp[i]
committed = []
for i in range(1, max_batches):
prepare_count = sum(1 for node in pool.nodes if i <= len(node._data.prepared))
has_prepared_cert = prepare_count >= pool_size - faulty
if has_prepared_cert:
batch_id = batches[i - 1]
committed.append(BatchID(1, batch_id.pp_seq_no, batch_id.pp_digest))
return pool, committed
def calc_committed(view_changes, max_pp_seq_no, n, f) -> List[BatchID]:
def check_in_batch(batch_id, some_batch_id, check_view_no=False):
if check_view_no and (batch_id[0] != some_batch_id[0]):
return False
return batch_id[1] == some_batch_id[1] and batch_id[2] == some_batch_id[2]
def check_prepared_in_vc(vc, batch_id):
# check that (pp_seq_no, digest) is present in VC's prepared and preprepared
for p_batch_id in vc.prepared:
if not check_in_batch(batch_id, p_batch_id, check_view_no=True):
continue
for pp_batch_id in vc.preprepared:
if check_in_batch(batch_id, pp_batch_id, check_view_no=True):
return True
return False
def find_batch_id(pp_seq_no):
for vc in view_changes:
for batch_id in vc.prepared:
if batch_id[1] != pp_seq_no:
continue
prepared_count = sum(1 for vc in view_changes if check_prepared_in_vc(vc, batch_id))
if prepared_count < n - f:
continue
return batch_id
return None
committed = []
for pp_seq_no in range(1, max_pp_seq_no):
batch_id = find_batch_id(pp_seq_no)
if batch_id is not None:
committed.append(BatchID(*batch_id))
return committed
| {"/plenum/server/consensus/checkpoint_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/checkpoint_service/test_checkpoint_service_unit.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/checkpoint_service.py"], "/plenum/hypothesis/consensus/hyp_sim_view_change.py": ["/plenum/hypothesis/helper.py", "/plenum/test/consensus/view_change/test_sim_view_change.py"], "/plenum/test/consensus/order_service/test_ordering_service_on_view_change.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/server/consensus/view_change_service.py": ["/plenum/common/messages/internal_messages.py"], "/plenum/test/consensus/view_change/test_sim_view_change.py": ["/plenum/common/messages/internal_messages.py", "/plenum/server/consensus/view_change_service.py", "/plenum/test/consensus/view_change/helper.py"], "/plenum/test/consensus/view_change/helper.py": ["/plenum/server/consensus/view_change_service.py"]} |
48,991 | KNOT-FIT-BUT/webapi-server | refs/heads/master | /core/api/_JSONRPC/protocol.py | '''
Created on 1. 5. 2014
@author: casey
'''
import inspect
from core.chain import ChainItem, ChainTerminator
class AProtocol(ChainItem):
'''
classdocs
'''
def __init__(self, core):
'''
Constructor
'''
super(AProtocol, self).__init__()
self.core = core
self.version=0
self.RPC_methods = []
self.__loadMethods()
def __loadMethods(self):
methods = inspect.getmembers(self, predicate=inspect.ismethod)
self.RPC_methods = [method[0][4:] for method in methods if method[0].startswith("RPC_")]
def process(self, action, version, rpc_args):
if (self.version == version or version == -1) and action in self.RPC_methods:
return self._hook(action, rpc_args)
return self.nextChainItem.process(action, version, rpc_args)
def _hook(self, action, rpc_args):
calle = getattr(self, "RPC_"+action)
return calle(**rpc_args) if rpc_args else calle()
class ProtocolEndPoint(ChainTerminator):
def __init__(self):
super(ProtocolEndPoint, self).__init__()
self.version = "terminator"
def process(self, action, version, rpc_args):
return None
| {"/core/api/_JSONRPC/protocol.py": ["/core/chain.py"], "/core/api/_REST/_v1/hner.py": ["/core/annotation/annotator.py"], "/core/annotation/tools/tner.py": ["/core/annotation/tools/_abstract.py"], "/core/annotation/tools/_abstract.py": ["/core/pipeline.py"], "/core/annotation/filters/inputFilters.py": ["/core/annotation/filters/_abstract.py"], "/core/annotation/generators/gFigaNer.py": ["/core/annotation/generators/_abstract.py"], "/core/api/JSONRPC.py": ["/core/loader.py", "/core/api/_JSONRPC/protocol.py", "/core/chain.py"], "/core/assets/adapters/_adapters/kblocal.py": ["/core/assets/adapters/_abstract.py", "/core/assets/adapters/_adapters/functions.py"], "/core/api/_REST/restV1.py": ["/core/api/_REST/_v1/__init__.py"], "/core/annotation/output.py": ["/core/pipeline.py", "/core/loader.py", "/core/annotation/generators/_abstract.py"], "/core/annotation/generators/_abstract.py": ["/core/pipeline.py"], "/core/api/HTTP.py": ["/core/annotation/annotator.py"], "/core/assets/adapters/_adapters/kbshared.py": ["/core/assets/adapters/_abstract.py", "/core/assets/adapters/_adapters/functions.py"], "/core/api/_REST/_v1/figa.py": ["/core/annotation/annotator.py"], "/core/api/_JSONRPC/rpcV1.py": ["/core/api/_JSONRPC/protocol.py", "/core/assets/asset.py"], "/core/annotation/tools/tfiga.py": ["/core/annotation/tools/_abstract.py", "/core/annotation/tools/tfiga08.py"], "/core/assets/adapters/factory.py": ["/core/loader.py", "/core/assets/adapters/_abstract.py"], "/core/annotation/tools/tfiga08.py": ["/core/annotation/tools/_abstract.py"], "/core/assets/adapters/_adapters/generic.py": ["/core/assets/adapters/_abstract.py"], "/core/annotation/filters/_abstract.py": ["/core/pipeline.py"]} |
48,992 | KNOT-FIT-BUT/webapi-server | refs/heads/master | /core/api/_REST/_v1/hner.py | # -*- coding: utf-8 -*-
'''
Created on 28. 10. 2013
@author: xjerab13
'''
import cherrypy
from core.annotation.annotator import AnnotationRequest
class NERHandler():
'''
NERHandler serve data about available KB for NER and parsing text via NER tool.
'''
exposed = True
def __init__(self):
'''
@ore - instance of main Core class.
'''
self.json_rpc = cherrypy.engine.json_rpc
@cherrypy.tools.json_out()
def GET(self, *flags, **kw):
'''
On GET return info about all available KB.
'''
return self.json_rpc.callRPC(action = "getAssetList", rpc_args = {"toolType": "ner"})
@cherrypy.tools.json_out()
def POST(self, *flags, **kw):
'''
Paring text via NER tool.
@return - data JSON to client.
'''
txt = kw.get("text")
asset_name = flags[0] if len(flags) > 0 else None
return self.json_rpc.callRPC(action = "annotate", rpc_args = {"request" : AnnotationRequest(txt, "ner", asset_name, 1)}, version = 1)
def PUT(self):
pass
def DELETE(self):
pass
| {"/core/api/_JSONRPC/protocol.py": ["/core/chain.py"], "/core/api/_REST/_v1/hner.py": ["/core/annotation/annotator.py"], "/core/annotation/tools/tner.py": ["/core/annotation/tools/_abstract.py"], "/core/annotation/tools/_abstract.py": ["/core/pipeline.py"], "/core/annotation/filters/inputFilters.py": ["/core/annotation/filters/_abstract.py"], "/core/annotation/generators/gFigaNer.py": ["/core/annotation/generators/_abstract.py"], "/core/api/JSONRPC.py": ["/core/loader.py", "/core/api/_JSONRPC/protocol.py", "/core/chain.py"], "/core/assets/adapters/_adapters/kblocal.py": ["/core/assets/adapters/_abstract.py", "/core/assets/adapters/_adapters/functions.py"], "/core/api/_REST/restV1.py": ["/core/api/_REST/_v1/__init__.py"], "/core/annotation/output.py": ["/core/pipeline.py", "/core/loader.py", "/core/annotation/generators/_abstract.py"], "/core/annotation/generators/_abstract.py": ["/core/pipeline.py"], "/core/api/HTTP.py": ["/core/annotation/annotator.py"], "/core/assets/adapters/_adapters/kbshared.py": ["/core/assets/adapters/_abstract.py", "/core/assets/adapters/_adapters/functions.py"], "/core/api/_REST/_v1/figa.py": ["/core/annotation/annotator.py"], "/core/api/_JSONRPC/rpcV1.py": ["/core/api/_JSONRPC/protocol.py", "/core/assets/asset.py"], "/core/annotation/tools/tfiga.py": ["/core/annotation/tools/_abstract.py", "/core/annotation/tools/tfiga08.py"], "/core/assets/adapters/factory.py": ["/core/loader.py", "/core/assets/adapters/_abstract.py"], "/core/annotation/tools/tfiga08.py": ["/core/annotation/tools/_abstract.py"], "/core/assets/adapters/_adapters/generic.py": ["/core/assets/adapters/_abstract.py"], "/core/annotation/filters/_abstract.py": ["/core/pipeline.py"]} |
48,993 | KNOT-FIT-BUT/webapi-server | refs/heads/master | /core/annotation/tools/tner.py | # -*- coding: utf-8 -*-
'''
Created on 24. 4. 2014
@author: casey
'''
import ner
from core.annotation.tools._abstract import AbstractTool
import name_recognizer.name_recognizer as name_recognizer
class NER(AbstractTool):
'''
classdocs
'''
toolName = "ner"
params=["lower", "remove_accent"]
def __init__(self):
'''
Constructor
'''
super(NER, self).__init__()
self.require = ['asset', 'input_data']
self.assetPart = "kb"
def _hook(self, request):
request.ongoing_data = self.call(request.input_data, request.asset, request.tool_params)
def call(self, input_data, asset, params ):
kb = asset.getPart(self.assetPart)
ner.dictionary=None
data = ner.recognize(kb, input_data, False, False, False, params.get("lower", False), params.get("remove accent", False), params.get("name_recognize", False))
return data
| {"/core/api/_JSONRPC/protocol.py": ["/core/chain.py"], "/core/api/_REST/_v1/hner.py": ["/core/annotation/annotator.py"], "/core/annotation/tools/tner.py": ["/core/annotation/tools/_abstract.py"], "/core/annotation/tools/_abstract.py": ["/core/pipeline.py"], "/core/annotation/filters/inputFilters.py": ["/core/annotation/filters/_abstract.py"], "/core/annotation/generators/gFigaNer.py": ["/core/annotation/generators/_abstract.py"], "/core/api/JSONRPC.py": ["/core/loader.py", "/core/api/_JSONRPC/protocol.py", "/core/chain.py"], "/core/assets/adapters/_adapters/kblocal.py": ["/core/assets/adapters/_abstract.py", "/core/assets/adapters/_adapters/functions.py"], "/core/api/_REST/restV1.py": ["/core/api/_REST/_v1/__init__.py"], "/core/annotation/output.py": ["/core/pipeline.py", "/core/loader.py", "/core/annotation/generators/_abstract.py"], "/core/annotation/generators/_abstract.py": ["/core/pipeline.py"], "/core/api/HTTP.py": ["/core/annotation/annotator.py"], "/core/assets/adapters/_adapters/kbshared.py": ["/core/assets/adapters/_abstract.py", "/core/assets/adapters/_adapters/functions.py"], "/core/api/_REST/_v1/figa.py": ["/core/annotation/annotator.py"], "/core/api/_JSONRPC/rpcV1.py": ["/core/api/_JSONRPC/protocol.py", "/core/assets/asset.py"], "/core/annotation/tools/tfiga.py": ["/core/annotation/tools/_abstract.py", "/core/annotation/tools/tfiga08.py"], "/core/assets/adapters/factory.py": ["/core/loader.py", "/core/assets/adapters/_abstract.py"], "/core/annotation/tools/tfiga08.py": ["/core/annotation/tools/_abstract.py"], "/core/assets/adapters/_adapters/generic.py": ["/core/assets/adapters/_abstract.py"], "/core/annotation/filters/_abstract.py": ["/core/pipeline.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.