text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import os
import sys
import pulsar as psr
def run_test():
tester = psr.PyTester("Basic Module Loading/Calling")
ma = psr.ModuleAdministrator()
ma.load_module("testmodules", "TestCppModule1", "TEST_CPP_MOD1")
ma.load_module("testmodules", "TestPyModule1", "TEST_PY_MOD1")
mcpp1 = ma.get_module("TEST_CPP_MOD1", 0)
mpy1 = ma.get_module("TEST_PY_MOD1", 0)
tester.test_call("Test simple C++ module function", True, mcpp1.run_test)
tester.test_call("Test simple Python module function", True, mpy1.run_test)
tester.test_call("Test chained get_module with C++ module", True, ma.get_module("TEST_CPP_MOD1", 0).run_test)
tester.test_call("Test chained get_module with Python module", True, ma.get_module("TEST_PY_MOD1", 0).run_test)
tester.print_results()
return tester.nfailed()
|
{
"content_hash": "9d01e63bfcd60aca60386053488c8125",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 115,
"avg_line_length": 37.54545454545455,
"alnum_prop": 0.6912832929782082,
"repo_name": "pulsar-chem/Pulsar-Core",
"id": "a18694c5d998817bf1b5e1849bcb37f03a310cd7",
"size": "826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/issues/Issue_33.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "7784"
},
{
"name": "C++",
"bytes": "1100500"
},
{
"name": "CMake",
"bytes": "24227"
},
{
"name": "Python",
"bytes": "739363"
},
{
"name": "Shell",
"bytes": "2851"
}
],
"symlink_target": ""
}
|
"""Infrastructure and implementation of in-memory graph data.
High-level Abstract Classes:
* `InMemoryGraphData`: provides nodes, edges, and features, for a
homogeneous or a heteregenous graph.
* `NodeClassificationGraphData`: an `InMemoryGraphData` that also provides
list of {train, test, validation} nodes, as well as their labels.
`InMemoryGraphData` implementations can provide
* a single GraphTensor for training on one big graph (e.g., for node
classification with `tf_trainer.py` or `keras_trainer.py`),
* a big graph from which in-memory sampling (e.g., `int_arithmetic_sampler`)
can create dataset of sampled subgraphs (encoded as `tfgnn.GraphTensor`).
All `InMemoryGraphData` implementations automatically inherit abilities of:
* `as_graph_tensor()` .
* These methods can be plugged-into TF-GNN models and training loops, e.g.,
for node classification (see `tf_trainer.py` and `keras_trainer.py`).
* In addition, they can be plugged-into in-memory sampling (see
`int_arithmetic_sampler.py`, and example trainer script,
`keras_minibatch_trainer.py`).
Concrete implementations:
* Node classification (inheriting `NodeClassificationGraphData`)
* `OgbnData`: Wraps node classification graph data from OGB, i.e., with
name prefix of "ogbn-", such as, "ogbn-arxiv".
* `PlanetoidGraphData`: wraps graph data that are popularized by GCN paper
(cora, citeseer, pubmed).
# Usage Example.
```
graph_data = datasets.OgbnData('ogbn-arxiv')
# Optionally, make graph undirected.
graph_data = graph_data.with_self_loops(True)
# add self-loops:
graph_data = graph_data.with_undirected_edges(True)
# To get GraphTensor and GraphSchema at any graph data:
graph_tensor = graph_data.as_graph_tensor()
graph_schema = graph_data.graph_schema()
spec = tfgnn.create_graph_spec_from_schema_pb(graph_schema)
# or optionally, by "relaxing" the batch dimension of `graph_tensor` (to None):
# spec = graph_tensor.spec.relax(num_nodes=True, num_edges=True)
```
The first line is equivalent to
`graph_data = datasets.get_in_memory_graph_data('ogbn-arxiv')`. Which is more
general, because it can load other data types:
* ogbn-* are node-calssificiation datasets for OGB.
* 'pubmed', 'cora', 'citeseer', correspond to transductive graphs used in
Planetoid (Yang et al, ICML'16).
`graph_tensor` (type `GraphTensor`) contains all nodes, edges, and features.
If it is a node-classification dataset, the training labels are also populated.
**For nodes not in training set**, label feature will be `-1`. To also include
If you want to explicitly get all labels from all partitions, you may:
```
graph_data = graph_data.with_split(['train', 'test', 'validation'])
graph_tensor = graph_data.graph_tensor
```
Chaining `with_*` calls can reduce verbosity. For example,
```
graph_data = (
datasets.OgbnData('ogbn-arxiv').with_undirected_edges(True)
.with_self_loops(True))
graph_tensor = graph_data.as_graph_tensor()
```
"""
import copy
import os
import pickle
import sys
from typing import Any, List, Mapping, MutableMapping, NamedTuple, Tuple, Union, Optional
import urllib.request
import numpy as np
import ogb.linkproppred
import ogb.nodeproppred
import scipy
import tensorflow as tf
import tensorflow_gnn as tfgnn
class InMemoryGraphData:
"""Abstract class for hold a graph data in-memory (nodes, edges, features).
Subclasses must implement methods `node_features_dicts()`, `node_counts()`,
`edge_lists()`, `node_sets()`, and optionally, `context()`. They inherit
methods `graph_schema()`, `edge_sets()`, and `as_graph_tensor()` based on
those.
"""
def __init__(self, make_undirected: bool = False,
add_self_loops: bool = False):
self._make_undirected = make_undirected
self._add_self_loops = add_self_loops
def with_undirected_edges(self, make_undirected: bool) -> 'InMemoryGraphData':
"""Marks graph as "undirected" and returns the reference to this instance.
Subsequent calls to `.graph_schema()` and to `.as_graph_tensor()` will be
affected. Specifically, the generated output `tfgnn.GraphTensor` (by
`.as_graph_tensor()`) will reverse all homogeneous edge sets (where its
source node set equals its target node set). Suppose edge `(i, j)` is
included in *homogeneous* edge set "MyEdgeSet", then output `GraphTensor`
will also contain edge `(j, i)` on edge set "MyEdgeSet". If edge `(j, i)`
already exists, then it will be duplicated.
If make_undirected == True:
* output of `.as_graph_tensor()` will contain only edge-set names that are
returned by `.edge_sets()`, where each homogeneous edge-set with M edges
will be expanded to M*2 edges with edge `M+k` reversing edge `k`.
* output of `.graph_schema()` will contain only edge-sets returned by
`edge_sets`.
If make_undirected == False:
* output of `.as_graph_tensor()` will contain, for each edge set "EdgeSet"
(returned by `.edge_sets()`) a new edge-set "rev_EdgeSet" that reverses
the "EdgeSet".
* output of `.graph_schema()`. will have both "EdgeSet" and "rev_EdgeSet".
Args:
make_undirected: If True, subsequent calls to `.graph_schema()` and
`.as_graph_tensor()` will export an undirected graph. If False, a
directed graph (with additional "rev_*" edges).
Returns:
Reference to `self`.
"""
modified = copy.copy(self)
modified._make_undirected = make_undirected # pylint: disable=protected-access -- same class.
return modified
def with_self_loops(self, add_self_loops: bool) -> 'InMemoryGraphData':
"""Marks graph with "self-loops" and returns the reference to this instance.
If add_self_loops == True, then subsequent calls to `.as_graph_tensor()`
will contain edges `[(i, i) for i in range(N_j)]`, for each homogeneous edge
set j, where `N_j` is the number of nodes in node set connected by edge set
`j`.
NOTE: self-loops will be added *regardless* if they already exist or not.
If the datasets already has self-loops, calling this, will double the self-
loop edges.
Args:
add_self_loops: If set, self-loops will be amended on subsequent calls to
`.as_graph_tensor()`. If not, no self-loops will be automatically added.
Returns:
Reference to `self`.
"""
modified = copy.copy(self)
modified._add_self_loops = add_self_loops # pylint: disable=protected-access -- same class.
return modified
def node_counts(self) -> Mapping[tfgnn.NodeSetName, int]:
"""Returns total number of graph nodes per node set."""
raise NotImplementedError()
def node_features_dicts(self) -> Mapping[
tfgnn.NodeSetName, MutableMapping[tfgnn.FieldName, tf.Tensor]]:
"""Returns 2-level dict: NodeSetName->FeatureName->Feature tensor.
For every node set (`"x"`), feature tensor must have leading dimension equal
to number of nodes in node set (`.node_counts()["x"]`). Other dimensions are
dataset specific.
"""
raise NotImplementedError()
def edge_lists(self) -> Mapping[
Tuple[tfgnn.NodeSetName, tfgnn.EdgeSetName, tfgnn.NodeSetName],
tf.Tensor]:
"""Returns dict from "edge type tuple" to int Tensor of shape (2, num_edges).
"edge type tuple" string three-tuple:
`(source node set name, edge set name, target node set name)`.
where `edge set name` must be unique.
"""
raise NotImplementedError()
def node_sets(self) -> MutableMapping[tfgnn.NodeSetName, tfgnn.NodeSet]:
"""Returns node sets of entire graph (dict: node set name -> NodeSet)."""
node_counts = self.node_counts()
features_dicts = self.node_features_dicts()
node_set_names = set(node_counts.keys()).union(features_dicts.keys())
return (
{name: tfgnn.NodeSet.from_fields(sizes=as_tensor([node_counts[name]]),
features=features_dicts.get(name, {}))
for name in node_set_names})
def context(self) -> Optional[tfgnn.Context]:
return None
def as_graph_tensor(self) -> tfgnn.GraphTensor:
"""Returns `GraphTensor` holding the entire graph."""
return tfgnn.GraphTensor.from_pieces(
node_sets=self.node_sets(), edge_sets=self.edge_sets(),
context=self.context())
def graph_schema(self) -> tfgnn.GraphSchema:
"""`tfgnn.GraphSchema` instance corresponding to `as_graph_tensor()`."""
# Populate node features specs.
schema = tfgnn.GraphSchema()
for node_set_name, node_set in self.node_sets().items():
node_features = schema.node_sets[node_set_name]
for feat_name, feature in node_set.features.items():
node_features.features[feat_name].dtype = feature.dtype.as_datatype_enum
for dim in feature.shape[1:]:
node_features.features[feat_name].shape.dim.add().size = dim
# Populate edge specs.
for edge_type in self.edge_lists().keys():
src_node_set_name, edge_set_name, dst_node_set_name = edge_type
# Populate edges with adjacency and it transpose.
schema.edge_sets[edge_set_name].source = src_node_set_name
schema.edge_sets[edge_set_name].target = dst_node_set_name
if not self._make_undirected:
schema.edge_sets['rev_' + edge_set_name].source = dst_node_set_name
schema.edge_sets['rev_' + edge_set_name].target = src_node_set_name
return schema
def edge_sets(self) -> MutableMapping[tfgnn.EdgeSetName, tfgnn.EdgeSet]:
"""Returns edge sets of entire graph (dict: edge set name -> EdgeSet)."""
edge_sets = {}
node_counts = self.node_counts() if self._add_self_loops else None
for edge_type, edge_list in self.edge_lists().items():
(source_node_set_name, edge_set_name, target_node_set_name) = edge_type
if self._make_undirected and source_node_set_name == target_node_set_name:
edge_list = tf.concat([edge_list, edge_list[::-1]], axis=-1)
if self._add_self_loops and source_node_set_name == target_node_set_name:
all_nodes = tf.range(node_counts[source_node_set_name],
dtype=edge_list.dtype)
self_connections = tf.stack([all_nodes, all_nodes], axis=0)
edge_list = tf.concat([edge_list, self_connections], axis=-1)
edge_sets[edge_set_name] = tfgnn.EdgeSet.from_fields(
sizes=tf.shape(edge_list)[1:2],
adjacency=tfgnn.Adjacency.from_indices(
source=(source_node_set_name, edge_list[0]),
target=(target_node_set_name, edge_list[1])))
if not self._make_undirected:
edge_sets['rev_' + edge_set_name] = tfgnn.EdgeSet.from_fields(
sizes=tf.shape(edge_list)[1:2],
adjacency=tfgnn.Adjacency.from_indices(
source=(target_node_set_name, edge_list[1]),
target=(source_node_set_name, edge_list[0])))
return edge_sets
class NodeSplit(NamedTuple):
"""Contains 1D int tensors holding positions of {train, valid, test} nodes.
This is returned by `NodeClassificationGraphData.node_split()`
"""
train: tf.Tensor
validation: tf.Tensor
test: tf.Tensor
class NodeClassificationGraphData(InMemoryGraphData):
"""Adapts `InMemoryGraphData` for node classification settings.
Subclasses should information for node classification: (node labels, name of
node set, and partitions train:validation:test nodes).
"""
def __init__(self, split: str = 'train', use_labels_as_features=False):
super().__init__()
self._splits = [split]
self._use_labels_as_features = use_labels_as_features
def with_split(self, split: Union[str, List[str]] = 'train'
) -> 'NodeClassificationGraphData':
splits = split if isinstance(split, (tuple, list)) else [split]
for split in splits:
if split not in ('train', 'validation', 'test'):
raise ValueError('split must be one of {"train", "valid", "test"}.')
modified = copy.copy(self)
modified._splits = splits # pylint: disable=protected-access -- same class.
return modified
def with_labels_as_features(
self, use_labels_as_features: bool) -> 'NodeClassificationGraphData':
modified = copy.copy(self)
modified._use_labels_as_features = use_labels_as_features # pylint: disable=protected-access -- same class.
return modified
@property
def splits(self) -> List[str]:
return copy.copy(self._splits)
def num_classes(self) -> int:
"""Number of node classes. Max of `labels` should be `< num_classes`."""
raise NotImplementedError('num_classes')
def node_split(self) -> NodeSplit:
"""`NodeSplit` with attributes `train`, `valid`, `test` set to node indices.
These indices correspond to leading dimension of features in node set
`labeled_nodeset`.
"""
raise NotImplementedError()
def labels(self) -> tf.Tensor:
"""int vector containing labels for train & validation nodes.
Size of vector is number of nodes in the labeled node set. In particular:
`self.labels().shape[0] == self.node_counts()[self.labeled_nodeset]`.
Specifically, the vector has as many entries as there are nodes belonging to
the node set that this task aims to predict labels for.
Entry `labels()[i]` will be -1 iff `i in self.node_split().test`. Otherwise,
`labels()[i]` will be int in range [`0`, `self.num_classes() - 1`].
"""
raise NotImplementedError()
def test_labels(self) -> tf.Tensor:
"""Like the above but contains no -1's.
Every {train, valid, test} node will have its class label.
"""
raise NotImplementedError()
@property
def labeled_nodeset(self) -> str:
"""Name of node set which `labels` and `node_splits` reference."""
raise NotImplementedError()
def node_features_dicts_without_labels(self) -> Mapping[
tfgnn.NodeSetName, MutableMapping[tfgnn.FieldName, tf.Tensor]]:
raise NotImplementedError()
def node_features_dicts(self) -> Mapping[
tfgnn.NodeSetName, MutableMapping[tfgnn.FieldName, tf.Tensor]]:
"""Implements a method required by the base class.
This method combines the data from `labels()` or `test_labels()` with the
data from `node_features_dicts_without_labels()` into a single features
dict.
Subclasses need to implement aforementioned methods and may inherit this.
Returns:
NodeSetName -> FeatureName -> Feature Tensor.
"""
node_features_dicts = self.node_features_dicts_without_labels()
if self._use_labels_as_features:
if 'test' in self._splits:
node_features_dicts[self.labeled_nodeset]['label'] = self.test_labels()
else:
node_features_dicts[self.labeled_nodeset]['label'] = self.labels()
return node_features_dicts
def context(self) -> Optional[tfgnn.Context]:
node_split = self.node_split()
seed_nodes = tf.concat(
[getattr(node_split, split) for split in self._splits], axis=0)
seed_nodes = tf.expand_dims(seed_nodes, axis=0)
seed_feature_name = 'seed_nodes.' + self.labeled_nodeset
return tfgnn.Context.from_fields(features={seed_feature_name: seed_nodes})
def graph_schema(self) -> tfgnn.GraphSchema:
graph_schema = super().graph_schema()
context_features = graph_schema.context.features
context_features['seed_nodes.' + self.labeled_nodeset].dtype = (
tf.int64.as_datatype_enum)
return graph_schema
class _OgbGraph:
"""Wraps data exposed by OGB graph objects, while enforcing heterogeneity.
Attributes offered by this class are consistent with the APIs of GraphData.
"""
def __init__(self, graph: Mapping[str, Any]):
"""Reads dict OGB `graph` and into the attributes defined below.
Args:
graph: Dict, described in
https://github.com/snap-stanford/ogb/blob/master/ogb/io/README.md#2-saving-graph-list
"""
if 'edge_index_dict' in graph: # Heterogeneous graph
assert 'num_nodes_dict' in graph
assert 'node_feat_dict' in graph
# node set name -> feature name -> feature matrix (numNodes x featDim).
node_set = {node_set_name: {'feat': as_tensor(feat)}
for node_set_name, feat in graph['node_feat_dict'].items()
if feat is not None}
# Populate remaining features
for key, node_set_name_to_feat in graph.items():
if key.startswith('node_') and key != 'node_feat_dict':
feat_name = key.split('node_', 1)[-1]
for node_set_name, feat in node_set_name_to_feat.items():
node_set[node_set_name][feat_name] = as_tensor(feat)
self._num_nodes_dict = graph['num_nodes_dict']
self._node_feat_dict = node_set
self._edge_index_dict = tf.nest.map_structure(
as_tensor, graph['edge_index_dict'])
else: # Homogenous graph. Make heterogeneous.
if graph.get('node_feat', None) is not None:
node_features = {
tfgnn.NODES: {'feat': as_tensor(graph['node_feat'])}
}
else:
node_features = {
tfgnn.NODES: {
'feat': tf.zeros([graph['num_nodes'], 0], dtype=tf.float32)
}
}
self._edge_index_dict = {
(tfgnn.NODES, tfgnn.EDGES, tfgnn.NODES): as_tensor(
graph['edge_index']),
}
self._num_nodes_dict = {tfgnn.NODES: graph['num_nodes']}
self._node_feat_dict = node_features
@property
def num_nodes_dict(self) -> Mapping[tfgnn.NodeSetName, int]:
"""Maps "node set name" -> number of nodes."""
return self._num_nodes_dict
@property
def node_feat_dict(self) -> Mapping[
tfgnn.NodeSetName, MutableMapping[tfgnn.FieldName, tf.Tensor]]:
"""Maps "node set name" to dict of "feature name"->tf.Tensor."""
return self._node_feat_dict
@property
def edge_index_dict(self) -> Mapping[
Tuple[tfgnn.NodeSetName, tfgnn.EdgeSetName, tfgnn.NodeSetName],
tf.Tensor]:
"""Adjacency lists for all edge sets.
Returns:
Dict (source node set name, edge set name, target node set name) -> edges.
Where `edges` is tf.Tensor of shape (2, num edges), with `edges[0]` and
`edges[1]`, respectively, containing source and target node IDs (as 1D int
tf.Tensor).
"""
return self._edge_index_dict
class OgbnData(NodeClassificationGraphData):
"""Wraps node classification graph data of ogbn-* for in-memory learning."""
def __init__(self, dataset_name, cache_dir=None):
super().__init__()
if cache_dir is None:
cache_dir = os.environ.get(
'OGB_CACHE_DIR', os.path.expanduser(os.path.join('~', 'data', 'ogb')))
self.ogb_dataset = ogb.nodeproppred.NodePropPredDataset(
dataset_name, root=cache_dir)
self._graph, self._node_labels, self._node_split, self._labeled_nodeset = (
OgbnData._to_heterogeneous(self.ogb_dataset))
# rehape from [N, 1] to [N].
self._node_labels = self._node_labels[:, 0]
# train labels (test set to -1).
self._train_labels = np.copy(self._node_labels)
self._train_labels[self._node_split.test] = -1
self._train_labels = as_tensor(self._train_labels)
self._node_labels = as_tensor(self._node_labels)
@staticmethod
def _to_heterogeneous(
ogb_dataset: ogb.nodeproppred.NodePropPredDataset) -> Tuple[
_OgbGraph, # ogb_graph.
np.ndarray, # node_labels.
NodeSplit, # idx_split.
str]:
"""Returns heterogeneous dicts from homogeneous or heterogeneous OGB dataset.
Args:
ogb_dataset: OGBN dataset. It can be homogeneous (single node set type,
single edge set type), or heterogeneous (various node/edge set types),
and returns data structure as-if the dataset is heterogeneous (i.e.,
names each node/edge set). If input is a homogeneous graph, then the
node set will be named "nodes" and the edge set will be named "edges".
Returns:
tuple: `(ogb_graph, node_labels, idx_split, labeled_nodeset)`, where:
`ogb_graph` is instance of _OgbGraph.
`node_labels`: np.array of labels, with .shape[0] equals number of nodes
in node set with name `labeled_nodeset`.
`idx_split`: instance of NodeSplit. Members `train`, `test` and `valid`,
respectively, contain indices of nodes in node set with name
`labeled_nodeset`.
`labeled_nodeset`: name of node set that the node-classification task is
designed over.
"""
graph, node_labels = ogb_dataset[0]
ogb_graph = _OgbGraph(graph)
if 'edge_index_dict' in graph: # Graph is heterogeneous
assert 'num_nodes_dict' in graph
assert 'node_feat_dict' in graph
labeled_nodeset = list(node_labels.keys())
if len(labeled_nodeset) != 1:
raise ValueError('Expecting OGB dataset with *one* node set with '
'labels. Found: ' + ', '.join(labeled_nodeset))
labeled_nodeset = labeled_nodeset[0]
node_labels = node_labels[labeled_nodeset]
# idx_split is dict: {'train': {labeled_nodeset: np.array}, 'test': ...}.
idx_split = ogb_dataset.get_idx_split()
# Change to {'train': Tensor, 'test': Tensor, 'valid': Tensor}
idx_split = {split_name: as_tensor(split_dict[labeled_nodeset])
for split_name, split_dict in idx_split.items()}
# third-party OGB class returns dict with key 'valid'. Make consistent
# with TF nomenclature by renaming.
idx_split['validation'] = idx_split.pop('valid') # Rename
idx_split = NodeSplit(**idx_split)
return ogb_graph, node_labels, idx_split, labeled_nodeset
# Copy other node information.
for key, value in graph.items():
if key != 'node_feat' and key.startswith('node_'):
key = key.split('node_', 1)[-1]
ogb_graph.node_feat_dict[tfgnn.NODES][key] = as_tensor(value)
idx_split = ogb_dataset.get_idx_split()
idx_split['validation'] = idx_split.pop('valid') # Rename
idx_split = NodeSplit(**tf.nest.map_structure(
tf.convert_to_tensor, idx_split))
return ogb_graph, node_labels, idx_split, tfgnn.NODES
def num_classes(self) -> int:
return self.ogb_dataset.num_classes
def node_features_dicts_without_labels(self) -> Mapping[
tfgnn.NodeSetName, MutableMapping[tfgnn.FieldName, tf.Tensor]]:
# Deep-copy dict (*but* without copying tf.Tensor objects).
node_sets = self._graph.node_feat_dict
node_sets = {node_set_name: dict(node_set.items())
for node_set_name, node_set in node_sets.items()}
node_counts = self.node_counts()
for node_set_name, count in node_counts.items():
if node_set_name not in node_sets:
node_sets[node_set_name] = {}
feat_dict = node_sets[node_set_name]
feat_dict['#id'] = tf.range(count, dtype=tf.int32)
return node_sets
@property
def labeled_nodeset(self):
return self._labeled_nodeset
def node_counts(self) -> Mapping[tfgnn.NodeSetName, int]:
return self._graph.num_nodes_dict
def edge_lists(self) -> Mapping[
Tuple[tfgnn.NodeSetName, tfgnn.EdgeSetName, tfgnn.NodeSetName],
tf.Tensor]:
return self._graph.edge_index_dict
def node_split(self) -> NodeSplit:
return self._node_split
def labels(self) -> tf.Tensor:
return self._train_labels
def test_labels(self) -> tf.Tensor:
"""int numpy array of length num_nodes containing train and test labels."""
return self._node_labels
def _maybe_download_file(source_url, destination_path, make_dirs=True):
"""Downloads URL `source_url` onto file `destination_path` if not present."""
if not os.path.exists(destination_path):
dir_name = os.path.dirname(destination_path)
if make_dirs:
try:
os.makedirs(dir_name)
except FileExistsError:
pass
with urllib.request.urlopen(source_url) as fin:
with open(destination_path, 'wb') as fout:
fout.write(fin.read())
class PlanetoidGraphData(NodeClassificationGraphData):
"""Wraps Planetoid node-classificaiton datasets.
These datasets first appeared in the Planetoid [1] paper and popularized by
the GCN paper [2].
[1] Yang et al, ICML'16
[2] Kipf & Welling, ICLR'17.
"""
def __init__(self, dataset_name, cache_dir=None):
super().__init__()
allowed_names = ('pubmed', 'citeseer', 'cora')
url_template = (
'https://github.com/kimiyoung/planetoid/blob/master/data/'
'ind.%s.%s?raw=true')
file_parts = ['ally', 'allx', 'graph', 'ty', 'tx', 'test.index']
if dataset_name not in allowed_names:
raise ValueError('Dataset must be one of: ' + ', '.join(allowed_names))
if cache_dir is None:
cache_dir = os.environ.get(
'PLANETOID_CACHE_DIR', os.path.expanduser(
os.path.join('~', 'data', 'planetoid')))
base_path = os.path.join(cache_dir, 'ind.%s' % dataset_name)
# Download all files.
for file_part in file_parts:
source_url = url_template % (dataset_name, file_part)
destination_path = os.path.join(
cache_dir, 'ind.%s.%s' % (dataset_name, file_part))
_maybe_download_file(source_url, destination_path)
# Load data files.
edge_lists = pickle.load(open(base_path + '.graph', 'rb'))
allx = PlanetoidGraphData.load_x(base_path + '.allx')
ally = np.load(base_path + '.ally', allow_pickle=True)
testx = PlanetoidGraphData.load_x(base_path + '.tx')
# Add test
test_idx = list(
map(int, open(base_path + '.test.index').read().split('\n')[:-1]))
num_test_examples = max(test_idx) - min(test_idx) + 1
sparse_zeros = scipy.sparse.csr_matrix((num_test_examples, allx.shape[1]),
dtype='float32')
allx = scipy.sparse.vstack((allx, sparse_zeros))
llallx = allx.tolil()
llallx[test_idx] = testx
self._allx = as_tensor(np.array(llallx.todense()))
testy = np.load(base_path + '.ty', allow_pickle=True)
ally = np.pad(ally, [(0, num_test_examples), (0, 0)], mode='constant')
ally[test_idx] = testy
self._num_nodes = len(edge_lists)
self._num_classes = ally.shape[1]
self._node_labels = np.argmax(ally, axis=1)
self._train_labels = self._node_labels + 0 # Copy.
self._train_labels[test_idx] = -1
self._node_labels = as_tensor(self._node_labels)
self._train_labels = as_tensor(self._train_labels)
self._test_idx = tf.convert_to_tensor(np.array(test_idx, dtype='int32'))
self._node_split = None # Populated on `node_split()`
# Will be used to construct (sparse) adjacency matrix.
adj_src = []
adj_target = []
for node, neighbors in edge_lists.items():
adj_src.extend([node] * len(neighbors))
adj_target.extend(neighbors)
self._edge_list = as_tensor(np.stack([adj_src, adj_target], axis=0))
@staticmethod
def load_x(filename):
if sys.version_info > (3, 0):
return pickle.load(open(filename, 'rb'), encoding='latin1')
else:
return np.load(filename)
def num_classes(self) -> int:
return self._num_classes
def node_features_dicts_without_labels(self) -> Mapping[
tfgnn.NodeSetName, MutableMapping[tfgnn.FieldName, tf.Tensor]]:
features = {'feat': self._allx}
features['#id'] = tf.range(self._num_nodes, dtype=tf.int32)
return {tfgnn.NODES: features}
def node_counts(self) -> Mapping[tfgnn.NodeSetName, int]:
return {tfgnn.NODES: self._num_nodes}
def edge_lists(self) -> Mapping[
Tuple[tfgnn.NodeSetName, tfgnn.EdgeSetName, tfgnn.NodeSetName],
tf.Tensor]:
return {(tfgnn.NODES, tfgnn.EDGES, tfgnn.NODES): self._edge_list}
def node_split(self) -> NodeSplit:
if self._node_split is None:
# By default, we mimic Planetoid & GCN setup -- i.e., 20 labels per class.
labels_per_class = int(os.environ.get('PLANETOID_LABELS_PER_CLASS', '20'))
num_train_nodes = labels_per_class * self.num_classes()
num_validation_nodes = 500
train_ids = tf.range(num_train_nodes, dtype=tf.int32)
validation_ids = tf.range(
num_train_nodes,
num_train_nodes + num_validation_nodes, dtype=tf.int32)
self._node_split = NodeSplit(train=train_ids, validation=validation_ids,
test=self._test_idx)
return self._node_split
@property
def labeled_nodeset(self):
return tfgnn.NODES
def labels(self) -> tf.Tensor:
return self._train_labels
def test_labels(self) -> tf.Tensor:
"""int numpy array of length num_nodes containing train and test labels."""
return self._node_labels
def get_in_memory_graph_data(dataset_name) -> InMemoryGraphData:
if dataset_name.startswith('ogbn-'):
return OgbnData(dataset_name)
elif dataset_name in ('cora', 'citeseer', 'pubmed'):
return PlanetoidGraphData(dataset_name)
else:
raise ValueError('Unknown Dataset name: ' + dataset_name)
# Shorthand. Can be replaced with: `as_tensor = tf.convert_to_tensor`.
def as_tensor(obj: Any) -> tf.Tensor:
"""short-hand for tf.convert_to_tensor."""
return tf.convert_to_tensor(obj)
|
{
"content_hash": "ba0fbee516917f0d53add255fbebe740",
"timestamp": "",
"source": "github",
"line_count": 747,
"max_line_length": 112,
"avg_line_length": 38.888888888888886,
"alnum_prop": 0.6601032702237521,
"repo_name": "tensorflow/gnn",
"id": "5c2e6f48a6b173e0c2ba22cd0d87f4575ab130fe",
"size": "29743",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tensorflow_gnn/experimental/in_memory/datasets.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2491"
},
{
"name": "Python",
"bytes": "1770047"
},
{
"name": "Shell",
"bytes": "3120"
},
{
"name": "Starlark",
"bytes": "47061"
}
],
"symlink_target": ""
}
|
import subprocess
import socket
import time
import inspect, os, sys
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
mid = 3
keepalive = 60
connect_packet = mosq_test.gen_connect("unsubscribe-qos2-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
unsubscribe_packet = mosq_test.gen_unsubscribe(mid, "qos2/test")
unsuback_packet = mosq_test.gen_unsuback(mid)
cmd = ['../../src/mosquitto', '-p', '1888']
broker = mosq_test.start_broker(filename=os.path.basename(__file__), cmd=cmd)
try:
sock = mosq_test.do_client_connect(connect_packet, connack_packet)
sock.send(unsubscribe_packet)
if mosq_test.expect_packet(sock, "unsuback", unsuback_packet):
rc = 0
sock.close()
finally:
broker.terminate()
broker.wait()
if rc:
(stdo, stde) = broker.communicate()
print(stde)
exit(rc)
|
{
"content_hash": "081ad8d38bf6831085ea3ba630c85f52",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 129,
"avg_line_length": 27.29268292682927,
"alnum_prop": 0.7015192135835567,
"repo_name": "else/mosquitto",
"id": "c3b2fd853d65177c092a46c496a75aa35cec2d6e",
"size": "1230",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "test/broker/02-unsubscribe-qos2.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "746142"
},
{
"name": "C++",
"bytes": "36524"
},
{
"name": "JavaScript",
"bytes": "8597"
},
{
"name": "Perl",
"bytes": "3271"
},
{
"name": "Python",
"bytes": "265444"
},
{
"name": "Shell",
"bytes": "4360"
},
{
"name": "XSLT",
"bytes": "1189"
}
],
"symlink_target": ""
}
|
import base64
from xdrlib import Packer, Unpacker
from .base import Integer
from .ledger_close_meta_v0 import LedgerCloseMetaV0
__all__ = ["LedgerCloseMeta"]
class LedgerCloseMeta:
"""
XDR Source Code::
union LedgerCloseMeta switch (int v)
{
case 0:
LedgerCloseMetaV0 v0;
};
"""
def __init__(
self,
v: int,
v0: LedgerCloseMetaV0 = None,
) -> None:
self.v = v
self.v0 = v0
def pack(self, packer: Packer) -> None:
Integer(self.v).pack(packer)
if self.v == 0:
if self.v0 is None:
raise ValueError("v0 should not be None.")
self.v0.pack(packer)
return
@classmethod
def unpack(cls, unpacker: Unpacker) -> "LedgerCloseMeta":
v = Integer.unpack(unpacker)
if v == 0:
v0 = LedgerCloseMetaV0.unpack(unpacker)
return cls(v=v, v0=v0)
return cls(v=v)
def to_xdr_bytes(self) -> bytes:
packer = Packer()
self.pack(packer)
return packer.get_buffer()
@classmethod
def from_xdr_bytes(cls, xdr: bytes) -> "LedgerCloseMeta":
unpacker = Unpacker(xdr)
return cls.unpack(unpacker)
def to_xdr(self) -> str:
xdr_bytes = self.to_xdr_bytes()
return base64.b64encode(xdr_bytes).decode()
@classmethod
def from_xdr(cls, xdr: str) -> "LedgerCloseMeta":
xdr_bytes = base64.b64decode(xdr.encode())
return cls.from_xdr_bytes(xdr_bytes)
def __eq__(self, other: object):
if not isinstance(other, self.__class__):
return NotImplemented
return self.v == other.v and self.v0 == other.v0
def __str__(self):
out = []
out.append(f"v={self.v}")
out.append(f"v0={self.v0}") if self.v0 is not None else None
return f"<LedgerCloseMeta [{', '.join(out)}]>"
|
{
"content_hash": "eccac8550a6435dea5e8203ce2b6839b",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 68,
"avg_line_length": 26.301369863013697,
"alnum_prop": 0.5604166666666667,
"repo_name": "StellarCN/py-stellar-base",
"id": "19d4dad43231f4a0ac2231bb469d7c886011b5c2",
"size": "2012",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "stellar_sdk/xdr/ledger_close_meta.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1737"
},
{
"name": "Makefile",
"bytes": "1085"
},
{
"name": "Python",
"bytes": "2044193"
},
{
"name": "RPC",
"bytes": "76503"
}
],
"symlink_target": ""
}
|
from celery import Celery, absolute_import, unicode_literals, shared_task
from vtn.models import DREvent, Site
from datetime import timedelta
from django.db.models import Q
from django.utils import timezone
from django.conf import settings
celery = Celery('tasks', broker='amqp://localhost')
@shared_task
def update_event_statuses():
now = timezone.now()
dr_events = DREvent.objects.filter(~Q(status='completed')).filter(~Q(status='cancelled'))
for dr_event in dr_events:
if dr_event.end < now:
dr_event.status = 'completed'
dr_event.save()
elif dr_event.start < now:
dr_event.status = 'active'
dr_event.save()
return
@shared_task
def update_online_offline():
now = timezone.now()
sites = Site.objects.all()
for site in sites:
try:
if site.last_status_time + timedelta(minutes=settings.ONLINE_INTERVAL_MINUTES) < now:
site.online = False
site.save()
else:
site.online = True
site.save()
except TypeError:
continue
|
{
"content_hash": "0bc259e5ab5f8fb6351cfc2e7ba42f7d",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 97,
"avg_line_length": 28.225,
"alnum_prop": 0.6138175376439327,
"repo_name": "rlutes/volttron-applications",
"id": "3ae0b72840b3f7a9e75c8fa0ce54808885fee2c8",
"size": "4031",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kisensum/openadr/openadr/vtn/tasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "221216"
},
{
"name": "CSS",
"bytes": "3865"
},
{
"name": "Gnuplot",
"bytes": "2486"
},
{
"name": "HTML",
"bytes": "4740"
},
{
"name": "JavaScript",
"bytes": "55562"
},
{
"name": "Makefile",
"bytes": "2413"
},
{
"name": "Objective-C",
"bytes": "1042"
},
{
"name": "Python",
"bytes": "1256496"
},
{
"name": "Shell",
"bytes": "517"
}
],
"symlink_target": ""
}
|
import logging
logger = logging.getLogger(__name__)
import sys
import json
import requests
from requests.exceptions import Timeout, ReadTimeout, ConnectionError
import time
import threading
from functools import lru_cache
from counterpartylib.lib import script
from counterpartylib.lib import config
bitcoin_rpc_session = None
class BackendRPCError(Exception):
pass
def rpc_call(payload):
url = config.BACKEND_URL
headers = {'content-type': 'application/json'}
global bitcoin_rpc_session
if not bitcoin_rpc_session:
bitcoin_rpc_session = requests.Session()
response = None
TRIES = 12
for i in range(TRIES):
try:
response = bitcoin_rpc_session.post(url, data=json.dumps(payload), headers=headers, verify=(not config.BACKEND_SSL_NO_VERIFY), timeout=config.REQUESTS_TIMEOUT)
if i > 0:
logger.debug('Successfully connected.')
break
except (Timeout, ReadTimeout, ConnectionError):
logger.debug('Could not connect to backend at `{}`. (Try {}/{})'.format(url, i+1, TRIES))
time.sleep(5)
if response == None:
if config.TESTNET:
network = 'testnet'
else:
network = 'mainnet'
raise BackendRPCError('Cannot communicate with backend at `{}`. (server is set to run on {}, is backend?)'.format(url, network))
elif response.status_code not in (200, 500):
raise BackendRPCError(str(response.status_code) + ' ' + response.reason)
# Return result, with error handling.
response_json = response.json()
# Batch query returns a list
if isinstance(response_json, list):
return response_json
if 'error' not in response_json.keys() or response_json['error'] == None:
return response_json['result']
elif response_json['error']['code'] == -5: # RPC_INVALID_ADDRESS_OR_KEY
raise BackendRPCError('{} Is `txindex` enabled in {} Core?'.format(response_json['error'], config.BTC_NAME))
elif response_json['error']['code'] in [-28, -8, -2]:
# “Verifying blocks...” or “Block height out of range” or “The network does not appear to fully agree!“
logger.debug('Backend not ready. Sleeping for ten seconds.')
# If Bitcoin Core takes more than `sys.getrecursionlimit() * 10 = 9970`
# seconds to start, this’ll hit the maximum recursion depth limit.
time.sleep(10)
return rpc_call(payload)
else:
raise BackendRPCError('{}'.format(response_json['error']))
def rpc(method, params):
payload = {
"method": method,
"params": params,
"jsonrpc": "2.0",
"id": 0,
}
return rpc_call(payload)
def rpc_batch(payload):
def get_chunks(l, n):
n = max(1, n)
return [l[i:i + n] for i in range(0, len(l), n)]
chunks = get_chunks(payload, config.RPC_BATCH_SIZE)
responses = []
for chunk in chunks:
responses += rpc_call(chunk)
return responses
# TODO: use scriptpubkey_to_address()
@lru_cache(maxsize=4096)
def extract_addresses(tx_hash):
logger.debug('Extract addresses: {}'.format(tx_hash))
# TODO: Use `rpc._batch` here.
tx = getrawtransaction(tx_hash, verbose=True)
addresses = []
for vout in tx['vout']:
if 'addresses' in vout['scriptPubKey']:
addresses += vout['scriptPubKey']['addresses']
txhash_list = [vin['txid'] for vin in tx['vin']]
raw_transactions = getrawtransaction_batch(txhash_list, verbose=True)
for vin in tx['vin']:
vin_tx = raw_transactions[vin['txid']]
vout = vin_tx['vout'][vin['vout']]
if 'addresses' in vout['scriptPubKey']:
addresses += vout['scriptPubKey']['addresses']
return addresses, tx
def unconfirmed_transactions(address):
# NOTE: This operation can be very slow.
logger.debug('Checking mempool for UTXOs.')
unconfirmed_tx = []
mempool = getrawmempool()
for index, tx_hash in enumerate(mempool):
logger.debug('Possible mempool UTXO: {} ({}/{})'.format(tx_hash, index, len(mempool)))
addresses, tx = extract_addresses(tx_hash)
if address in addresses:
unconfirmed_tx.append(tx)
return unconfirmed_tx
def searchrawtransactions(address, unconfirmed=False):
# Get unconfirmed transactions.
if unconfirmed:
logger.debug('Getting unconfirmed transactions.')
unconfirmed = unconfirmed_transactions(address)
else:
unconfirmed = []
# Get confirmed transactions.
try:
logger.debug('Searching raw transactions.')
rawtransactions = rpc('searchrawtransactions', [address, 1, 0, 9999999])
except BackendRPCError as e:
if str(e) == '404 Not Found':
raise BackendRPCError('Unknown RPC command: `searchrawtransactions`. Please use a version of {} Core which supports an address index.'.format(config.BTC_NAME))
else:
raise BackendRPCError(str(e))
confirmed = [tx for tx in rawtransactions if tx['confirmations'] > 0]
return unconfirmed + confirmed
def getblockcount():
return rpc('getblockcount', [])
def getblockhash(blockcount):
return rpc('getblockhash', [blockcount])
def getblock(block_hash):
return rpc('getblock', [block_hash, False])
def getrawtransaction(tx_hash, verbose=False):
return getrawtransaction_batch([tx_hash], verbose=verbose)[tx_hash]
def getrawmempool():
return rpc('getrawmempool', [])
def sendrawtransaction(tx_hex):
return rpc('sendrawtransaction', [tx_hex])
# TODO: move to __init__.py
RAW_TRANSACTIONS_CACHE = {}
RAW_TRANSACTIONS_CACHE_KEYS = []
RAW_TRANSACTIONS_CACHE_SIZE = 10000
raw_transaction_cache_lock = threading.Lock()
def getrawtransaction_batch(txhash_list, verbose=False):
with raw_transaction_cache_lock:
tx_hash_call_id = {}
call_id = 0
payload = []
# payload for transactions not in cache
for tx_hash in txhash_list:
if tx_hash not in RAW_TRANSACTIONS_CACHE:
payload.append({
"method": 'getrawtransaction',
"params": [tx_hash, 1],
"jsonrpc": "2.0",
"id": call_id
})
tx_hash_call_id[call_id] = tx_hash
call_id += 1
# populate cache
if len(payload) > 0:
batch_responses = rpc_batch(payload)
for response in batch_responses:
if 'error' not in response or response['error'] is None:
tx_hex = response['result']
tx_hash = tx_hash_call_id[response['id']]
if tx_hash not in RAW_TRANSACTIONS_CACHE:
RAW_TRANSACTIONS_CACHE[tx_hash] = tx_hex
RAW_TRANSACTIONS_CACHE_KEYS.append(tx_hash)
else:
raise BackendRPCError('{}'.format(response['error']))
# get transactions from cache
result = {}
for tx_hash in txhash_list:
if verbose:
result[tx_hash] = RAW_TRANSACTIONS_CACHE[tx_hash]
else:
result[tx_hash] = RAW_TRANSACTIONS_CACHE[tx_hash]['hex']
# remove oldest hashes from cache
while len(RAW_TRANSACTIONS_CACHE_KEYS) > RAW_TRANSACTIONS_CACHE_SIZE:
first_hash = RAW_TRANSACTIONS_CACHE_KEYS[0]
del(RAW_TRANSACTIONS_CACHE[first_hash])
RAW_TRANSACTIONS_CACHE_KEYS.pop(0)
return result
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
{
"content_hash": "ff837f0703d15ec2a34f4b6b457f4889",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 171,
"avg_line_length": 35.49065420560748,
"alnum_prop": 0.619486504279131,
"repo_name": "stefcrypto/counterparty-lib",
"id": "f4fec5138388def4206d2f74c64479e3568cd216",
"size": "7609",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "counterpartylib/lib/backend/addrindex.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PLpgSQL",
"bytes": "988909"
},
{
"name": "Python",
"bytes": "657326"
}
],
"symlink_target": ""
}
|
"""Modal base classes."""
# pylint: disable=too-few-public-methods
from lib import base
from lib.constants import locator
class BaseModal(base.Modal):
"""Base class for creation and edition modals."""
_locator_ui_title = locator.ModalCreateNewObject.UI_TITLE
_locator_ui_code = locator.ModalCreateNewObject.UI_CODE
_locator_button_save = locator.ModalCreateNewObject.BUTTON_SAVE_AND_CLOSE
def __init__(self, driver):
super(BaseModal, self).__init__(driver)
self.button_save_and_close = base.Button(driver, self._locator_button_save)
self.ui_title = base.TextInputField(self._driver, self._locator_ui_title)
self.ui_code = base.TextInputField(self._driver, self._locator_ui_code)
def enter_title(self, text):
"""Enter title to modal."""
self.ui_title.enter_text(text)
def enter_code(self, text):
"""Enter code to modal."""
self.ui_code.enter_text(text)
def fill_minimal_data(self, title, code):
"""Enter common minimal data to modal."""
self.enter_title(title)
self.enter_code(code)
return self.__class__(self._driver)
class ProgramsModal(BaseModal):
"""Modal base for Program objects."""
# pylint: disable=too-many-instance-attributes
_locators = locator.ModalCreateNewProgram
def __init__(self, driver):
super(ProgramsModal, self).__init__(driver)
# user input elements
self.ui_description = base.Iframe(
self._driver, self._locators.UI_DESCRIPTION)
self.ui_notes = base.Iframe(self._driver, self._locators.UI_NOTES)
self.ui_code = base.TextInputField(self._driver, self._locators.UI_CODE)
self.ui_state = base.Dropdown(self._driver, self._locators.UI_STATE)
self.ui_show_optional_fields = base.Toggle(
self._driver, self._locators.BUTTON_SHOW_ALL_OPTIONAL_FIELDS)
self.ui_program_url = base.TextInputField(
self._driver, self._locators.UI_PROGRAM_URL)
self.ui_reference_url = base.TextInputField(
self._driver, self._locators.UI_REFERENCE_URL)
self.ui_effective_date = base.DatePicker(
self._driver, self._locators.EFFECTIVE_DATE_DATEPICKER,
self._locators.UI_EFFECTIVE_DATE)
self.ui_stop_date = base.DatePicker(
self._driver, self._locators.STOP_DATE_DATEPICKER,
self._locators.UI_STOP_DATE)
# static elements
self.title = base.Label(self._driver, self._locators.TITLE)
self.description = base.Label(self._driver, self._locators.DESCRIPTION)
self.program_url = base.Label(self._driver, self._locators.PROGRAM_URL)
def enter_description(self, description):
"""Enter text into description element.
Args: description (basestring)
"""
self.ui_description.find_iframe_and_enter_data(description)
def enter_notes(self, notes):
"""Enter text into notes element.
Args: notes basestring)
"""
self.ui_notes.find_iframe_and_enter_data(notes)
def enter_code(self, code):
"""Enter text into code element.
Args: code (basestring)
"""
self.ui_code.enter_text(code)
def select_state(self, state):
"""Selects state from dropdown."""
raise NotImplementedError
def toggle_optional_fields(self):
"""Show or hide optional fields."""
raise NotImplementedError
def enter_program_url(self, url):
"""Enter program url for this program object.
Args: url (str)
"""
self.ui_program_url.enter_text(url)
def enter_reference_url(self, url):
"""Enter reference url for this program object.
Args: url (str)
"""
self.ui_reference_url.enter_text(url)
def enter_effective_date_start_month(self, day):
"""Select from datepicker start date.
Args: day (int): # base.DatePicker.select_day_in_current_month
"""
# pylint: disable=invalid-name
self.ui_effective_date.select_day_in_current_month(day)
def enter_stop_date_end_month(self, day):
"""Select from datepicker end date.
Args: day (int): #base.DatePicker.select_day_in_current_month
"""
self.ui_stop_date.select_day_in_current_month(day)
class ControlsModal(BaseModal):
"""Modal base for Control objects."""
# pylint: disable=too-many-instance-attributes
_locators = locator.ModalCreateNewControl
def __init__(self, driver):
super(ControlsModal, self).__init__(driver)
# labels
self.modal_title = base.Label(driver, self._locators.MODAL_TITLE)
self.title = base.Label(driver, self._locators.TITLE)
self.description = base.Label(driver, self._locators.DESCRIPTION)
self.test_plan = base.Label(driver, self._locators.TEST_PLAN)
self.notes = base.Label(driver, self._locators.NOTES)
self.code = base.Label(driver, self._locators.CODE)
self.kind_or_nature = base.Label(driver, self._locators.KIND_OR_NATURE)
self.fraud_related = base.Label(driver, self._locators.FRAUD_RELATED)
self.frequency = base.Label(driver, self._locators.FREQUENCY)
self.assertions = base.Label(driver, self._locators.ASSERTIONS)
self.admin = base.Label(driver, self._locators.ADMIN)
self.control_url = base.Label(driver, self._locators.CONTROL_URL)
self.reference_url = base.Label(driver, self._locators.REFERENCE_URL)
self.significance = base.Label(driver, self._locators.SIGNIFICANCE)
self.type_or_means = base.Label(driver, self._locators.TYPE_OR_MEANS)
self.categories = base.Label(driver, self._locators.CATEGORIES)
self.state = base.Label(driver, self._locators.STATE)
self.ui_description = base.Iframe(driver, self._locators.UI_DESCRIPTION)
self.ui_test_plan = base.Iframe(driver, self._locators.UI_TEST_PLAN)
self.ui_notes = base.Iframe(driver, self._locators.UI_NOTES)
self.ui_code = base.TextInputField(driver, self._locators.UI_CODE)
self.ui_control_url = base.TextInputField(
driver, self._locators.UI_CONTROL_URL)
self.ui_reference_url = base.TextInputField(
driver, self._locators.UI_REFERENCE_URL)
# datepickers
self.ui_effective_date = base.DatePicker(
driver, self._locators.EFFECTIVE_DATE,
self._locators.DATEPICKER_EFFECTIVE_DATE)
self.ui_stop_date = base.DatePicker(
driver, self._locators.STOP_DATE, self._locators.DATEPICKER_STOP_DATE)
# dropdowns
self.ui_kind_or_nature = base.Dropdown(
driver, self._locators.DROPDOWN_KIND_OR_NATURE)
self.ui_fraud_related = base.Dropdown(
driver, self._locators.DROPDOWN_FRAUD_RELATED)
self.ui_type_or_means = base.Dropdown(
driver, self._locators.DROPDOWN_TYPE_OR_MEANS)
self.ui_frequency = base.Dropdown(
driver, self._locators.DROPDOWN_FREQUENCY)
# selectable lists
self.selectable_assertions = base.Selectable(
driver, self._locators.SELECTABLE_ASSERTIONS)
self.selectable_categories = base.Selectable(
driver, self._locators.SELECTABLE_CATEGORIES)
# buttons
self.button_add_owner = base.Button(
driver, self._locators.BUTTON_ADD_OWNER)
self.button_hide_all_optional_fields = base.Button(
driver, self._locators.BUTTON_HIDE_ALL_OPTIONAL_FIELDS)
def enter_description(self, text):
"""
Args: text (basestringe)
"""
self.ui_description.find_iframe_and_enter_data(text)
def enter_test_plan(self, text):
"""
Args: text (basestring)
"""
self.ui_test_plan.find_iframe_and_enter_data(text)
def enter_notes(self, text):
"""
Args: text (basestring)
"""
self.ui_notes.find_iframe_and_enter_data(text)
def enter_code(self, text):
"""
Args: text (basestring)
"""
self.ui_code.enter_text(text)
class RisksModal(BaseModal):
"""Modal base for Risk objects."""
_locators = locator.ModalCreateNewRisk
_locator_ui_title = locator.ModalCreateNewRisk.UI_TITLE
def __init__(self, driver):
super(RisksModal, self).__init__(driver)
self.ui_description = base.Iframe(driver, self._locators.UI_DESCRIPTION)
def enter_description(self, text):
self.ui_description.find_iframe_and_enter_data(text)
class OrgGroupsModal(BaseModal):
"""Modal base for Org Group objects."""
_locator_ui_title = locator.ModalCreateNewOrgGroup.UI_TITLE
class IssuesModal(BaseModal):
"""Modal base for Issue objects."""
_locator_ui_title = locator.ModalCreateNewIssue.UI_TITLE
class ProcessesModal(BaseModal):
"""Modal base for Process objects."""
_locator_ui_title = locator.ModalCreateNewProcess.UI_TITLE
class DataAssetsModal(BaseModal):
"""Modal base for DataAsset objects."""
_locator_ui_title = locator.ModalCreateNewDataAsset.UI_TITLE
class SystemsModal(BaseModal):
"""Modal base for System objects."""
_locator_ui_title = locator.ModalCreateNewSystem.UI_TITLE
class ProductsModal(BaseModal):
"""Modal base for Product objects."""
_locator_ui_title = locator.ModalCreateNewProduct.UI_TITLE
class ProjectsModal(BaseModal):
"""Modal base for Project objects."""
_locator_ui_title = locator.ModalCreateNewProject.UI_TITLE
class AsmtTmplModal(BaseModal):
"""Modal base for Assessment Template objects."""
_locators = locator.ModalCreateNewAsmtTmpl
def __init__(self, driver):
super(AsmtTmplModal, self).__init__(driver)
class AsmtsModal(BaseModal):
"""Modal base for Assessment objects."""
_locators = locator.ModalCreateNewAsmt
def __init__(self, driver):
super(AsmtsModal, self).__init__(driver)
|
{
"content_hash": "f30d4bea79e4691fa7f4fa7f2a1bb140",
"timestamp": "",
"source": "github",
"line_count": 262,
"max_line_length": 79,
"avg_line_length": 35.51526717557252,
"alnum_prop": 0.7003761418592155,
"repo_name": "AleksNeStu/ggrc-core",
"id": "f1db49c8c5491734bd68c4329e441341298ac60a",
"size": "9417",
"binary": false,
"copies": "1",
"ref": "refs/heads/release/0.10-Raspberry",
"path": "test/selenium/src/lib/page/modal/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "221201"
},
{
"name": "HTML",
"bytes": "1055542"
},
{
"name": "JavaScript",
"bytes": "1872353"
},
{
"name": "Makefile",
"bytes": "7044"
},
{
"name": "Mako",
"bytes": "4320"
},
{
"name": "Python",
"bytes": "2700938"
},
{
"name": "Shell",
"bytes": "31273"
}
],
"symlink_target": ""
}
|
"""Classes to generate point and figure charts."""
from collections import OrderedDict
from datetime import datetime
from decimal import Decimal
import logging
import pypf.terminal_format
class PFChart(object):
"""Base class for point and figure charts."""
TWOPLACES = Decimal('0.01')
def __init__(self, instrument, box_size=.01, duration=1.0,
interval='d', method='hl', reversal=3, style=False,
trend_lines=False, debug=False, indent=0, truncate=0):
"""Initialize common functionality."""
self._log = logging.getLogger(self.__class__.__name__)
if debug is True:
self._log.setLevel(logging.DEBUG)
self._log.debug(self)
self.instrument = instrument
self.interval = interval
self.box_size = Decimal(box_size).quantize(PFChart.TWOPLACES)
self.duration = Decimal(duration).quantize(PFChart.TWOPLACES)
self.method = method
self.reversal = int(reversal)
self.style_output = style
self.trend_lines = trend_lines
self.indent = indent
self.truncate = truncate
@property
def indent(self):
"""Get the box_size."""
return "".rjust(self._indent)
@indent.setter
def indent(self, value):
self._indent = value
self._log.debug('set self._indent to ' + str(value))
@property
def truncate(self):
"""Get the box_size."""
return self._truncate
@truncate.setter
def truncate(self, value):
self._truncate = value
self._log.debug('set self._truncate to ' + str(self._truncate))
@property
def box_size(self):
"""Get the box_size."""
return self._box_size
@box_size.setter
def box_size(self, value):
self._box_size = value
self._log.debug('set self._box_size to ' + str(self._box_size))
@property
def chart(self):
"""Get the chart."""
return self._chart
@property
def chart_meta_data(self):
"""Get the chart meta data."""
return self._chart_meta_data
@property
def duration(self):
"""Get the duration."""
return self._duration
@duration.setter
def duration(self, value):
self._duration = value
self._log.debug('set self._duration to ' + str(self._duration))
@property
def instrument(self):
"""Get the instrument."""
return self._instrument
@instrument.setter
def instrument(self, value):
self._instrument = value
self._log.debug('set self._instrument to ' + str(self._instrument))
@property
def interval(self):
"""Specify day (d), week (w), or month (m) interval."""
return self._interval
@interval.setter
def interval(self, value):
if value not in ["d", "w", "m"]:
raise ValueError("incorrect interval: "
"valid intervals are d, w, m")
self._interval = value
self._log.debug('set self._interval to '
+ str(self._interval))
@property
def method(self):
"""Get the method."""
return self._method
@method.setter
def method(self, value):
if value not in ["hl", "c"]:
raise ValueError("incorrect method: "
"valid methods are hl, c")
self._method = value
self._log.debug('set self._method to ' + self._method)
@property
def reversal(self):
"""Get the reversal."""
return self._reversal
@reversal.setter
def reversal(self, value):
self._reversal = value
self._log.debug('set self._reversal to ' + str(self._reversal))
@property
def style_output(self):
"""Get the style_output."""
return self._style_output
@style_output.setter
def style_output(self, value):
self._style_output = value
self._log.debug('set self._style_output to ' + str(self._style_output))
@property
def trend_lines(self):
"""Get the trend_lines."""
return self._trend_lines
@trend_lines.setter
def trend_lines(self, value):
self._trend_lines = value
self._log.debug('set self._trend_lines to ' + str(self._trend_lines))
def create_chart(self):
"""Populate the data and create the chart."""
self._initialize()
self._set_historical_data()
self._set_price_fields()
self._set_scale()
self._set_chart_data()
self._chart = self._get_chart()
def _get_chart(self):
self._set_current_state()
chart = ""
chart += "\n"
chart += self._get_chart_title()
index = len(self._chart_data[0]) - 1
if self.truncate > 0:
first_column = self._chart_data.pop(0)
self._chart_data = self._chart_data[-self.truncate:]
self._chart_data.insert(0,first_column)
scale_right = None
self._log.info(len(self._chart_data))
while index >= 0:
found = False
first = True
for column in self._chart_data:
if first:
first = False
continue
if index in column:
found = True
break
if found:
first = True
for column in self._chart_data:
if index in column:
if first:
scale_value = column[index]
if index == self._current_scale_index:
scale_left = (self._style('red',
self._style('bold', '{:7.2f}'))
.format(scale_value))
scale_right = (self._style('red',
self._style('bold', '<< '))
+ self._style('red',
self._style('bold',
'{:.2f}'))
.format(self._current_close))
else:
scale_left = '{:7.2f}'.format(scale_value)
scale_right = '{:.2f}'.format(scale_value)
chart = chart + self.indent + scale_left + '| '
first = False
else:
chart = chart + ' ' + column[index][0]
else:
chart += ' '
chart += ' |' + scale_right
chart += "\n"
index -= 1
return chart
def _get_chart_title(self):
self._set_current_prices()
title = self.indent
title = title + self._style('bold',
self._style('underline',
self.instrument.symbol))
title = (title + ' '
+ "(" + self._style('bold', str(self.instrument.download_timestamp.strftime("%a %b %d, %Y %H:%M:%S"))) + ")\n")
title = (title + self.indent
+ "o: {:.2f} h: {:.2f} l: {:.2f} c: {:.2f}"
.format(self._current_open,
self._current_high, self._current_low,
self._current_close)
+ "\n")
title = (title + self.indent
+ "box: "
+ str((self.box_size * 100).quantize(PFChart.TWOPLACES)))
title = title + ", reversal: " + str(self.reversal)
title = title + ", method: " + str(self.method) + "\n"
title = (title + self.indent
+ "signal: "
+ self._style('bold', self._current_signal)
+ ", status: " + self._style('bold', self._current_status)
+ "\n\n")
return title
def _get_month(self, date_value):
datetime_object = datetime.strptime(date_value, '%Y-%m-%d')
month = str(datetime_object.month)
if month == '10':
month = 'A'
elif month == '11':
month = 'B'
elif month == '12':
month = 'C'
return self._style('bold', self._style('red', month))
def _get_scale_index(self, value, direction):
index = 0
while index < len(self._scale):
if self._scale[index] == value:
return index
elif self._scale[index] > value:
if direction == 'x':
return index - 1
else:
return index
index += 1
def _get_status(self, signal, direction):
if signal == 'buy' and direction == 'x':
status = 'bull confirmed'
elif signal == 'buy' and direction == 'o':
status = 'bull correction'
elif signal == 'sell' and direction == 'o':
status = 'bear confirmed'
elif signal == 'sell' and direction == 'x':
status = 'bear correction'
else:
status = 'none'
return status
def _set_chart_data(self):
self._log.info('generating chart')
self._chart_data = []
self._chart_meta_data = OrderedDict()
self._support_lines = []
self._resistance_lines = []
self._chart_data.append(self._scale)
column = OrderedDict()
column_index = 1
direction = 'x'
index = None
month = None
signal = 'none'
prior_high_index = len(self._scale) - 1
prior_low_index = 0
for row in self._historical_data:
day = self._historical_data[row]
action = 'none'
move = 0
current_month = self._get_month(day[self._date_field])
if index is None:
# First day - set the starting index based
# on the high and 'x' direction
index = self._get_scale_index(day[self._high_field], 'x')
column[index] = ['x', day[self._date_field]]
month = current_month
continue
if direction == 'x':
scale_index = self._get_scale_index(day[self._high_field], 'x')
if scale_index > index:
# new high
action = 'x'
move = scale_index - index
if signal != 'buy' and scale_index > prior_high_index:
signal = 'buy'
first = True
while index < scale_index:
index += 1
if first:
if current_month != month:
column[index] = [current_month,
day[self._date_field]]
else:
column[index] = ['x', day[self._date_field]]
first = False
else:
column[index] = ['x', day[self._date_field]]
month = current_month
else:
# check for reversal
x_scale_index = scale_index
scale_index = self._get_scale_index(day[self._low_field],
'o')
if index - scale_index >= self.reversal:
# reversal
action = 'reverse x->o'
move = index - scale_index
if signal != 'sell' and scale_index < prior_low_index:
signal = 'sell'
prior_high_index = index
self._resistance_lines.append([column_index,
prior_high_index + 1])
self._chart_data.append(column)
column_index += 1
column = OrderedDict()
direction = 'o'
first = True
while index > scale_index:
index -= 1
if first:
if current_month != month:
column[index] = [current_month,
day[self._date_field]]
else:
column[index] = ['d',
day[self._date_field]]
first = False
else:
column[index] = ['d', day[self._date_field]]
month = current_month
else:
# no reversal - reset the scale_index
scale_index = x_scale_index
else:
# in an 'o' column
scale_index = self._get_scale_index(day[self._low_field], 'o')
if scale_index < index:
# new low
action = 'o'
move = index - scale_index
if signal != 'sell' and scale_index < prior_low_index:
signal = 'sell'
first = True
while index > scale_index:
index -= 1
if first:
if current_month != month:
column[index] = [current_month,
day[self._date_field]]
else:
column[index] = ['o', day[self._date_field]]
first = False
else:
column[index] = ['o', day[self._date_field]]
month = current_month
else:
# check for reversal
o_scale_index = scale_index
scale_index = self._get_scale_index(day[self._high_field],
'x')
if scale_index - index >= self.reversal:
# reversal
action = 'reverse o->x'
move = scale_index - index
if signal != 'buy' and scale_index > prior_high_index:
signal = 'buy'
prior_low_index = index
self._support_lines.append([column_index,
prior_low_index - 1])
self._chart_data.append(column)
column_index += 1
column = OrderedDict()
direction = 'x'
first = True
while index < scale_index:
index += 1
if first:
if current_month != month:
column[index] = [current_month,
day[self._date_field]]
else:
column[index] = ['u',
day[self._date_field]]
first = False
else:
column[index] = ['u', day[self._date_field]]
month = current_month
else:
# no reversal - reset the scale_index
scale_index = o_scale_index
# Store the meta data for the day
status = self._get_status(signal, direction)
scale_value = (self._scale[scale_index]
.quantize(PFChart.TWOPLACES))
prior_high = self._scale[prior_high_index]
prior_low = self._scale[prior_low_index]
self._store_base_metadata(day, signal, status, action, move,
column_index, scale_index, scale_value,
direction, prior_high, prior_low)
self._chart_data.append(column)
if len(self._chart_data[1]) < self.reversal:
self._chart_data.pop(1)
for line in self._support_lines:
line[0] = line[0] - 1
for line in self._resistance_lines:
line[0] = line[0] - 1
if self.trend_lines:
self._set_trend_lines()
return self._chart_data
def _initialize(self):
self._chart = None
self._chart_data = []
self._chart_meta_data = OrderedDict()
self._historical_data = []
self._scale = OrderedDict()
self._current_date = None
self._current_open = None
self._current_high = None
self._current_low = None
self._current_close = None
self._date_field = None
self._open_field = None
self._high_field = None
self._low_field = None
self._close_field = None
self._volume_field = None
self._current_signal = None
self._current_status = None
self._current_action = None
self._current_move = None
self._current_column_index = None
self._current_scale_index = None
self._current_scale_value = None
self._current_direction = None
self._support_lines = []
self._resistance_lines = []
def _is_complete_line(self, start_point, line_type='support'):
c_index = start_point[0]
s_index = start_point[1]
while c_index < len(self._chart_data):
if s_index in self._chart_data[c_index]:
return False
c_index += 1
if line_type == 'support':
s_index += 1
else:
s_index -= 1
if c_index - start_point[0] > 2:
return True
else:
return False
def _set_trend_lines(self):
for start_point in self._support_lines:
c_index = start_point[0]
s_index = start_point[1]
if self._is_complete_line(start_point, 'support'):
while c_index < len(self._chart_data):
self._chart_data[c_index][s_index] = [self._style('bold',
self._style('blue',
'.')),
'']
c_index += 1
s_index += 1
for start_point in self._resistance_lines:
c_index = start_point[0]
s_index = start_point[1]
if self._is_complete_line(start_point, 'resistance'):
while c_index < len(self._chart_data):
self._chart_data[c_index][s_index] = [self._style('bold',
self._style('blue',
'.')),
'']
c_index += 1
s_index -= 1
def _set_current_prices(self):
day = next(reversed(self._historical_data))
current_day = self._historical_data[day]
self._current_date = current_day[self._date_field]
self._current_open = (current_day[self._open_field]
.quantize(PFChart.TWOPLACES))
self._current_high = (current_day[self._high_field]
.quantize(PFChart.TWOPLACES))
self._current_low = (current_day[self._low_field]
.quantize(PFChart.TWOPLACES))
self._current_close = (current_day[self._close_field]
.quantize(PFChart.TWOPLACES))
def _set_current_state(self):
current_meta_index = next(reversed(self._chart_meta_data))
current_meta = self._chart_meta_data[current_meta_index]
self._current_signal = current_meta['signal']
self._current_status = current_meta['status']
self._current_action = current_meta['action']
self._current_move = current_meta['move']
self._current_column_index = current_meta['column_index']
self._current_scale_index = current_meta['scale_index']
self._current_scale_value = current_meta['scale_value']
self._current_direction = current_meta['direction']
def _set_historical_data(self):
self._log.info('setting historical data')
if len(self.instrument.daily_historical_data) == 0:
self.instrument.populate_data()
if self.interval == 'd':
days = int(self.duration * 252)
self._historical_data = self.instrument.daily_historical_data
elif self.interval == 'w':
days = int(self.duration * 52)
self._historical_data = self.instrument.weekly_historical_data
elif self.interval == 'm':
days = int(self.duration * 12)
self._historical_data = self.instrument.monthly_historical_data
if len(self._historical_data) > days:
offset = len(self._historical_data) - days
i = 0
while i < offset:
self._historical_data.popitem(False)
i += 1
def _set_price_fields(self):
if self.method == 'hl':
self._high_field = 'High'
self._low_field = 'Low'
else:
self._high_field = 'Close'
self._low_field = 'Close'
self._open_field = 'Open'
self._close_field = 'Close'
self._volume_field = 'Volume'
self._date_field = 'Date'
def _set_scale(self):
row = next(iter(self._historical_data))
day = self._historical_data[row]
highest = day[self._high_field]
lowest = day[self._low_field]
for row in self._historical_data:
day = self._historical_data[row]
if day[self._high_field] > highest:
highest = day[self._high_field]
if day[self._low_field] < lowest:
lowest = day[self._low_field]
temp_scale = []
current = Decimal(.01)
temp_scale.append(current)
while current <= highest:
value = current + (current * self.box_size)
temp_scale.append(value)
current = value
slice_point = 0
for index, scale_value in enumerate(temp_scale):
if scale_value > lowest:
slice_point = index - 1
break
temp_scale = temp_scale[slice_point:]
self._scale = OrderedDict()
for index, scale_value in enumerate(temp_scale):
self._scale[index] = scale_value.quantize(PFChart.TWOPLACES)
def _store_base_metadata(self, day, signal, status, action, move,
column_index, scale_index, scale_value,
direction, prior_high, prior_low):
date_value = day['Date']
self._chart_meta_data[date_value] = {}
self._chart_meta_data[date_value]['signal'] = signal
self._chart_meta_data[date_value]['status'] = status
self._chart_meta_data[date_value]['action'] = action
self._chart_meta_data[date_value]['move'] = move
self._chart_meta_data[date_value]['column_index'] = column_index
self._chart_meta_data[date_value]['scale_index'] = scale_index
self._chart_meta_data[date_value]['scale_value'] = scale_value
self._chart_meta_data[date_value]['direction'] = direction
self._chart_meta_data[date_value]['prior_high'] = (prior_high
.quantize(
PFChart
.TWOPLACES))
self._chart_meta_data[date_value]['prior_low'] = (prior_low
.quantize(
PFChart
.TWOPLACES))
self._chart_meta_data[date_value]['date'] = day['Date']
self._chart_meta_data[date_value]['open'] = (day['Open']
.quantize(
PFChart
.TWOPLACES))
self._chart_meta_data[date_value]['high'] = (day['High']
.quantize(
PFChart
.TWOPLACES))
self._chart_meta_data[date_value]['low'] = (day['Low']
.quantize(
PFChart
.TWOPLACES))
self._chart_meta_data[date_value]['close'] = (day['Close']
.quantize(
PFChart
.TWOPLACES))
self._chart_meta_data[date_value]['volume'] = day['Volume']
self._store_custom_metadata(day)
def _store_custom_metadata(self, day):
pass
def _style(self, style, message):
if self.style_output:
method = getattr(pypf.terminal_format, style)
return method(message)
else:
return message
|
{
"content_hash": "35c2c8fdeb6d1acf943cd2196e794fcc",
"timestamp": "",
"source": "github",
"line_count": 671,
"max_line_length": 128,
"avg_line_length": 39.31147540983606,
"alnum_prop": 0.44404427932367885,
"repo_name": "pviglucci/pypf",
"id": "bd5d2f88eb4b9317786c5528ae6b6fa8b58e47dc",
"size": "26378",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pypf/chart.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45930"
}
],
"symlink_target": ""
}
|
"""
Q1. WAP that will create a new list of age given the list of year of birth using list comprehension.
Q2. Create a list which will print all the characters in a string which are not vowels using list
comprehension.
Q3. Create a list of squares of numbers given a list of ten numbers using list comprehension.
Q4. WAP which will input a string from the user and gives the list of first letter of every word
in the list using list comprehension.
Q5. WAP to create a tuple containing types of fishes and using list comprehension print all the items
of the tuple if item is not octupus.
Q6. Differentiate between deep copy and shallow copy.
Q7. Write some of the features of python.
Q8. How will you get all the keys from the dictionARY.Write a small program to demonstrate it.
Q9. Do following conversions:
String to integer
String to long
String to float
String to a tupple
String to a list
String to a set
String to a dictonary
Q10.Create a dictionary using tupple.
Q11.What do you understand by frozen set.
Q12.What is the purpose of following operators:-
1) **
2) //
3) is
4) not in
"""
#break and continue
#P1
List_Year=[]
n=int(input("Enter number of elements in your list: "))
for i in range(n):
x=int(input("Enter Year Of Birth: "))
List_Year.append(x)
List_age=[2019-x for x in List_Year]
print("LIst of Ages are: ",List_age)
#P2
s=input("Enter a string: ")
l=[x for x in s if x not in ['a','e','i','o','u']]
print("Your list of consonants is:",l)
#P3
a=int(input("Enter starting number: "))
l=[x**2 for x in range(a,a+10)]
print("Your list of square of numbers is:",l)
#P4
s=input("Enter any string: ")
ls=s.split()
l=[x[0] for x in ls ]
print("Your first letter of all words are:",l)
#P5
List=[]
n=int(input("Enter number of fishes in your list: "))
for i in range(n):
x=input("Enter Name of Fish: ")
List.append(x)
t=tuple(List)
l=[x for x in t if x !='octopus']
print("Your list is:",l)
#FEB 5/2019
#Some more functions on lists and tupples and dictionaries.
t=(23,45,67,78)
len(t)
max(t) #Only works when we have same datatypes ,in string compares ASCII value.
min(t)
d1={'aman':1,'srishti':2,'babita':3}
s=str(d1)
print(s)
#dict also has clear and copy(it makes a deep copy.) as lists. simple assignment gives (shallow copy).
seq={'name','class','roll'}
dict1={}
dict1= dict1.fromkeys(seq,10)#wiillform keys as elements of seq and every key has a value 10
print(dict1)
l1=[1,2,3]
dict1=dict1.fromkeys(l1)#form keys as that of l1
dict1={'1':1,'2':2,'3':3}
#we can delete anything using del(list,tupple,lists).
#Q1. Count the number of words in a string.
#Q2. Calculate factorial of a function using functions.
#P1
para=input("Enter a paragraph: ")
para.lower()
paral=para.split()
dict1={}
dict1=dict1.fromkeys(paral,0)
for p in paral:
dict1[p]=dict1[p]+1
print(dict1)
para=input("Enter a paragraph: ")
para.lower()
l=para.split()
d={}
for i in l:
d[i]=d.get(i,0)+1
#P2
def facto(n):
if n==1:
return (1)
elif n>1:
return n*facto(n-1)
else:
print("Error!!! Negative numbers don't have a factorial")
num=int(input("Enter the number whose factorial you want to find: "))
print("Your {number}! is: {factorial}".format(number=num,factorial=facto(num)))
#Create a function to check whether input is interger or not.
#Create a function to find lcm of two given number.
#Create a function which will give sum of ASCII values of all the characters in a string.
#Create a function to check whether the numbers is prime or not.
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def check(i):
if i-round(i)==0 :
print("It is an interger")
else:
print("It is not an integer")
a=float(input("Input: "))
check(a)
def check(i):
if type(i)==type(1):
print("It is an integer")
else:
print("It is not an integer")
#---------------------------------------------------------------------------------------------------------------------
def lcm(a,b):
z=max(a,b)
l=[]
m=[]
n=[]
for i in range(1,z+1):
if a%i==0 and b%i==0:
l.append(i)
a=a/i
b=b/i
if a%i==0 and b%i!=0:
m.append(i)
a=a/i
if a%i!=0 and b%i==0:
n.append(i)
b=b/i
print(l)
print(m)
print(n)
lm=l+m+n
print(lm)
x=1
for i in lm:
x=x*i
return(x)
lcm(12,14)
|
{
"content_hash": "d160ad41fe57f6717059f87c543327ba",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 120,
"avg_line_length": 21.877828054298643,
"alnum_prop": 0.5772492244053774,
"repo_name": "Akagi201/learning-python",
"id": "ff53c480008c31ef08be9829348c0940ae3596e4",
"size": "4835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "list/practice1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "125"
},
{
"name": "CSS",
"bytes": "82315"
},
{
"name": "HTML",
"bytes": "16738"
},
{
"name": "JavaScript",
"bytes": "253132"
},
{
"name": "Jupyter Notebook",
"bytes": "3666"
},
{
"name": "Less",
"bytes": "2022"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Procfile",
"bytes": "21"
},
{
"name": "Python",
"bytes": "336950"
},
{
"name": "Rich Text Format",
"bytes": "49342"
},
{
"name": "Shell",
"bytes": "4498"
}
],
"symlink_target": ""
}
|
"""State and behavior for ingestion during an operation."""
import abc
import collections
import enum
import six
from grpc.framework.core import _constants
from grpc.framework.core import _interfaces
from grpc.framework.core import _utilities
from grpc.framework.foundation import abandonment
from grpc.framework.foundation import callable_util
from grpc.framework.interfaces.base import base
_CREATE_SUBSCRIPTION_EXCEPTION_LOG_MESSAGE = 'Exception initializing ingestion!'
_INGESTION_EXCEPTION_LOG_MESSAGE = 'Exception during ingestion!'
class _SubscriptionCreation(
collections.namedtuple(
'_SubscriptionCreation',
('kind', 'subscription', 'code', 'details',))):
"""A sum type for the outcome of ingestion initialization.
Attributes:
kind: A Kind value coarsely indicating how subscription creation completed.
subscription: The created subscription. Only present if kind is
Kind.SUBSCRIPTION.
code: A code value to be sent to the other side of the operation along with
an indication that the operation is being aborted due to an error on the
remote side of the operation. Only present if kind is Kind.REMOTE_ERROR.
details: A details value to be sent to the other side of the operation
along with an indication that the operation is being aborted due to an
error on the remote side of the operation. Only present if kind is
Kind.REMOTE_ERROR.
"""
@enum.unique
class Kind(enum.Enum):
SUBSCRIPTION = 'subscription'
REMOTE_ERROR = 'remote error'
ABANDONED = 'abandoned'
class _SubscriptionCreator(six.with_metaclass(abc.ABCMeta)):
"""Common specification of subscription-creating behavior."""
@abc.abstractmethod
def create(self, group, method):
"""Creates the base.Subscription of the local customer.
Any exceptions raised by this method should be attributed to and treated as
defects in the customer code called by this method.
Args:
group: The group identifier of the operation.
method: The method identifier of the operation.
Returns:
A _SubscriptionCreation describing the result of subscription creation.
"""
raise NotImplementedError()
class _ServiceSubscriptionCreator(_SubscriptionCreator):
"""A _SubscriptionCreator appropriate for service-side use."""
def __init__(self, servicer, operation_context, output_operator):
"""Constructor.
Args:
servicer: The base.Servicer that will service the operation.
operation_context: A base.OperationContext for the operation to be passed
to the customer.
output_operator: A base.Operator for the operation to be passed to the
customer and to be called by the customer to accept operation data
emitted by the customer.
"""
self._servicer = servicer
self._operation_context = operation_context
self._output_operator = output_operator
def create(self, group, method):
try:
subscription = self._servicer.service(
group, method, self._operation_context, self._output_operator)
except base.NoSuchMethodError as e:
return _SubscriptionCreation(
_SubscriptionCreation.Kind.REMOTE_ERROR, None, e.code, e.details)
except abandonment.Abandoned:
return _SubscriptionCreation(
_SubscriptionCreation.Kind.ABANDONED, None, None, None)
else:
return _SubscriptionCreation(
_SubscriptionCreation.Kind.SUBSCRIPTION, subscription, None, None)
def _wrap(behavior):
def wrapped(*args, **kwargs):
try:
behavior(*args, **kwargs)
except abandonment.Abandoned:
return False
else:
return True
return wrapped
class _IngestionManager(_interfaces.IngestionManager):
"""An implementation of _interfaces.IngestionManager."""
def __init__(
self, lock, pool, subscription, subscription_creator, termination_manager,
transmission_manager, expiration_manager, protocol_manager):
"""Constructor.
Args:
lock: The operation-wide lock.
pool: A thread pool in which to execute customer code.
subscription: A base.Subscription describing the customer's interest in
operation values from the other side. May be None if
subscription_creator is not None.
subscription_creator: A _SubscriptionCreator wrapping the portion of
customer code that when called returns the base.Subscription describing
the customer's interest in operation values from the other side. May be
None if subscription is not None.
termination_manager: The _interfaces.TerminationManager for the operation.
transmission_manager: The _interfaces.TransmissionManager for the
operation.
expiration_manager: The _interfaces.ExpirationManager for the operation.
protocol_manager: The _interfaces.ProtocolManager for the operation.
"""
self._lock = lock
self._pool = pool
self._termination_manager = termination_manager
self._transmission_manager = transmission_manager
self._expiration_manager = expiration_manager
self._protocol_manager = protocol_manager
if subscription is None:
self._subscription_creator = subscription_creator
self._wrapped_operator = None
elif subscription.kind is base.Subscription.Kind.FULL:
self._subscription_creator = None
self._wrapped_operator = _wrap(subscription.operator.advance)
else:
# TODO(nathaniel): Support other subscriptions.
raise ValueError('Unsupported subscription "%s"!' % subscription.kind)
self._pending_initial_metadata = None
self._pending_payloads = []
self._pending_completion = None
self._local_allowance = 1
# A nonnegative integer or None, with None indicating that the local
# customer is done emitting anyway so there's no need to bother it by
# informing it that the remote customer has granted it further permission to
# emit.
self._remote_allowance = 0
self._processing = False
def _abort_internal_only(self):
self._subscription_creator = None
self._wrapped_operator = None
self._pending_initial_metadata = None
self._pending_payloads = None
self._pending_completion = None
def _abort_and_notify(self, outcome_kind, code, details):
self._abort_internal_only()
if self._termination_manager.outcome is None:
outcome = _utilities.Outcome(outcome_kind, code, details)
self._termination_manager.abort(outcome)
self._transmission_manager.abort(outcome)
self._expiration_manager.terminate()
def _operator_next(self):
"""Computes the next step for full-subscription ingestion.
Returns:
An initial_metadata, payload, completion, allowance, continue quintet
indicating what operation values (if any) are available to pass into
customer code and whether or not there is anything immediately
actionable to call customer code to do.
"""
if self._wrapped_operator is None:
return None, None, None, None, False
else:
initial_metadata, payload, completion, allowance, action = [None] * 5
if self._pending_initial_metadata is not None:
initial_metadata = self._pending_initial_metadata
self._pending_initial_metadata = None
action = True
if self._pending_payloads and 0 < self._local_allowance:
payload = self._pending_payloads.pop(0)
self._local_allowance -= 1
action = True
if not self._pending_payloads and self._pending_completion is not None:
completion = self._pending_completion
self._pending_completion = None
action = True
if self._remote_allowance is not None and 0 < self._remote_allowance:
allowance = self._remote_allowance
self._remote_allowance = 0
action = True
return initial_metadata, payload, completion, allowance, bool(action)
def _operator_process(
self, wrapped_operator, initial_metadata, payload,
completion, allowance):
while True:
advance_outcome = callable_util.call_logging_exceptions(
wrapped_operator, _INGESTION_EXCEPTION_LOG_MESSAGE,
initial_metadata=initial_metadata, payload=payload,
completion=completion, allowance=allowance)
if advance_outcome.exception is None:
if advance_outcome.return_value:
with self._lock:
if self._termination_manager.outcome is not None:
return
if completion is not None:
self._termination_manager.ingestion_complete()
initial_metadata, payload, completion, allowance, moar = (
self._operator_next())
if not moar:
self._processing = False
return
else:
with self._lock:
if self._termination_manager.outcome is None:
self._abort_and_notify(
base.Outcome.Kind.LOCAL_FAILURE, None, None)
return
else:
with self._lock:
if self._termination_manager.outcome is None:
self._abort_and_notify(base.Outcome.Kind.LOCAL_FAILURE, None, None)
return
def _operator_post_create(self, subscription):
wrapped_operator = _wrap(subscription.operator.advance)
with self._lock:
if self._termination_manager.outcome is not None:
return
self._wrapped_operator = wrapped_operator
self._subscription_creator = None
metadata, payload, completion, allowance, moar = self._operator_next()
if not moar:
self._processing = False
return
self._operator_process(
wrapped_operator, metadata, payload, completion, allowance)
def _create(self, subscription_creator, group, name):
outcome = callable_util.call_logging_exceptions(
subscription_creator.create,
_CREATE_SUBSCRIPTION_EXCEPTION_LOG_MESSAGE, group, name)
if outcome.return_value is None:
with self._lock:
if self._termination_manager.outcome is None:
self._abort_and_notify(base.Outcome.Kind.LOCAL_FAILURE, None, None)
elif outcome.return_value.kind is _SubscriptionCreation.Kind.ABANDONED:
with self._lock:
if self._termination_manager.outcome is None:
self._abort_and_notify(base.Outcome.Kind.LOCAL_FAILURE, None, None)
elif outcome.return_value.kind is _SubscriptionCreation.Kind.REMOTE_ERROR:
code = outcome.return_value.code
details = outcome.return_value.details
with self._lock:
if self._termination_manager.outcome is None:
self._abort_and_notify(
base.Outcome.Kind.REMOTE_FAILURE, code, details)
elif outcome.return_value.subscription.kind is base.Subscription.Kind.FULL:
self._protocol_manager.set_protocol_receiver(
outcome.return_value.subscription.protocol_receiver)
self._operator_post_create(outcome.return_value.subscription)
else:
# TODO(nathaniel): Support other subscriptions.
raise ValueError(
'Unsupported "%s"!' % outcome.return_value.subscription.kind)
def _store_advance(self, initial_metadata, payload, completion, allowance):
if initial_metadata is not None:
self._pending_initial_metadata = initial_metadata
if payload is not None:
self._pending_payloads.append(payload)
if completion is not None:
self._pending_completion = completion
if allowance is not None and self._remote_allowance is not None:
self._remote_allowance += allowance
def _operator_advance(self, initial_metadata, payload, completion, allowance):
if self._processing:
self._store_advance(initial_metadata, payload, completion, allowance)
else:
action = False
if initial_metadata is not None:
action = True
if payload is not None:
if 0 < self._local_allowance:
self._local_allowance -= 1
action = True
else:
self._pending_payloads.append(payload)
payload = False
if completion is not None:
if self._pending_payloads:
self._pending_completion = completion
else:
action = True
if allowance is not None and self._remote_allowance is not None:
allowance += self._remote_allowance
self._remote_allowance = 0
action = True
if action:
self._pool.submit(
callable_util.with_exceptions_logged(
self._operator_process, _constants.INTERNAL_ERROR_LOG_MESSAGE),
self._wrapped_operator, initial_metadata, payload, completion,
allowance)
def set_group_and_method(self, group, method):
"""See _interfaces.IngestionManager.set_group_and_method for spec."""
if self._subscription_creator is not None and not self._processing:
self._pool.submit(
callable_util.with_exceptions_logged(
self._create, _constants.INTERNAL_ERROR_LOG_MESSAGE),
self._subscription_creator, group, method)
self._processing = True
def add_local_allowance(self, allowance):
"""See _interfaces.IngestionManager.add_local_allowance for spec."""
if any((self._subscription_creator, self._wrapped_operator,)):
self._local_allowance += allowance
if not self._processing:
initial_metadata, payload, completion, allowance, moar = (
self._operator_next())
if moar:
self._pool.submit(
callable_util.with_exceptions_logged(
self._operator_process,
_constants.INTERNAL_ERROR_LOG_MESSAGE),
initial_metadata, payload, completion, allowance)
def local_emissions_done(self):
self._remote_allowance = None
def advance(self, initial_metadata, payload, completion, allowance):
"""See _interfaces.IngestionManager.advance for specification."""
if self._subscription_creator is not None:
self._store_advance(initial_metadata, payload, completion, allowance)
elif self._wrapped_operator is not None:
self._operator_advance(initial_metadata, payload, completion, allowance)
def invocation_ingestion_manager(
subscription, lock, pool, termination_manager, transmission_manager,
expiration_manager, protocol_manager):
"""Creates an IngestionManager appropriate for invocation-side use.
Args:
subscription: A base.Subscription indicating the customer's interest in the
data and results from the service-side of the operation.
lock: The operation-wide lock.
pool: A thread pool in which to execute customer code.
termination_manager: The _interfaces.TerminationManager for the operation.
transmission_manager: The _interfaces.TransmissionManager for the
operation.
expiration_manager: The _interfaces.ExpirationManager for the operation.
protocol_manager: The _interfaces.ProtocolManager for the operation.
Returns:
An IngestionManager appropriate for invocation-side use.
"""
return _IngestionManager(
lock, pool, subscription, None, termination_manager, transmission_manager,
expiration_manager, protocol_manager)
def service_ingestion_manager(
servicer, operation_context, output_operator, lock, pool,
termination_manager, transmission_manager, expiration_manager,
protocol_manager):
"""Creates an IngestionManager appropriate for service-side use.
The returned IngestionManager will require its set_group_and_name method to be
called before its advance method may be called.
Args:
servicer: A base.Servicer for servicing the operation.
operation_context: A base.OperationContext for the operation to be passed to
the customer.
output_operator: A base.Operator for the operation to be passed to the
customer and to be called by the customer to accept operation data output
by the customer.
lock: The operation-wide lock.
pool: A thread pool in which to execute customer code.
termination_manager: The _interfaces.TerminationManager for the operation.
transmission_manager: The _interfaces.TransmissionManager for the
operation.
expiration_manager: The _interfaces.ExpirationManager for the operation.
protocol_manager: The _interfaces.ProtocolManager for the operation.
Returns:
An IngestionManager appropriate for service-side use.
"""
subscription_creator = _ServiceSubscriptionCreator(
servicer, operation_context, output_operator)
return _IngestionManager(
lock, pool, None, subscription_creator, termination_manager,
transmission_manager, expiration_manager, protocol_manager)
|
{
"content_hash": "e5f8e14f0648b4078eb771889b2eddbc",
"timestamp": "",
"source": "github",
"line_count": 410,
"max_line_length": 80,
"avg_line_length": 40.52926829268293,
"alnum_prop": 0.6975988445567792,
"repo_name": "goldenbull/grpc",
"id": "f2767c981b2e3d4c62903794d581e800dd5171f6",
"size": "18146",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/python/grpcio/grpc/framework/core/_ingestion.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "9986"
},
{
"name": "C",
"bytes": "4768389"
},
{
"name": "C#",
"bytes": "1029724"
},
{
"name": "C++",
"bytes": "1274782"
},
{
"name": "DTrace",
"bytes": "147"
},
{
"name": "JavaScript",
"bytes": "262872"
},
{
"name": "Makefile",
"bytes": "549711"
},
{
"name": "Objective-C",
"bytes": "255265"
},
{
"name": "PHP",
"bytes": "127110"
},
{
"name": "Protocol Buffer",
"bytes": "95971"
},
{
"name": "Python",
"bytes": "1578367"
},
{
"name": "Ruby",
"bytes": "460190"
},
{
"name": "Shell",
"bytes": "47892"
},
{
"name": "Swift",
"bytes": "5279"
}
],
"symlink_target": ""
}
|
def vendauth(request):
return {
'venduser': getattr(request, 'venduser'),
}
|
{
"content_hash": "3e1fc5b9014a1b58f0af06f1189d3a6b",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 49,
"avg_line_length": 23,
"alnum_prop": 0.5869565217391305,
"repo_name": "remarkablerocket/django-vend",
"id": "bed2a2a067deaa7b73e2b0d3793aea5d66b6bacc",
"size": "92",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_vend/auth/context_processors.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "3154"
},
{
"name": "Python",
"bytes": "52526"
}
],
"symlink_target": ""
}
|
import os
from mi.core.versioning import version
from mi.dataset.driver.wfp_common.wfp_c_file_driver import WfpCFileDriver
from mi.dataset.dataset_driver import ParticleDataHandler
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.parser.dofst_k_wfp import DofstKWfpParser
from mi.dataset.parser.dofst_k_wfp_particles import \
DofstKWfpRecoveredDataParticle, \
DofstKWfpRecoveredMetadataParticle, \
DofstKWfpDataParticleKey, \
DataParticleType
from mi.dataset.driver.flort_kn.stc_imodem.flort_kn__stc_imodem_driver import FlortKnStcImodemDriver
from mi.core.log import get_logger
log = get_logger()
__author__ = 'jroy'
@version("0.0.4")
def parse(unused, source_file_path, particle_data_handler):
"""
This is the method called by Uframe
:param unused
:param source_file_path This is the full path and filename of the file to be parsed
:param particle_data_handler Java Object to consume the output of the parser
:return particle_data_handler
"""
# Get the flort file name from the ctd file name
head, tail = os.path.split(source_file_path)
e_tail = tail.replace('C', 'E')
if e_tail == tail:
log.error('Could not generate e file name')
return particle_data_handler
flort_source_file_path = os.path.join(head, e_tail)
# Parse the flort file to get a list of (time, pressure) tuples.
try:
with open(flort_source_file_path, 'rb') as flort_stream_handle:
driver = FlortKnStcImodemDriver(unused, flort_stream_handle, ParticleDataHandler())
e_file_time_pressure_tuples = driver.get_time_pressure_tuples()
except Exception as e:
log.error(e)
return particle_data_handler
if not e_file_time_pressure_tuples:
log.error('Time-Pressure tuples not extracted from %s', flort_source_file_path)
return particle_data_handler
# Parse the ctd file and use the e_file_time_pressure_tuples to generate
# the internal timestamps of the particles
with open(source_file_path, 'rb') as stream_handle:
driver = DofstKWfpRecoveredDriver(
unused, stream_handle, particle_data_handler, e_file_time_pressure_tuples)
driver.processFileStream()
return particle_data_handler
class DofstKWfpRecoveredDriver(WfpCFileDriver):
"""
Derived dofst_k_wfp driver class
All this needs to do is create a concrete _build_parser method
"""
def _build_parser(self, stream_handle):
filesize = os.path.getsize(stream_handle.name)
config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.dofs_k_wfp_particles',
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
'instrument_data_particle_class': DofstKWfpRecoveredDataParticle,
'metadata_particle_class': DofstKWfpRecoveredMetadataParticle
}
}
parser = DofstKWfpParser(config,
None,
stream_handle,
lambda state, ingested: None,
lambda data: None,
self._exception_callback,
filesize)
return parser
def pressure_containing_data_particle_stream(self):
return DataParticleType.RECOVERED_DATA
def pressure_containing_data_particle_field(self):
return DofstKWfpDataParticleKey.PRESSURE
|
{
"content_hash": "b961a80ff1734dd3ec94be5d9b3fe3df",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 100,
"avg_line_length": 36.628865979381445,
"alnum_prop": 0.6681677455671263,
"repo_name": "oceanobservatories/mi-instrument",
"id": "f339394dd385ece5fd8d91a0ff4a53589d32cdf1",
"size": "3635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mi/dataset/driver/dofst_k/wfp/dofst_k_wfp_recovered_driver.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "4746"
},
{
"name": "Python",
"bytes": "10221924"
}
],
"symlink_target": ""
}
|
__author__ = 'ar'
import os
import nibabel as nib
import matplotlib.pyplot as plt
import numpy as np
import keras.backend as K
from run00_common import BatcherOnImageCT3D
#########################################
def createLesionMask(pathInpNii, dirWithModel, pathOutNii=None, isDebug=False):
if not os.path.isfile(pathInpNii):
raise Exception('Cant find input file [%s]' % pathInpNii)
if not os.path.isdir(dirWithModel):
raise Exception('Cant find directory with model [%s]' % dirWithModel)
if pathOutNii is not None:
outDir = os.path.dirname(os.path.abspath(pathOutNii))
if not os.path.isdir(outDir):
raise Exception(
'Cant find output directory [%s], create directory for output file before this call' % outDir)
batcherInfer = BatcherOnImageCT3D()
batcherInfer.loadModelForInference(pathModelJson=dirWithModel, pathMeanData=dirWithModel)
if isDebug:
batcherInfer.model.summary()
ret = batcherInfer.inference([pathInpNii], batchSize=1)
if K.image_dim_ordering() == 'th':
outMsk = ret[0][1, :, :, :]
else:
outMsk = ret[0][:, :, :, 1]
if pathOutNii is None:
pathOutNii = '%s-segm.nii.gz' % pathInpNii
tmpNii = nib.load(pathInpNii)
outMskNii = nib.Nifti1Image(outMsk.copy().astype(np.float16), tmpNii.affine, header=tmpNii.header)
nib.save(outMskNii, pathOutNii)
#########################################
if __name__ == '__main__':
pathDirWithModels = '../../experimental_data/models/fcnn_ct_lesion_segm_3d_tf'
# pathDirWithModels = '../../experimental_data/models/fcnn_ct_lesion_segm_3d_th'
lstPathNifti = [
'../../experimental_data/TB_sub_1_5-resize-128x128x64_3case/data/tb1_001_1001_1_33554433-128x128x64.nii.gz',
'../../experimental_data/TB_sub_1_5-resize-128x128x64_3case/data/tb1_002_10064_1_33554433-128x128x64.nii.gz',
'../../experimental_data/TB_sub_1_5-resize-128x128x64_3case/data/tb1_003_102_2_16777217-128x128x64.nii.gz',
]
for ii, pp in enumerate(lstPathNifti):
print ('[%d/%d] : %s' % (ii, len(lstPathNifti), pp))
foutNii = '%s-msk.nii.gz' % (os.path.basename(pp))
createLesionMask(pathInpNii=pp, dirWithModel=pathDirWithModels, pathOutNii=foutNii, isDebug=True)
|
{
"content_hash": "ab33ef7f0b73b4c8ab69128e5db7e83b",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 117,
"avg_line_length": 46.795918367346935,
"alnum_prop": 0.651984300043611,
"repo_name": "gakarak/BTBDB_ImageAnalysisSubPortal",
"id": "6cbe8f8a26e3a3f13083faeb502f952273662c3e",
"size": "2335",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "experimental_code/step03_FCNN_CT_Lesion_Detection/run04_inference_on_sample_v1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "161"
},
{
"name": "HTML",
"bytes": "15991"
},
{
"name": "JavaScript",
"bytes": "6689"
},
{
"name": "Python",
"bytes": "458280"
},
{
"name": "Shell",
"bytes": "3378"
}
],
"symlink_target": ""
}
|
import sys
import os
import getopt
import xml.etree.ElementTree as ET
#######################################################
#
# Helper Functions
#
#######################################################
def printline(file, depth, text):
"""print line with indent"""
for i in range(0, depth):
print >> file, '\t',
print >> file, text
def NodeAsContainer(node):
container = ''
key = ''
for attr in node.attrib:
if attr in ('container_', 'cont_'):
container = node.attrib[attr]
elif attr in ('key_'):
key = node.attrib[attr]
return container, key
def Node2Struct(node):
return node.tag[0:1].upper() + node.tag[1:]
def Node2Member(node):
return node.tag[0:1].lower() + node.tag[1:]
def NodeHasContent(node):
return node.text and node.text.strip()
def CppType(type):
if (type == 'string'):
return 'std::' + type
return type
def IsKeyword(word):
return word in ('container_', 'cont_', 'key_')
def AttrVarName(name):
return name + '_';
def IsSeqCont(cont):
return cont in ('vector', 'list', 'dequeue')
def IsMapCont(cont):
return cont in ('map', 'multimap')
#######################################################
#
# Generate the c++ header file with config struct.
#
#######################################################
class HeaderGenerator(object):
"""docstring for HeaderGenerator """
def __init__(self, arg):
super(HeaderGenerator, self).__init__()
self.xmlpg = arg
def gen(self, tree, file=sys.stdout):
self.genHeader(tree.getroot(), 0, file)
def genHeader(self, node, depth, file=sys.stdout):
# print node.tag, node.attrib, node.text
container, key = NodeAsContainer(node)
printline(file, depth, 'struct %s {' % Node2Struct(node))
# memfuctions ...
printline(file, depth + 1, 'bool parse(XMLParser& xml, XMLParser::NodePtr node);')
printline(file, depth + 1, 'bool write(XMLWriter& xml, XMLWriter::NodePtr node);')
printline(file, depth + 1, 'void reset();')
printline(file, depth + 1, '')
# content
if NodeHasContent(node):
printline(file, depth + 1, '%s& operator () () { return content__; }' % CppType(node.text))
printline(file, depth + 1, 'const %s& operator () () const { return content__; }' % CppType(node.text))
printline(file, depth + 1, '%s content__;' % CppType(node.text))
printline(file, depth + 1, '')
# accessor
for attr in node.attrib:
if not IsKeyword(attr):
printline(file, depth + 1, 'const %s& %s() { return %s; } ' %
(CppType(node.attrib[attr]), attr, AttrVarName(attr)))
if len(node.attrib) > 0:
printline(file, depth + 1, '')
# members
for attr in node.attrib:
if not IsKeyword(attr):
printline(file, depth + 1, '%s %s;' % (CppType(node.attrib[attr]), AttrVarName(attr)))
for child in node:
self.genHeader(child, depth + 1, file)
printline(file, depth, '};')
if not depth == 0:
if len(container) > 0:
if IsMapCont(container):
if not node.attrib.has_key(key):
print >> sys.stderr, 'node:%s has no attrib:%s' % (node.tag, key)
return 0
printline(file, depth, 'typedef std::%s<%s, %s> %sMap;' % (
container, CppType(node.attrib[key]), Node2Struct(node), Node2Struct(node)))
printline(file, depth,
'typedef %sMap::iterator %sMapIter;' % (Node2Struct(node), Node2Struct(node)))
printline(file, depth,
'typedef %sMap::const_iterator %sMapConstIter;' % (Node2Struct(node), Node2Struct(node)))
printline(file, depth, '%sMap %s;' % (Node2Struct(node), Node2Member(node)))
elif IsSeqCont(container):
printline(file, depth,
'typedef std::%s<%s> %sCont;' % (container, Node2Struct(node), Node2Struct(node)))
printline(file, depth,
'typedef %sCont::iterator %sContIter;' % (Node2Struct(node), Node2Struct(node)))
printline(file, depth, 'typedef %sCont::const_iterator %sContConstIter;' % (
Node2Struct(node), Node2Struct(node)))
printline(file, depth, '%sCont %s;' % (Node2Struct(node), Node2Member(node)))
else:
print >> sys.stderr, 'node:%s container is invalid:%s' % (node.tag, container)
return 0
else:
printline(file, depth, '%s %s;' % (Node2Struct(node), Node2Member(node)))
printline(file, depth, '')
#######################################################
#
# Generate the c++ cpp file with implementation.
#
#######################################################
class CppGenerator(object):
"""docstring for CppGenerator """
def __init__(self, arg):
super(CppGenerator, self).__init__()
self.xmlpg = arg
def inline(self):
if self.xmlpg.inline:
return "inline "
else:
return ""
def gen(self, tree, file=sys.stdout):
self.gen_parse(Node2Struct(tree.getroot()), tree.getroot(), file)
self.gen_writer(Node2Struct(tree.getroot()), tree.getroot(), file)
self.gen_reset(Node2Struct(tree.getroot()), tree.getroot(), file)
def gen_parse(self, prefix, node, file=sys.stdout):
printline(file, 0, '%sbool %s::parse(XMLParser& xml, XMLParser::NodePtr node) {' % (self.inline(), prefix))
printline(file, 1, 'if (!node) return false;')
if NodeHasContent(node):
printline(file, 1, 'content__ = xml.getNodeContent<%s>(node);' % (CppType(node.text)))
for attr in node.attrib:
if not IsKeyword(attr):
printline(file, 1, '%s = xml.getNodeProp<%s>(node, "%s");' % (
AttrVarName(attr), CppType(node.attrib[attr]), attr))
for child in node:
container, key = NodeAsContainer(child)
if len(container) > 0:
printline(file, 1, '')
if IsMapCont(container):
printline(file, 1,
'XMLParser::NodePtr %s_node = xml.getChildNode(node, "%s");' % (child.tag, child.tag))
printline(file, 1, 'while (%s_node) {' % child.tag)
printline(file, 2, '%s item;' % Node2Struct(child))
printline(file, 2, 'if (item.parse(xml, %s_node))' % child.tag)
printline(file, 3,
'%s.insert(std::make_pair(item.%s, item));' % (Node2Member(child), AttrVarName(key)))
printline(file, 2, '%s_node = xml.getNextNode(%s_node, "%s");' % (child.tag, child.tag, child.tag))
printline(file, 1, '}')
elif IsSeqCont(container):
printline(file, 1,
'XMLParser::NodePtr %s_node = xml.getChildNode(node, "%s");' % (child.tag, child.tag))
printline(file, 1, 'while (%s_node) {' % child.tag)
printline(file, 2, '%s item;' % Node2Struct(child))
printline(file, 2, 'if (item.parse(xml, %s_node))' % child.tag)
printline(file, 3, '%s.push_back(item);' % Node2Member(child))
printline(file, 2, '%s_node = xml.getNextNode(%s_node, "%s");' % (child.tag, child.tag, child.tag))
printline(file, 1, '}')
else:
return 0
else:
printline(file, 1, '%s.parse(xml, xml.getChildNode(node, "%s"));' % (Node2Member(child), child.tag))
printline(file, 1, 'return true;')
printline(file, 0, '}')
printline(file, 0, '')
for child in node:
self.gen_parse('%s::%s' % (prefix, Node2Struct(child)), child, file)
def gen_writer(self, prefix, node, file=sys.stdout):
printline(file, 0, '%sbool %s::write(XMLWriter& xml, XMLWriter::NodePtr node) {' % (self.inline(), prefix))
printline(file, 1, 'if (!node) return false;')
for attr in node.attrib:
if not IsKeyword(attr):
printline(file, 1, 'xml.createNodeProp(node, "%s", %s);' % (attr, AttrVarName(attr)))
for child in node:
container, key = NodeAsContainer(child)
printline(file, 1, '')
if len(container) > 0:
if IsMapCont(container):
printline(file, 1, 'for (%sMapIter it = %s.begin(); it != %s.end(); ++ it) {' %
(Node2Struct(child), Node2Member(child), Node2Member(child)))
if NodeHasContent(child):
printline(file, 2,
'XMLWriter::NodePtr subnode = xml.createChildNode(node, "%s", tw_cast<std::string>(it->second.content__).c_str());' % child.tag)
else:
printline(file, 2,
'XMLWriter::NodePtr subnode = xml.createChildNode(node, "%s","");' % child.tag)
printline(file, 2, 'if (subnode)')
printline(file, 3, 'it->second.write(xml, subnode);')
printline(file, 1, '}')
elif IsSeqCont(container):
printline(file, 1, 'for (%sContIter it = %s.begin(); it != %s.end(); ++ it) {' %
(Node2Struct(child), Node2Member(child), Node2Member(child)))
if NodeHasContent(child):
printline(file, 2,
'XMLWriter::NodePtr subnode = xml.createChildNode(node, "%s", tw_cast<std::string>(it->content__).c_str());' % child.tag)
else:
printline(file, 2,
'XMLWriter::NodePtr subnode = xml.createChildNode(node, "%s","");' % child.tag)
printline(file, 2, 'if (subnode)')
printline(file, 3, 'it->write(xml, subnode);')
printline(file, 1, '}')
else:
return 0
else:
if NodeHasContent(child):
printline(file, 1,
'XMLWriter::NodePtr %s_node = xml.createChildNode(node, "%s", tw_cast<std::string>(%s.content__).c_str());' % (
child.tag, child.tag, Node2Member(child)))
else:
printline(file, 1, 'XMLWriter::NodePtr %s_node = xml.createChildNode(node, "%s", "");' % (
child.tag, child.tag))
printline(file, 1, 'if (%s_node)' % child.tag)
printline(file, 2, '%s.write(xml, %s_node);' % (Node2Member(child), child.tag))
printline(file, 1, 'return true;')
printline(file, 0, '}')
printline(file, 0, '')
for child in node:
self.gen_writer('%s::%s' % (prefix, Node2Struct(child)), child, file)
def gen_reset(self, prefix, node, file=sys.stdout):
printline(file, 0, '%svoid %s::reset() {' % (self.inline(), prefix))
if NodeHasContent(node):
printline(file, 1, 'content__ = %s();' % (CppType(node.text)))
for attr in node.attrib:
if not IsKeyword(attr):
printline(file, 1, '%s = %s();' % (AttrVarName(attr), CppType(node.attrib[attr])))
for child in node:
container, key = NodeAsContainer(child)
if len(container) > 0:
if IsMapCont(container):
printline(file, 1, '%s.clear();' % (Node2Member(child)))
elif IsSeqCont(container):
printline(file, 1, '%s.clear();' % (Node2Member(child)))
else:
return 0
else:
printline(file, 1, '%s.reset();' % (Node2Member(child)))
printline(file, 0, '}')
printline(file, 0, '')
for child in node:
self.gen_reset('%s::%s' % (prefix, Node2Struct(child)), child, file)
#######################################################
#
# Generator
#
#######################################################
class XMLParserGenerator(object):
"""docstring for XMLParserGenerator """
def __init__(self):
super(XMLParserGenerator, self).__init__()
self.header = 0
self.cpp = 0
self.inline = 0
self.outputfile = ""
self.schemas = []
def schemaFileName(self, schema):
return os.path.splitext(os.path.basename(schema))[0]
def outputFileName(self, schema, ext):
return os.path.join("path...", self.schemaFileName(schema) + '.' + ext)
def run(self):
# print self.headerfile, self.cppfile, self.inline, self.schemas
for i in range(0, len(self.schemas)):
self.generate(self.schemas[i])
def generate(self, filename):
try:
tree = ET.ElementTree(file=filename)
file = sys.stdout
if (self.outputfile):
file = open(self.outputfile, 'w+')
hg = HeaderGenerator(self)
cg = CppGenerator(self)
if self.header:
hg.gen(tree, file)
if self.cpp:
cg.gen(tree, file)
except IOError, arg:
print arg
#######################################################
#
# Main
#
#######################################################
xmlpg = XMLParserGenerator()
def Usage():
print "usage:", sys.argv[0], "[-i --header --cpp -o file] schema1.xml schema2.xml ..."
print "options: "
print " -h --help help"
print " -o file --output=file output to the file"
print " -a --header generate c++ struct"
print " -b --cpp generate c++ functions"
print " -i --inline member function is inlined."
if len(sys.argv) == 1:
Usage()
sys.exit()
try:
opts, args = getopt.getopt(sys.argv[1:], 'hiabo:',
['help', 'inline', 'header', 'cpp', 'output='])
except getopt.GetoptError:
Usage()
sys.exit()
for o, a in opts:
if o in ("-h", "--help"):
Usage()
sys.exit()
elif o in ("-a", "--header"):
xmlpg.header = 1
elif o in ("-b", "--cpp"):
xmlpg.cpp = 1
elif o in ("-i", "--inline"):
xmlpg.inline = 1
elif o in ("-o", "--output"):
xmlpg.outputfile = a
xmlpg.schemas = args
if len(xmlpg.schemas) > 0:
xmlpg.run()
else:
Usage()
|
{
"content_hash": "730cb6d5a12ace655fc120e0f31e6bce",
"timestamp": "",
"source": "github",
"line_count": 402,
"max_line_length": 162,
"avg_line_length": 37.1318407960199,
"alnum_prop": 0.49581295638775374,
"repo_name": "david-pp/tinyworld",
"id": "49cdcc3ee741fed4494e2f740e5edc07f95e7331",
"size": "15160",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/xmlpg.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "37375"
},
{
"name": "C++",
"bytes": "1124023"
},
{
"name": "CMake",
"bytes": "6950"
},
{
"name": "Makefile",
"bytes": "14636"
},
{
"name": "Python",
"bytes": "18379"
},
{
"name": "Shell",
"bytes": "669"
}
],
"symlink_target": ""
}
|
from p4_core import *
import p4_imperatives
import p4_headers
import exclusive_conditions
import os
import ast
import inspect
import logging
from p4_hlir.util.OrderedSet import OrderedSet
from collections import OrderedDict, defaultdict
p4_match_type = p4_create_enum("p4_match_type", [
"P4_MATCH_EXACT",
"P4_MATCH_TERNARY",
"P4_MATCH_LPM",
"P4_MATCH_RANGE",
"P4_MATCH_VALID",
])
class p4_node(p4_object):
"""
TODO: docstring
"""
def __init__(self, hlir, name, **kwargs):
p4_object.__init__(self, hlir, name, **kwargs)
if not self.valid_obj:
return
self.control_flow_parent = None
self.next_ = OrderedDict()
# self.prev = OrderedSet()
self.conditional_barrier = None
self.dependencies_to = {} # tables to which this table have a dependency
self.dependencies_for = {} # tables for which this table is a dependency
hlir.p4_nodes[name] = self
def depends_on_step(self, node, visited):
assert isinstance(node, p4_node)
visited.add(self)
for n in self.dependencies_to:
if n == node: return True
if not n in visited:
if n.depends_on_step(node, visited): return True
return False
def depends_on(self, node):
return self.depends_on_step(node, set())
class p4_conditional_node (p4_node):
"""
TODO: docstring
"""
def __init__ (self, hlir, condition):
name = "_condition_"+str(len(hlir.p4_conditional_nodes))
p4_node.__init__(self, hlir, name)
if not self.valid_obj:
return
self.condition = condition
hlir.p4_conditional_nodes[self.name] = self
def build(self, hlir):
pass
class p4_table (p4_node):
"""
TODO
"""
required_attributes = ["name", "match_fields", "actions", "action_profile"]
allowed_attributes = required_attributes + ["doc", "min_size", "max_size", "size", "support_timeout"]
def __init__ (self, hlir, name, **kwargs):
p4_node.__init__(self, hlir, name, **kwargs)
if not self.valid_obj:
return
if not hasattr(self, "support_timeout"):
self.support_timeout = False
# references to attached stateful memories
self.attached_counters = []
self.attached_meters = []
self.attached_registers = []
hlir.p4_tables[self.name] = self
def build_fields (self, hlir):
for idx, match in enumerate(self.match_fields):
match_field, match_type, match_mask = match
if "." in match_field:
match_field = hlir.p4_fields[match_field]
else:
match_field = hlir.p4_header_instances[match_field]
self.match_fields[idx] = (match_field, match_type, match_mask)
def build_actions (self, hlir):
if self.action_profile:
self.action_profile = hlir.p4_action_profiles[self.action_profile]
self.actions = self.action_profile.actions
else:
for idx, action in enumerate(self.actions):
self.actions[idx] = hlir.p4_actions[action]
for idx, action in enumerate(self.actions):
self.next_[self.actions[idx]] = None
def build (self, hlir):
self.build_fields(hlir)
self.build_actions(hlir)
class p4_action_profile (p4_object):
"""
TODO
"""
required_attributes = ["name", "actions"]
allowed_attributes = required_attributes + ["doc", "size", "selector"]
def __init__ (self, hlir, name, **kwargs):
p4_object.__init__(self, hlir, name, **kwargs)
if not self.valid_obj:
return
if not hasattr(self, "size"):
self.size = None
if not hasattr(self, "selector"):
self.selector = None
hlir.p4_action_profiles[self.name] = self
def build_actions (self, hlir):
for idx, action in enumerate(self.actions):
self.actions[idx] = hlir.p4_actions[action]
def build (self, hlir):
if self.selector:
self.selector = hlir.p4_action_selectors[self.selector]
self.build_actions(hlir)
class p4_action_selector (p4_object):
"""
TODO
"""
required_attributes = ["name", "selection_key"]
allowed_attributes = required_attributes + ["selection_mode", "selection_type"]
def __init__ (self, hlir, name, **kwargs):
p4_object.__init__(self, hlir, name, **kwargs)
if not self.valid_obj:
return
if not hasattr(self, "selection_mode"):
self.size = None
if not hasattr(self, "selection_type"):
self.selector = None
hlir.p4_action_selectors[self.name] = self
def build (self, hlir):
self.selection_key = hlir.p4_field_list_calculations[self.selection_key]
def p4_control_flow_to_table_graph(hlir, call_sequence):
visited = set()
return _p4_control_flow_to_table_graph(hlir, call_sequence,
None, None, visited)
def _p4_control_flow_to_table_graph(hlir,
call_sequence, parent_fn,
conditional_barrier,
visited):
if type(call_sequence) is p4_imperatives.p4_control_flow:
parent_fn = call_sequence
call_sequence = parent_fn.call_sequence
entry = None
parents = []
for call in call_sequence:
if type(call) is p4_table:
if call in visited:
# TODO: improve
raise p4_compiler_msg (
"Table '" + call.name + "' is invoked multiple times."
)
visited.add(call)
call_entry = call
next_parents = [call_entry]
call_entry.control_flow_parent = parent_fn.name
call_entry.conditional_barrier = conditional_barrier
elif type(call) is tuple and len(call) == 3:
paths = {True: None, False: None}
next_parents = []
call_entry = p4_conditional_node (hlir, call[0])
call_entry.control_flow_parent = parent_fn.name
call_entry.conditional_barrier = conditional_barrier
if len(call[1]) > 0:
true_entry, true_exit = _p4_control_flow_to_table_graph(
hlir,
call[1],
parent_fn,
(call_entry,True),
visited
)
paths[True] = true_entry
# true_entry.prev.add(call_entry)
next_parents += true_exit
if len(call[2]) > 0:
false_entry, false_exit = _p4_control_flow_to_table_graph(
hlir,
call[2],
parent_fn,
(call_entry,False),
visited
)
paths[False] = false_entry
# false_entry.prev.add(call_entry)
next_parents += false_exit
call_entry.next_ = paths
next_parents.append(call_entry)
elif type(call) is tuple and len(call) == 2:
next_parents = []
call_entry = call[0]
call_entry.control_flow_parent = parent_fn.name
call_entry.conditional_barrier = conditional_barrier
case_list = call[1]
hit_miss_switch = False
for case in case_list:
if case[0] in {"hit", "miss"}:
hit_miss_switch = True
if hit_miss_switch:
paths ={"hit": None, "miss": None}
else:
paths = OrderedDict(call_entry.next_)
if hit_miss_switch:
for case in case_list:
assert(case[0] in {"hit", "miss"})
if not case[1]: continue
cb = (call_entry, case[0])
hit_miss_entry, hit_miss_exit = _p4_control_flow_to_table_graph(
hlir,
case[1],
parent_fn,
cb,
visited
)
paths[case[0]] = hit_miss_entry
# hit_miss_entry.prev.add(call_entry)
next_parents += hit_miss_exit
else:
actions = set()
for case in case_list:
if case[0] == "default": continue
assert(type(case[0] is set))
actions.update(case[0])
if not case[1]: continue
cb = (call_entry, case[0])
action_entry, action_exit = _p4_control_flow_to_table_graph(
hlir,
case[1],
parent_fn,
cb,
visited
)
# action_entry.prev.add(call_entry)
next_parents += action_exit
for action in case[0]:
assert(isinstance(action, p4_imperatives.p4_action))
paths[action] = action_entry
for case in case_list:
if case[0] != "default": continue
if not case[1]: break
remaining_actions = set(paths.keys()) - actions
cb = (call_entry, remaining_actions)
action_entry, action_exit = _p4_control_flow_to_table_graph(
hlir,
case[1],
parent_fn,
cb,
visited
)
# action_entry.prev.add(call_entry)
next_parents += action_exit
for action in remaining_actions:
paths[action] = action_entry
break
call_entry.next_ = paths
next_parents.append(call_entry)
elif type(call) is p4_imperatives.p4_control_flow:
call_entry, next_parents = _p4_control_flow_to_table_graph(
hlir,
call,
None,
conditional_barrier,
visited
)
for parent in parents:
for label, edge in parent.next_.items():
if edge == None:
parent.next_[label] = call_entry
# call_entry.prev.add(parent)
if not entry:
entry = call_entry
if next_parents:
parents = next_parents
return entry, parents
# TODO: write something more generic
def _find_modified_hdrs(action_set):
modified_hdrs = set()
for action in action_set:
for call in action.flat_call_sequence:
primitive_name = call[0].name
args = call[1]
if primitive_name == "copy_header":
modified_hdrs.add(args[0])
elif primitive_name == "add_header":
modified_hdrs.add(args[0])
elif primitive_name == "remove_header":
modified_hdrs.add(args[0])
return modified_hdrs
def _get_all_conditions(node, conditions):
if not node: return conditions
if not node.conditional_barrier: return conditions
cb = node.conditional_barrier
try:
condition = cb[0].condition
conditions.append( (cb[0].condition, cb[1]) )
except AttributeError:
pass
return _get_all_conditions(cb[0], conditions)
def _set_modified_hdrs(hlir, entry_point, modified_hdrs):
if not entry_point: return
try:
if entry_point._modified_hdrs.issuperset(modified_hdrs):
return
except AttributeError:
pass
for a, nt in entry_point.next_.items():
if a in {True, False}:
full_modified_hdrs = modified_hdrs
elif a in {"hit", "miss"}:
full_modified_hdrs = modified_hdrs & _find_modified_hdrs(set(entry_point.actions))
else:
full_modified_hdrs = modified_hdrs & _find_modified_hdrs(set([a]))
try:
entry_point._modified_hdrs &= full_modified_hdrs
except AttributeError:
entry_point._modified_hdrs = full_modified_hdrs
_set_modified_hdrs(hlir, nt, full_modified_hdrs)
def _find_unused_nodes_step(entry_point):
if not entry_point: return
if entry_point._mark_used: return
entry_point._mark_used = True
for a, nt in entry_point.next_.items():
_find_unused_nodes_step(nt)
def _find_conditional_barrier(entry_point, node, visited):
def sorted_tuple_from_set(s):
return tuple(sorted(list(s)))
if entry_point in visited: return visited[entry_point]
if entry_point == node:
visited[entry_point] = True
return True
if entry_point is None:
return False
possible_next = set(entry_point.next_.values())
if len(possible_next) == 1:
r = _find_conditional_barrier(possible_next.pop(), node, visited)
visited[entry_point] = r
return r
results = {}
for nt in possible_next:
results[nt] = _find_conditional_barrier(nt, node, visited)
diff_results = set(results.values())
if len(diff_results) == 1:
r = diff_results.pop()
visited[entry_point] = r
return r
if {True, False} <= diff_results:
assert({True, False} == diff_results)
cond = set()
for nt, v in results.items():
if not v: continue
for a, n in entry_point.next_.items():
if n == nt: cond.add(a)
if len(cond) == 1: cond = cond.pop()
else: cond = sorted_tuple_from_set(cond)
r = (entry_point, cond)
visited[entry_point] = r
return r
for r in diff_results:
if type(r) is not bool:
visited[entry_point] = r
return r
def _update_conditional_barriers(hlir):
for _, node in hlir.p4_nodes.items():
if not node._mark_used: continue
node.conditional_barrier = None
for ingress_ptr in hlir.p4_ingress_ptr.keys():
if not node.conditional_barrier:
node.conditional_barrier = _find_conditional_barrier(
ingress_ptr, node, {}
)
if hlir.p4_egress_ptr and not node.conditional_barrier:
node.conditional_barrier = _find_conditional_barrier(
hlir.p4_egress_ptr, node, {}
)
for _, node in hlir.p4_nodes.items():
if not node._mark_used: continue
if node.conditional_barrier == True:
node.conditional_barrier = None
# print node, node.conditional_barrier
# for _, node in hlir.p4_nodes.items():
# if not node._mark_used: continue
# cb = node.conditional_barrier
# while cb is not None and not cb[0]._mark_used:
# cb = cb[0].conditional_barrier
# if cb is not None:
# node.conditional_barrier = cb
def _remove_unused_conditions(hlir):
change = True
while change:
change = False
conditions_used = set()
for _, node in hlir.p4_nodes.items():
if not node._mark_used: continue
cb = node.conditional_barrier
if cb and isinstance(cb[0], p4_conditional_node):
conditions_used.add(cb[0])
removed_conditions = set()
for _, p4_node in hlir.p4_nodes.items():
if not p4_node._mark_used: continue
for a, nt in p4_node.next_.items():
if not nt: continue
assert(nt._mark_used)
if not isinstance(nt, p4_conditional_node): continue
if nt.next_[True] == nt.next_[False]:
assert(nt not in conditions_used)
p4_node.next_[a] = nt.next_[True]
removed_conditions.add(nt)
assert(not (conditions_used & removed_conditions))
for c in removed_conditions:
print "removing useless condition:", c
# print c.next_
c._mark_used = False
change = True
def _purge_unused_nodes(hlir):
for _, node in hlir.p4_nodes.items():
node._mark_used = False
for ingress_ptr in hlir.p4_ingress_ptr:
_find_unused_nodes_step(ingress_ptr)
if hlir.p4_egress_ptr:
_find_unused_nodes_step(hlir.p4_egress_ptr)
_update_conditional_barriers(hlir)
_remove_unused_conditions(hlir)
for _, node in hlir.p4_nodes.items():
if not node._mark_used:
print node, "is unused, removing it"
name = node.name
del hlir.p4_nodes[name]
try:
del hlir.p4_tables[name]
except KeyError:
pass
try:
del hlir.p4_conditional_nodes[name]
except KeyError:
pass
else:
del node._mark_used
def optimize_table_graph(hlir):
for ingress_ptr in hlir.p4_ingress_ptr:
_set_modified_hdrs(hlir, ingress_ptr, set())
if hlir.p4_egress_ptr:
_set_modified_hdrs(hlir, hlir.p4_egress_ptr, set())
xconds = exclusive_conditions.Solver(hlir)
change = True
# I am being lazy, and this is all tentative anyway
while change:
change = False
for _, p4_node in hlir.p4_nodes.items():
for a, nt in p4_node.next_.items():
conditions = _get_all_conditions(p4_node, [])
if a in {True, False}:
conditions += [(p4_node.condition, a)]
if isinstance(nt, p4_conditional_node):
cond_value = xconds.evaluate_condition(
nt._modified_hdrs,
nt.condition,
conditions
)
if cond_value is not None:
p4_node.next_[a] = nt.next_[cond_value]
change = True
for _, p4_node in hlir.p4_nodes.items():
del p4_node._modified_hdrs
_purge_unused_nodes(hlir)
# for _, p4_node in hlir.p4_nodes.items():
# print p4_node, p4_node.conditional_barrier
def print_graph(entry, tab = ""):
for k, next_table in entry.next_.items():
print tab, entry, "---", k, "--->", next_table
if next_table: print_graph(next_table, tab + " ")
|
{
"content_hash": "9cec306396420ae0b5f0fb7ba8cde99c",
"timestamp": "",
"source": "github",
"line_count": 562,
"max_line_length": 105,
"avg_line_length": 33.26868327402135,
"alnum_prop": 0.5264480932769963,
"repo_name": "hanw/p4-hlir",
"id": "ba2140f640f6484d6ae50319a6021f9da67183f6",
"size": "19295",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "p4_hlir/hlir/p4_tables.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "361669"
}
],
"symlink_target": ""
}
|
"""
This is the client process that distributes n simulators
"""
import sys
import os
import re
import subprocess
import time
import signal
boost_path = "/afs/csail/group/carbon/tools/boost_1_38_0/stage/lib"
# spawn_job:
# start up a command across multiple machines
# returns an object that can be passed to poll_job()
def spawn_job(machine_list, command, working_dir = os.getcwd()):
procs = {}
# spawn
for i in range(0,len(machine_list)):
exec_command = "export CARBON_PROCESS_INDEX=" + str(i) + "; " + \
"export LD_LIBRARY_PATH=\"" + boost_path + "\"; " + \
command
if (machine_list[i] != "localhost") and (machine_list[i] != r'127.0.0.1'):
exec_command = exec_command.replace("\"","\\\"")
exec_command = "ssh -x " + machine_list[i] + \
" \"cd " + working_dir + "; " + \
exec_command + "\""
print "[spawn.py] Starting process: " + str(i) + " : " + exec_command
procs[i] = subprocess.Popen(exec_command, shell=True)
return procs
# poll_job:
# check if a job has finished
# returns the returnCode, or None
def poll_job(procs):
# check status
returnCode = None
for i in range(0,len(procs)):
returnCode = procs[i].poll()
if returnCode != None:
break
# process still running
if returnCode == None:
return None
# process terminated, so wait or kill remaining
for i in range(0,len(procs)):
returnCode2 = procs[i].poll()
if returnCode2 == None:
if returnCode == 0:
returnCode = procs[i].wait()
else:
os.kill(procs[i].pid, signal.SIGKILL)
print "[spawn.py] Exited with return code: %d" % returnCode
return returnCode
# wait_job:
# wait on a job to finish
def wait_job(procs):
while True:
ret = poll_job(procs)
if ret != None:
return ret
time.sleep(0.5)
# helper functions
# read process list from a config file
def load_process_list_from_file(filename):
process_list = []
config = open(config_filename,"r").readlines()
found_process_map = False
for line in config:
if found_process_map:
if line == "\n":
break;
# extract the process from the line
hostname = re.search("\"(.*)\"", line).group(1)
process_list.append(hostname)
if line == "[process_map]\n":
found_process_map = True
return process_list
# get sim root from environment variable, or use pwd
def get_sim_root():
sim_root = os.environ.get('GRAPHITE_HOME')
if sim_root == None:
pwd = os.environ.get('PWD')
assert(pwd != None)
print "[spawn.py] 'GRAPHITE_HOME' undefined. Setting 'GRAPHITE_HOME' to '" + pwd
return pwd
return sim_root
# main -- if this is used as standalone script
if __name__=="__main__":
num_procs = int(sys.argv[1])
config_filename = sys.argv[2]
command = " ".join(sys.argv[3:])
process_list = load_process_list_from_file(config_filename)
j = spawn_job(process_list[0:num_procs],
command,
get_sim_root())
sys.exit(wait_job(j))
|
{
"content_hash": "1083a4835e12a652d0ac2029ff7417f5",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 88,
"avg_line_length": 26.733870967741936,
"alnum_prop": 0.5656108597285068,
"repo_name": "mit-carbon/Graphite-Cycle-Level",
"id": "d87402e5e99bfaa167c0d18259213e2987a0e099",
"size": "3338",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/spawn.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "524506"
},
{
"name": "C++",
"bytes": "1622279"
},
{
"name": "Python",
"bytes": "2668"
},
{
"name": "Shell",
"bytes": "81"
}
],
"symlink_target": ""
}
|
import json
import math
from django.shortcuts import render
from django.contrib import messages
from django.template.loader import render_to_string
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse, JsonResponse, HttpResponseRedirect, HttpResponseNotFound
from django.conf import settings
from starthinker.tool.colab import recipe_to_colab
from starthinker.tool.example import recipe_to_python
from starthinker_ui.account.decorators import permission_admin
from starthinker_ui.recipe.forms_script import ScriptForm
from starthinker_ui.recipe.models import Recipe, utc_milliseconds
from starthinker_ui.recipe.dag import script_to_dag
from starthinker_ui.recipe.log import log_manager_scale
from starthinker_ui.recipe.compute import group_instances_list, group_instances_resize
def recipe_list(request):
recipes = {
'running': [],
'paused': [],
'finished': [],
'errors': [],
'manual': []
}
if request.user.is_authenticated:
for recipe in request.user.recipe_set.all():
if recipe.manual:
recipes['manual'].append(recipe)
elif not recipe.active or recipe.get_log()['status'] == 'NEW':
recipes['paused'].append(recipe)
elif recipe.get_log()['status'] == 'FINISHED':
recipes['finished'].append(recipe)
elif recipe.get_log()['status'] == 'ERROR':
recipes['errors'].append(recipe)
else:
recipes['running'].append(recipe)
return render(request, 'recipe/recipe_list.html', {'recipes': recipes})
@permission_admin()
def recipe_edit(request, pk=None, manual=False):
if pk:
recipe = request.user.recipe_set.get(pk=pk)
manual = recipe.manual
else:
recipe = None
if request.method == 'POST':
form_script = ScriptForm(manual, recipe, request.user, request.POST)
if form_script.is_valid():
form_script.save()
messages.success(request, 'Recipe updated.')
if request.POST.get('save_and_run') == '1':
return recipe_run(request, form_script.instance.pk)
else:
return HttpResponseRedirect(form_script.instance.link_edit())
else:
messages.error(
request,
'Recipe Script Errors: %s' % ' '.join(form_script.get_errors())
)
else:
form_script = ScriptForm(
manual,
recipe,
request.user,
scripts=request.GET.get('scripts', '')
)
return render(request, 'recipe/recipe_edit.html', {
'form_script': form_script,
'manual': manual
})
@permission_admin()
def recipe_manual(request, pk=None):
return recipe_edit(request, pk=None, manual=True)
@permission_admin()
def recipe_delete(request, pk=None):
request.user.recipe_set.filter(pk=pk).delete()
messages.success(request, 'Recipe deleted.')
return HttpResponseRedirect('/')
@permission_admin()
def recipe_run(request, pk):
try:
recipe = request.user.recipe_set.get(pk=pk)
if recipe.is_running():
recipe.force()
messages.success(request, 'Recipe dispatched, will run once in progress task completes.')
else:
recipe.force()
autoscale(request)
messages.success(request, 'Recipe dispatched, give it a few minutes to start.')
except Recipe.DoesNotExist as e:
messages.error(request, str(e))
return HttpResponseRedirect('/recipe/edit/%s/' % pk)
@permission_admin()
def recipe_cancel(request, pk):
try:
recipe = request.user.recipe_set.get(pk=pk)
if recipe.is_running():
messages.success(request, 'Recipe cancelled, active task will stop shortly.')
else:
messages.success(request, 'Recipe cancelled, no tasks are running.')
recipe.cancel()
except Recipe.DoesNotExist as e:
messages.error(request, str(e))
return HttpResponseRedirect('/recipe/edit/%s/' % pk)
@permission_admin()
def recipe_status(request, pk):
try:
recipe = request.user.recipe_set.get(pk=pk)
log = recipe.get_log()
log['report'] = render_to_string('recipe/log.html', {'log': log})
except Recipe.DoesNotExist:
log = {}
return JsonResponse(log)
@csrf_exempt
def recipe_start(request):
try:
recipe = Recipe.objects.get(reference=request.POST.get('reference', 'invalid'))
if recipe.is_running():
response = HttpResponse('RECIPE INTERRUPTED', content_type='text/plain')
else:
response = HttpResponse('RECIPE STARTED', content_type='text/plain')
recipe.force()
except Recipe.DoesNotExist as e:
response = HttpResponseNotFound('RECIPE NOT FOUND', content_type='text/plain')
return response
@csrf_exempt
def recipe_stop(request):
try:
recipe = Recipe.objects.get(reference=request.POST.get('reference', 'invalid'))
if recipe.is_running():
response = HttpResponse('RECIPE INTERRUPTED', content_type='text/plain')
else:
response = HttpResponse('RECIPE STOPPED', content_type='text/plain')
recipe.cancel()
except Recipe.DoesNotExist as e:
response = HttpResponseNotFound('RECIPE NOT FOUND', content_type='text/plain')
return response
@permission_admin()
def recipe_download(request, pk):
return render(request, 'recipe/download.html', {'recipe': pk})
@permission_admin()
def recipe_json(request, pk):
try:
recipe = request.user.recipe_set.get(pk=pk)
data = recipe.get_json(credentials=False)
response = HttpResponse(json.dumps(data, indent=2), content_type='application/json')
response['Content-Disposition'] = 'attachment; filename=recipe_%s.json' % recipe.slug()
return response
except Exception as e:
recipe = None
messages.error(request, str(e))
return HttpResponseRedirect('/recipe/download/%s/' % pk)
@permission_admin()
def recipe_colab(request, pk):
try:
recipe = request.user.recipe_set.get(pk=pk)
data = recipe_to_colab(recipe.slug(), '', [], recipe.get_json(credentials=False)['tasks'])
response = HttpResponse(data, content_type='application/vnd.jupyter')
response['Content-Disposition'] = 'attachment; filename=colab_%s.ipynb' % recipe.slug()
return response
except Exception as e:
messages.error(request, str(e))
raise (e)
return HttpResponseRedirect('/recipe/download/%s/' % pk)
@permission_admin()
def recipe_airflow(request, pk):
try:
recipe = request.user.recipe_set.get(pk=pk)
data = script_to_dag(recipe.slug(), recipe.name, '', [], recipe.get_json(credentials=False)['tasks'])
response = HttpResponse(data, content_type='text/x-python')
response['Content-Disposition'] = 'attachment; filename=airflow_%s.py' % recipe.slug()
return response
except Exception as e:
messages.error(request, str(e))
raise (e)
return HttpResponseRedirect('/recipe/download/%s/' % pk)
@permission_admin()
def recipe_python(request, pk):
try:
recipe = request.user.recipe_set.get(pk=pk)
data = recipe_to_python(recipe.slug(), '', [], recipe.get_json(credentials=False)['tasks'])
response = HttpResponse(data, content_type='text/x-python')
response['Content-Disposition'] = 'attachment; filename=python_%s.py' % recipe.slug()
return response
except Exception as e:
messages.error(request, str(e))
raise (e)
return HttpResponseRedirect('/recipe/download/%s/' % pk)
def autoscale(request):
scale = {
'jobs': 0,
'workers': {
'jobs': settings.WORKER_JOBS,
'max': settings.WORKER_MAX,
'existing': 0,
'required': 0
}
}
# get task and worker list
scale['jobs'] = Recipe.objects.filter(
active=True,
job_utm__lt=utc_milliseconds()
).exclude(job_utm=0).count()
scale['workers']['existing'] = 3 if request == 'TEST' else sum(
1 for instance in group_instances_list(('PROVISIONING', 'STAGING', 'RUNNING'))
)
scale['workers']['required'] = min(
settings.WORKER_MAX, math.ceil(scale['jobs'] / scale['workers']['jobs'])
)
if request != 'TEST' and scale['workers']['required'] > scale['workers']['existing']:
group_instances_resize(scale['workers']['required'])
# log the scaling operation
log_manager_scale(scale)
return JsonResponse(scale)
|
{
"content_hash": "a98d11b82f23aa81696af20c5a618c60",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 105,
"avg_line_length": 31.260700389105057,
"alnum_prop": 0.6844660194174758,
"repo_name": "google/starthinker",
"id": "afa363cb7506b5be9045f7938a740cf70c835261",
"size": "8776",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "starthinker_ui/recipe/views.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "89775"
},
{
"name": "Jupyter Notebook",
"bytes": "1088964"
},
{
"name": "Python",
"bytes": "2356647"
},
{
"name": "Shell",
"bytes": "89492"
}
],
"symlink_target": ""
}
|
import unittest
import pyresample
import satistjenesten
import rasterio
class TestUtils(unittest.TestCase):
def setUp(self):
self.gtiff_fh = rasterio.open('test_data/modis.tif')
def test_geotiff_to_areadef(self):
gtiff_meta_dict = self.gtiff_fh.meta
area_def = satistjenesten.utils.geotiff_meta_to_areadef(gtiff_meta_dict)
self.assertIsInstance(area_def, pyresample.geometry.AreaDefinition)
|
{
"content_hash": "4bc792b66a2f4267bf9426406778bfce",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 74,
"avg_line_length": 28.785714285714285,
"alnum_prop": 0.7890818858560794,
"repo_name": "mitkin/satistjenesten",
"id": "cc7b7b668258edd649600826a68355e0f1101095",
"size": "403",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cucumber",
"bytes": "1130"
},
{
"name": "Makefile",
"bytes": "221"
},
{
"name": "Python",
"bytes": "45812"
},
{
"name": "Shell",
"bytes": "654"
}
],
"symlink_target": ""
}
|
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
|
{
"content_hash": "39440cd6313974237ecaff649ffbba1c",
"timestamp": "",
"source": "github",
"line_count": 869,
"max_line_length": 98,
"avg_line_length": 34.63636363636363,
"alnum_prop": 0.6314495498189309,
"repo_name": "sauliusl/seaborn",
"id": "c374474da105607eb66b1514701655326b3a8e83",
"size": "30099",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "seaborn/external/six.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "342"
},
{
"name": "Python",
"bytes": "652614"
}
],
"symlink_target": ""
}
|
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from urllib.parse import urlencode
from urllib.request import urlopen
from django.core.urlresolvers import reverse
from django.conf import settings as project_settings
from .base import BackendBase
class PaypalBackend(BackendBase):
backend_id = 'paypal'
backend_verbose_name = _("PayPal")
backend_display_name = _("PayPal")
backend_has_recurring = True
def __init__(self, settings):
self.test = settings.get('TEST', False)
self.header_image = settings.get('HEADER_IMAGE', None)
self.title = settings.get('TITLE', 'VPN Payment')
self.currency = settings.get('CURRENCY', 'EUR')
self.account_address = settings.get('ADDRESS')
self.receiver_address = settings.get('RECEIVER', self.account_address)
if self.test:
default_api = 'https://www.sandbox.paypal.com/'
else:
default_api = 'https://www.paypal.com/'
self.api_base = settings.get('API_BASE', default_api)
if self.account_address:
self.backend_enabled = True
def new_payment(self, payment):
ROOT_URL = project_settings.ROOT_URL
params = {
'cmd': '_xclick',
'notify_url': ROOT_URL + reverse('payments:cb_paypal', args=(payment.id,)),
'item_name': self.title,
'amount': '%.2f' % (payment.amount / 100),
'currency_code': self.currency,
'business': self.account_address,
'no_shipping': '1',
'return': ROOT_URL + reverse('payments:view', args=(payment.id,)),
'cancel_return': ROOT_URL + reverse('payments:cancel', args=(payment.id,)),
}
if self.header_image:
params['cpp_header_image'] = self.header_image
payment.status_message = _("Waiting for PayPal to confirm the transaction... " +
"It can take up to a few minutes...")
payment.save()
return redirect(self.api_base + '/cgi-bin/webscr?' + urlencode(params))
def new_subscription(self, rps):
months = {
'3m': 3,
'6m': 6,
'12m': 12,
}[rps.period]
ROOT_URL = project_settings.ROOT_URL
params = {
'cmd': '_xclick-subscriptions',
'notify_url': ROOT_URL + reverse('payments:cb_paypal_subscr', args=(rps.id,)),
'item_name': self.title,
'currency_code': self.currency,
'business': self.account_address,
'no_shipping': '1',
'return': ROOT_URL + reverse('payments:return_subscr', args=(rps.id,)),
'cancel_return': ROOT_URL + reverse('account:index'),
'a3': '%.2f' % (rps.period_amount / 100),
'p3': str(months),
't3': 'M',
'src': '1',
}
if self.header_image:
params['cpp_header_image'] = self.header_image
rps.save()
return redirect(self.api_base + '/cgi-bin/webscr?' + urlencode(params))
def handle_verified_callback(self, payment, params):
if self.test and params['test_ipn'] != '1':
raise ValueError('Test IPN')
txn_type = params.get('txn_type')
if txn_type not in (None, 'web_accept', 'express_checkout'):
# Not handled here and can be ignored
return
if params['payment_status'] == 'Refunded':
payment.status = 'refunded'
payment.status_message = None
elif params['payment_status'] == 'Completed':
self.handle_completed_payment(payment, params)
def handle_verified_callback_subscr(self, subscr, params):
if self.test and params['test_ipn'] != '1':
raise ValueError('Test IPN')
txn_type = params.get('txn_type')
if not txn_type.startswith('subscr_'):
# Not handled here and can be ignored
return
if txn_type == 'subscr_payment':
if params['payment_status'] == 'Refunded':
# FIXME: Find the payment and do something
pass
elif params['payment_status'] == 'Completed':
payment = subscr.create_payment()
if not self.handle_completed_payment(payment, params):
return
subscr.last_confirmed_payment = payment.created
subscr.backend_extid = params.get('subscr_id', '')
if subscr.status == 'new' or subscr.status == 'unconfirmed':
subscr.status = 'active'
subscr.save()
elif txn_type == 'subscr_cancel' or txn_type == 'subscr_eot':
subscr.status = 'cancelled'
subscr.save()
def handle_completed_payment(self, payment, params):
from payments.models import Payment
# Prevent making duplicate Payments if IPN is received twice
pc = Payment.objects.filter(backend_extid=params['txn_id']).count()
if pc > 0:
return False
if self.receiver_address != params['receiver_email']:
raise ValueError('Wrong receiver: ' + params['receiver_email'])
if self.currency.lower() != params['mc_currency'].lower():
raise ValueError('Wrong currency: ' + params['mc_currency'])
payment.paid_amount = int(float(params['mc_gross']) * 100)
if payment.paid_amount < payment.amount:
raise ValueError('Not fully paid.')
payment.user.vpnuser.add_paid_time(payment.time)
payment.user.vpnuser.on_payment_confirmed(payment)
payment.user.vpnuser.save()
payment.backend_extid = params['txn_id']
payment.status = 'confirmed'
payment.status_message = None
payment.save()
return True
def verify_ipn(self, request):
v_url = self.api_base + '/cgi-bin/webscr?cmd=_notify-validate'
v_req = urlopen(v_url, data=request.body, timeout=5)
v_res = v_req.read()
return v_res == b'VERIFIED'
def callback(self, payment, request):
if not self.verify_ipn(request):
return False
params = request.POST
try:
self.handle_verified_callback(payment, params)
return True
except (KeyError, ValueError) as e:
payment.status = 'error'
payment.status_message = None
payment.backend_data['ipn_exception'] = repr(e)
payment.backend_data['ipn_last_data'] = repr(request.POST)
payment.save()
raise
def callback_subscr(self, subscr, request):
if not self.verify_ipn(request):
return False
params = request.POST
try:
self.handle_verified_callback_subscr(subscr, params)
return True
except (KeyError, ValueError) as e:
subscr.status = 'error'
subscr.status_message = None
subscr.backend_data['ipn_exception'] = repr(e)
subscr.backend_data['ipn_last_data'] = repr(request.POST)
subscr.save()
raise
def get_ext_url(self, payment):
if not payment.backend_extid:
return None
url = 'https://history.paypal.com/webscr?cmd=_history-details-from-hub&id=%s'
return url % payment.backend_extid
def get_subscr_ext_url(self, subscr):
if not subscr.backend_extid:
return None
return ('https://www.paypal.com/fr/cgi-bin/webscr?cmd=_profile-recurring-payments'
'&encrypted_profile_id=%s' % subscr.backend_extid)
|
{
"content_hash": "0595356c039dc3d079dad947e17d43d4",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 90,
"avg_line_length": 36.47142857142857,
"alnum_prop": 0.5742264003133568,
"repo_name": "CCrypto/ccvpn3",
"id": "6a395cb76601c3b4e3e2f8cd461d488ed5764d94",
"size": "7659",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "payments/backends/paypal.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16467"
},
{
"name": "HTML",
"bytes": "54117"
},
{
"name": "JavaScript",
"bytes": "3594"
},
{
"name": "Python",
"bytes": "171906"
},
{
"name": "Shell",
"bytes": "320"
}
],
"symlink_target": ""
}
|
import os
import sys
from glob import glob
import subprocess as subp
if not os.path.isdir('examples'):
raise IOError('Must run from root dir of none repo')
# check for venv activations
cmd = 'if [ -z "$VIRTUAL_ENV" ];then exit 1;else exit 0;fi'
if subp.call(cmd, shell=True) > 0:
raise IOError('Need to activate the virtualenv')
benchmarks = glob('examples/convnet-benchmarks/*.py')
results = []
for ex in benchmarks:
for dt_arg in ['f16', 'f32']:
print(ex, dt_arg)
ex_bn = os.path.basename(ex)
cmd = "python {} -d {}".format(ex, dt_arg)
rc = subp.call(cmd, shell=True)
results.append([ex, rc])
print('\n\n')
errors = 0
for dat in results:
if dat[1] != 0:
print('FAILURE on {}'.format(dat[0]))
errors += 1
sys.exit(errors)
|
{
"content_hash": "ed1dd1bc2a0e41f731d7d56e1557e87c",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 59,
"avg_line_length": 24.545454545454547,
"alnum_prop": 0.6135802469135803,
"repo_name": "dongjoon-hyun/neon",
"id": "f4380fae3fc7debc228242d64cf8804bd7bd9de7",
"size": "926",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/run_benchmarks.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6534"
},
{
"name": "C++",
"bytes": "67757"
},
{
"name": "CSS",
"bytes": "927996"
},
{
"name": "Cuda",
"bytes": "14937"
},
{
"name": "Makefile",
"bytes": "11069"
},
{
"name": "Python",
"bytes": "1560785"
}
],
"symlink_target": ""
}
|
import logging
from fabric import api
from fabric.state import env
LOG = logging.getLogger(__name__)
class LocalExecutionFailed(RuntimeError):
def __init__(self, msg, code):
super(LocalExecutionFailed, self).__init__()
self.code = code
self.message = msg
def run(cmd, capture_output=True):
"""
Run command locally with current user privileges
:returns: command output on success
:raises: LocalExecutionFailed if command failed"""
try:
LOG.debug("Running '%s' locally", cmd)
return api.local(cmd, capture=capture_output)
except (SystemExit, env.abort_exception) as e:
LOG.debug("Command '%s' failed with '%s'", cmd, e.message)
raise LocalExecutionFailed(e.message, e.code)
def sudo(cmd, sudo_password=None, capture_output=True):
if sudo_password:
# make sure back slashes and quotation marks are handled correctly
pwd = sudo_password.replace("\\", "\\\\").replace("'", "'\\''")
# TODO logs password in plaintext (!!!)
sudo_cmd = "echo '{passwd}' | sudo -S {cmd}".format(passwd=pwd,
cmd=cmd)
else:
sudo_cmd = "sudo {cmd}".format(cmd=cmd)
return run(sudo_cmd, capture_output=capture_output)
def run_ignoring_errors(cmd):
"""Suppresses all command execution errors
:returns: (retcode, output) pair
"""
try:
output = run(cmd, capture_output=True)
return 0, output
except LocalExecutionFailed as e:
return e.code, e.message
def sudo_ignoring_errors(cmd, sudo_password=None):
"""Suppresses all command execution errors
:returns: (retcode, output) pair
"""
try:
output = sudo(cmd, capture_output=True, sudo_password=sudo_password)
return 0, output
except LocalExecutionFailed as e:
return e.code, e.message
|
{
"content_hash": "054efdc865d2fa15fa66110b9b1b7c7c",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 76,
"avg_line_length": 31.583333333333332,
"alnum_prop": 0.6263852242744063,
"repo_name": "SVilgelm/CloudFerry",
"id": "66e068b54afbecb7e39b1c77a426428121ab06a7",
"size": "2471",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudferry/lib/utils/local.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2615"
},
{
"name": "Python",
"bytes": "1718937"
},
{
"name": "Ruby",
"bytes": "2507"
},
{
"name": "Shell",
"bytes": "11910"
}
],
"symlink_target": ""
}
|
import pi3d
import sys
from Geometry import Geometry
from PaletteTypes import PaletteTypes
from ShaderTypes import ShaderTypes
from random import uniform
class Background(Geometry):
def __init__(self, camera):
super(Background, self).__init__(camera,
pi3d.EnvironmentCube(size=40.0, maptype='FACES'), fground=False)
# pi3d.Sprite(w=100, h=100, x=0, y=0, z=100.0), fground=False)
|
{
"content_hash": "ddb96ca573d8cb8fa653221b8c4a2315",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 74,
"avg_line_length": 29.071428571428573,
"alnum_prop": 0.7174447174447175,
"repo_name": "fredericofs/devart-template",
"id": "cf64fffb65e8b4d14e6b70ac0587d4a72dd3fcba",
"size": "454",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project_code/Background.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "330"
},
{
"name": "GLSL",
"bytes": "6314"
},
{
"name": "HTML",
"bytes": "703"
},
{
"name": "Java",
"bytes": "6581"
},
{
"name": "JavaScript",
"bytes": "3021"
},
{
"name": "Processing",
"bytes": "88915"
},
{
"name": "Python",
"bytes": "38770"
}
],
"symlink_target": ""
}
|
"""MIT License
Copyright (c) 2017 Curtis La Graff
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import json
import configparser
import yaml
import xmldict
extension_handlers = {}
def get_ext(path):
partitions = path.split(".")
if len(partitions) <= 1:
raise Exception("No extension on file path: %s" % path)
return partitions[-1]
def parse(path):
ext = get_ext(path)
if ext not in extension_handlers:
raise Exception("Extension not supported: %s" % ext)
parser = extension_handlers[ext]
return parser(path)
def register(ext):
def outer(fn):
extension_handlers[ext] = fn
def inner(*args, **kwargs):
return fn(*args, **kwargs)
return inner
return outer
@register("json")
def parse_json(path):
content = ""
data = {}
with open(path, "r") as f:
content = f.read()
if content:
data = json.loads(content)
return data
@register("yaml")
@register("yml")
def parse_yaml(path):
data = {}
with open(path, "r") as f:
data = yaml.safe_load(f)
return data
@register("ini")
def read_ini(path):
data = {}
parser = configparser.ConfigParser()
parser.read(path)
for section in parser:
data[section] = {}
for attr in parser[section]:
data[section][attr] = parser[section][attr]
return data
@register("xml")
def read_xml(path):
content = ""
data = {}
with open(path, "r") as f:
content = f.read()
if content:
data = xmldict.xml_to_dict(content)
return data
|
{
"content_hash": "0e9f4e9caab63cbc5d6e9884756061b9",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 78,
"avg_line_length": 23.85185185185185,
"alnum_prop": 0.6758540372670807,
"repo_name": "clagraff/beaver",
"id": "b43a729ba526ecc913af61c55c997f408fab0f8a",
"size": "2576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "beaver/drivers/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23579"
}
],
"symlink_target": ""
}
|
import re
import datetime
from utils import log as logging
from django.http import HttpResponse
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from apps.recommendations.models import RecommendedFeed
from apps.reader.models import UserSubscription
from apps.rss_feeds.models import Feed, MFeedIcon
from utils import json_functions as json
from utils.user_functions import get_user, ajax_login_required, admin_only
def load_recommended_feed(request):
user = get_user(request)
page = max(int(request.REQUEST.get('page', 0)), 0)
usersub = None
refresh = request.REQUEST.get('refresh')
now = datetime.datetime.now
unmoderated = request.REQUEST.get('unmoderated', False) == 'true'
if unmoderated:
recommended_feeds = RecommendedFeed.objects.filter(is_public=False, declined_date__isnull=True)[page:page+2]
else:
recommended_feeds = RecommendedFeed.objects.filter(is_public=True, approved_date__lte=now)[page:page+2]
if recommended_feeds and request.user.is_authenticated():
usersub = UserSubscription.objects.filter(user=user, feed=recommended_feeds[0].feed)
if refresh != 'true' and page > 0:
logging.user(request, "~FBBrowse recommended feed: ~SBPage #%s" % (page+1))
recommended_feed = recommended_feeds and recommended_feeds[0]
if not recommended_feeds:
return HttpResponse("")
feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed_id)
if recommended_feed:
return render_to_response('recommendations/render_recommended_feed.xhtml', {
'recommended_feed' : recommended_feed,
'description' : recommended_feed.description or recommended_feed.feed.data.feed_tagline,
'usersub' : usersub,
'feed_icon' : feed_icon and feed_icon[0],
'has_next_page' : len(recommended_feeds) > 1,
'has_previous_page' : page != 0,
'unmoderated' : unmoderated,
'today' : datetime.datetime.now(),
'page' : page,
}, context_instance=RequestContext(request))
else:
return HttpResponse("")
@json.json_view
def load_feed_info(request, feed_id):
feed = get_object_or_404(Feed, pk=feed_id)
previous_recommendation = None
if request.user.is_authenticated():
recommended_feed = RecommendedFeed.objects.filter(user=request.user, feed=feed)
if recommended_feed:
previous_recommendation = recommended_feed[0].created_date
return {
'subscriber_count': feed.num_subscribers,
'tagline': feed.data.feed_tagline,
'previous_recommendation': previous_recommendation
}
@ajax_login_required
@json.json_view
def save_recommended_feed(request):
feed_id = request.POST['feed_id']
feed = get_object_or_404(Feed, pk=int(feed_id))
tagline = request.POST['tagline']
twitter = request.POST.get('twitter')
code = 1
recommended_feed, created = RecommendedFeed.objects.get_or_create(
feed=feed,
user=request.user,
defaults=dict(
description=tagline,
twitter=twitter
)
)
return dict(code=code if created else -1)
@admin_only
@ajax_login_required
def approve_feed(request):
feed_id = request.POST['feed_id']
feed = get_object_or_404(Feed, pk=int(feed_id))
date = request.POST['date']
recommended_feed = RecommendedFeed.objects.filter(feed=feed)[0]
year, month, day = re.search(r'(\d{4})-(\d{1,2})-(\d{1,2})', date).groups()
recommended_feed.is_public = True
recommended_feed.approved_date = datetime.date(int(year), int(month), int(day))
recommended_feed.save()
return load_recommended_feed(request)
@admin_only
@ajax_login_required
def decline_feed(request):
feed_id = request.POST['feed_id']
feed = get_object_or_404(Feed, pk=int(feed_id))
recommended_feeds = RecommendedFeed.objects.filter(feed=feed)
for recommended_feed in recommended_feeds:
recommended_feed.is_public = False
recommended_feed.declined_date = datetime.datetime.now()
recommended_feed.save()
return load_recommended_feed(request)
|
{
"content_hash": "d1900d313019a6952f156359785094c1",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 116,
"avg_line_length": 38.280701754385966,
"alnum_prop": 0.6551329055912007,
"repo_name": "huihoo/reader",
"id": "0bf55dbece6b53d9f030cb0b824649ec46a9809e",
"size": "4364",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/recommendations/views.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
"""
Client side of the compute RPC API.
"""
from oslo.config import cfg
from oslo import messaging
from nova import exception
from nova.i18n import _
from nova import objects
from nova.objects import base as objects_base
from nova.openstack.common import jsonutils
from nova import rpc
rpcapi_opts = [
cfg.StrOpt('compute_topic',
default='compute',
help='The topic compute nodes listen on'),
]
CONF = cfg.CONF
CONF.register_opts(rpcapi_opts)
rpcapi_cap_opt = cfg.StrOpt('compute',
help='Set a version cap for messages sent to compute services. If you '
'plan to do a live upgrade from havana to icehouse, you should '
'set this option to "icehouse-compat" before beginning the live '
'upgrade procedure.')
CONF.register_opt(rpcapi_cap_opt, 'upgrade_levels')
def _compute_host(host, instance):
'''Get the destination host for a message.
:param host: explicit host to send the message to.
:param instance: If an explicit host was not specified, use
instance['host']
:returns: A host
'''
if host:
return host
if not instance:
raise exception.NovaException(_('No compute host specified'))
if not instance['host']:
raise exception.NovaException(_('Unable to find host for '
'Instance %s') % instance['uuid'])
return instance['host']
class ComputeAPI(object):
'''Client side of the compute rpc API.
API version history:
* 1.0 - Initial version.
* 1.1 - Adds get_host_uptime()
* 1.2 - Adds check_can_live_migrate_[destination|source]
* 1.3 - Adds change_instance_metadata()
* 1.4 - Remove instance_uuid, add instance argument to
reboot_instance()
* 1.5 - Remove instance_uuid, add instance argument to
pause_instance(), unpause_instance()
* 1.6 - Remove instance_uuid, add instance argument to
suspend_instance()
* 1.7 - Remove instance_uuid, add instance argument to
get_console_output()
* 1.8 - Remove instance_uuid, add instance argument to
add_fixed_ip_to_instance()
* 1.9 - Remove instance_uuid, add instance argument to attach_volume()
* 1.10 - Remove instance_id, add instance argument to
check_can_live_migrate_destination()
* 1.11 - Remove instance_id, add instance argument to
check_can_live_migrate_source()
* 1.12 - Remove instance_uuid, add instance argument to
confirm_resize()
* 1.13 - Remove instance_uuid, add instance argument to detach_volume()
* 1.14 - Remove instance_uuid, add instance argument to finish_resize()
* 1.15 - Remove instance_uuid, add instance argument to
finish_revert_resize()
* 1.16 - Remove instance_uuid, add instance argument to
get_diagnostics()
* 1.17 - Remove instance_uuid, add instance argument to
get_vnc_console()
* 1.18 - Remove instance_uuid, add instance argument to inject_file()
* 1.19 - Remove instance_uuid, add instance argument to
inject_network_info()
* 1.20 - Remove instance_id, add instance argument to
post_live_migration_at_destination()
* 1.21 - Remove instance_uuid, add instance argument to
power_off_instance() and stop_instance()
* 1.22 - Remove instance_uuid, add instance argument to
power_on_instance() and start_instance()
* 1.23 - Remove instance_id, add instance argument to
pre_live_migration()
* 1.24 - Remove instance_uuid, add instance argument to
rebuild_instance()
* 1.25 - Remove instance_uuid, add instance argument to
remove_fixed_ip_from_instance()
* 1.26 - Remove instance_id, add instance argument to
remove_volume_connection()
* 1.27 - Remove instance_uuid, add instance argument to
rescue_instance()
* 1.28 - Remove instance_uuid, add instance argument to reset_network()
* 1.29 - Remove instance_uuid, add instance argument to
resize_instance()
* 1.30 - Remove instance_uuid, add instance argument to
resume_instance()
* 1.31 - Remove instance_uuid, add instance argument to revert_resize()
* 1.32 - Remove instance_id, add instance argument to
rollback_live_migration_at_destination()
* 1.33 - Remove instance_uuid, add instance argument to
set_admin_password()
* 1.34 - Remove instance_uuid, add instance argument to
snapshot_instance()
* 1.35 - Remove instance_uuid, add instance argument to
unrescue_instance()
* 1.36 - Remove instance_uuid, add instance argument to
change_instance_metadata()
* 1.37 - Remove instance_uuid, add instance argument to
terminate_instance()
* 1.38 - Changes to prep_resize():
* remove instance_uuid, add instance
* remove instance_type_id, add instance_type
* remove topic, it was unused
* 1.39 - Remove instance_uuid, add instance argument to run_instance()
* 1.40 - Remove instance_id, add instance argument to live_migration()
* 1.41 - Adds refresh_instance_security_rules()
* 1.42 - Add reservations arg to prep_resize(), resize_instance(),
finish_resize(), confirm_resize(), revert_resize() and
finish_revert_resize()
* 1.43 - Add migrate_data to live_migration()
* 1.44 - Adds reserve_block_device_name()
* 2.0 - Remove 1.x backwards compat
* 2.1 - Adds orig_sys_metadata to rebuild_instance()
* 2.2 - Adds slave_info parameter to add_aggregate_host() and
remove_aggregate_host()
* 2.3 - Adds volume_id to reserve_block_device_name()
* 2.4 - Add bdms to terminate_instance
* 2.5 - Add block device and network info to reboot_instance
* 2.6 - Remove migration_id, add migration to resize_instance
* 2.7 - Remove migration_id, add migration to confirm_resize
* 2.8 - Remove migration_id, add migration to finish_resize
* 2.9 - Add publish_service_capabilities()
* 2.10 - Adds filter_properties and request_spec to prep_resize()
* 2.11 - Adds soft_delete_instance() and restore_instance()
* 2.12 - Remove migration_id, add migration to revert_resize
* 2.13 - Remove migration_id, add migration to finish_revert_resize
* 2.14 - Remove aggregate_id, add aggregate to add_aggregate_host
* 2.15 - Remove aggregate_id, add aggregate to remove_aggregate_host
* 2.16 - Add instance_type to resize_instance
* 2.17 - Add get_backdoor_port()
* 2.18 - Add bdms to rebuild_instance
* 2.19 - Add node to run_instance
* 2.20 - Add node to prep_resize
* 2.21 - Add migrate_data dict param to pre_live_migration()
* 2.22 - Add recreate, on_shared_storage and host arguments to
rebuild_instance()
* 2.23 - Remove network_info from reboot_instance
* 2.24 - Added get_spice_console method
* 2.25 - Add attach_interface() and detach_interface()
* 2.26 - Add validate_console_port to ensure the service connects to
vnc on the correct port
* 2.27 - Adds 'reservations' to terminate_instance() and
soft_delete_instance()
... Grizzly supports message version 2.27. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 2.27.
* 2.28 - Adds check_instance_shared_storage()
* 2.29 - Made start_instance() and stop_instance() take new-world
instance objects
* 2.30 - Adds live_snapshot_instance()
* 2.31 - Adds shelve_instance(), shelve_offload_instance, and
unshelve_instance()
* 2.32 - Make reboot_instance take a new world instance object
* 2.33 - Made suspend_instance() and resume_instance() take new-world
instance objects
* 2.34 - Added swap_volume()
* 2.35 - Made terminate_instance() and soft_delete_instance() take
new-world instance objects
* 2.36 - Made pause_instance() and unpause_instance() take new-world
instance objects
* 2.37 - Added the legacy_bdm_in_spec parameter to run_instance
* 2.38 - Made check_can_live_migrate_[destination|source] take
new-world instance objects
* 2.39 - Made revert_resize() and confirm_resize() take new-world
instance objects
* 2.40 - Made reset_network() take new-world instance object
* 2.41 - Make inject_network_info take new-world instance object
* 2.42 - Splits snapshot_instance() into snapshot_instance() and
backup_instance() and makes them take new-world instance
objects.
* 2.43 - Made prep_resize() take new-world instance object
* 2.44 - Add volume_snapshot_create(), volume_snapshot_delete()
* 2.45 - Made resize_instance() take new-world objects
* 2.46 - Made finish_resize() take new-world objects
* 2.47 - Made finish_revert_resize() take new-world objects
... Havana supports message version 2.47. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 2.47.
* 2.48 - Make add_aggregate_host() and remove_aggregate_host() take
new-world objects
* ... - Remove live_snapshot() that was never actually used
* 3.0 - Remove 2.x compatibility
* 3.1 - Update get_spice_console() to take an instance object
* 3.2 - Update get_vnc_console() to take an instance object
* 3.3 - Update validate_console_port() to take an instance object
* 3.4 - Update rebuild_instance() to take an instance object
* 3.5 - Pass preserve_ephemeral flag to rebuild_instance()
* 3.6 - Make volume_snapshot_{create,delete} use new-world objects
* 3.7 - Update change_instance_metadata() to take an instance object
* 3.8 - Update set_admin_password() to take an instance object
* 3.9 - Update rescue_instance() to take an instance object
* 3.10 - Added get_rdp_console method
* 3.11 - Update unrescue_instance() to take an object
* 3.12 - Update add_fixed_ip_to_instance() to take an object
* 3.13 - Update remove_fixed_ip_from_instance() to take an object
* 3.14 - Update post_live_migration_at_destination() to take an object
* 3.15 - Adds filter_properties and node to unshelve_instance()
* 3.16 - Make reserve_block_device_name and attach_volume use new-world
objects, and add disk_bus and device_type params to
reserve_block_device_name, and bdm param to attach_volume
* 3.17 - Update attach_interface and detach_interface to take an object
* 3.18 - Update get_diagnostics() to take an instance object
* Removed inject_file(), as it was unused.
* 3.19 - Update pre_live_migration to take instance object
* 3.20 - Make restore_instance take an instance object
* 3.21 - Made rebuild take new-world BDM objects
* 3.22 - Made terminate_instance take new-world BDM objects
* 3.23 - Added external_instance_event()
* build_and_run_instance was added in Havana and not used or
documented.
... Icehouse supports message version 3.23. So, any changes to
existing methods in 3.x after that point should be done such that they
can handle the version_cap being set to 3.23.
* 3.24 - Update rescue_instance() to take optional rescue_image_ref
* 3.25 - Make detach_volume take an object
* 3.26 - Make live_migration() and
rollback_live_migration_at_destination() take an object
* ... Removed run_instance()
* 3.27 - Make run_instance() accept a new-world object
* 3.28 - Update get_console_output() to accept a new-world object
* 3.29 - Make check_instance_shared_storage accept a new-world object
* 3.30 - Make remove_volume_connection() accept a new-world object
* 3.31 - Add get_instance_diagnostics
* 3.32 - Add destroy_disks and migrate_data optional parameters to
rollback_live_migration_at_destination()
* 3.33 - Make build_and_run_instance() take a NetworkRequestList object
* 3.34 - Add get_serial_console method
* 3.35 - Make reserve_block_device_name return a BDM object
... Juno supports message version 3.35. So, any changes to
existing methods in 3.x after that point should be done such that they
can handle the version_cap being set to 3.35.
'''
VERSION_ALIASES = {
'icehouse': '3.23',
'juno': '3.35',
}
def __init__(self):
super(ComputeAPI, self).__init__()
target = messaging.Target(topic=CONF.compute_topic, version='3.0')
version_cap = self.VERSION_ALIASES.get(CONF.upgrade_levels.compute,
CONF.upgrade_levels.compute)
serializer = objects_base.NovaObjectSerializer()
self.client = self.get_client(target, version_cap, serializer)
# Cells overrides this
def get_client(self, target, version_cap, serializer):
return rpc.get_client(target,
version_cap=version_cap,
serializer=serializer)
def _check_live_migration_api_version(self, server):
# NOTE(angdraug): live migration involving a compute host running Nova
# API older than v3.32 as either source or destination can cause
# instance disks to be deleted from shared storage
if not self.client.can_send_version('3.32'):
raise exception.LiveMigrationWithOldNovaNotSafe(server=server)
def add_aggregate_host(self, ctxt, aggregate, host_param, host,
slave_info=None):
'''Add aggregate host.
:param ctxt: request context
:param aggregate_id:
:param host_param: This value is placed in the message to be the 'host'
parameter for the remote method.
:param host: This is the host to send the message to.
'''
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'add_aggregate_host',
aggregate=aggregate, host=host_param,
slave_info=slave_info)
def add_fixed_ip_to_instance(self, ctxt, instance, network_id):
version = '3.12'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'add_fixed_ip_to_instance',
instance=instance, network_id=network_id)
def attach_interface(self, ctxt, instance, network_id, port_id,
requested_ip):
version = '3.17'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'attach_interface',
instance=instance, network_id=network_id,
port_id=port_id, requested_ip=requested_ip)
def attach_volume(self, ctxt, instance, volume_id, mountpoint, bdm=None):
# NOTE(ndipanov): Remove volume_id and mountpoint on the next major
# version bump - they are not needed when using bdm objects.
version = '3.16'
kw = {'instance': instance, 'volume_id': volume_id,
'mountpoint': mountpoint, 'bdm': bdm}
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'attach_volume', **kw)
def change_instance_metadata(self, ctxt, instance, diff):
version = '3.7'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'change_instance_metadata',
instance=instance, diff=diff)
def check_can_live_migrate_destination(self, ctxt, instance, destination,
block_migration, disk_over_commit):
self._check_live_migration_api_version(destination)
cctxt = self.client.prepare(server=destination, version='3.32')
return cctxt.call(ctxt, 'check_can_live_migrate_destination',
instance=instance,
block_migration=block_migration,
disk_over_commit=disk_over_commit)
def check_can_live_migrate_source(self, ctxt, instance, dest_check_data):
source = _compute_host(None, instance)
self._check_live_migration_api_version(source)
cctxt = self.client.prepare(server=source, version='3.32')
return cctxt.call(ctxt, 'check_can_live_migrate_source',
instance=instance,
dest_check_data=dest_check_data)
def check_instance_shared_storage(self, ctxt, instance, data):
if self.client.can_send_version('3.29'):
version = '3.29'
else:
version = '3.0'
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'check_instance_shared_storage',
instance=instance,
data=data)
def confirm_resize(self, ctxt, instance, migration, host,
reservations=None, cast=True):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(host, instance),
version=version)
rpc_method = cctxt.cast if cast else cctxt.call
return rpc_method(ctxt, 'confirm_resize',
instance=instance, migration=migration,
reservations=reservations)
def detach_interface(self, ctxt, instance, port_id):
version = '3.17'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'detach_interface',
instance=instance, port_id=port_id)
def detach_volume(self, ctxt, instance, volume_id):
if self.client.can_send_version('3.25'):
version = '3.25'
else:
version = '3.0'
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'detach_volume',
instance=instance, volume_id=volume_id)
def finish_resize(self, ctxt, instance, migration, image, disk_info,
host, reservations=None):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'finish_resize',
instance=instance, migration=migration,
image=image, disk_info=disk_info, reservations=reservations)
def finish_revert_resize(self, ctxt, instance, migration, host,
reservations=None):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'finish_revert_resize',
instance=instance, migration=migration,
reservations=reservations)
def get_console_output(self, ctxt, instance, tail_length):
if self.client.can_send_version('3.28'):
version = '3.28'
else:
version = '3.0'
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_console_output',
instance=instance, tail_length=tail_length)
def get_console_pool_info(self, ctxt, console_type, host):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'get_console_pool_info',
console_type=console_type)
def get_console_topic(self, ctxt, host):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'get_console_topic')
def get_diagnostics(self, ctxt, instance):
version = '3.18'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_diagnostics', instance=instance)
def get_instance_diagnostics(self, ctxt, instance):
instance_p = jsonutils.to_primitive(instance)
kwargs = {'instance': instance_p}
version = '3.31'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_instance_diagnostics', **kwargs)
def get_vnc_console(self, ctxt, instance, console_type):
version = '3.2'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_vnc_console',
instance=instance, console_type=console_type)
def get_spice_console(self, ctxt, instance, console_type):
version = '3.1'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_spice_console',
instance=instance, console_type=console_type)
def get_rdp_console(self, ctxt, instance, console_type):
version = '3.10'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_rdp_console',
instance=instance, console_type=console_type)
def get_serial_console(self, ctxt, instance, console_type):
version = '3.34'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_serial_console',
instance=instance, console_type=console_type)
def validate_console_port(self, ctxt, instance, port, console_type):
version = '3.3'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'validate_console_port',
instance=instance, port=port,
console_type=console_type)
def host_maintenance_mode(self, ctxt, host_param, mode, host):
'''Set host maintenance mode
:param ctxt: request context
:param host_param: This value is placed in the message to be the 'host'
parameter for the remote method.
:param mode:
:param host: This is the host to send the message to.
'''
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'host_maintenance_mode',
host=host_param, mode=mode)
def host_power_action(self, ctxt, action, host):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'host_power_action', action=action)
def inject_network_info(self, ctxt, instance):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'inject_network_info', instance=instance)
def live_migration(self, ctxt, instance, dest, block_migration, host,
migrate_data=None):
if self.client.can_send_version('3.26'):
version = '3.26'
else:
version = '3.0'
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'live_migration', instance=instance,
dest=dest, block_migration=block_migration,
migrate_data=migrate_data)
def pause_instance(self, ctxt, instance):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'pause_instance', instance=instance)
def post_live_migration_at_destination(self, ctxt, instance,
block_migration, host):
version = '3.14'
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'post_live_migration_at_destination',
instance=instance, block_migration=block_migration)
def pre_live_migration(self, ctxt, instance, block_migration, disk,
host, migrate_data=None):
version = '3.19'
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'pre_live_migration',
instance=instance,
block_migration=block_migration,
disk=disk, migrate_data=migrate_data)
def prep_resize(self, ctxt, image, instance, instance_type, host,
reservations=None, request_spec=None,
filter_properties=None, node=None):
version = '3.0'
instance_type_p = jsonutils.to_primitive(instance_type)
image_p = jsonutils.to_primitive(image)
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'prep_resize',
instance=instance,
instance_type=instance_type_p,
image=image_p, reservations=reservations,
request_spec=request_spec,
filter_properties=filter_properties,
node=node)
def reboot_instance(self, ctxt, instance, block_device_info,
reboot_type):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'reboot_instance',
instance=instance,
block_device_info=block_device_info,
reboot_type=reboot_type)
def rebuild_instance(self, ctxt, instance, new_pass, injected_files,
image_ref, orig_image_ref, orig_sys_metadata, bdms,
recreate=False, on_shared_storage=False, host=None,
preserve_ephemeral=False, kwargs=None):
# NOTE(danms): kwargs is only here for cells compatibility, don't
# actually send it to compute
extra = {'preserve_ephemeral': preserve_ephemeral}
version = '3.21'
cctxt = self.client.prepare(server=_compute_host(host, instance),
version=version)
cctxt.cast(ctxt, 'rebuild_instance',
instance=instance, new_pass=new_pass,
injected_files=injected_files, image_ref=image_ref,
orig_image_ref=orig_image_ref,
orig_sys_metadata=orig_sys_metadata, bdms=bdms,
recreate=recreate, on_shared_storage=on_shared_storage,
**extra)
def refresh_provider_fw_rules(self, ctxt, host):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'refresh_provider_fw_rules')
def remove_aggregate_host(self, ctxt, aggregate, host_param, host,
slave_info=None):
'''Remove aggregate host.
:param ctxt: request context
:param aggregate_id:
:param host_param: This value is placed in the message to be the 'host'
parameter for the remote method.
:param host: This is the host to send the message to.
'''
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'remove_aggregate_host',
aggregate=aggregate, host=host_param,
slave_info=slave_info)
def remove_fixed_ip_from_instance(self, ctxt, instance, address):
version = '3.13'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'remove_fixed_ip_from_instance',
instance=instance, address=address)
def remove_volume_connection(self, ctxt, instance, volume_id, host):
if self.client.can_send_version('3.30'):
version = '3.30'
else:
version = '3.0'
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'remove_volume_connection',
instance=instance, volume_id=volume_id)
def rescue_instance(self, ctxt, instance, rescue_password,
rescue_image_ref=None):
msg_args = {'rescue_password': rescue_password}
if self.client.can_send_version('3.24'):
version = '3.24'
msg_args['rescue_image_ref'] = rescue_image_ref
else:
version = '3.9'
msg_args['instance'] = instance
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'rescue_instance', **msg_args)
def reset_network(self, ctxt, instance):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'reset_network', instance=instance)
def resize_instance(self, ctxt, instance, migration, image, instance_type,
reservations=None):
version = '3.0'
instance_type_p = jsonutils.to_primitive(instance_type)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'resize_instance',
instance=instance, migration=migration,
image=image, reservations=reservations,
instance_type=instance_type_p)
def resume_instance(self, ctxt, instance):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'resume_instance', instance=instance)
def revert_resize(self, ctxt, instance, migration, host,
reservations=None):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(host, instance),
version=version)
cctxt.cast(ctxt, 'revert_resize',
instance=instance, migration=migration,
reservations=reservations)
def rollback_live_migration_at_destination(self, ctxt, instance, host,
destroy_disks=True,
migrate_data=None):
self._check_live_migration_api_version(host)
cctxt = self.client.prepare(server=host, version='3.32')
cctxt.cast(ctxt, 'rollback_live_migration_at_destination',
instance=instance,
destroy_disks=destroy_disks, migrate_data=migrate_data)
# NOTE(alaski): Remove this method when the scheduler rpc interface is
# bumped to 4.x as the only callers of this method will be removed.
def run_instance(self, ctxt, instance, host, request_spec,
filter_properties, requested_networks,
injected_files, admin_password,
is_first_time, node=None, legacy_bdm_in_spec=True):
if self.client.can_send_version('3.27'):
version = '3.27'
else:
version = '3.0'
instance = jsonutils.to_primitive(instance)
msg_kwargs = {'instance': instance, 'request_spec': request_spec,
'filter_properties': filter_properties,
'requested_networks': requested_networks,
'injected_files': injected_files,
'admin_password': admin_password,
'is_first_time': is_first_time, 'node': node,
'legacy_bdm_in_spec': legacy_bdm_in_spec}
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'run_instance', **msg_kwargs)
def set_admin_password(self, ctxt, instance, new_pass):
version = '3.8'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'set_admin_password',
instance=instance, new_pass=new_pass)
def set_host_enabled(self, ctxt, enabled, host):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'set_host_enabled', enabled=enabled)
def swap_volume(self, ctxt, instance, old_volume_id, new_volume_id):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'swap_volume',
instance=instance, old_volume_id=old_volume_id,
new_volume_id=new_volume_id)
def get_host_uptime(self, ctxt, host):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'get_host_uptime')
def reserve_block_device_name(self, ctxt, instance, device, volume_id,
disk_bus=None, device_type=None):
kw = {'instance': instance, 'device': device,
'volume_id': volume_id, 'disk_bus': disk_bus,
'device_type': device_type, 'return_bdm_object': True}
if self.client.can_send_version('3.35'):
version = '3.35'
else:
del kw['return_bdm_object']
version = '3.16'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
volume_bdm = cctxt.call(ctxt, 'reserve_block_device_name', **kw)
if not isinstance(volume_bdm, objects.BlockDeviceMapping):
volume_bdm = objects.BlockDeviceMapping.get_by_volume_id(
ctxt, volume_id)
return volume_bdm
def backup_instance(self, ctxt, instance, image_id, backup_type,
rotation):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'backup_instance',
instance=instance,
image_id=image_id,
backup_type=backup_type,
rotation=rotation)
def snapshot_instance(self, ctxt, instance, image_id):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'snapshot_instance',
instance=instance,
image_id=image_id)
def start_instance(self, ctxt, instance):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'start_instance', instance=instance)
def stop_instance(self, ctxt, instance, do_cast=True):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
rpc_method = cctxt.cast if do_cast else cctxt.call
return rpc_method(ctxt, 'stop_instance', instance=instance)
def suspend_instance(self, ctxt, instance):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'suspend_instance', instance=instance)
def terminate_instance(self, ctxt, instance, bdms, reservations=None):
version = '3.22'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'terminate_instance',
instance=instance, bdms=bdms,
reservations=reservations)
def unpause_instance(self, ctxt, instance):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'unpause_instance', instance=instance)
def unrescue_instance(self, ctxt, instance):
version = '3.11'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'unrescue_instance', instance=instance)
def soft_delete_instance(self, ctxt, instance, reservations=None):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'soft_delete_instance',
instance=instance, reservations=reservations)
def restore_instance(self, ctxt, instance):
version = '3.20'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'restore_instance', instance=instance)
def shelve_instance(self, ctxt, instance, image_id=None):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'shelve_instance',
instance=instance, image_id=image_id)
def shelve_offload_instance(self, ctxt, instance):
version = '3.0'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'shelve_offload_instance', instance=instance)
def unshelve_instance(self, ctxt, instance, host, image=None,
filter_properties=None, node=None):
version = '3.15'
msg_kwargs = {
'instance': instance,
'image': image,
'filter_properties': filter_properties,
'node': node,
}
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'unshelve_instance', **msg_kwargs)
def volume_snapshot_create(self, ctxt, instance, volume_id,
create_info):
version = '3.6'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'volume_snapshot_create', instance=instance,
volume_id=volume_id, create_info=create_info)
def volume_snapshot_delete(self, ctxt, instance, volume_id, snapshot_id,
delete_info):
version = '3.6'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'volume_snapshot_delete', instance=instance,
volume_id=volume_id, snapshot_id=snapshot_id,
delete_info=delete_info)
def external_instance_event(self, ctxt, instances, events):
cctxt = self.client.prepare(
server=_compute_host(None, instances[0]),
version='3.23')
cctxt.cast(ctxt, 'external_instance_event', instances=instances,
events=events)
def build_and_run_instance(self, ctxt, instance, host, image, request_spec,
filter_properties, admin_password=None, injected_files=None,
requested_networks=None, security_groups=None,
block_device_mapping=None, node=None, limits=None):
version = '3.33'
if not self.client.can_send_version(version):
version = '3.23'
if requested_networks is not None:
requested_networks = [(network_id, address, port_id)
for (network_id, address, port_id, _) in
requested_networks.as_tuples()]
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'build_and_run_instance', instance=instance,
image=image, request_spec=request_spec,
filter_properties=filter_properties,
admin_password=admin_password,
injected_files=injected_files,
requested_networks=requested_networks,
security_groups=security_groups,
block_device_mapping=block_device_mapping, node=node,
limits=limits)
class SecurityGroupAPI(object):
'''Client side of the security group rpc API.
API version history:
1.0 - Initial version.
1.41 - Adds refresh_instance_security_rules()
2.0 - Remove 1.x backwards compat
3.0 - Identical to 2.x, but has to be bumped at the same time as the
compute API since it's all together on the server side.
'''
def __init__(self):
super(SecurityGroupAPI, self).__init__()
target = messaging.Target(topic=CONF.compute_topic, version='3.0')
version_cap = ComputeAPI.VERSION_ALIASES.get(
CONF.upgrade_levels.compute, CONF.upgrade_levels.compute)
self.client = rpc.get_client(target, version_cap)
def refresh_security_group_rules(self, ctxt, security_group_id, host):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'refresh_security_group_rules',
security_group_id=security_group_id)
def refresh_security_group_members(self, ctxt, security_group_id,
host):
version = '3.0'
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'refresh_security_group_members',
security_group_id=security_group_id)
def refresh_instance_security_rules(self, ctxt, host, instance):
version = '3.0'
instance_p = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'refresh_instance_security_rules',
instance=instance_p)
|
{
"content_hash": "a171f2752c3eb716f761b45fc01316c8",
"timestamp": "",
"source": "github",
"line_count": 929,
"max_line_length": 79,
"avg_line_length": 46.16899892357374,
"alnum_prop": 0.5984472266909142,
"repo_name": "vmthunder/nova",
"id": "34466ac3de09a78eb6c3274f9518e729d7e50bc0",
"size": "43497",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/compute/rpcapi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import logging
import tempfile
import tvm
import pytest
from tvm.meta_schedule import ReplayTraceConfig, tune_tir
from tvm.meta_schedule.tune_context import TuneContext
from tvm.meta_schedule import schedule_rule, postproc
from tvm.meta_schedule.space_generator import PostOrderApply
from tvm.script import tir as T
from tvm.target.target import Target
from tvm.te.operation import create_prim_func
from tvm.tir import Schedule
from tvm.meta_schedule.testing import te_workload
logging.basicConfig()
logging.getLogger("tvm.meta_schedule").setLevel(logging.DEBUG)
# pylint: disable=no-member,invalid-name,unused-variable
@T.prim_func
def matmul(a: T.handle, b: T.handle, c: T.handle) -> None:
A = T.match_buffer(a, [128, 128])
B = T.match_buffer(b, [128, 128])
C = T.match_buffer(c, [128, 128])
for i, j, k in T.grid(128, 128, 128):
with T.block("update"):
vi, vj, vk = T.axis.remap("SSR", [i, j, k])
with T.init():
C[vi, vj] = 0.0
C[vi, vj] = C[vi, vj] + A[vi, vk] * B[vj, vk]
# pylint: enable=no-member,invalid-name,unused-variable
@pytest.mark.skip("Integration test")
def test_tune_matmul_cpu():
with tempfile.TemporaryDirectory() as work_dir:
sch: Schedule = tune_tir(
mod=matmul,
target=Target("llvm --num-cores=16"),
config=ReplayTraceConfig(
num_trials_per_iter=32,
num_trials_total=32,
),
work_dir=work_dir,
)
if sch is None:
print("No valid schedule found!")
else:
print(sch.mod.script())
print(sch.trace)
@pytest.mark.skip("Integration test")
def test_tune_matmul_cuda():
with tempfile.TemporaryDirectory() as work_dir:
sch: Schedule = tune_tir(
mod=matmul,
target=Target("nvidia/geforce-rtx-3070"),
config=ReplayTraceConfig(
num_trials_per_iter=32,
num_trials_total=32,
),
work_dir=work_dir,
)
if sch is None:
print("No valid schedule found!")
else:
print(sch.mod.script())
print(sch.trace)
@pytest.mark.skip("Integeration test")
def test_tune_matmul_cuda_tensor_core():
n = 512
mod = create_prim_func(te_workload.matmul_fp16(n, n, n))
target = Target("nvidia/geforce-rtx-3070")
config = ReplayTraceConfig(
num_trials_per_iter=32,
num_trials_total=320,
)
class DefaultTensorCore:
@staticmethod
def _sch_rules():
from tvm.meta_schedule import ( # pylint: disable=import-outside-toplevel
schedule_rule as M,
)
return [
M.AutoInline(
into_producer=False,
into_consumer=True,
# into_cache_only=False,
inline_const_tensor=True,
disallow_if_then_else=False,
require_injective=False,
require_ordered=False,
disallow_op=None,
),
M.MultiLevelTiling(
structure="SSSRRSRS",
tile_binds=["blockIdx.x", "blockIdx.y", "threadIdx.y"],
# use_tensor_core=True,
max_innermost_factor=64,
vector_load_lens=[1, 2, 3, 4],
reuse_read=schedule_rule.ReuseType(
req="must",
levels=[4],
scope="shared",
),
reuse_write=schedule_rule.ReuseType(
req="no",
levels=[],
scope="",
),
),
M.AutoInline(
into_producer=True,
into_consumer=True,
# into_cache_only=True,
inline_const_tensor=True,
disallow_if_then_else=False,
require_injective=False,
require_ordered=False,
disallow_op=None,
),
M.ParallelizeVectorizeUnroll(
max_jobs_per_core=-1, # disable parallelize
max_vectorize_extent=-1, # disable vectorize
unroll_max_steps=[0, 16, 64, 512, 1024],
unroll_explicit=True,
),
]
@staticmethod
def _postproc():
from tvm.meta_schedule import ( # pylint: disable=import-outside-toplevel
postproc as M,
)
return [
# M.RewriteCooperativeFetch(),
M.RewriteParallelVectorizeUnroll(),
M.RewriteReductionBlock(),
# M.RewriteTensorCore(),
M.VerifyGPUCode(),
]
with tempfile.TemporaryDirectory() as work_dir:
sch: Schedule = tune_tir(
mod=mod,
target=target,
config=config,
work_dir=work_dir,
space=PostOrderApply(),
sch_rules=DefaultTensorCore._sch_rules,
postprocs=DefaultTensorCore._postproc,
num_threads=None,
)
if sch is None:
print("No valid schedule found!")
else:
print(sch.mod.script())
print(sch.trace)
from tvm.contrib import nvcc
import numpy as np
ctx = tvm.gpu(0)
if nvcc.have_tensorcore(ctx.compute_version):
with tvm.transform.PassContext():
func = tvm.build(sch.mod["main"], [], "cuda")
print(sch.mod.script())
print(func.imported_modules[0].get_source())
a_np = np.random.uniform(size=(n, n)).astype("float16")
b_np = np.random.uniform(size=(n, n)).astype("float16")
a = tvm.nd.array(a_np, ctx)
b = tvm.nd.array(b_np, ctx)
c = tvm.nd.array(np.zeros((n, n), dtype="float32"), ctx)
evaluator = func.time_evaluator(
func.entry_name, ctx, number=3, repeat=1, min_repeat_ms=40
)
print("matmul with tensor core: %f ms" % (evaluator(a, b, c).mean * 1e3))
np.testing.assert_allclose(
c.asnumpy(),
np.matmul(a_np.astype("float32"), b_np.astype("float32")),
rtol=1e-4,
atol=1e-4,
)
if __name__ == """__main__""":
test_tune_matmul_cpu()
test_tune_matmul_cuda()
test_tune_matmul_cuda_tensor_core()
|
{
"content_hash": "ee99b4b2153dca72abe76b9b0b787e99",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 89,
"avg_line_length": 33.80099502487562,
"alnum_prop": 0.5042684721813365,
"repo_name": "dmlc/tvm",
"id": "277fa2407bd15f1c190f5c03ee32cf54501205c6",
"size": "7615",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/python/unittest/test_meta_schedule_tune_tir.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "6112"
},
{
"name": "C",
"bytes": "92947"
},
{
"name": "C++",
"bytes": "5765945"
},
{
"name": "CMake",
"bytes": "74045"
},
{
"name": "Go",
"bytes": "112384"
},
{
"name": "HTML",
"bytes": "8625"
},
{
"name": "Java",
"bytes": "171101"
},
{
"name": "JavaScript",
"bytes": "49803"
},
{
"name": "Makefile",
"bytes": "55807"
},
{
"name": "Objective-C",
"bytes": "15241"
},
{
"name": "Objective-C++",
"bytes": "46673"
},
{
"name": "Python",
"bytes": "7183810"
},
{
"name": "Rust",
"bytes": "181961"
},
{
"name": "Scala",
"bytes": "202148"
},
{
"name": "Shell",
"bytes": "97271"
},
{
"name": "Tcl",
"bytes": "53645"
},
{
"name": "Verilog",
"bytes": "30605"
}
],
"symlink_target": ""
}
|
from translate.convert import convert
import os
import sys
from py import test
try:
import psyco
except Exception:
psyco = None
class TestConvertCommand:
"""Tests running actual commands on files"""
convertmodule = convert
defaultoptions = {"progress": "none"}
if psyco:
defaultoptions["psyco"] = "none"
def setup_method(self, method):
"""creates a clean test directory for the given method"""
self.testdir = "%s_%s" % (self.__class__.__name__, method.__name__)
self.cleardir()
os.mkdir(self.testdir)
self.rundir = os.path.abspath(os.getcwd())
def teardown_method(self, method):
"""removes the test directory for the given method"""
os.chdir(self.rundir)
self.cleardir()
def cleardir(self):
"""removes the test directory"""
if os.path.exists(self.testdir):
for dirpath, subdirs, filenames in os.walk(self.testdir, topdown=False):
for name in filenames:
os.remove(os.path.join(dirpath, name))
for name in subdirs:
os.rmdir(os.path.join(dirpath, name))
if os.path.exists(self.testdir): os.rmdir(self.testdir)
assert not os.path.exists(self.testdir)
def run_command(self, *argv, **kwargs):
"""runs the command via the main function, passing self.defaultoptions and keyword arguments as --long options and argv arguments straight"""
os.chdir(self.testdir)
argv = list(argv)
kwoptions = getattr(self, "defaultoptions", {}).copy()
kwoptions.update(kwargs)
for key, value in kwoptions.iteritems():
if value is True:
argv.append("--%s" % key)
else:
argv.append("--%s=%s" % (key, value))
try:
self.convertmodule.main(argv)
finally:
os.chdir(self.rundir)
def get_testfilename(self, filename):
"""gets the path to the test file"""
return os.path.join(self.testdir, filename)
def open_testfile(self, filename, mode="r"):
"""opens the given filename in the testdirectory in the given mode"""
filename = self.get_testfilename(filename)
if not mode.startswith("r"):
subdir = os.path.dirname(filename)
currentpath = ""
if not os.path.isdir(subdir):
for part in subdir.split(os.sep):
currentpath = os.path.join(currentpath, part)
if not os.path.isdir(currentpath):
os.mkdir(currentpath)
return open(filename, mode)
def create_testfile(self, filename, contents):
"""creates the given file in the testdirectory with the given contents"""
testfile = self.open_testfile(filename, "w")
testfile.write(contents)
def read_testfile(self, filename):
"""reads the given file in the testdirectory and returns the contents"""
testfile = open(self.get_testfilename(filename))
return testfile.read()
def help_check(self, options, option, last=False):
"""check that a help string occurs and remove it"""
assert option in options
newoptions = []
for line in options.splitlines():
if option in line or not line.lstrip().startswith("-"):
continue
newoptions.append(line)
if last:
assert newoptions == []
return "\n".join(newoptions)
def test_help(self):
"""tests getting help (returning the help_string so further tests can be done)"""
stdout = sys.stdout
helpfile = self.open_testfile("help.txt", "w")
sys.stdout = helpfile
try:
test.raises(SystemExit, self.run_command, help=True)
finally:
sys.stdout = stdout
helpfile.close()
help_string = self.read_testfile("help.txt")
print help_string
convertsummary = self.convertmodule.__doc__.split("\n")[0]
# the convertsummary might be wrapped. this will probably unwrap it
assert convertsummary in help_string.replace("\n", " ")
usageline = help_string[:help_string.find("\n")]
# Different versions of optparse might contain either upper or
# lowercase versions of 'Usage:' and 'Options:', so we need to take
# that into account
assert (usageline.startswith("Usage: ") or usageline.startswith("usage: ")) \
and "[--version] [-h|--help]" in usageline
options = help_string[help_string.find("ptions:\n"):]
options = options[options.find("\n")+1:]
options = self.help_check(options, "--progress=PROGRESS")
options = self.help_check(options, "--version")
options = self.help_check(options, "-h, --help")
options = self.help_check(options, "--manpage")
options = self.help_check(options, "--errorlevel=ERRORLEVEL")
if psyco:
options = self.help_check(options, "--psyco=MODE")
options = self.help_check(options, "-i INPUT, --input=INPUT")
options = self.help_check(options, "-x EXCLUDE, --exclude=EXCLUDE")
options = self.help_check(options, "-o OUTPUT, --output=OUTPUT")
return options
|
{
"content_hash": "bf59eefda939db847b3bba399d5a840b",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 149,
"avg_line_length": 41.04651162790697,
"alnum_prop": 0.5998111425873466,
"repo_name": "dbbhattacharya/kitsune",
"id": "31c1b6694105f3b6d1d866e328222bd5dc752563",
"size": "5342",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "vendor/packages/translate-toolkit/translate/convert/test_convert.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2694"
},
{
"name": "CSS",
"bytes": "276585"
},
{
"name": "HTML",
"bytes": "600145"
},
{
"name": "JavaScript",
"bytes": "800276"
},
{
"name": "Python",
"bytes": "2762831"
},
{
"name": "Shell",
"bytes": "6720"
},
{
"name": "Smarty",
"bytes": "1752"
}
],
"symlink_target": ""
}
|
"""
scitran.data.medimg.montage
===========================
Montage provides montage writing capabilities for MR datasets read by any subclass of MedImgReader.
Provides a MedImgWriter subclass for creating image pyramids.
"""
import os
import json
import math
import logging
import zipfile
import cStringIO
import numpy as np
from PIL import Image
import medimg
log = logging.getLogger(__name__)
def generate_montage(imagedata, timepoints=[], bits16=False):
"""Generate a montage."""
# Figure out the image dimensions and make an appropriate montage.
# NIfTI images can have up to 7 dimensions. The fourth dimension is
# by convention always supposed to be time, so some images (RGB, vector, tensor)
# will have 5 dimensions with a single 4th dimension. For our purposes, we
# can usually just collapse all dimensions above the 3rd.
# TODO: we should handle data_type = RGB as a special case.
# TODO: should we use the scaled data (getScaledData())? (We do some auto-windowing below)
# This transpose (usually) makes the resulting images come out in a more standard orientation.
# TODO: we could look at the qto_xyz to infer the optimal transpose for any dataset.
data = imagedata.transpose(np.concatenate(([1, 0], range(2, imagedata.ndim))))
num_images = np.prod(data.shape[2:])
if data.ndim < 2:
raise MontageError('NIfTI file must have at least 2 dimensions')
elif data.ndim == 2:
# a single slice: no need to do anything
num_cols = 1
data = np.atleast_3d(data)
elif data.ndim == 3:
# a simple (x, y, z) volume- set num_cols to produce a square(ish) montage.
rows_to_cols_ratio = float(data.shape[0])/float(data.shape[1])
num_cols = int(math.ceil(math.sqrt(float(num_images)) * math.sqrt(rows_to_cols_ratio)))
elif data.ndim >= 4:
# timeseries (x, y, z, t) or more
num_cols = data.shape[2]
data = data.transpose(np.concatenate(([0, 1, 3, 2], range(4, data.ndim)))).reshape(data.shape[0], data.shape[1], num_images)
if len(timepoints) > 0:
data = data[..., timepoints]
num_rows = int(np.ceil(float(data.shape[2])/float(num_cols)))
montage = np.zeros((data.shape[0] * num_rows, data.shape[1] * num_cols), dtype=data.dtype)
for im_num in range(data.shape[2]):
slice_r, slice_c = im_num / num_cols * data.shape[0], im_num % num_cols * data.shape[1]
montage[slice_r:slice_r + data.shape[0], slice_c:slice_c + data.shape[1]] = data[:, :, im_num]
# montage = montage.copy() # is this necessary? need a deep copy?
if montage.dtype == np.uint8 and bits16:
montage = np.cast['uint16'](data)
elif montage.dtype != np.uint8 or (montage.dtype != np.uint16 and bits16):
montage = montage.astype(np.float32) # do scaling/clipping with floats
clip_vals = np.percentile(montage, (20.0, 99.0)) # auto-window the data by clipping
montage = montage.clip(clip_vals[0], clip_vals[1]) - clip_vals[0]
if bits16:
montage = np.cast['uint16'](np.round(montage/(clip_vals[1]-clip_vals[0])*65535))
else:
montage = np.cast['uint8'](np.round(montage/(clip_vals[1]-clip_vals[0])*255.0))
return montage
def generate_pyramid(montage, tile_size):
"""
Slice up a NIfTI file into a multi-res pyramid of tiles.
We use the file name convention suitable for d3tiles
The zoom level (z) is an integer between 0 and n, where 0 is fully zoomed out and n is zoomed in.
E.g., z=0 is for 1 tile covering the whole world, z=1 is for 2x2=4 tiles, ... z=n is the original resolution.
"""
montage_image = Image.fromarray(montage, 'L')
montage_image = montage_image.crop(montage_image.getbbox()) # crop away edges that contain only zeros
sx, sy = montage_image.size
if sx * sy < 1:
raise MontageError('degenerate image size (%d, %d): no tiles will be created' % (sx, sy))
if sx < tile_size and sy < tile_size: # Panojs chokes if the lowest res image is smaller than the tile size.
tile_size = max(sx, sy)
pyramid = {}
pyramid_meta = {
'tile_size': tile_size,
'mimetype': 'image/jpeg',
'real_size': montage_image.size,
'zoom_levels': {},
}
divs = max(1, int(np.ceil(np.log2(float(max(sx, sy))/tile_size))) + 1)
for z in range(divs):
# flip the z label to be d3 friendly
level = (divs - 1) - z
ysize = int(round(float(sy)/pow(2, z)))
xsize = int(round(float(ysize)/sy*sx))
xpieces = int(math.ceil(float(xsize)/tile_size))
ypieces = int(math.ceil(float(ysize)/tile_size))
log.debug('level %s, size %dx%d, splits %d,%d' % (level, xsize, ysize, xpieces, ypieces))
# TODO: we don't need to use 'thumbnail' here. This function always returns a square
# image of the requested size, padding and scaling as needed. Instead, we should resize
# and chop the image up, with no padding, ever. panojs can handle non-square images
# at the edges, so the padding is unnecessary and, in fact, a little wrong.
im = montage_image.copy()
im.thumbnail([xsize, ysize], Image.ANTIALIAS)
im = im.convert('L') # convert to grayscale
for x in range(xpieces):
for y in range(ypieces):
tile = im.copy().crop((x*tile_size, y*tile_size, min((x+1)*tile_size, xsize), min((y+1)*tile_size, ysize)))
log.debug(tile.size)
if tile.size != (tile_size, tile_size):
log.debug('tile is not square...padding')
background = Image.new('L', (tile_size, tile_size), 'white') # what to pad with? default black
background.paste(tile, (0, 0))
tile = background
buf = cStringIO.StringIO()
tile.save(buf, 'JPEG', quality=85)
pyramid[(level, x, y)] = buf
pyramid_meta['zoom_levels'][level] = (xpieces, ypieces)
return pyramid, montage_image.size, pyramid_meta
def generate_dir_pyr(imagedata, outbase, tile_size=256):
"""Generate a panojs image pyramid directory."""
montage = generate_montage(imagedata)
pyramid, pyramid_size, pyramid_meta = generate_pyramid(montage, tile_size)
# write directory pyramid
image_path = os.path.join(outbase, 'images')
if not os.path.exists(image_path):
os.makedirs(image_path)
for idx, tile_buf in pyramid.iteritems():
with open(os.path.join(image_path, ('%03d_%03d_%03d.jpg' % idx)), 'wb') as fp:
fp.write(tile_buf.getvalue())
# check for one image, pyramid file
if not os.path.exists(os.path.join(outbase, 'images', '000_000_000.jpg')):
raise MontageError('montage (flat png) not generated')
else:
log.debug('generated %s' % outbase)
return outbase
def generate_zip_pyr(imagedata, outbase, tile_size=256):
montage = generate_montage(imagedata)
pyramid, pyramid_size, pyramid_meta = generate_pyramid(montage, tile_size)
zip_name = outbase + '.zip'
with zipfile.ZipFile(zip_name, 'w', compression=zipfile.ZIP_STORED) as zf:
pyramid_meta['dirname'] = os.path.basename(outbase)
zf.comment = json.dumps(pyramid_meta)
montage_jpeg = os.path.join(os.path.basename(outbase), 'montage.jpeg')
buf = cStringIO.StringIO()
Image.fromarray(montage).convert('L').save(buf, format='JPEG', optimize=True)
zf.writestr(montage_jpeg, buf.getvalue())
for idx, tile_buf in pyramid.iteritems():
tilename = 'z%03d/x%03d_y%03d.jpg' % idx
arcname = os.path.join(os.path.basename(outbase), tilename)
zf.writestr(arcname, tile_buf.getvalue())
return zip_name
def generate_flat(imagedata, filepath):
"""Generate a flat png montage."""
montage = generate_montage(imagedata)
Image.fromarray(montage).convert('L').save(filepath, optimize=True)
if not os.path.exists(filepath):
raise MontageError('montage (flat png) not generated')
else:
log.debug('generated %s' % os.path.basename(filepath))
return filepath
class MontageError(medimg.MedImgError):
pass
class Montage(medimg.MedImgReader, medimg.MedImgWriter):
domain = u'mr'
filetype = u'montage'
state = ['orig']
def __init__(self, filepath, load_data=False):
super(Montage, self).__init__(filepath, load_data=load_data)
self.data = None # contains montage
if load_data:
self.load_data(preloaded=True)
def load_data(self, preloaded=False):
super(Montage, self).load_data(preloaded=preloaded)
log.debug('loading %s')
if not preloaded:
# read the data
pass
def get_tile(self, x, y, z):
get_tile(self.filepath, x, y, z)
def get_info(self):
get_info(self.filepath)
@classmethod
def write(cls, metadata, imagedata, outbase, voxel_order='LPS', mtype='zip', tilesize=256, multi=False):
"""
Write the metadata and imagedata to image montage pyramid.
Parameters
----------
metadata : object
fully loaded instance of a Reader.
imagedata : dict
dictionary of np.darrays. label suffix as keys, with np.darrays as values.
outbase : str
output name prefix.
voxel_order : str [default None]
three character string indicating the voxel order, ex. 'LPS'.
mtype : str [default 'sqlite']
type of montage to create. can be 'sqlite', 'dir', or 'png'.
tilesize : int [default 512]
tilesize for generated sqlite or directory pyramid. Has no affect on mtype 'png'.
multi : bool [default False]
True indicates to write multiple files. False only writes primary data in imagedata['']
Returns
-------
results : list
list of files written.
Raises
------
DataError
metadata or data is None.
"""
super(Montage, cls).write(metadata, imagedata, outbase, voxel_order)
results = []
for data_label, data in imagedata.iteritems():
if not multi and data_label is not '':
continue
if data is None:
continue
data = imagedata.get(data_label)
outname = outbase + data_label
if voxel_order:
data, _ = cls.reorder_voxels(data, metadata.qto_xyz, voxel_order)
if mtype == 'png':
log.debug('type: flat png')
result = generate_flat(data, outname + '.png')
elif mtype == 'dir':
log.debug('type: directory')
result = generate_dir_pyr(data, outname, tilesize)
elif mtype == 'zip':
log.debug('type: zip of tiles')
result = generate_zip_pyr(data, outname, tilesize)
else:
raise MontageError('montage mtype must be sqlite, dir or png. not %s' % mtype)
results.append(result)
return results
write = Montage.write
|
{
"content_hash": "5ee3ddcecf8afb8de84b4a2ea66d1243",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 132,
"avg_line_length": 41.227106227106226,
"alnum_prop": 0.6127054642381163,
"repo_name": "scitran/data",
"id": "4fe7973054dac7f4c84b11e7a8f121eeea868065",
"size": "11335",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scitran/data/medimg/montage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "223864"
},
{
"name": "Shell",
"bytes": "86"
}
],
"symlink_target": ""
}
|
"""Deepstream event handling."""
from __future__ import absolute_import, division, print_function, with_statement
from __future__ import unicode_literals
from deepstreampy.constants import actions
from deepstreampy.constants import topic as topic_constants
from deepstreampy.constants import event as event_constants
from deepstreampy.message import message_parser
from deepstreampy.message import message_builder
from deepstreampy.utils import Listener
from deepstreampy.utils import AckTimeoutRegistry
from deepstreampy.utils import ResubscribeNotifier
from tornado import concurrent
from pyee import EventEmitter
class EventHandler(object):
"""Handles incoming and outgoing messages related to deepstream events.
"""
def __init__(self, connection, client, **options):
self._options = options
self._connection = connection
self._client = client
self._emitter = EventEmitter()
self._listener = {}
subscription_timeout = options.get("subscriptionTimeout", 15)
self._ack_timeout_registry = AckTimeoutRegistry(client,
topic_constants.EVENT,
subscription_timeout)
self._resubscribe_notifier = ResubscribeNotifier(client,
self._resubscribe)
def subscribe(self, name, callback):
"""Subscribe to an event.
Adds a callback for both locally emited events as well as events emitted
by other clients.
Args:
name (str): The name of the event.
callback (callable): The function to call when an event is received.
"""
future = None
if not self._emitter.listeners(name):
self._ack_timeout_registry.add(name, actions.SUBSCRIBE)
future = self._connection.send_message(topic_constants.EVENT,
actions.SUBSCRIBE,
[name])
else:
future = concurrent.Future()
future.set_result(None)
self._emitter.on(name, callback)
return future
def unsubscribe(self, name, callback):
"""Unsubscribe from an event.
Removes the callback for the specified event, and notifies the server
of the change.
Args:
name (str): The name of the event
callback (callable): The callback to remove
"""
self._emitter.remove_listener(name, callback)
if not self._emitter.listeners(name):
self._ack_timeout_registry.add(name, actions.UNSUBSCRIBE)
return self._connection.send_message(topic_constants.EVENT,
actions.UNSUBSCRIBE,
[name])
future = concurrent.Future()
future.set_result(None)
return future
def emit(self, name, data):
"""Emit an event locally, and tell the server to broadcast it.
Other connected clients will also receive the event.
Args:
name (str): The name of the event.
data: JSON serializable data to send along with the event.
"""
future = self._connection.send_message(
topic_constants.EVENT, actions.EVENT,
[name, message_builder.typed(data)])
self._emitter.emit(name, data)
return future
def listen(self, pattern, callback):
"""Register as listener for event subscriptions from other clients.
Args:
pattern (str): Regular expression pattern to match subscriptions to
callback (callable): A function that will be called when an event
has been initially subscribed to or is no longer subscribed.
Expects the following arguments:
event_name (str)
is_subscribed (bool)
response (callable, callable)
"""
if (pattern in self._listener and
not self._listener[pattern].destroy_pending):
self._client._on_error(topic_constants.EVENT,
event_constants.LISTENER_EXISTS,
pattern)
future = concurrent.Future()
future.set_result(None)
return future
elif pattern in self._listener:
self._listener[pattern].destroy()
listener = Listener(topic_constants.EVENT,
pattern,
callback,
self._options,
self._client,
self._connection)
self._listener[pattern] = listener
return listener.send_future
def unlisten(self, pattern):
"""Stop listening to the specified pattern.
Remove a previously registered listening pattern. The client will no
longer be listening for active/inactive subscriptions.
Args:
pattern: The regular expression pattern to remove
"""
if pattern not in self._listener:
self._client._on_error(topic_constants.ERROR,
event_constants.NOT_LISTENING,
pattern)
future = concurrent.Future()
future.set_result(None)
return future
listener = self._listener[pattern]
if not listener.destroy_pending:
listener.send_destroy()
else:
self._ack_timeout_registry.add(pattern, actions.UNLISTEN)
listener.destroy()
del self._listener[pattern]
return listener.send_future
def handle(self, message):
action = message['action']
data = message['data']
if action == actions.ACK:
name = message['data'][1]
else:
name = message['data'][0]
if action == actions.EVENT:
if data and len(data) == 2:
self._emitter.emit(
name, message_parser.convert_typed(data[1], self._client))
else:
self._emitter.emit(name)
return
if (action == actions.ACK and data[0] == actions.UNLISTEN and
(name in self._listener) and
self._listener[name].destroy_pending):
self._listener[name].destroy()
del self._listener[name]
return
elif name in self._listener:
self._listener[name]._on_message(message)
return
elif action in (actions.SUBSCRIPTION_FOR_PATTERN_REMOVED,
actions.SUBSCRIPTION_HAS_PROVIDER):
return
if action == actions.ACK:
self._ack_timeout_registry.clear(message)
return
if action == actions.ERROR:
if data[0] == event_constants.MESSAGE_DENIED:
self._ack_timeout_registry.remove(message['data'][1],
message['data'][2])
elif data[0] == event_constants.NOT_SUBSCRIBED:
self._ack_timeout_registry.remove(message['data'][1],
actions.UNSUBSCRIBE)
message['processedError'] = True
self._client._on_error(topic_constants.EVENT, data[0], data[1])
return
self._client._on_error(topic_constants.EVENT,
event_constants.UNSOLICITED_MESSAGE,
name)
def _resubscribe(self):
for event in self._emitter._events:
self._connection.send_message(topic_constants.EVENT,
actions.SUBSCRIBE,
[event])
|
{
"content_hash": "5cd5f2c12a282ec962e20b92ecec9b35",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 80,
"avg_line_length": 35.455357142857146,
"alnum_prop": 0.5507428859229413,
"repo_name": "YavorPaunov/deepstreampy",
"id": "1bbc16675a2f5aea15cb82cb075fde95938a9438",
"size": "7942",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "deepstreampy/event.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "208710"
}
],
"symlink_target": ""
}
|
"""
rstblog.modules.tags
~~~~~~~~~~~~~~~~~~~~
Implements tagging.
:copyright: (c) 2010 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from math import log
from urlparse import urljoin
from jinja2 import contextfunction
from werkzeug.contrib.atom import AtomFeed
from rstblog.signals import after_file_published, \
before_build_finished
class Tag(object):
def __init__(self, name, count):
self.name = name
self.count = count
self.size = 100 + log(count or 1) * 20
@contextfunction
def get_tags(context, limit=50):
tags = get_tag_summary(context['builder'])
if limit:
tags.sort(key=lambda x: -x.count)
tags = tags[:limit]
tags.sort(key=lambda x: x.name.lower())
return tags
def get_tag_summary(builder):
storage = builder.get_storage('tags')
by_tag = storage.get('by_tag', {})
result = []
for tag, tagged in by_tag.iteritems():
result.append(Tag(tag, len(tagged)))
result.sort(key=lambda x: x.count)
return result
def get_tagged_entries(builder, tag):
if isinstance(tag, Tag):
tag = tag.name
storage = builder.get_storage('tags')
by_tag = storage.get('by_tag', {})
return by_tag.get(tag) or []
def remember_tags(context):
tags = context.config.merged_get('tags') or []
storage = context.builder.get_storage('tags')
by_file = storage.setdefault('by_file', {})
by_file[context.source_filename] = tags
by_tag = storage.setdefault('by_tag', {})
for tag in tags:
by_tag.setdefault(tag, []).append(context)
context.tags = frozenset(tags)
def write_tagcloud_page(builder):
with builder.open_link_file('tagcloud') as f:
rv = builder.render_template('tagcloud.html')
f.write(rv.encode('utf-8') + '\n')
def write_tag_feed(builder, tag):
blog_author = builder.config.root_get('author')
url = builder.config.root_get('canonical_url') or 'http://localhost/'
name = builder.config.get('feed.name') or u'Recent Blog Posts'
subtitle = builder.config.get('feed.subtitle') or u'Recent blog posts'
feed = AtomFeed(name,
subtitle=subtitle,
feed_url=urljoin(url, builder.link_to('blog_feed')),
url=url)
for entry in get_tagged_entries(builder, tag)[:10]:
feed.add(entry.title, unicode(entry.render_contents()),
content_type='html', author=blog_author,
url=urljoin(url, entry.slug),
updated=entry.pub_date)
with builder.open_link_file('tagfeed', tag=tag.name) as f:
f.write(feed.to_string().encode('utf-8') + '\n')
def write_tag_page(builder, tag):
entries = get_tagged_entries(builder, tag)
entries.sort(key=lambda x: (x.title or '').lower())
with builder.open_link_file('tag', tag=tag.name) as f:
rv = builder.render_template('tag.html', {
'tag': tag,
'entries': entries
})
f.write(rv.encode('utf-8') + '\n')
def write_tag_files(builder):
write_tagcloud_page(builder)
for tag in get_tag_summary(builder):
write_tag_page(builder, tag)
write_tag_feed(builder, tag)
def setup(builder):
after_file_published.connect(remember_tags)
before_build_finished.connect(write_tag_files)
builder.register_url('tag', config_key='modules.tags.tag_url',
config_default='/tags/<tag>/')
builder.register_url('tagfeed', config_key='modules.tags.tag_feed_url',
config_default='/tags/<tag>/feed.atom')
builder.register_url('tagcloud', config_key='modules.tags.cloud_url',
config_default='/tags/')
builder.jinja_env.globals['get_tags'] = get_tags
|
{
"content_hash": "c2de0071589f37b975c8de63e9decd55",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 75,
"avg_line_length": 31.84873949579832,
"alnum_prop": 0.6163588390501319,
"repo_name": "mitsuhiko/rstblog",
"id": "ec10242ea664c2b93dc351d0ca3c6d2f67cc426e",
"size": "3814",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "rstblog/modules/tags.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "49930"
}
],
"symlink_target": ""
}
|
from autobahn.twisted.wamp import ApplicationSession
from twisted.internet.defer import inlineCallbacks
class Authorizer(ApplicationSession):
def onConnect(self):
self.join(self.config.realm, [u'ticket'], u'authorizer')
def onChallenge(self, challenge):
if challenge.method == u'ticket':
return u'secret123'
else:
raise Exception('Invalid authmethod {}'.format(challenge.method))
@inlineCallbacks
def onJoin(self, details):
yield self.register(self.authorize, u'com.example.auth')
def authorize(self, session, uri, action):
self.log.info('authorize: session={session}, uri={uri}, action={action}', session=session, uri=uri, action=action)
return True
|
{
"content_hash": "9e1c11001aa67247a08052a3627ab415",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 122,
"avg_line_length": 35.61904761904762,
"alnum_prop": 0.6831550802139037,
"repo_name": "CSF-JH/crossbarexamples",
"id": "843552a1a10d8b7b978c67d36011624117003c49",
"size": "748",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "authorization/dynamic/container/authorizer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "34842"
},
{
"name": "Batchfile",
"bytes": "5120"
},
{
"name": "C#",
"bytes": "7363"
},
{
"name": "C++",
"bytes": "21426"
},
{
"name": "CSS",
"bytes": "596166"
},
{
"name": "Erlang",
"bytes": "7903"
},
{
"name": "HTML",
"bytes": "1618067"
},
{
"name": "Java",
"bytes": "10442"
},
{
"name": "JavaScript",
"bytes": "1584356"
},
{
"name": "Lua",
"bytes": "1233"
},
{
"name": "Makefile",
"bytes": "18676"
},
{
"name": "PHP",
"bytes": "45760"
},
{
"name": "PLSQL",
"bytes": "401670"
},
{
"name": "Python",
"bytes": "278547"
},
{
"name": "SQLPL",
"bytes": "6303"
},
{
"name": "Shell",
"bytes": "729"
}
],
"symlink_target": ""
}
|
"""
Base codec functions for bson.
"""
import struct
import cStringIO
import calendar, pytz
from datetime import datetime
import warnings
from abc import ABCMeta, abstractmethod
# {{{ Error Classes
class MissingClassDefinition(ValueError):
def __init__(self, class_name):
super(MissingClassDefinition, self).__init__(
"No class definition for class %s" % (class_name,))
class UnknownSerializerError(ValueError):
pass
# }}}
# {{{ Warning Classes
class MissingTimezoneWarning(RuntimeWarning):
def __init__(self, *args):
args = list(args)
if len(args) < 1:
args.append("Input datetime object has no tzinfo, assuming UTC.")
super(MissingTimezoneWarning, self).__init__(*args)
# }}}
# {{{ Traversal Step
class TraversalStep(object):
def __init__(self, parent, key):
self.parent = parent
self.key = key
# }}}
# {{{ Custom Object Codec
class BSONCoding(object):
__metaclass__ = ABCMeta
@abstractmethod
def bson_encode(self):
pass
@abstractmethod
def bson_init(self, raw_values):
pass
classes = {}
def import_class(cls):
if not issubclass(cls, BSONCoding):
return
global classes
classes[cls.__name__] = cls
def import_classes(*args):
for cls in args:
import_class(cls)
def import_classes_from_modules(*args):
for module in args:
for item in module.__dict__:
if hasattr(item, "__new__") and hasattr(item, "__name__"):
import_class(item)
def encode_object(obj, traversal_stack, generator_func, on_unknown=None):
values = obj.bson_encode()
class_name = obj.__class__.__name__
values["$$__CLASS_NAME__$$"] = class_name
return encode_document(values, traversal_stack, obj, generator_func, on_unknown)
def encode_object_element(name, value, traversal_stack, generator_func, on_unknown):
return "\x03" + encode_cstring(name) + \
encode_object(value, traversal_stack,
generator_func = generator_func,
on_unknown = on_unknown)
class _EmptyClass(object):
pass
def decode_object(raw_values):
global classes
class_name = raw_values["$$__CLASS_NAME__$$"]
cls = None
try:
cls = classes[class_name]
except KeyError, e:
raise MissingClassDefinition(class_name)
retval = _EmptyClass()
retval.__class__ = cls
alt_retval = retval.bson_init(raw_values)
return alt_retval or retval
# }}}
# {{{ Codec Logic
def encode_string(value):
value = value.encode("utf8")
length = len(value)
return struct.pack("<i%dsb" % (length,), length + 1, value, 0)
def decode_string(data, base):
length = struct.unpack("<i", data[base:base + 4])[0]
value = data[base + 4: base + 4 + length - 1]
value = value.decode("utf8")
return (base + 4 + length, value)
def encode_cstring(value):
if isinstance(value, unicode):
value = value.encode("utf8")
return value + "\x00"
def decode_cstring(data, base):
length = 0
max_length = len(data) - base
while length < max_length:
character = data[base + length]
length += 1
if character == "\x00":
break
return (base + length, data[base:base + length - 1].decode("utf8"))
def encode_binary(value):
length = len(value)
return struct.pack("<ib", length, 0) + value
def decode_binary(data, base):
length, binary_type = struct.unpack("<ib", data[base:base + 5])
return (base + 5 + length, data[base + 5:base + 5 + length])
def encode_double(value):
return struct.pack("<d", value)
def decode_double(data, base):
return (base + 8, struct.unpack("<d", data[base: base + 8])[0])
ELEMENT_TYPES = {
0x01 : "double",
0x02 : "string",
0x03 : "document",
0x04 : "array",
0x05 : "binary",
0x08 : "boolean",
0x09 : "UTCdatetime",
0x0A : "none",
0x10 : "int32",
0x12 : "int64"
}
def encode_double_element(name, value):
return "\x01" + encode_cstring(name) + encode_double(value)
def decode_double_element(data, base):
base, name = decode_cstring(data, base + 1)
base, value = decode_double(data, base)
return (base, name, value)
def encode_string_element(name, value):
return "\x02" + encode_cstring(name) + encode_string(value)
def decode_string_element(data, base):
base, name = decode_cstring(data, base + 1)
base, value = decode_string(data, base)
return (base, name, value)
def encode_value(name, value, buf, traversal_stack, generator_func,
on_unknown = None):
if isinstance(value, BSONCoding):
buf.write(encode_object_element(name, value, traversal_stack,
generator_func, on_unknown))
elif isinstance(value, float):
buf.write(encode_double_element(name, value))
elif isinstance(value, unicode):
buf.write(encode_string_element(name, value))
elif isinstance(value, dict):
buf.write(encode_document_element(name, value,
traversal_stack, generator_func, on_unknown))
elif isinstance(value, list) or isinstance(value, tuple):
buf.write(encode_array_element(name, value,
traversal_stack, generator_func, on_unknown))
elif isinstance(value, str):
buf.write(encode_binary_element(name, value))
elif isinstance(value, bool):
buf.write(encode_boolean_element(name, value))
elif isinstance(value, datetime):
buf.write(encode_UTCdatetime_element(name, value))
elif value is None:
buf.write(encode_none_element(name, value))
elif isinstance(value, int):
if value < -0x80000000 or value > 0x7fffffff:
buf.write(encode_int64_element(name, value))
else:
buf.write(encode_int32_element(name, value))
elif isinstance(value, long):
buf.write(encode_int64_element(name, value))
else:
if on_unknown is not None:
encode_value(name, on_unknown(value), buf, traversal_stack,
generator_func, on_unknown)
else:
raise UnknownSerializerError()
def encode_document(obj, traversal_stack,
traversal_parent = None,
generator_func = None,
on_unknown = None):
buf = cStringIO.StringIO()
key_iter = obj.iterkeys()
if generator_func is not None:
key_iter = generator_func(obj, traversal_stack)
for name in key_iter:
value = obj[name]
traversal_stack.append(TraversalStep(traversal_parent or obj, name))
encode_value(name, value, buf, traversal_stack, generator_func,
on_unknown)
traversal_stack.pop()
e_list = buf.getvalue()
e_list_length = len(e_list)
return struct.pack("<i%dsb" % (e_list_length,), e_list_length + 4 + 1,
e_list, 0)
def encode_array(array, traversal_stack,
traversal_parent = None,
generator_func = None,
on_unknown = None):
buf = cStringIO.StringIO()
for i in xrange(0, len(array)):
value = array[i]
traversal_stack.append(TraversalStep(traversal_parent or array, i))
encode_value(unicode(i), value, buf, traversal_stack, generator_func,
on_unknown)
traversal_stack.pop()
e_list = buf.getvalue()
e_list_length = len(e_list)
return struct.pack("<i%dsb" % (e_list_length,), e_list_length + 4 + 1,
e_list, 0)
def decode_element(data, base):
element_type = struct.unpack("<b", data[base:base + 1])[0]
element_description = ELEMENT_TYPES[element_type]
decode_func = globals()["decode_" + element_description + "_element"]
return decode_func(data, base)
def decode_document(data, base):
length = struct.unpack("<i", data[base:base + 4])[0]
end_point = base + length
base += 4
retval = {}
while base < end_point - 1:
base, name, value = decode_element(data, base)
retval[name] = value
if "$$__CLASS_NAME__$$" in retval:
retval = decode_object(retval)
return (end_point, retval)
def encode_document_element(name, value, traversal_stack, generator_func, on_unknown):
return "\x03" + encode_cstring(name) + \
encode_document(value, traversal_stack,
generator_func = generator_func,
on_unknown = on_unknown)
def decode_document_element(data, base):
base, name = decode_cstring(data, base + 1)
base, value = decode_document(data, base)
return (base, name, value)
def encode_array_element(name, value, traversal_stack, generator_func, on_unknown):
return "\x04" + encode_cstring(name) + \
encode_array(value, traversal_stack,
generator_func = generator_func,
on_unknown = on_unknown)
def decode_array_element(data, base):
base, name = decode_cstring(data, base + 1)
base, value = decode_document(data, base)
retval = []
try:
i = 0
while True:
retval.append(value[unicode(i)])
i += 1
except KeyError:
pass
return (base, name, retval)
def encode_binary_element(name, value):
return "\x05" + encode_cstring(name) + encode_binary(value)
def decode_binary_element(data, base):
base, name = decode_cstring(data, base + 1)
base, value = decode_binary(data, base)
return (base, name, value)
def encode_boolean_element(name, value):
return "\x08" + encode_cstring(name) + struct.pack("<b", value)
def decode_boolean_element(data, base):
base, name = decode_cstring(data, base + 1)
value = not not struct.unpack("<b", data[base:base + 1])[0]
return (base + 1, name, value)
def encode_UTCdatetime_element(name, value):
if value.tzinfo is None:
warnings.warn(MissingTimezoneWarning(), None, 4)
value = int(round(calendar.timegm(value.utctimetuple()) * 1000 +
(value.microsecond / 1000.0)))
return "\x09" + encode_cstring(name) + struct.pack("<q", value)
def decode_UTCdatetime_element(data, base):
base, name = decode_cstring(data, base + 1)
value = datetime.fromtimestamp(struct.unpack("<q",
data[base:base + 8])[0] / 1000.0, pytz.utc)
return (base + 8, name, value)
def encode_none_element(name, value):
return "\x0a" + encode_cstring(name)
def decode_none_element(data, base):
base, name = decode_cstring(data, base + 1)
return (base, name, None)
def encode_int32_element(name, value):
return "\x10" + encode_cstring(name) + struct.pack("<i", value)
def decode_int32_element(data, base):
base, name = decode_cstring(data, base + 1)
value = struct.unpack("<i", data[base:base + 4])[0]
return (base + 4, name, value)
def encode_int64_element(name, value):
return "\x12" + encode_cstring(name) + struct.pack("<q", value)
def decode_int64_element(data, base):
base, name = decode_cstring(data, base + 1)
value = struct.unpack("<q", data[base:base + 8])[0]
return (base + 8, name, value)
# }}}
|
{
"content_hash": "843cf35972c773069f15e0c4610c4224",
"timestamp": "",
"source": "github",
"line_count": 339,
"max_line_length": 86,
"avg_line_length": 29.38938053097345,
"alnum_prop": 0.689551339957844,
"repo_name": "andreas/bson",
"id": "a01436382ade5b484db2e726c6873d356d580f41",
"size": "10188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bson/codec.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "25712"
}
],
"symlink_target": ""
}
|
"""
The LayerMapping class provides a way to map the contents of OGR
vector files (e.g. SHP files) to Geographic-enabled Django models.
For more information, please consult the GeoDjango documentation:
https://docs.djangoproject.com/en/dev/ref/contrib/gis/layermapping/
"""
import sys
from decimal import Decimal, InvalidOperation as DecimalInvalidOperation
from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.gdal import (
CoordTransform, DataSource, GDALException, OGRGeometry, OGRGeomType,
SpatialReference,
)
from django.contrib.gis.gdal.field import (
OFTDate, OFTDateTime, OFTInteger, OFTInteger64, OFTReal, OFTString,
OFTTime,
)
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist
from django.db import connections, models, router, transaction
from django.utils.encoding import force_str
# LayerMapping exceptions.
class LayerMapError(Exception):
pass
class InvalidString(LayerMapError):
pass
class InvalidDecimal(LayerMapError):
pass
class InvalidInteger(LayerMapError):
pass
class MissingForeignKey(LayerMapError):
pass
class LayerMapping:
"A class that maps OGR Layers to GeoDjango Models."
# Acceptable 'base' types for a multi-geometry type.
MULTI_TYPES = {
1: OGRGeomType('MultiPoint'),
2: OGRGeomType('MultiLineString'),
3: OGRGeomType('MultiPolygon'),
OGRGeomType('Point25D').num: OGRGeomType('MultiPoint25D'),
OGRGeomType('LineString25D').num: OGRGeomType('MultiLineString25D'),
OGRGeomType('Polygon25D').num: OGRGeomType('MultiPolygon25D'),
}
# Acceptable Django field types and corresponding acceptable OGR
# counterparts.
FIELD_TYPES = {
models.AutoField: OFTInteger,
models.BigAutoField: OFTInteger64,
models.SmallAutoField: OFTInteger,
models.BooleanField: (OFTInteger, OFTReal, OFTString),
models.IntegerField: (OFTInteger, OFTReal, OFTString),
models.FloatField: (OFTInteger, OFTReal),
models.DateField: OFTDate,
models.DateTimeField: OFTDateTime,
models.EmailField: OFTString,
models.TimeField: OFTTime,
models.DecimalField: (OFTInteger, OFTReal),
models.CharField: OFTString,
models.SlugField: OFTString,
models.TextField: OFTString,
models.URLField: OFTString,
models.UUIDField: OFTString,
models.BigIntegerField: (OFTInteger, OFTReal, OFTString),
models.SmallIntegerField: (OFTInteger, OFTReal, OFTString),
models.PositiveBigIntegerField: (OFTInteger, OFTReal, OFTString),
models.PositiveIntegerField: (OFTInteger, OFTReal, OFTString),
models.PositiveSmallIntegerField: (OFTInteger, OFTReal, OFTString),
}
def __init__(self, model, data, mapping, layer=0,
source_srs=None, encoding='utf-8',
transaction_mode='commit_on_success',
transform=True, unique=None, using=None):
"""
A LayerMapping object is initialized using the given Model (not an instance),
a DataSource (or string path to an OGR-supported data file), and a mapping
dictionary. See the module level docstring for more details and keyword
argument usage.
"""
# Getting the DataSource and the associated Layer.
if isinstance(data, str):
self.ds = DataSource(data, encoding=encoding)
else:
self.ds = data
self.layer = self.ds[layer]
self.using = using if using is not None else router.db_for_write(model)
self.spatial_backend = connections[self.using].ops
# Setting the mapping & model attributes.
self.mapping = mapping
self.model = model
# Checking the layer -- initialization of the object will fail if
# things don't check out before hand.
self.check_layer()
# Getting the geometry column associated with the model (an
# exception will be raised if there is no geometry column).
if connections[self.using].features.supports_transform:
self.geo_field = self.geometry_field()
else:
transform = False
# Checking the source spatial reference system, and getting
# the coordinate transformation object (unless the `transform`
# keyword is set to False)
if transform:
self.source_srs = self.check_srs(source_srs)
self.transform = self.coord_transform()
else:
self.transform = transform
# Setting the encoding for OFTString fields, if specified.
if encoding:
# Making sure the encoding exists, if not a LookupError
# exception will be thrown.
from codecs import lookup
lookup(encoding)
self.encoding = encoding
else:
self.encoding = None
if unique:
self.check_unique(unique)
transaction_mode = 'autocommit' # Has to be set to autocommit.
self.unique = unique
else:
self.unique = None
# Setting the transaction decorator with the function in the
# transaction modes dictionary.
self.transaction_mode = transaction_mode
if transaction_mode == 'autocommit':
self.transaction_decorator = None
elif transaction_mode == 'commit_on_success':
self.transaction_decorator = transaction.atomic
else:
raise LayerMapError('Unrecognized transaction mode: %s' % transaction_mode)
# #### Checking routines used during initialization ####
def check_fid_range(self, fid_range):
"Check the `fid_range` keyword."
if fid_range:
if isinstance(fid_range, (tuple, list)):
return slice(*fid_range)
elif isinstance(fid_range, slice):
return fid_range
else:
raise TypeError
else:
return None
def check_layer(self):
"""
Check the Layer metadata and ensure that it's compatible with the
mapping information and model. Unlike previous revisions, there is no
need to increment through each feature in the Layer.
"""
# The geometry field of the model is set here.
# TODO: Support more than one geometry field / model. However, this
# depends on the GDAL Driver in use.
self.geom_field = False
self.fields = {}
# Getting lists of the field names and the field types available in
# the OGR Layer.
ogr_fields = self.layer.fields
ogr_field_types = self.layer.field_types
# Function for determining if the OGR mapping field is in the Layer.
def check_ogr_fld(ogr_map_fld):
try:
idx = ogr_fields.index(ogr_map_fld)
except ValueError:
raise LayerMapError('Given mapping OGR field "%s" not found in OGR Layer.' % ogr_map_fld)
return idx
# No need to increment through each feature in the model, simply check
# the Layer metadata against what was given in the mapping dictionary.
for field_name, ogr_name in self.mapping.items():
# Ensuring that a corresponding field exists in the model
# for the given field name in the mapping.
try:
model_field = self.model._meta.get_field(field_name)
except FieldDoesNotExist:
raise LayerMapError('Given mapping field "%s" not in given Model fields.' % field_name)
# Getting the string name for the Django field class (e.g., 'PointField').
fld_name = model_field.__class__.__name__
if isinstance(model_field, GeometryField):
if self.geom_field:
raise LayerMapError('LayerMapping does not support more than one GeometryField per model.')
# Getting the coordinate dimension of the geometry field.
coord_dim = model_field.dim
try:
if coord_dim == 3:
gtype = OGRGeomType(ogr_name + '25D')
else:
gtype = OGRGeomType(ogr_name)
except GDALException:
raise LayerMapError('Invalid mapping for GeometryField "%s".' % field_name)
# Making sure that the OGR Layer's Geometry is compatible.
ltype = self.layer.geom_type
if not (ltype.name.startswith(gtype.name) or self.make_multi(ltype, model_field)):
raise LayerMapError('Invalid mapping geometry; model has %s%s, '
'layer geometry type is %s.' %
(fld_name, '(dim=3)' if coord_dim == 3 else '', ltype))
# Setting the `geom_field` attribute w/the name of the model field
# that is a Geometry. Also setting the coordinate dimension
# attribute.
self.geom_field = field_name
self.coord_dim = coord_dim
fields_val = model_field
elif isinstance(model_field, models.ForeignKey):
if isinstance(ogr_name, dict):
# Is every given related model mapping field in the Layer?
rel_model = model_field.remote_field.model
for rel_name, ogr_field in ogr_name.items():
idx = check_ogr_fld(ogr_field)
try:
rel_model._meta.get_field(rel_name)
except FieldDoesNotExist:
raise LayerMapError('ForeignKey mapping field "%s" not in %s fields.' %
(rel_name, rel_model.__class__.__name__))
fields_val = rel_model
else:
raise TypeError('ForeignKey mapping must be of dictionary type.')
else:
# Is the model field type supported by LayerMapping?
if model_field.__class__ not in self.FIELD_TYPES:
raise LayerMapError('Django field type "%s" has no OGR mapping (yet).' % fld_name)
# Is the OGR field in the Layer?
idx = check_ogr_fld(ogr_name)
ogr_field = ogr_field_types[idx]
# Can the OGR field type be mapped to the Django field type?
if not issubclass(ogr_field, self.FIELD_TYPES[model_field.__class__]):
raise LayerMapError('OGR field "%s" (of type %s) cannot be mapped to Django %s.' %
(ogr_field, ogr_field.__name__, fld_name))
fields_val = model_field
self.fields[field_name] = fields_val
def check_srs(self, source_srs):
"Check the compatibility of the given spatial reference object."
if isinstance(source_srs, SpatialReference):
sr = source_srs
elif isinstance(source_srs, self.spatial_backend.spatial_ref_sys()):
sr = source_srs.srs
elif isinstance(source_srs, (int, str)):
sr = SpatialReference(source_srs)
else:
# Otherwise just pulling the SpatialReference from the layer
sr = self.layer.srs
if not sr:
raise LayerMapError('No source reference system defined.')
else:
return sr
def check_unique(self, unique):
"Check the `unique` keyword parameter -- may be a sequence or string."
if isinstance(unique, (list, tuple)):
# List of fields to determine uniqueness with
for attr in unique:
if attr not in self.mapping:
raise ValueError
elif isinstance(unique, str):
# Only a single field passed in.
if unique not in self.mapping:
raise ValueError
else:
raise TypeError('Unique keyword argument must be set with a tuple, list, or string.')
# Keyword argument retrieval routines ####
def feature_kwargs(self, feat):
"""
Given an OGR Feature, return a dictionary of keyword arguments for
constructing the mapped model.
"""
# The keyword arguments for model construction.
kwargs = {}
# Incrementing through each model field and OGR field in the
# dictionary mapping.
for field_name, ogr_name in self.mapping.items():
model_field = self.fields[field_name]
if isinstance(model_field, GeometryField):
# Verify OGR geometry.
try:
val = self.verify_geom(feat.geom, model_field)
except GDALException:
raise LayerMapError('Could not retrieve geometry from feature.')
elif isinstance(model_field, models.base.ModelBase):
# The related _model_, not a field was passed in -- indicating
# another mapping for the related Model.
val = self.verify_fk(feat, model_field, ogr_name)
else:
# Otherwise, verify OGR Field type.
val = self.verify_ogr_field(feat[ogr_name], model_field)
# Setting the keyword arguments for the field name with the
# value obtained above.
kwargs[field_name] = val
return kwargs
def unique_kwargs(self, kwargs):
"""
Given the feature keyword arguments (from `feature_kwargs`), construct
and return the uniqueness keyword arguments -- a subset of the feature
kwargs.
"""
if isinstance(self.unique, str):
return {self.unique: kwargs[self.unique]}
else:
return {fld: kwargs[fld] for fld in self.unique}
# #### Verification routines used in constructing model keyword arguments. ####
def verify_ogr_field(self, ogr_field, model_field):
"""
Verify if the OGR Field contents are acceptable to the model field. If
they are, return the verified value, otherwise raise an exception.
"""
if (isinstance(ogr_field, OFTString) and
isinstance(model_field, (models.CharField, models.TextField))):
if self.encoding and ogr_field.value is not None:
# The encoding for OGR data sources may be specified here
# (e.g., 'cp437' for Census Bureau boundary files).
val = force_str(ogr_field.value, self.encoding)
else:
val = ogr_field.value
if model_field.max_length and val is not None and len(val) > model_field.max_length:
raise InvalidString('%s model field maximum string length is %s, given %s characters.' %
(model_field.name, model_field.max_length, len(val)))
elif isinstance(ogr_field, OFTReal) and isinstance(model_field, models.DecimalField):
try:
# Creating an instance of the Decimal value to use.
d = Decimal(str(ogr_field.value))
except DecimalInvalidOperation:
raise InvalidDecimal('Could not construct decimal from: %s' % ogr_field.value)
# Getting the decimal value as a tuple.
dtup = d.as_tuple()
digits = dtup[1]
d_idx = dtup[2] # index where the decimal is
# Maximum amount of precision, or digits to the left of the decimal.
max_prec = model_field.max_digits - model_field.decimal_places
# Getting the digits to the left of the decimal place for the
# given decimal.
if d_idx < 0:
n_prec = len(digits[:d_idx])
else:
n_prec = len(digits) + d_idx
# If we have more than the maximum digits allowed, then throw an
# InvalidDecimal exception.
if n_prec > max_prec:
raise InvalidDecimal(
'A DecimalField with max_digits %d, decimal_places %d must '
'round to an absolute value less than 10^%d.' %
(model_field.max_digits, model_field.decimal_places, max_prec)
)
val = d
elif isinstance(ogr_field, (OFTReal, OFTString)) and isinstance(model_field, models.IntegerField):
# Attempt to convert any OFTReal and OFTString value to an OFTInteger.
try:
val = int(ogr_field.value)
except ValueError:
raise InvalidInteger('Could not construct integer from: %s' % ogr_field.value)
else:
val = ogr_field.value
return val
def verify_fk(self, feat, rel_model, rel_mapping):
"""
Given an OGR Feature, the related model and its dictionary mapping,
retrieve the related model for the ForeignKey mapping.
"""
# TODO: It is expensive to retrieve a model for every record --
# explore if an efficient mechanism exists for caching related
# ForeignKey models.
# Constructing and verifying the related model keyword arguments.
fk_kwargs = {}
for field_name, ogr_name in rel_mapping.items():
fk_kwargs[field_name] = self.verify_ogr_field(feat[ogr_name], rel_model._meta.get_field(field_name))
# Attempting to retrieve and return the related model.
try:
return rel_model.objects.using(self.using).get(**fk_kwargs)
except ObjectDoesNotExist:
raise MissingForeignKey(
'No ForeignKey %s model found with keyword arguments: %s' %
(rel_model.__name__, fk_kwargs)
)
def verify_geom(self, geom, model_field):
"""
Verify the geometry -- construct and return a GeometryCollection
if necessary (for example if the model field is MultiPolygonField while
the mapped shapefile only contains Polygons).
"""
# Downgrade a 3D geom to a 2D one, if necessary.
if self.coord_dim != geom.coord_dim:
geom.coord_dim = self.coord_dim
if self.make_multi(geom.geom_type, model_field):
# Constructing a multi-geometry type to contain the single geometry
multi_type = self.MULTI_TYPES[geom.geom_type.num]
g = OGRGeometry(multi_type)
g.add(geom)
else:
g = geom
# Transforming the geometry with our Coordinate Transformation object,
# but only if the class variable `transform` is set w/a CoordTransform
# object.
if self.transform:
g.transform(self.transform)
# Returning the WKT of the geometry.
return g.wkt
# #### Other model methods ####
def coord_transform(self):
"Return the coordinate transformation object."
SpatialRefSys = self.spatial_backend.spatial_ref_sys()
try:
# Getting the target spatial reference system
target_srs = SpatialRefSys.objects.using(self.using).get(srid=self.geo_field.srid).srs
# Creating the CoordTransform object
return CoordTransform(self.source_srs, target_srs)
except Exception as exc:
raise LayerMapError(
'Could not translate between the data source and model geometry.'
) from exc
def geometry_field(self):
"Return the GeometryField instance associated with the geographic column."
# Use `get_field()` on the model's options so that we
# get the correct field instance if there's model inheritance.
opts = self.model._meta
return opts.get_field(self.geom_field)
def make_multi(self, geom_type, model_field):
"""
Given the OGRGeomType for a geometry and its associated GeometryField,
determine whether the geometry should be turned into a GeometryCollection.
"""
return (geom_type.num in self.MULTI_TYPES and
model_field.__class__.__name__ == 'Multi%s' % geom_type.django)
def save(self, verbose=False, fid_range=False, step=False,
progress=False, silent=False, stream=sys.stdout, strict=False):
"""
Save the contents from the OGR DataSource Layer into the database
according to the mapping dictionary given at initialization.
Keyword Parameters:
verbose:
If set, information will be printed subsequent to each model save
executed on the database.
fid_range:
May be set with a slice or tuple of (begin, end) feature ID's to map
from the data source. In other words, this keyword enables the user
to selectively import a subset range of features in the geographic
data source.
step:
If set with an integer, transactions will occur at every step
interval. For example, if step=1000, a commit would occur after
the 1,000th feature, the 2,000th feature etc.
progress:
When this keyword is set, status information will be printed giving
the number of features processed and successfully saved. By default,
progress information will pe printed every 1000 features processed,
however, this default may be overridden by setting this keyword with an
integer for the desired interval.
stream:
Status information will be written to this file handle. Defaults to
using `sys.stdout`, but any object with a `write` method is supported.
silent:
By default, non-fatal error notifications are printed to stdout, but
this keyword may be set to disable these notifications.
strict:
Execution of the model mapping will cease upon the first error
encountered. The default behavior is to attempt to continue.
"""
# Getting the default Feature ID range.
default_range = self.check_fid_range(fid_range)
# Setting the progress interval, if requested.
if progress:
if progress is True or not isinstance(progress, int):
progress_interval = 1000
else:
progress_interval = progress
def _save(feat_range=default_range, num_feat=0, num_saved=0):
if feat_range:
layer_iter = self.layer[feat_range]
else:
layer_iter = self.layer
for feat in layer_iter:
num_feat += 1
# Getting the keyword arguments
try:
kwargs = self.feature_kwargs(feat)
except LayerMapError as msg:
# Something borked the validation
if strict:
raise
elif not silent:
stream.write('Ignoring Feature ID %s because: %s\n' % (feat.fid, msg))
else:
# Constructing the model using the keyword args
is_update = False
if self.unique:
# If we want unique models on a particular field, handle the
# geometry appropriately.
try:
# Getting the keyword arguments and retrieving
# the unique model.
u_kwargs = self.unique_kwargs(kwargs)
m = self.model.objects.using(self.using).get(**u_kwargs)
is_update = True
# Getting the geometry (in OGR form), creating
# one from the kwargs WKT, adding in additional
# geometries, and update the attribute with the
# just-updated geometry WKT.
geom_value = getattr(m, self.geom_field)
if geom_value is None:
geom = OGRGeometry(kwargs[self.geom_field])
else:
geom = geom_value.ogr
new = OGRGeometry(kwargs[self.geom_field])
for g in new:
geom.add(g)
setattr(m, self.geom_field, geom.wkt)
except ObjectDoesNotExist:
# No unique model exists yet, create.
m = self.model(**kwargs)
else:
m = self.model(**kwargs)
try:
# Attempting to save.
m.save(using=self.using)
num_saved += 1
if verbose:
stream.write('%s: %s\n' % ('Updated' if is_update else 'Saved', m))
except Exception as msg:
if strict:
# Bailing out if the `strict` keyword is set.
if not silent:
stream.write(
'Failed to save the feature (id: %s) into the '
'model with the keyword arguments:\n' % feat.fid
)
stream.write('%s\n' % kwargs)
raise
elif not silent:
stream.write('Failed to save %s:\n %s\nContinuing\n' % (kwargs, msg))
# Printing progress information, if requested.
if progress and num_feat % progress_interval == 0:
stream.write('Processed %d features, saved %d ...\n' % (num_feat, num_saved))
# Only used for status output purposes -- incremental saving uses the
# values returned here.
return num_saved, num_feat
if self.transaction_decorator is not None:
_save = self.transaction_decorator(_save)
nfeat = self.layer.num_feat
if step and isinstance(step, int) and step < nfeat:
# Incremental saving is requested at the given interval (step)
if default_range:
raise LayerMapError('The `step` keyword may not be used in conjunction with the `fid_range` keyword.')
beg, num_feat, num_saved = (0, 0, 0)
indices = range(step, nfeat, step)
n_i = len(indices)
for i, end in enumerate(indices):
# Constructing the slice to use for this step; the last slice is
# special (e.g, [100:] instead of [90:100]).
if i + 1 == n_i:
step_slice = slice(beg, None)
else:
step_slice = slice(beg, end)
try:
num_feat, num_saved = _save(step_slice, num_feat, num_saved)
beg = end
except Exception: # Deliberately catch everything
stream.write('%s\nFailed to save slice: %s\n' % ('=-' * 20, step_slice))
raise
else:
# Otherwise, just calling the previously defined _save() function.
_save()
|
{
"content_hash": "ad413139d5aa7e037c275b78c7a8f1f3",
"timestamp": "",
"source": "github",
"line_count": 635,
"max_line_length": 118,
"avg_line_length": 43.418897637795276,
"alnum_prop": 0.5731384425664647,
"repo_name": "kaedroho/django",
"id": "6908d20ef759ceb5fc6e01937f056b2d37da7034",
"size": "27630",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "django/contrib/gis/utils/layermapping.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "53429"
},
{
"name": "JavaScript",
"bytes": "103687"
},
{
"name": "Makefile",
"bytes": "5553"
},
{
"name": "Python",
"bytes": "10225313"
},
{
"name": "Shell",
"bytes": "10452"
}
],
"symlink_target": ""
}
|
"""Common expressions data structures in the IR."""
import tvm._ffi
from .base import Node
from . import _ffi_api
class BaseExpr(Node):
"""Base class of all the expressions."""
class PrimExpr(BaseExpr):
"""Base class of all primitive expressions.
PrimExpr is used in the low-level code
optimizations and integer analysis.
"""
class RelayExpr(BaseExpr):
"""Base class of all non-primitive expressions."""
@property
def checked_type(self):
"""Get the checked type of tvm.relay.Expr.
Returns
-------
checked_type : tvm.relay.Type
The checked type.
"""
ret = self._checked_type_
if ret is None:
raise ValueError("The type checker has not populated" " the checked_type for this node")
return ret
@tvm._ffi.register_object("GlobalVar")
class GlobalVar(RelayExpr):
"""A global variable in the IR.
GlobalVar is used to refer to the global functions
stored in the IRModule.
Parameters
----------
name_hint: str
The name of the variable.
"""
def __init__(self, name_hint):
self.__init_handle_by_constructor__(_ffi_api.GlobalVar, name_hint)
def __call__(self, *args):
"""Call the global variable.
Parameters
----------
args: List[RelayExpr]
The arguments to the call.
Returns
-------
call: BaseExpr
A call taking the variable as a function.
"""
# pylint: disable=import-outside-toplevel
if all(isinstance(x, RelayExpr) for x in args):
from tvm import relay
return relay.Call(self, args)
arg_types = [type(x) for x in args]
raise RuntimeError(
"Do not know how to handle GlobalVar.__call__ for types {}".format(arg_types)
)
@tvm._ffi.register_object
class Range(Node):
"""Represent a range in TVM.
You do not need to create a Range explicitly.
Python lists and tuples will be converted automatically to a Range in API functions.
Parameters
----------
begin : PrimExpr
The begin value of the range when end is None.
Otherwise it is the length of the range.
end : Optional[PrimExpr]
The end value of the range.
Note
----
The constructor creates the range `[begin, end)`
if the end argument is not None. Otherwise, it creates `[0, begin)`.
"""
def __init__(self, begin, end=None):
if end is None:
self.__init_handle_by_constructor__(_ffi_api.Range, 0, begin)
else:
self.__init_handle_by_constructor__(_ffi_api.Range, begin, end)
@staticmethod
def from_min_extent(min_value, extent):
"""Construct a Range by min and extent.
This constructs a range in [min_value, min_value + extent)
Parameters
----------
min_value : PrimExpr
The minimum value of the range.
extent : PrimExpr
The extent of the range.
Returns
-------
rng : Range
The constructed range.
"""
return _ffi_api.Range_from_min_extent(min_value, extent)
|
{
"content_hash": "1ae0ddfd185e65e3e3012e0e9b7aec22",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 100,
"avg_line_length": 25.696,
"alnum_prop": 0.5868617683686177,
"repo_name": "tqchen/tvm",
"id": "f6bf975621e35c078b7272488cef280d8ccb2395",
"size": "3997",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "python/tvm/ir/expr.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4104"
},
{
"name": "C",
"bytes": "205781"
},
{
"name": "C++",
"bytes": "8124041"
},
{
"name": "CMake",
"bytes": "135007"
},
{
"name": "Cuda",
"bytes": "6677"
},
{
"name": "Go",
"bytes": "111558"
},
{
"name": "HTML",
"bytes": "2664"
},
{
"name": "Java",
"bytes": "200193"
},
{
"name": "JavaScript",
"bytes": "15075"
},
{
"name": "Makefile",
"bytes": "48206"
},
{
"name": "Objective-C",
"bytes": "18506"
},
{
"name": "Objective-C++",
"bytes": "56786"
},
{
"name": "Python",
"bytes": "10300435"
},
{
"name": "RenderScript",
"bytes": "1895"
},
{
"name": "Rust",
"bytes": "327078"
},
{
"name": "Shell",
"bytes": "157176"
},
{
"name": "TypeScript",
"bytes": "94435"
}
],
"symlink_target": ""
}
|
from .middleware import create_app
from .config import ServerConfig
from .server import TAXIIServer
from .utils import configure_logging
# This module is also used as a Gunicorn configuration module, i.e. passed
# as ``--config python:opentaxii.http``. ``logconfig_dict`` module-level
# variable is recognised by Gunicorn >= 19.8. The desired effect is to
# remove ``gunicorn.error`` logger's stream handler and restore
# propagation to the root logger, which should follow our
# ``structlog`` configuration.
logconfig_dict = {
'version': 1,
'disable_existing_loggers': False,
'root': {},
'loggers': {
'gunicorn.error': {
'level': 'INFO',
'propagate': True
}
}
}
config_obj = ServerConfig()
configure_logging(config_obj.get('logging', {'': 'info'}))
server = TAXIIServer(config_obj)
app = create_app(server)
app.debug = False
|
{
"content_hash": "b7d9cca61bf489b548b105a24d5b1d71",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 74,
"avg_line_length": 29.7,
"alnum_prop": 0.6790123456790124,
"repo_name": "EclecticIQ/OpenTAXII",
"id": "7a57b2825966ad3cac9c6ac0e145d1724382e8f7",
"size": "892",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "opentaxii/http.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "237268"
},
{
"name": "Shell",
"bytes": "3174"
}
],
"symlink_target": ""
}
|
"""Zookeeper based queue implementations.
:Maintainer: None
:Status: Possibly Buggy
.. note::
This queue was reported to cause memory leaks over long running periods.
See: https://github.com/python-zk/kazoo/issues/175
"""
import uuid
from kazoo.exceptions import NoNodeError, NodeExistsError
from kazoo.protocol.states import EventType
from kazoo.retry import ForceRetryError
class BaseQueue(object):
"""A common base class for queue implementations."""
def __init__(self, client, path):
"""
:param client: A :class:`~kazoo.client.KazooClient` instance.
:param path: The queue path to use in ZooKeeper.
"""
self.client = client
self.path = path
self._entries_path = path
self.structure_paths = (self.path, )
self.ensured_path = False
def _check_put_arguments(self, value, priority=100):
if not isinstance(value, bytes):
raise TypeError("value must be a byte string")
if not isinstance(priority, int):
raise TypeError("priority must be an int")
elif priority < 0 or priority > 999:
raise ValueError("priority must be between 0 and 999")
def _ensure_paths(self):
if not self.ensured_path:
# make sure our parent / internal structure nodes exists
for path in self.structure_paths:
self.client.ensure_path(path)
self.ensured_path = True
def __len__(self):
self._ensure_paths()
_, stat = self.client.retry(self.client.get, self._entries_path)
return stat.children_count
class Queue(BaseQueue):
"""A distributed queue with optional priority support.
This queue does not offer reliable consumption. An entry is removed
from the queue prior to being processed. So if an error occurs, the
consumer has to re-queue the item or it will be lost.
"""
prefix = "entry-"
def __init__(self, client, path):
"""
:param client: A :class:`~kazoo.client.KazooClient` instance.
:param path: The queue path to use in ZooKeeper.
"""
super(Queue, self).__init__(client, path)
self._children = []
def __len__(self):
"""Return queue size."""
return super(Queue, self).__len__()
def get(self):
"""
Get item data and remove an item from the queue.
:returns: Item data or None.
:rtype: bytes
"""
self._ensure_paths()
return self.client.retry(self._inner_get)
def _inner_get(self):
if not self._children:
self._children = self.client.retry(
self.client.get_children, self.path)
self._children = sorted(self._children)
if not self._children:
return None
name = self._children[0]
try:
data, stat = self.client.get(self.path + "/" + name)
self.client.delete(self.path + "/" + name)
except NoNodeError: # pragma: nocover
# the first node has vanished in the meantime, try to
# get another one
self._children = []
raise ForceRetryError()
self._children.pop(0)
return data
def put(self, value, priority=100):
"""Put an item into the queue.
:param value: Byte string to put into the queue.
:param priority:
An optional priority as an integer with at most 3 digits.
Lower values signify higher priority.
"""
self._check_put_arguments(value, priority)
self._ensure_paths()
path = '{path}/{prefix}{priority:03d}-'.format(
path=self.path, prefix=self.prefix, priority=priority)
self.client.create(path, value, sequence=True)
class LockingQueue(BaseQueue):
"""A distributed queue with priority and locking support.
Upon retrieving an entry from the queue, the entry gets locked with an
ephemeral node (instead of deleted). If an error occurs, this lock gets
released so that others could retake the entry. This adds a little penalty
as compared to :class:`Queue` implementation.
The user should call the :meth:`LockingQueue.get` method first to lock and
retrieve the next entry. When finished processing the entry, a user should
call the :meth:`LockingQueue.consume` method that will remove the entry
from the queue.
This queue will not track connection status with ZooKeeper. If a node locks
an element, then loses connection with ZooKeeper and later reconnects, the
lock will probably be removed by Zookeeper in the meantime, but a node
would still think that it holds a lock. The user should check the
connection status with Zookeeper or call :meth:`LockingQueue.holds_lock`
method that will check if a node still holds the lock.
.. note::
:class:`LockingQueue` requires ZooKeeper 3.4 or above, since it is
using transactions.
"""
lock = "/taken"
entries = "/entries"
entry = "entry"
def __init__(self, client, path):
"""
:param client: A :class:`~kazoo.client.KazooClient` instance.
:param path: The queue path to use in ZooKeeper.
"""
super(LockingQueue, self).__init__(client, path)
self.id = uuid.uuid4().hex.encode()
self.processing_element = None
self._lock_path = self.path + self.lock
self._entries_path = self.path + self.entries
self.structure_paths = (self._lock_path, self._entries_path)
def __len__(self):
"""Returns the current length of the queue.
:returns: queue size (includes locked entries count).
"""
return super(LockingQueue, self).__len__()
def put(self, value, priority=100):
"""Put an entry into the queue.
:param value: Byte string to put into the queue.
:param priority:
An optional priority as an integer with at most 3 digits.
Lower values signify higher priority.
"""
self._check_put_arguments(value, priority)
self._ensure_paths()
self.client.create(
"{path}/{prefix}-{priority:03d}-".format(
path=self._entries_path,
prefix=self.entry,
priority=priority),
value, sequence=True)
def put_all(self, values, priority=100):
"""Put several entries into the queue. The action only succeeds
if all entries where put into the queue.
:param values: A list of values to put into the queue.
:param priority:
An optional priority as an integer with at most 3 digits.
Lower values signify higher priority.
"""
if not isinstance(values, list):
raise TypeError("values must be a list of byte strings")
if not isinstance(priority, int):
raise TypeError("priority must be an int")
elif priority < 0 or priority > 999:
raise ValueError("priority must be between 0 and 999")
self._ensure_paths()
with self.client.transaction() as transaction:
for value in values:
if not isinstance(value, bytes):
raise TypeError("value must be a byte string")
transaction.create(
"{path}/{prefix}-{priority:03d}-".format(
path=self._entries_path,
prefix=self.entry,
priority=priority),
value, sequence=True)
def get(self, timeout=None):
"""Locks and gets an entry from the queue. If a previously got entry
was not consumed, this method will return that entry.
:param timeout:
Maximum waiting time in seconds. If None then it will wait
untill an entry appears in the queue.
:returns: A locked entry value or None if the timeout was reached.
:rtype: bytes
"""
self._ensure_paths()
if self.processing_element is not None:
return self.processing_element[1]
else:
return self._inner_get(timeout)
def holds_lock(self):
"""Checks if a node still holds the lock.
:returns: True if a node still holds the lock, False otherwise.
:rtype: bool
"""
if self.processing_element is None:
return False
lock_id, _ = self.processing_element
lock_path = "{path}/{id}".format(path=self._lock_path, id=lock_id)
self.client.sync(lock_path)
value, stat = self.client.retry(self.client.get, lock_path)
return value == self.id
def consume(self):
"""Removes a currently processing entry from the queue.
:returns: True if element was removed successfully, False otherwise.
:rtype: bool
"""
if self.processing_element is not None and self.holds_lock():
id_, value = self.processing_element
with self.client.transaction() as transaction:
transaction.delete("{path}/{id}".format(
path=self._entries_path,
id=id_))
transaction.delete("{path}/{id}".format(
path=self._lock_path,
id=id_))
self.processing_element = None
return True
else:
return False
def release(self):
"""Removes the lock from currently processed item without consuming it.
:returns: True if the lock was removed successfully, False otherwise.
:rtype: bool
"""
if self.processing_element is not None and self.holds_lock():
id_, value = self.processing_element
with self.client.transaction() as transaction:
transaction.delete("{path}/{id}".format(
path=self._lock_path,
id=id_))
self.processing_element = None
return True
else:
return False
def _inner_get(self, timeout):
flag = self.client.handler.event_object()
lock = self.client.handler.lock_object()
canceled = False
value = []
def check_for_updates(event):
if event is not None and event.type != EventType.CHILD:
return
with lock:
if canceled or flag.isSet():
return
values = self.client.retry(
self.client.get_children,
self._entries_path,
check_for_updates)
taken = self.client.retry(
self.client.get_children,
self._lock_path,
check_for_updates)
available = self._filter_locked(values, taken)
if len(available) > 0:
ret = self._take(available[0])
if ret is not None:
# By this time, no one took the task
value.append(ret)
flag.set()
check_for_updates(None)
retVal = None
flag.wait(timeout)
with lock:
canceled = True
if len(value) > 0:
# We successfully locked an entry
self.processing_element = value[0]
retVal = value[0][1]
return retVal
def _filter_locked(self, values, taken):
taken = set(taken)
available = sorted(values)
return (available if len(taken) == 0 else
[x for x in available if x not in taken])
def _take(self, id_):
try:
self.client.create(
"{path}/{id}".format(
path=self._lock_path,
id=id_),
self.id,
ephemeral=True)
except NodeExistsError:
# Item is already locked
return None
try:
value, stat = self.client.retry(
self.client.get,
"{path}/{id}".format(path=self._entries_path, id=id_))
except NoNodeError:
# Item is already consumed
self.client.delete(
"{path}/{id}".format(
path=self._lock_path,
id=id_))
return None
return (id_, value)
|
{
"content_hash": "fc1da033cbf8805d2cdebda0858d0012",
"timestamp": "",
"source": "github",
"line_count": 353,
"max_line_length": 79,
"avg_line_length": 35.22096317280453,
"alnum_prop": 0.5720260596798842,
"repo_name": "kawamon/hue",
"id": "9cf22b6cf0d0acec014a3a4ebe35fee456b165ae",
"size": "12433",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/kazoo-2.8.0/kazoo/recipe/queue.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "5786"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "Batchfile",
"bytes": "118907"
},
{
"name": "C",
"bytes": "3196521"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "308860"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "1050129"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "10981"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "7312"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "24999718"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "JSONiq",
"bytes": "4"
},
{
"name": "Java",
"bytes": "471854"
},
{
"name": "JavaScript",
"bytes": "28075556"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "Jupyter Notebook",
"bytes": "73168"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Lex",
"bytes": "264449"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "269655"
},
{
"name": "Mako",
"bytes": "3614942"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "31565"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "1412"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "3204"
},
{
"name": "Python",
"bytes": "76440000"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "95764"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "190718"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "TSQL",
"bytes": "10013"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "317058"
},
{
"name": "TypeScript",
"bytes": "1607"
},
{
"name": "VBA",
"bytes": "2884"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "521413"
},
{
"name": "Yacc",
"bytes": "2133855"
}
],
"symlink_target": ""
}
|
from deepdetector import DeepDetector
from simpledetector import SimpleDetector
from facerecognizer import FaceRecognizer
from annotatedphotowriter import AnnotatedPhotoWriter
from annotatedframewriter import AnnotatedFrameWriter
from annotatedvideowriter import AnnotatedVideoWriter
from jsonreportwriter import JSONReportWriter
import imageio
import cv2
import multiprocessing
import os
import os.path
import yaml
import sys
import traceback
class MultiPipelineExecutor(object):
'''
A multiprocess executor that sets up a pipeline for each CPU
core and distributes input set of files across these pipelines.
Each pipeline has its own detectors (including darkflow detectors), recognizer and outputter components,
including deep object detector neural networks. This may seem inefficient,
but it appears darkflow stores some state per input file and is therefore not thread safe.
'''
def execute(self, pipeline_file, input_directory, output_directory):
# Create a shared file queue across multiple processes.
file_queue = multiprocessing.JoinableQueue()
# Start pipelines.
num_pipeline_processors = multiprocessing.cpu_count()
print('Creating %d pipelines' % num_pipeline_processors)
pipeline_processors = [
PipelineProcessor(pipeline_file, input_directory, output_directory, file_queue)
for i in range(num_pipeline_processors) ]
# Enqueue files in input directory.
for dirpath,dirs,files in os.walk(input_directory):
for f in files:
file_path = os.path.join(dirpath, f)
print("put in queue:", file_path)
file_queue.put(file_path)
for w in pipeline_processors:
w.start()
# Add an end command in each queue
for i in range(num_pipeline_processors):
file_queue.put(None)
# Wait for all of the tasks to finish
file_queue.join()
print("Completed")
class PipelineProcessor(multiprocessing.Process):
def __init__(self, pipeline_file, input_directory, output_directory, file_queue):
multiprocessing.Process.__init__(self)
self.file_queue = file_queue
self.pipeline_file = pipeline_file
self.input_directory = input_directory
self.output_directory = output_directory
def run(self):
self.pipeline = Pipeline(self.pipeline_file, self.input_directory, self.output_directory)
proc_name = self.name
while True:
next_file = self.file_queue.get()
if next_file is None:
# None means shutdown this process.
print('%s: Exiting' % proc_name)
self.file_queue.task_done()
break
print('%s: Executing %s' % (proc_name, next_file))
# Termination of process due to uncaught exception will hold up
# the main process because it'll wait on queue forever.
try:
self.pipeline.execute(next_file)
print('%s: Executed %s' % (proc_name, next_file))
except:
print("*****************\nException while executing " + next_file)
traceback.print_exc()
finally:
self.file_queue.task_done()
return
class Pipeline(object):
'''
A Pipeline consists of a series of detectors, recognizers and outputters
through which a photo or video is passed in a sequence.
'''
COMPONENTS = {
'deepdetector' : DeepDetector,
'simpledetector' : SimpleDetector,
'photowriter' : AnnotatedPhotoWriter,
'framewriter' : AnnotatedFrameWriter,
'videowriter' : AnnotatedVideoWriter,
'recognizer' : FaceRecognizer,
'jsonreportwriter' : JSONReportWriter
}
def __init__(self, pipeline_file, input_directory, output_directory):
with open(pipeline_file, 'r') as f:
self.cfg = yaml.load(f)
self.cfg = self.cfg['pipeline']
self.output_directory = output_directory
self.input_directory = input_directory
self.create_components()
def create_components(self):
self.components = []
for comp_cfg in self.cfg:
comp_type = Pipeline.COMPONENTS.get(comp_cfg['type'])
if comp_type:
comp = comp_type(comp_cfg)
self.components.append(comp)
def execute(self, input_file):
isphoto = False
isvideo = False
img = None
# if input file is a photo, read it. Also create a separate
# grayscale image because some of the detectors work on grayscale
# but annotate on original color image.
# Send the color image, grayscale image, filepath, and isphoto flag
# through the components of the pipeline.
try:
img = imageio.imread(input_file)
print("Image read")
isphoto = True
except:
print("Not a photo. Error while attempting to load:", sys.exc_info())
# If input file is a video, open it and setup an iterator over its
# frames. Then for each frame, send image, grayscale image, video filename,
# frame number and isvideo flag through the components of the pipeline.
video = None
try:
video = imageio.get_reader(input_file, 'ffmpeg')
print("Video opened")
isvideo = True
except:
print("Not a video. Error while attempting to open:", sys.exc_info())
if video:
video.close()
if not isphoto and not isvideo:
print("Ignoring file: ", input_file)
return
if isphoto:
input_data = {
'file' : input_file,
'img' : img,
'isphoto' : True,
'isvideo' : False
}
self._execute_pipeline_on_image(input_data)
self.completed(input_data)
elif isvideo:
for frame_num, img in enumerate(video):
input_data = {
'file' : input_file,
'img' : img,
'isphoto' : False,
'isvideo' : True,
'frame' : frame_num
}
self._execute_pipeline_on_image(input_data)
# Notify components such as video writers that need to know when
# the input stream has completed so they can do their own cleanup.
self.completed(input_data)
def _execute_pipeline_on_image(self, input_data):
if input_data['img'].ndim == 3:
# It *appears* imageio imread returns RGB or RGBA, not BGR...confirmed using a blue
# filled rectangle that imageio is indeed RGB which is opposite of OpenCV's default BGR.
# Use RGB consistently everywhere.
if input_data['img'].shape[-1] == 4:
input_data['gray'] = cv2.cvtColor(input_data['img'], cv2.COLOR_RGBA2GRAY)
print("Input image seems to be 4-channel RGBA. Creating 3-channel RGB version")
input_data['img'] = cv2.cvtColor(input_data['img'], cv2.COLOR_RGBA2RGB)
else:
input_data['gray'] = cv2.cvtColor(input_data['img'], cv2.COLOR_RGB2GRAY)
elif input_data['img'].ndim == 2:
# If input is a grayscale image, it'll have just 2 dimensions,
# but Darkflow code expects 3 dimensions. So always keep 'img' a 3 dimension
# image no matter what.
print("Input image is grayscale. Creating RGB version")
input_data['gray'] = input_data['img'].copy()
input_data['img'] = cv2.cvtColor(input_data['img'], cv2.COLOR_GRAY2RGB)
else:
raise "Unknown image format " + input_data['img'].shape
print("Input image:", input_data['img'].shape)
print("Grayscale image:", input_data['gray'].shape)
for comp in self.components:
print("Executing %s on %s frame %d" % (comp.name, input_data['file'], input_data.get('frame', 0)))
comp_outputs = comp.execute(input_data, self.input_directory, self.output_directory)
# At each stage of the pipeline, collect the component's outputs
# and add them to the input data so that they're available for
# downstream components.
input_data[comp.name] = comp_outputs
# Release the image arrays.
input_data['img'] = None
input_data['gray'] = None
def completed(self, input_data):
for comp in self.components:
comp.completed(input_data, self.input_directory, self.output_directory)
|
{
"content_hash": "6ec982d40b8844e89748960cb87f5e0d",
"timestamp": "",
"source": "github",
"line_count": 249,
"max_line_length": 110,
"avg_line_length": 37.5863453815261,
"alnum_prop": 0.566299818356662,
"repo_name": "pathbreak/deepvisualminer",
"id": "ea7cc8f7ce88e33c4052dffea6141dad67c7e91b",
"size": "9359",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pipeline.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "58446"
},
{
"name": "Shell",
"bytes": "4604"
}
],
"symlink_target": ""
}
|
from zeus.models import ItemStat
def test_build_details(
client, db_session, default_login, default_repo, default_build, default_repo_access
):
db_session.add(ItemStat(item_id=default_build.id, name="tests.count", value="1"))
resp = client.get(
"/api/repos/{}/builds/{}".format(
default_repo.get_full_name(), default_build.number
)
)
assert resp.status_code == 200
data = resp.json()
assert data["id"] == str(default_build.id)
assert data["stats"]["tests"]["count"] == 1
assert data["authors"]
|
{
"content_hash": "737ae9f497e2b81b693b03b64d7e27eb",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 87,
"avg_line_length": 31.166666666666668,
"alnum_prop": 0.6310160427807486,
"repo_name": "getsentry/zeus",
"id": "eb455ec5450fb7a0e99accdbb6087a3f56d43967",
"size": "561",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/zeus/api/resources/test_build_details.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3596"
},
{
"name": "HTML",
"bytes": "13037"
},
{
"name": "JavaScript",
"bytes": "327335"
},
{
"name": "Makefile",
"bytes": "1130"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "822392"
},
{
"name": "Shell",
"bytes": "2564"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.auth.decorators import login_required
from django.conf.urls import include, url
from django.contrib import admin
from django.shortcuts import redirect
from django.views.generic import TemplateView
from django.contrib.auth import views as auth_views
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
from main.views import DashboardView
from main.api.urls import urls as api_endpoints
from main.urls import download_urlpatterns
from publish.urls import publish_urlpatterns
def home_view_selection_view(request):
if request.user.is_authenticated:
return redirect('api/explorer')
else:
return redirect('login')
def admin_view_selection_view(request):
if request.user.is_superuser:
return admin.site.index(request)
elif request.user.is_authenticated:
return redirect('dashboard')
else:
return redirect('login')
web_urls = [
# Authentication URLs
url(r'^logout/$', auth_views.logout, {'next_page': '/login/'}, name='logout'),
# url(r'^login/$', auth_views.login),
url('^', include('django.contrib.auth.urls')),
# Application URLs
url(r'^download/', include(download_urlpatterns, namespace='download')),
url(r'^admin/logout/$', auth_views.logout, {'next_page': '/'}),
# use a function to determine where admin/ will resolve to, based on the user
url(r'^admin/$', admin_view_selection_view),
url(r'^admin/', admin.site.urls),
url(r'^publish/', include(publish_urlpatterns, namespace='publish')),
url(r'^$', home_view_selection_view, name='home'),
url(r'^dashboard/', login_required(DashboardView.as_view()), name='dashboard'),
url(r'^about/', TemplateView.as_view(template_name='main/about.html'), name='about'),
# legacy
url(r'^grappelli/', include('grappelli.urls')), # Grappelli URLS
]
api_urls = [
url(r'^api/', include(api_endpoints, namespace='api')),
]
sso_api_urls = [
url(r'^sso-api/', include(api_endpoints, namespace='sso-api')),
]
media_urls = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
schema_view = get_schema_view(
openapi.Info(
title="Biosys API",
default_version='v1',
description="Biosys API Documentation",
),
public=True,
patterns=api_urls
)
api_doc_urls = [
url(r'^api/swagger(?P<format>\.json|\.yaml)$', schema_view.without_ui(cache_timeout=None), name='doc-json'),
url(r'^api/explorer/$', schema_view.with_ui('swagger', cache_timeout=None), name='doc-swagger'),
url(r'^api/redoc/$', schema_view.with_ui('redoc', cache_timeout=None), name='doc-redoc'),
]
urlpatterns = web_urls + api_urls + api_doc_urls + media_urls + sso_api_urls
|
{
"content_hash": "3eed0a7b5187b75de62d0adb5155e60f",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 112,
"avg_line_length": 34.96341463414634,
"alnum_prop": 0.6916637600279038,
"repo_name": "ropable/biosys",
"id": "85a2edafedc152768c1db9b00d765dd2185300a1",
"size": "2867",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "biosys/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2303"
},
{
"name": "Dockerfile",
"bytes": "1138"
},
{
"name": "HTML",
"bytes": "38249"
},
{
"name": "JavaScript",
"bytes": "8822"
},
{
"name": "Python",
"bytes": "793743"
}
],
"symlink_target": ""
}
|
__author__ = 'jordsti'
import item
import c_library
class decorated_button(item.item):
def __init__(self):
item.item.__init__(self)
self.obj = self.lib.DecoratedButton_new()
def set_caption(self, text):
self.lib.DecoratedButton_setCaption(self.obj, text)
def get_caption(self):
return c_library.to_str(self.lib.DecoratedButton_getCaption(self.obj))
|
{
"content_hash": "d9eac41a26c9afaf22785927f0e5f15b",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 78,
"avg_line_length": 26.4,
"alnum_prop": 0.6641414141414141,
"repo_name": "jordsti/stigame",
"id": "c23753133084d2878cbc7447a7d449d40bad9b7d",
"size": "396",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/gui-editor/wrapper/decorated_button.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "631452"
},
{
"name": "CMake",
"bytes": "14021"
},
{
"name": "Python",
"bytes": "2171"
}
],
"symlink_target": ""
}
|
import os
import shutil
import time
from threading import Timer, Event as TEvent
from apptools.preferences.preference_binding import bind_preference
from numpy import copy
from skimage.color import gray2rgb
from skimage.draw import circle_perimeter, line
# ============= enthought library imports =======================
from traits.api import (
Instance,
String,
Property,
Button,
Bool,
Event,
on_trait_change,
Str,
Float,
Enum,
Int,
)
from pychron.canvas.canvas2D.camera import Camera, YamlCamera, BaseCamera
from pychron.core.helpers.binpack import pack, encode_blob
from pychron.core.helpers.filetools import unique_path, unique_path_from_manifest
from pychron.core.ui.stage_component_editor import VideoComponentEditor
from pychron.core.ui.thread import Thread as QThread
from pychron.core.ui.thread import sleep
from pychron.core.yaml import yload
from pychron.image.video import Video, pil_save
from pychron.mv.lumen_detector import LumenDetector
from pychron.paths import paths
from .stage_manager import StageManager
try:
from pychron.canvas.canvas2D.video_laser_tray_canvas import VideoLaserTrayCanvas
except ImportError:
from pychron.canvas.canvas2D.laser_tray_canvas import (
LaserTrayCanvas as VideoLaserTrayCanvas,
)
class VideoStageManager(StageManager):
""" """
video = Instance(Video)
camera = Instance(BaseCamera)
canvas_editor_klass = VideoComponentEditor
camera_zoom_coefficients = Property(
String(enter_set=True, auto_set=False), depends_on="_camera_zoom_coefficients"
)
_camera_zoom_coefficients = String
use_auto_center_interpolation = Bool(False)
configure_camera_device_button = Button
autocenter_button = Button("AutoCenter")
configure_autocenter_button = Button("Configure")
autocenter_manager = Instance("pychron.mv.autocenter_manager.AutoCenterManager")
autofocus_manager = Instance("pychron.mv.focus.autofocus_manager.AutoFocusManager")
# zoom_calibration_manager = Instance(
# 'pychron.mv.zoom.zoom_calibration.ZoomCalibrationManager')
snapshot_button = Button("Snapshot")
snapshot_mode = Enum("Single", "3 Burst", "10 Burst")
auto_save_snapshot = Bool(True)
record = Event
record_label = Property(depends_on="is_recording")
is_recording = Bool
use_db = False
use_video_archiver = Bool(True)
video_archiver = Instance("pychron.core.helpers.archiver.Archiver")
video_identifier = Str
# use_video_server = Bool(False)
# video_server_port = Int
# video_server_quality = Int
# video_server = Instance('pychron.image.video_server.VideoServer')
use_media_storage = Bool(False)
auto_upload = Bool(False)
keep_local_copy = Bool(False)
lumen_detector = Instance(LumenDetector)
render_with_markup = Bool(False)
burst_delay = Int(250)
_auto_correcting = False
stop_timer = Event
pxpermm = Float(23)
_measure_grain_t = None
_measure_grain_evt = None
grain_polygons = None
dimension_multiplier = Float(1)
def motor_event_hook(self, name, value, *args, **kw):
if name == "zoom":
self._update_zoom(value)
def bind_preferences(self, pref_id):
self.debug("binding preferences")
super(VideoStageManager, self).bind_preferences(pref_id)
if self.autocenter_manager:
self.autocenter_manager.bind_preferences(pref_id)
# bind_preference(self.autocenter_manager, 'use_autocenter',
# '{}.use_autocenter'.format(pref_id))
bind_preference(
self, "render_with_markup", "{}.render_with_markup".format(pref_id)
)
bind_preference(self, "burst_delay", "{}.burst_delay".format(pref_id))
bind_preference(self, "auto_upload", "{}.auto_upload".format(pref_id))
bind_preference(
self, "use_media_storage", "{}.use_media_storage".format(pref_id)
)
bind_preference(self, "keep_local_copy", "{}.keep_local_copy".format(pref_id))
bind_preference(
self, "dimension_multiplier", "{}.dimension_multiplier".format(pref_id)
)
bind_preference(
self, "use_video_archiver", "{}.use_video_archiver".format(pref_id)
)
bind_preference(self, "video_identifier", "{}.video_identifier".format(pref_id))
bind_preference(self, "use_video_server", "{}.use_video_server".format(pref_id))
bind_preference(
self.video_archiver,
"archive_months",
"{}.video_archive_months".format(pref_id),
)
bind_preference(
self.video_archiver, "archive_days", "{}.video_archive_days".format(pref_id)
)
bind_preference(
self.video_archiver,
"archive_hours",
"{}.video_archive_hours".format(pref_id),
)
bind_preference(
self.video_archiver, "root", "{}.video_directory".format(pref_id)
)
# bind_preference(self.video, 'output_mode',
# '{}.video_output_mode'.format(pref_id))
# bind_preference(self.video, 'ffmpeg_path',
# '{}.ffmpeg_path'.format(pref_id))
def get_grain_polygon(self):
ld = self.lumen_detector
l, m = ld.lum()
return m.tostring()
def get_grain_polygon_blob(self):
# self.debug('Get grain polygons n={}'.format(len(self.grain_polygons)))
try:
t, md, p = next(self.grain_polygons)
a = pack("ff", ((t, md),))
b = pack("HH", p)
return encode_blob(a + b)
except (StopIteration, TypeError) as e:
self.debug("No more grain polygons. {}".format(e))
def stop_measure_grain_polygon(self):
self.debug("Stop measure polygons {}".format(self._measure_grain_evt))
if self._measure_grain_evt:
self._measure_grain_evt.set()
return True
def start_measure_grain_polygon(self):
self._measure_grain_evt = evt = TEvent()
def _measure_grain_polygon():
ld = self.lumen_detector
dim = self.get_target_dimension()
ld.pxpermm = self.pxpermm
self.debug("Starting measure grain polygon")
masks = []
display_image = self.autocenter_manager.display_image
mask_dim = self.get_mask_dimension()
mask_dim_mm = mask_dim * self.pxpermm
ld.grain_measuring = True
while not evt.is_set():
src = self._get_preprocessed_src()
if src is not None:
targets = ld.find_targets(
display_image,
src,
dim,
mask=mask_dim,
search={"start_offset_scalar": 1},
)
if targets:
t = time.time()
targets = [
(t, mask_dim_mm, ti.poly_points.tolist()) for ti in targets
]
masks.extend(targets)
sleep(0.1)
ld.grain_measuring = False
self.grain_polygons = (m for m in masks)
self.debug("exiting measure grain")
self._measure_grain_t = QThread(target=_measure_grain_polygon)
self._measure_grain_t.start()
return True
def start_recording(
self, path=None, use_dialog=False, basename="vm_recording", **kw
):
""" """
directory = None
if os.path.sep in basename:
args = os.path.split(basename)
directory, basename = os.path.sep.join(args[:-1]), args[-1]
if path is None:
if use_dialog:
path = self.save_file_dialog()
else:
vd = self.video_archiver.root
self.debug("video archiver root {}".format(vd))
if not vd:
vd = paths.video_dir
if directory:
vd = os.path.join(vd, directory)
if not os.path.isdir(vd):
os.mkdir(vd)
path = unique_path_from_manifest(vd, basename, extension="avi")
kw["path"] = path
kw["basename"] = basename
self._start_recording(**kw)
self.is_recording = True
return path
def stop_recording(self, user="remote", delay=None):
""" """
def close():
self.is_recording = False
self.info("stop video recording")
p = self.video.output_path
if self.video.stop_recording(wait=True):
if self.auto_upload:
try:
p = self._upload(p, inform=False)
except BaseException as e:
self.critical("Failed uploading {}. error={}".format(p, e))
return p
if self.video.is_recording():
if delay:
t = Timer(delay, close)
t.start()
else:
return close()
@property
def video_configuration_path(self):
if self.configuration_dir_path:
return os.path.join(self.configuration_dir_path, "camera.yaml")
def initialize_video(self):
if self.video:
identifier = 0
p = self.video_configuration_path
if os.path.isfile(p):
yd = yload(p)
vid = yd["Device"]
identifier = vid.get("identifier", 0)
self.video.open(identifier=identifier)
self.video.load_configuration(p)
self.lumen_detector.pixel_depth = self.video.pixel_depth
def initialize_stage(self):
super(VideoStageManager, self).initialize_stage()
self.initialize_video()
# s = self.stage_controller
# if s.axes:
# xa = s.axes['x'].drive_ratio
# ya = s.axes['y'].drive_ratio
# self._drive_xratio = xa
# self._drive_yratio = ya
self._update_zoom(0)
def autocenter(self, *args, **kw):
return self._autocenter(*args, **kw)
def snapshot(
self,
path=None,
name=None,
auto=False,
inform=True,
return_blob=False,
pic_format=".jpg",
include_raw=True,
):
"""
path: abs path to use
name: base name to use if auto saving in default dir
auto: force auto save
returns:
path: local abs path
upath: remote abs path
"""
if path is None:
if self.auto_save_snapshot or auto:
if name is None:
name = "snapshot"
path = unique_path_from_manifest(paths.snapshot_dir, name, pic_format)
elif name is not None:
if not os.path.isdir(os.path.dirname(name)):
path = unique_path_from_manifest(
paths.snapshot_dir, name, pic_format
)
else:
path = name
else:
path = self.save_file_dialog()
if path:
self.info("saving snapshot {}".format(path))
# play camera shutter sound
# play_sound('shutter')
if include_raw:
frame = self.video.get_cached_frame()
head, _ = os.path.splitext(path)
raw_path = "{}.tif".format(head)
pil_save(frame, raw_path)
self._render_snapshot(path)
if self.auto_upload:
if include_raw:
self._upload(raw_path)
upath = self._upload(path, inform=inform)
if upath is None:
upath = ""
if inform:
if self.keep_local_copy:
self.information_dialog(
'Snapshot saved: "{}".\nUploaded : "{}"'.format(
path, upath
)
)
else:
self.information_dialog(
'Snapshot uploaded to "{}"'.format(upath)
)
else:
upath = None
if inform:
self.information_dialog('Snapshot saved to "{}"'.format(path))
if return_blob:
with open(path, "rb") as rfile:
im = rfile.read()
return path, upath, im
else:
return path, upath
def kill(self):
""" """
super(VideoStageManager, self).kill()
if self.camera:
self.camera.save_calibration()
self.stop_timer = True
self.canvas.close_video()
if self.video:
self.video.close(force=True)
# if self.use_video_server:
# self.video_server.stop()
# if self._stage_maps:
# for s in self._stage_maps:
# s.dump_correction_file()
self.clean_video_archive()
def clean_video_archive(self):
if self.use_video_archiver:
self.info("Cleaning video directory")
self.video_archiver.clean(("manifest.yaml",))
def is_auto_correcting(self):
return self._auto_correcting
def cancel_auto_correcting(self):
self.autocenter_manager.cancel()
return True
crop_width = 5
crop_height = 5
def get_scores(self, **kw):
ld = self.lumen_detector
src = self._get_preprocessed_src()
return ld.get_scores(src, **kw)
def find_lum_peak(self, min_distance, blur, **kw):
ld = self.lumen_detector
src = self._get_preprocessed_src()
dim = self.get_target_dimension()
mask_dim = dim * 1.05
# mask_dim_mm = mask_dim * self.pxpermm
if src is not None and src.ndim >= 2:
return ld.find_lum_peak(
src, dim, mask_dim, blur=blur, min_distance=min_distance, **kw
)
def get_brightness(self, **kw):
ld = self.lumen_detector
src = self._get_preprocessed_src()
dim = self.get_target_dimension()
return ld.get_value(src, dim, **kw)
# src = self.video.get_cached_frame()
# csrc = copy(src)
# src, v = ld.get_value(csrc, **kw)
# return csrc, src, v
def get_frame_size(self):
cw = 2 * self.crop_width * self.pxpermm
ch = 2 * self.crop_height * self.pxpermm
return cw, ch
def close_open_images(self):
if self.autocenter_manager:
self.autocenter_manager.close_open_images()
def finish_move_to_hole(self, user_entry):
self.debug("finish move to hole")
# if user_entry and not self.keep_images_open:
# self.close_open_images()
def get_preprocessed_src(self):
return self._get_preprocessed_src()
def get_target_dimension(self, hole=None):
dim = self.stage_map.g_dimension
if hole:
if isinstance(hole, (int, str)):
hole = self.stage_map.get_hole(hole)
dim = hole.dimension
return dim * self.dimension_multiplier
def get_mask_dimension(self):
return self.get_target_dimension() * 1.05
# private
def _get_preprocessed_src(self):
ld = self.lumen_detector
src = copy(self.video.get_cached_frame())
dim = self.get_target_dimension()
ld.pxpermm = self.pxpermm
offx, offy = self.canvas.get_screen_offset()
cropdim = dim * 2.5
if src is not None:
if len(src.shape):
src = ld.crop(src, cropdim, cropdim, offx, offy, verbose=False)
return src
def _stage_map_changed_hook(self):
self.lumen_detector.hole_radius = self.get_target_dimension()
def _upload(self, src, inform=True):
if not self.use_media_storage:
msg = "Use Media Storage not enabled in Laser preferences"
if inform:
self.warning_dialog(msg)
else:
self.warning(msg)
else:
srv = "pychron.media_storage.manager.MediaStorageManager"
msm = self.parent.application.get_service(srv)
if msm is not None:
d = os.path.split(os.path.dirname(src))[-1]
dest = os.path.join(self.parent.name, d, os.path.basename(src))
msm.put(src, dest)
if not self.keep_local_copy:
self.debug("removing {}".format(src))
if src.endswith(".avi"):
head, ext = os.path.splitext(src)
vd = "{}-images".format(head)
self.debug("removing video build directory {}".format(vd))
shutil.rmtree(vd)
os.remove(src)
dest = "{}/{}".format(msm.get_base_url(), dest)
return dest
else:
msg = "Media Storage Plugin not enabled"
if inform:
self.warning_dialog(msg)
else:
self.warning(msg)
def _render_snapshot(self, path):
from chaco.plot_graphics_context import PlotGraphicsContext
c = self.canvas
p = None
was_visible = False
if not self.render_with_markup:
p = c.show_laser_position
c.show_laser_position = False
if self.points_programmer.is_visible:
c.hide_all()
was_visible = True
gc = PlotGraphicsContext((int(c.outer_width), int(c.outer_height)))
c.do_layout()
gc.render_component(c)
# gc.save(path)
from pychron.core.helpers import save_gc
save_gc.save(gc, path)
if p is not None:
c.show_laser_position = p
if was_visible:
c.show_all()
def _start_recording(self, path, basename):
self.info("start video recording {}".format(path))
d = os.path.dirname(path)
if not os.path.isdir(d):
self.warning("invalid directory {}".format(d))
self.warning("using default directory")
path, _ = unique_path(paths.video_dir, basename, extension="avi")
self.info("saving recording to path {}".format(path))
# if self.use_db:
# db = self.get_video_database()
# db.connect()
#
# v = db.add_video_record(rid=basename)
# db.add_path(v, path)
# self.info('saving {} to database'.format(basename))
# db.commit()
video = self.video
crop_to_hole = True
dim = self.get_target_dimension()
cropdim = dim * 8 * self.pxpermm
color = self.canvas.crosshairs_color.getRgb()[:3]
r = int(self.canvas.get_crosshairs_radius() * self.pxpermm)
# offx, offy = self.canvas.get_screen_offset()
def renderer(p):
# cw, ch = self.get_frame_size()
frame = video.get_cached_frame()
if frame is not None:
if not len(frame.shape):
return
frame = copy(frame)
# ch, cw, _ = frame.shape
# ch, cw = int(ch), int(cw)
if crop_to_hole:
frame = video.crop(frame, 0, 0, cropdim, cropdim)
if self.render_with_markup:
# draw crosshairs
if len(frame.shape) == 2:
frame = gray2rgb(frame)
ch, cw, _ = frame.shape
ch, cw = int(ch), int(cw)
y = ch // 2
x = cw // 2
cp = circle_perimeter(y, x, r, shape=(ch, cw))
frame[cp] = color
frame[line(y, 0, y, x - r)] = color # left
frame[line(y, x + r, y, int(cw) - 1)] = color # right
frame[line(0, x, y - r, x)] = color # bottom
frame[line(y + r, x, int(ch) - 1, x)] = color # top
if frame is not None:
pil_save(frame, p)
self.video.start_recording(path, renderer)
def _move_to_hole_hook(self, holenum, correct, autocentered_position):
args = holenum, correct, autocentered_position
self.debug(
"move to hole hook holenum={}, "
"correct={}, autocentered_position={}".format(*args)
)
if correct:
ntries = 1 if autocentered_position else 3
self._auto_correcting = True
try:
self._autocenter(holenum=holenum, ntries=ntries, save=True)
except BaseException as e:
self.critical("Autocentering failed. {}".format(e))
self._auto_correcting = False
# def find_center(self):
# ox, oy = self.canvas.get_screen_offset()
# rpos, src = self.autocenter_manager.calculate_new_center(
# self.stage_controller.x,
# self.stage_controller.y,
# ox, oy,
# dim=self.stage_map.g_dimension, open_image=False)
#
# return rpos, src
# def find_target(self):
# if self.video:
# ox, oy = self.canvas.get_screen_offset()
# src = self.video.get_cached_frame()
#
# ch = cw = self.pxpermm * self.stage_map.g_dimension * 2.5
# src = self.video.crop(src, ox, oy, cw, ch)
# return self.lumen_detector.find_target(src)
#
# def find_best_target(self):
# if self.video:
# src = self.video.get_cached_frame()
# src = self.autocenter_manager.crop(src)
# return self.lumen_detector.find_best_target(src)
def _autocenter(self, holenum=None, ntries=3, save=False, inform=False):
self.debug("do autocenter")
rpos = None
interp = False
sm = self.stage_map
st = time.time()
if self.autocenter_manager.use_autocenter:
time.sleep(0.1)
dim = self.get_target_dimension()
shape = sm.g_shape
if holenum is not None:
hole = sm.get_hole(holenum)
if hole is not None:
dim = self.get_target_dimension(holenum)
shape = hole.shape
ox, oy = self.canvas.get_screen_offset()
for ti in range(max(1, ntries)):
# use machine vision to calculate positioning error
rpos = self.autocenter_manager.calculate_new_center(
self.stage_controller.x,
self.stage_controller.y,
ox,
oy,
dim=dim,
shape=shape,
)
if rpos is not None:
self.linear_move(
*rpos,
block=True,
source="autocenter",
use_calibration=False,
update_hole=False,
velocity_scalar=0.1
)
time.sleep(0.1)
else:
self.snapshot(
auto=True,
name="pos_err_{}_{}".format(holenum, ti),
inform=inform,
)
break
# if use_interpolation and rpos is None:
# self.info('trying to get interpolated position')
# rpos = sm.get_interpolated_position(holenum)
# if rpos:
# s = '{:0.3f},{:0.3f}'
# interp = True
# else:
# s = 'None'
# self.info('interpolated position= {}'.format(s))
if rpos:
corrected = True
# add an adjustment value to the stage map
if save and holenum is not None:
sm.set_hole_correction(holenum, *rpos)
sm.dump_correction_file()
# f = 'interpolation' if interp else 'correction'
else:
# f = 'uncorrected'
corrected = False
if holenum is not None:
hole = sm.get_hole(holenum)
if hole:
rpos = hole.nominal_position
self.debug("Autocenter duration ={}".format(time.time() - st))
return rpos, corrected, interp
# ===============================================================================
# views
# ===============================================================================
# ===============================================================================
# view groups
# ===============================================================================
# ===============================================================================
# handlers
# ===============================================================================
def _configure_camera_device_button_fired(self):
if self.video:
self.video.load_configuration(self.video_configuration_path)
if hasattr(self.video.cap, "reload_configuration"):
self.video.cap.reload_configuration(self.video_configuration_path)
self.lumen_detector.pixel_depth = self.video.pixel_depth
def _update_zoom(self, v):
if self.camera:
self._update_xy_limits()
@on_trait_change("parent:motor_event")
def _update_motor(self, new):
print("motor event", new, self.canvas, self.canvas.camera)
# s = self.stage_controller
if self.camera:
if not isinstance(new, (int, float)):
args, _ = new
name, v = args[:2]
else:
name = "zoom"
v = new
if name == "zoom":
self._update_xy_limits()
# pxpermm = self.canvas.camera.set_limits_by_zoom(v, s.x, s.y)
# self.pxpermm = pxpermm
elif name == "beam":
self.lumen_detector.beam_radius = v / 2.0
def _pxpermm_changed(self, new):
if self.autocenter_manager:
self.autocenter_manager.pxpermm = new
self.lumen_detector.pxpermm = new
# self.lumen_detector.mask_radius = new*self.stage_map.g_dimension
def _autocenter_button_fired(self):
self.goto_position(self.calibrated_position_entry, autocenter_only=True)
# def _configure_autocenter_button_fired(self):
# info = self.autocenter_manager.edit_traits(view='configure_view',
# kind='livemodal')
# if info.result:
# self.autocenter_manager.dump_detector()
def _snapshot_button_fired(self):
n = 1
if self.snapshot_mode == "3 Burst":
n = 3
elif self.snapshot_mode == "10 Burst":
n = 10
bd = self.burst_delay * 0.001
delay = n > 1
for i in range(n):
st = time.time()
self.snapshot(inform=False)
if delay:
time.sleep(max(0, bd - time.time() + st))
def _record_fired(self):
# time.sleep(4)
# self.stop_recording()
if self.is_recording:
self.stop_recording()
else:
self.start_recording()
def _use_video_server_changed(self):
if self.use_video_server:
self.video_server.start()
else:
self.video_server.stop()
def _get_camera_zoom_coefficients(self):
return self.camera.zoom_coefficients
def _set_camera_zoom_coefficients(self, v):
self.camera.zoom_coefficients = ",".join(map(str, v))
self._update_xy_limits()
def _validate_camera_zoom_coefficients(self, v):
try:
return list(map(float, v.split(",")))
except ValueError:
pass
def _update_xy_limits(self):
z = 0
if self.parent is not None:
zoom = self.parent.get_motor("zoom")
if zoom is not None:
z = zoom.data_position
x = self.stage_controller.get_current_position("x")
y = self.stage_controller.get_current_position("y")
if self.camera:
pxpermm = self.camera.set_limits_by_zoom(z, x, y, self.canvas)
self.pxpermm = pxpermm
self.debug("updated xy limits zoom={}, pxpermm={}".format(z, pxpermm))
self.canvas.request_redraw()
def _get_record_label(self):
return "Start Recording" if not self.is_recording else "Stop"
# ===============================================================================
# factories
# ===============================================================================
def _canvas_factory(self):
""" """
v = VideoLaserTrayCanvas(stage_manager=self, padding=30)
return v
def _canvas_editor_factory(self):
e = super(VideoStageManager, self)._canvas_editor_factory()
e.stop_timer = "stop_timer"
return e
# ===============================================================================
# defaults
# ===============================================================================
def _camera_default(self):
klass = YamlCamera
# p = os.path.join(self.configuration_dir_path, 'camera.yaml')
p = self.video_configuration_path
if p is not None:
if not os.path.isfile(p):
klass = Camera
pp = os.path.join(self.configuration_dir_path, "camera.cfg")
if not os.path.isfile(pp):
self.warning_dialog(
"No Camera configuration file a {} or {}".format(p, pp)
)
p = pp
camera = klass()
camera.load(p)
else:
camera = Camera()
camera.set_limits_by_zoom(0, 0, 0, self.canvas)
self._camera_zoom_coefficients = camera.zoom_coefficients
return camera
def _lumen_detector_default(self):
ld = LumenDetector()
ld.pixel_depth = self.video.pixel_depth
return ld
def _video_default(self):
v = Video()
self.canvas.video = v
return v
def _video_server_default(self):
from pychron.image.video_server import VideoServer
return VideoServer(video=self.video)
def _video_archiver_default(self):
from pychron.core.helpers.archiver import Archiver
return Archiver()
def _autocenter_manager_default(self):
if self.parent.mode != "client":
# from pychron.mv.autocenter_manager import AutoCenterManager
if "co2" in self.parent.name.lower():
from pychron.mv.autocenter_manager import CO2AutocenterManager
klass = CO2AutocenterManager
else:
from pychron.mv.autocenter_manager import DiodeAutocenterManager
klass = DiodeAutocenterManager
return klass(
video=self.video, canvas=self.canvas, application=self.application
)
def _autofocus_manager_default(self):
if self.parent.mode != "client":
from pychron.mv.focus.autofocus_manager import AutoFocusManager
return AutoFocusManager(
video=self.video,
laser_manager=self.parent,
stage_controller=self.stage_controller,
canvas=self.canvas,
application=self.application,
)
# def _zoom_calibration_manager_default(self):
# if self.parent.mode != 'client':
# from pychron.mv.zoom.zoom_calibration import ZoomCalibrationManager
# return ZoomCalibrationManager(laser_manager=self.parent)
# ===============================================================================
# calcualte camera params
# ===============================================================================
# def _calculate_indicator_positions(self, shift=None):
# ccm = self.camera_calibration_manager
#
# zoom = self.parent.zoom
# pychron, name = self.video_manager.snapshot(identifier=zoom)
# ccm.image_factory(pychron=pychron)
#
# ccm.process_image()
# ccm.title = name
#
# cond = Condition()
# ccm.cond = cond
# cond.acquire()
# do_later(ccm.edit_traits, view='snapshot_view')
# if shift:
# self.stage_controller.linear_move(*shift, block=False)
#
# cond.wait()
# cond.release()
#
# def _calculate_camera_parameters(self):
# ccm = self.camera_calibration_manager
# self._calculate_indicator_positions()
# if ccm.result:
# if self.calculate_offsets:
# rdxmm = 5
# rdymm = 5
#
# x = self.stage_controller.x + rdxmm
# y = self.stage_controller.y + rdymm
# self.stage_controller.linear_move(x, y, block=True)
#
# time.sleep(2)
#
# polygons1 = ccm.polygons
# x = self.stage_controller.x - rdxmm
# y = self.stage_controller.y - rdymm
# self._calculate_indicator_positions(shift=(x, y))
#
# polygons2 = ccm.polygons
#
# # compare polygon sets
# # calculate pixel displacement
# dxpx = sum([sum([(pts1.x - pts2.x)
# for pts1, pts2 in zip(p1.points, p2.points)]) / len(p1.points)
# for p1, p2 in zip(polygons1, polygons2)]) / len(polygons1)
# dypx = sum([sum([(pts1.y - pts2.y)
# for pts1, pts2 in zip(p1.points, p2.points)]) / len(p1.points)
# for p1, p2 in zip(polygons1, polygons2)]) / len(polygons1)
#
# # convert pixel displacement to mm using defined mapping
# dxmm = dxpx / self.pxpercmx
# dymm = dypx / self.pxpercmy
#
# # calculate drive offset. ratio of request/actual
# try:
# self.drive_xratio = rdxmm / dxmm
# self.drive_yratio = rdymm / dymm
# except ZeroDivisionError:
# self.drive_xratio = 100
#
# def _calibration_manager_default(self):
#
# # self.video.open(user = 'calibration')
# return CalibrationManager(parent = self,
# laser_manager = self.parent,
# video_manager = self.video_manager,
# )
# ============= EOF ====================================
# adxs = []
# adys = []
# for p1, p2 in zip(polygons, polygons2):
# # dxs = []
# # dys = []
# # for pts1, pts2 in zip(p1.points, p2.points):
# #
# # dx = pts1.x - pts2.x
# # dy = pts1.y - pts2.y
# # dxs.append(dx)
# # dys.append(dy)
# # dxs = [(pts1.x - pts2.x) for pts1, pts2 in zip(p1.points, p2.points)]
# # dys = [(pts1.y - pts2.y) for pts1, pts2 in zip(p1.points, p2.points)]
# #
# adx = sum([(pts1.x - pts2.x) for pts1, pts2 in zip(p1.points, p2.points)]) / len(p1.points)
# ady = sum([(pts1.y - pts2.y) for pts1, pts2 in zip(p1.points, p2.points)]) / len(p1.points)
#
# # adx = sum(dxs) / len(dxs)
# # ady = sum(dys) / len(dys)
# adxs.append(adx)
# adys.append(ady)
# print 'xffset', sum(adxs) / len(adxs)
# print 'yffset', sum(adys) / len(adys)
|
{
"content_hash": "2b087c3edffdd0e4fc9e6cbb8b9791e7",
"timestamp": "",
"source": "github",
"line_count": 1063,
"max_line_length": 112,
"avg_line_length": 34.24270931326435,
"alnum_prop": 0.5089285714285714,
"repo_name": "NMGRL/pychron",
"id": "a14ff2afd388ec101e61c966ae676a89cf6a840f",
"size": "37136",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pychron/lasers/stage_managers/video_stage_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "263"
},
{
"name": "Cython",
"bytes": "1692"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "46796"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10773692"
},
{
"name": "Shell",
"bytes": "1003"
}
],
"symlink_target": ""
}
|
"""This tutorial introduces restricted boltzmann machines (RBM) using Theano.
Boltzmann Machines (BMs) are a particular form of energy-based model which
contain hidden variables. Restricted Boltzmann Machines further restrict BMs
to those without visible-visible and hidden-hidden connections.
"""
import time
try:
import PIL.Image as Image
except ImportError:
import Image
import numpy
import theano
import theano.tensor as T
import os
from theano.tensor.shared_randomstreams import RandomStreams
from utils import tile_raster_images
from logistic_sgd import load_data
# start-snippet-1
class RBM(object):
"""Restricted Boltzmann Machine (RBM) """
def __init__(
self,
input=None,
n_visible=784,
n_hidden=500,
W=None,
hbias=None,
vbias=None,
numpy_rng=None,
theano_rng=None
):
"""
RBM constructor. Defines the parameters of the model along with
basic operations for inferring hidden from visible (and vice-versa),
as well as for performing CD updates.
:param input: None for standalone RBMs or symbolic variable if RBM is
part of a larger graph.
:param n_visible: number of visible units
:param n_hidden: number of hidden units
:param W: None for standalone RBMs or symbolic variable pointing to a
shared weight matrix in case RBM is part of a DBN network; in a DBN,
the weights are shared between RBMs and layers of a MLP
:param hbias: None for standalone RBMs or symbolic variable pointing
to a shared hidden units bias vector in case RBM is part of a
different network
:param vbias: None for standalone RBMs or a symbolic variable
pointing to a shared visible units bias
"""
self.n_visible = n_visible
self.n_hidden = n_hidden
if numpy_rng is None:
# create a number generator
numpy_rng = numpy.random.RandomState(1234)
if theano_rng is None:
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
if W is None:
# W is initialized with `initial_W` which is uniformely
# sampled from -4*sqrt(6./(n_visible+n_hidden)) and
# 4*sqrt(6./(n_hidden+n_visible)) the output of uniform if
# converted using asarray to dtype theano.config.floatX so
# that the code is runable on GPU
initial_W = numpy.asarray(
numpy_rng.uniform(
low=-4 * numpy.sqrt(6. / (n_hidden + n_visible)),
high=4 * numpy.sqrt(6. / (n_hidden + n_visible)),
size=(n_visible, n_hidden)
),
dtype=theano.config.floatX
)
# theano shared variables for weights and biases
W = theano.shared(value=initial_W, name='W', borrow=True)
if hbias is None:
# create shared variable for hidden units bias
hbias = theano.shared(
value=numpy.zeros(
n_hidden,
dtype=theano.config.floatX
),
name='hbias',
borrow=True
)
if vbias is None:
# create shared variable for visible units bias
vbias = theano.shared(
value=numpy.zeros(
n_visible,
dtype=theano.config.floatX
),
name='vbias',
borrow=True
)
# initialize input layer for standalone RBM or layer0 of DBN
self.input = input
if not input:
self.input = T.matrix('input')
self.W = W
self.hbias = hbias
self.vbias = vbias
self.theano_rng = theano_rng
# **** WARNING: It is not a good idea to put things in this list
# other than shared variables created in this function.
self.params = [self.W, self.hbias, self.vbias]
# end-snippet-1
def free_energy(self, v_sample):
''' Function to compute the free energy '''
wx_b = T.dot(v_sample, self.W) + self.hbias
vbias_term = T.dot(v_sample, self.vbias)
hidden_term = T.sum(T.log(1 + T.exp(wx_b)), axis=1)
return -hidden_term - vbias_term
def propup(self, vis):
'''This function propagates the visible units activation upwards to
the hidden units
Note that we return also the pre-sigmoid activation of the
layer. As it will turn out later, due to how Theano deals with
optimizations, this symbolic variable will be needed to write
down a more stable computational graph (see details in the
reconstruction cost function)
'''
pre_sigmoid_activation = T.dot(vis, self.W) + self.hbias
return [pre_sigmoid_activation, T.nnet.sigmoid(pre_sigmoid_activation)]
def sample_h_given_v(self, v0_sample):
''' This function infers state of hidden units given visible units '''
# compute the activation of the hidden units given a sample of
# the visibles
pre_sigmoid_h1, h1_mean = self.propup(v0_sample)
# get a sample of the hiddens given their activation
# Note that theano_rng.binomial returns a symbolic sample of dtype
# int64 by default. If we want to keep our computations in floatX
# for the GPU we need to specify to return the dtype floatX
h1_sample = self.theano_rng.binomial(size=h1_mean.shape,
n=1, p=h1_mean,
dtype=theano.config.floatX)
return [pre_sigmoid_h1, h1_mean, h1_sample]
def propdown(self, hid):
'''This function propagates the hidden units activation downwards to
the visible units
Note that we return also the pre_sigmoid_activation of the
layer. As it will turn out later, due to how Theano deals with
optimizations, this symbolic variable will be needed to write
down a more stable computational graph (see details in the
reconstruction cost function)
'''
pre_sigmoid_activation = T.dot(hid, self.W.T) + self.vbias
return [pre_sigmoid_activation, T.nnet.sigmoid(pre_sigmoid_activation)]
def sample_v_given_h(self, h0_sample):
''' This function infers state of visible units given hidden units '''
# compute the activation of the visible given the hidden sample
pre_sigmoid_v1, v1_mean = self.propdown(h0_sample)
# get a sample of the visible given their activation
# Note that theano_rng.binomial returns a symbolic sample of dtype
# int64 by default. If we want to keep our computations in floatX
# for the GPU we need to specify to return the dtype floatX
v1_sample = self.theano_rng.binomial(size=v1_mean.shape,
n=1, p=v1_mean,
dtype=theano.config.floatX)
return [pre_sigmoid_v1, v1_mean, v1_sample]
def gibbs_hvh(self, h0_sample):
''' This function implements one step of Gibbs sampling,
starting from the hidden state'''
pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h0_sample)
pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v1_sample)
return [pre_sigmoid_v1, v1_mean, v1_sample,
pre_sigmoid_h1, h1_mean, h1_sample]
def gibbs_vhv(self, v0_sample):
''' This function implements one step of Gibbs sampling,
starting from the visible state'''
pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v0_sample)
pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h1_sample)
return [pre_sigmoid_h1, h1_mean, h1_sample,
pre_sigmoid_v1, v1_mean, v1_sample]
# start-snippet-2
def get_cost_updates(self, lr=0.1, persistent=None, k=1):
"""This functions implements one step of CD-k or PCD-k
:param lr: learning rate used to train the RBM
:param persistent: None for CD. For PCD, shared variable
containing old state of Gibbs chain. This must be a shared
variable of size (batch size, number of hidden units).
:param k: number of Gibbs steps to do in CD-k/PCD-k
Returns a proxy for the cost and the updates dictionary. The
dictionary contains the update rules for weights and biases but
also an update of the shared variable used to store the persistent
chain, if one is used.
"""
# compute positive phase
pre_sigmoid_ph, ph_mean, ph_sample = self.sample_h_given_v(self.input)
# decide how to initialize persistent chain:
# for CD, we use the newly generate hidden sample
# for PCD, we initialize from the old state of the chain
if persistent is None:
chain_start = ph_sample
else:
chain_start = persistent
# end-snippet-2
# perform actual negative phase
# in order to implement CD-k/PCD-k we need to scan over the
# function that implements one gibbs step k times.
# Read Theano tutorial on scan for more information :
# http://deeplearning.net/software/theano/library/scan.html
# the scan will return the entire Gibbs chain
(
[
pre_sigmoid_nvs,
nv_means,
nv_samples,
pre_sigmoid_nhs,
nh_means,
nh_samples
],
updates
) = theano.scan(
self.gibbs_hvh,
# the None are place holders, saying that
# chain_start is the initial state corresponding to the
# 6th output
outputs_info=[None, None, None, None, None, chain_start],
n_steps=k
)
# start-snippet-3
# determine gradients on RBM parameters
# note that we only need the sample at the end of the chain
chain_end = nv_samples[-1]
cost = T.mean(self.free_energy(self.input)) - T.mean(
self.free_energy(chain_end))
# We must not compute the gradient through the gibbs sampling
gparams = T.grad(cost, self.params, consider_constant=[chain_end])
# end-snippet-3 start-snippet-4
# constructs the update dictionary
for gparam, param in zip(gparams, self.params):
# make sure that the learning rate is of the right dtype
updates[param] = param - gparam * T.cast(
lr,
dtype=theano.config.floatX
)
if persistent:
# Note that this works only if persistent is a shared variable
updates[persistent] = nh_samples[-1]
# pseudo-likelihood is a better proxy for PCD
monitoring_cost = self.get_pseudo_likelihood_cost(updates)
else:
# reconstruction cross-entropy is a better proxy for CD
monitoring_cost = self.get_reconstruction_cost(updates,
pre_sigmoid_nvs[-1])
return monitoring_cost, updates
# end-snippet-4
def get_pseudo_likelihood_cost(self, updates):
"""Stochastic approximation to the pseudo-likelihood"""
# index of bit i in expression p(x_i | x_{\i})
bit_i_idx = theano.shared(value=0, name='bit_i_idx')
# binarize the input image by rounding to nearest integer
xi = T.round(self.input)
# calculate free energy for the given bit configuration
fe_xi = self.free_energy(xi)
# flip bit x_i of matrix xi and preserve all other bits x_{\i}
# Equivalent to xi[:,bit_i_idx] = 1-xi[:, bit_i_idx], but assigns
# the result to xi_flip, instead of working in place on xi.
xi_flip = T.set_subtensor(xi[:, bit_i_idx], 1 - xi[:, bit_i_idx])
# calculate free energy with bit flipped
fe_xi_flip = self.free_energy(xi_flip)
# equivalent to e^(-FE(x_i)) / (e^(-FE(x_i)) + e^(-FE(x_{\i})))
cost = T.mean(self.n_visible * T.log(T.nnet.sigmoid(fe_xi_flip -
fe_xi)))
# increment bit_i_idx % number as part of updates
updates[bit_i_idx] = (bit_i_idx + 1) % self.n_visible
return cost
def get_reconstruction_cost(self, updates, pre_sigmoid_nv):
"""Approximation to the reconstruction error
Note that this function requires the pre-sigmoid activation as
input. To understand why this is so you need to understand a
bit about how Theano works. Whenever you compile a Theano
function, the computational graph that you pass as input gets
optimized for speed and stability. This is done by changing
several parts of the subgraphs with others. One such
optimization expresses terms of the form log(sigmoid(x)) in
terms of softplus. We need this optimization for the
cross-entropy since sigmoid of numbers larger than 30. (or
even less then that) turn to 1. and numbers smaller than
-30. turn to 0 which in terms will force theano to compute
log(0) and therefore we will get either -inf or NaN as
cost. If the value is expressed in terms of softplus we do not
get this undesirable behaviour. This optimization usually
works fine, but here we have a special case. The sigmoid is
applied inside the scan op, while the log is
outside. Therefore Theano will only see log(scan(..)) instead
of log(sigmoid(..)) and will not apply the wanted
optimization. We can not go and replace the sigmoid in scan
with something else also, because this only needs to be done
on the last step. Therefore the easiest and more efficient way
is to get also the pre-sigmoid activation as an output of
scan, and apply both the log and sigmoid outside scan such
that Theano can catch and optimize the expression.
"""
cross_entropy = T.mean(
T.sum(
self.input * T.log(T.nnet.sigmoid(pre_sigmoid_nv)) +
(1 - self.input) * T.log(1 - T.nnet.sigmoid(pre_sigmoid_nv)),
axis=1
)
)
return cross_entropy
def test_rbm(learning_rate=0.1, training_epochs=15,
dataset='mnist.pkl.gz', batch_size=20,
n_chains=20, n_samples=10, output_folder='rbm_plots',
n_hidden=500):
"""
Demonstrate how to train and afterwards sample from it using Theano.
This is demonstrated on MNIST.
:param learning_rate: learning rate used for training the RBM
:param training_epochs: number of epochs used for training
:param dataset: path the the pickled dataset
:param batch_size: size of a batch used to train the RBM
:param n_chains: number of parallel Gibbs chains to be used for sampling
:param n_samples: number of samples to plot for each chain
"""
datasets = load_data(dataset)
train_set_x, train_set_y = datasets[0]
test_set_x, test_set_y = datasets[2]
# compute number of minibatches for training, validation and testing
n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
# allocate symbolic variables for the data
index = T.lscalar() # index to a [mini]batch
x = T.matrix('x') # the data is presented as rasterized images
rng = numpy.random.RandomState(123)
theano_rng = RandomStreams(rng.randint(2 ** 30))
# initialize storage for the persistent chain (state = hidden
# layer of chain)
persistent_chain = theano.shared(numpy.zeros((batch_size, n_hidden),
dtype=theano.config.floatX),
borrow=True)
# construct the RBM class
rbm = RBM(input=x, n_visible=28 * 28,
n_hidden=n_hidden, numpy_rng=rng, theano_rng=theano_rng)
# get the cost and the gradient corresponding to one step of CD-15
cost, updates = rbm.get_cost_updates(lr=learning_rate,
persistent=persistent_chain, k=15)
#################################
# Training the RBM #
#################################
if not os.path.isdir(output_folder):
os.makedirs(output_folder)
os.chdir(output_folder)
# start-snippet-5
# it is ok for a theano function to have no output
# the purpose of train_rbm is solely to update the RBM parameters
train_rbm = theano.function(
[index],
cost,
updates=updates,
givens={
x: train_set_x[index * batch_size: (index + 1) * batch_size]
},
name='train_rbm'
)
plotting_time = 0.
start_time = time.clock()
# go through training epochs
for epoch in xrange(training_epochs):
# go through the training set
mean_cost = []
for batch_index in xrange(n_train_batches):
mean_cost += [train_rbm(batch_index)]
print 'Training epoch %d, cost is ' % epoch, numpy.mean(mean_cost)
# Plot filters after each training epoch
plotting_start = time.clock()
# Construct image from the weight matrix
image = Image.fromarray(
tile_raster_images(
X=rbm.W.get_value(borrow=True).T,
img_shape=(28, 28),
tile_shape=(10, 10),
tile_spacing=(1, 1)
)
)
image.save('filters_at_epoch_%i.png' % epoch)
plotting_stop = time.clock()
plotting_time += (plotting_stop - plotting_start)
end_time = time.clock()
pretraining_time = (end_time - start_time) - plotting_time
print ('Training took %f minutes' % (pretraining_time / 60.))
# end-snippet-5 start-snippet-6
#################################
# Sampling from the RBM #
#################################
# find out the number of test samples
number_of_test_samples = test_set_x.get_value(borrow=True).shape[0]
# pick random test examples, with which to initialize the persistent chain
test_idx = rng.randint(number_of_test_samples - n_chains)
persistent_vis_chain = theano.shared(
numpy.asarray(
test_set_x.get_value(borrow=True)[test_idx:test_idx + n_chains],
dtype=theano.config.floatX
)
)
# end-snippet-6 start-snippet-7
plot_every = 1000
# define one step of Gibbs sampling (mf = mean-field) define a
# function that does `plot_every` steps before returning the
# sample for plotting
(
[
presig_hids,
hid_mfs,
hid_samples,
presig_vis,
vis_mfs,
vis_samples
],
updates
) = theano.scan(
rbm.gibbs_vhv,
outputs_info=[None, None, None, None, None, persistent_vis_chain],
n_steps=plot_every
)
# add to updates the shared variable that takes care of our persistent
# chain :.
updates.update({persistent_vis_chain: vis_samples[-1]})
# construct the function that implements our persistent chain.
# we generate the "mean field" activations for plotting and the actual
# samples for reinitializing the state of our persistent chain
sample_fn = theano.function(
[],
[
vis_mfs[-1],
vis_samples[-1]
],
updates=updates,
name='sample_fn'
)
# create a space to store the image for plotting ( we need to leave
# room for the tile_spacing as well)
image_data = numpy.zeros(
(29 * n_samples + 1, 29 * n_chains - 1),
dtype='uint8'
)
for idx in xrange(n_samples):
# generate `plot_every` intermediate samples that we discard,
# because successive samples in the chain are too correlated
vis_mf, vis_sample = sample_fn()
print ' ... plotting sample ', idx
image_data[29 * idx:29 * idx + 28, :] = tile_raster_images(
X=vis_mf,
img_shape=(28, 28),
tile_shape=(1, n_chains),
tile_spacing=(1, 1)
)
# construct image
image = Images.fromarray(image_data)
image.save('samples.png')
# end-snippet-7
os.chdir('../')
if __name__ == '__main__':
test_rbm()
|
{
"content_hash": "f31498a4cfb3b41ced3194783be4d7cb",
"timestamp": "",
"source": "github",
"line_count": 540,
"max_line_length": 79,
"avg_line_length": 38.16111111111111,
"alnum_prop": 0.5959625370019896,
"repo_name": "h-mayorquin/mnist_dl_ann_project",
"id": "e5d48e08f7ec746a9ffcd491040c1256366df6ab",
"size": "20607",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/rbm.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "188064"
},
{
"name": "TeX",
"bytes": "35314"
}
],
"symlink_target": ""
}
|
from functools import wraps
from waffle.compat import CLASS_TYPES
from waffle.models import Flag, Switch, Sample
__all__ = ['override_flag', 'override_sample', 'override_switch']
class _overrider(object):
def __init__(self, name, active):
self.name = name
self.active = active
def __call__(self, func):
if isinstance(func, CLASS_TYPES):
return self.for_class(func)
else:
return self.for_callable(func)
def for_class(self, obj):
"""Wraps a class's test methods in the decorator"""
for attr in dir(obj):
if not attr.startswith('test_'):
# Ignore non-test functions
continue
attr_value = getattr(obj, attr)
if not callable(attr_value):
# Ignore non-functions
continue
setattr(obj, attr, self.for_callable(attr_value))
return obj
def for_callable(self, func):
"""Wraps a method in the decorator"""
@wraps(func)
def _wrapped(*args, **kwargs):
with self:
return func(*args, **kwargs)
return _wrapped
def get(self):
self.obj, self.created = self.cls.objects.get_or_create(name=self.name)
def update(self, active):
raise NotImplementedError
def get_value(self):
raise NotImplementedError
def __enter__(self):
self.get()
self.old_value = self.get_value()
if self.old_value != self.active:
self.update(self.active)
def __exit__(self, exc_type, exc_val, exc_tb):
if self.created:
self.obj.delete()
else:
self.update(self.old_value)
class override_switch(_overrider):
"""
override_switch is a contextmanager for easier testing of switches.
It accepts two parameters, name of the switch and it's state. Example
usage::
with override_switch('happy_mode', active=True):
...
If `Switch` already existed, it's value would be changed inside the context
block, then restored to the original value. If `Switch` did not exist
before entering the context, it is created, then removed at the end of the
block.
It can also act as a decorator::
@override_switch('happy_mode', active=True)
def test_happy_mode_enabled():
...
"""
cls = Switch
def update(self, active):
self.cls.objects.filter(pk=self.obj.pk).update(active=active)
def get_value(self):
return self.obj.active
class override_flag(_overrider):
cls = Flag
def update(self, active):
self.cls.objects.filter(pk=self.obj.pk).update(everyone=active)
def get_value(self):
return self.obj.everyone
class override_sample(_overrider):
cls = Sample
def get(self):
try:
self.obj = self.cls.objects.get(name=self.name)
self.created = False
except self.cls.DoesNotExist:
self.obj = self.cls.objects.create(name=self.name, percent='0.0')
self.created = True
def update(self, active):
if active is True:
p = 100.0
elif active is False:
p = 0.0
else:
p = active
self.cls.objects.filter(pk=self.obj.pk).update(percent='{0}'.format(p))
def get_value(self):
p = self.obj.percent
if p == 100.0:
return True
if p == 0.0:
return False
return p
|
{
"content_hash": "5bd752696d9e5c29a30b0b3361c46963",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 79,
"avg_line_length": 25.970588235294116,
"alnum_prop": 0.5784258210645526,
"repo_name": "crccheck/django-waffle",
"id": "d4e428205c47ad4e46503d46ee1f09e3d658e242",
"size": "3532",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "waffle/testutils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "929"
},
{
"name": "JavaScript",
"bytes": "1250"
},
{
"name": "Python",
"bytes": "129769"
},
{
"name": "Shell",
"bytes": "627"
}
],
"symlink_target": ""
}
|
"""Interface to TerminalInteractiveShell for PyDev Interactive Console frontend
for IPython 0.11 to 1.0+.
"""
from __future__ import print_function
import os
import sys
import codeop
import traceback
from IPython.core.error import UsageError
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
from IPython.core.usage import default_banner_parts
from IPython.core.formatters import DisplayFormatter
from IPython.core import release
from IPython.terminal.interactiveshell import TerminalInteractiveShell
from IPython.terminal.ipapp import load_default_config
from IPython import InteractiveShell
from traitlets import Type
from traitlets import CBool, Unicode
from _pydevd_bundle.pydevd_constants import dict_keys, dict_iter_items
from _pydev_bundle.pydev_ipython_rich_output import PyDevDisplayHook, PyDevDisplayPub, \
patch_stdout
from _pydev_bundle.pydev_ipython_completer import init_shell_completer
default_pydev_banner_parts = default_banner_parts
default_pydev_banner = ''.join(default_pydev_banner_parts)
INLINE_OUTPUT_SUPPORTED = os.getenv('INLINE_OUTPUT_SUPPORTED', False)
def show_in_pager(self, strng, *args, **kwargs):
""" Run a string through pager """
# On PyDev we just output the string, there are scroll bars in the console
# to handle "paging". This is the same behaviour as when TERM==dump (see
# page.py)
# for compatibility with mime-bundle form:
if isinstance(strng, dict):
strng = strng['text/plain']
print(strng)
def create_editor_hook(rpc_client):
def call_editor(filename, line=0, wait=True):
""" Open an editor in PyDev """
if line is None:
line = 0
# Make sure to send an absolution path because unlike most editor hooks
# we don't launch a process. This is more like what happens in the zmqshell
filename = os.path.abspath(filename)
# import sys
# sys.__stderr__.write('Calling editor at: %s:%s\n' % (pydev_host, pydev_client_port))
# Tell PyDev to open the editor
rpc_client.IPythonEditor(filename, str(line))
if wait:
try:
raw_input("Press Enter when done editing:")
except NameError:
input("Press Enter when done editing:")
return call_editor
class PyDevTerminalInteractiveShell(TerminalInteractiveShell):
banner1 = Unicode(default_pydev_banner, config=True,
help="""The part of the banner to be printed before the profile"""
)
# TODO term_title: (can PyDev's title be changed???, see terminal.py for where to inject code, in particular set_term_title as used by %cd)
# for now, just disable term_title
term_title = CBool(False)
# Note in version 0.11 there is no guard in the IPython code about displaying a
# warning, so with 0.11 you get:
# WARNING: Readline services not available or not loaded.
# WARNING: The auto-indent feature requires the readline library
# Disable readline, readline type code is all handled by PyDev (on Java side)
readline_use = CBool(False)
# autoindent has no meaning in PyDev (PyDev always handles that on the Java side),
# and attempting to enable it will print a warning in the absence of readline.
autoindent = CBool(False)
# Force console to not give warning about color scheme choice and default to NoColor.
# TODO It would be nice to enable colors in PyDev but:
# - The PyDev Console (Eclipse Console) does not support the full range of colors, so the
# effect isn't as nice anyway at the command line
# - If done, the color scheme should default to LightBG, but actually be dependent on
# any settings the user has (such as if a dark theme is in use, then Linux is probably
# a better theme).
colors_force = CBool(True)
colors = Unicode("NoColor")
# Since IPython 5 the terminal interface is not compatible with Emacs `inferior-shell` and
# the `simple_prompt` flag is needed
simple_prompt = CBool(True)
if INLINE_OUTPUT_SUPPORTED:
displayhook_class = Type(PyDevDisplayHook)
display_pub_class = Type(PyDevDisplayPub)
def __init__(self, *args, **kwargs):
super(PyDevTerminalInteractiveShell, self).__init__(*args, **kwargs)
if INLINE_OUTPUT_SUPPORTED:
try:
self.enable_matplotlib('inline')
except:
sys.stderr.write("Failed to enable inline matplotlib plots\n")
sys.stderr.flush()
def patch_stdout_if_needed(self):
if INLINE_OUTPUT_SUPPORTED:
patch_stdout()
# In the PyDev Console, GUI control is done via hookable XML-RPC server
@staticmethod
def enable_gui(gui=None, app=None):
"""Switch amongst GUI input hooks by name.
"""
# Deferred import
if not INLINE_OUTPUT_SUPPORTED:
from pydev_ipython.inputhook import enable_gui as real_enable_gui
try:
return real_enable_gui(gui, app)
except ValueError as e:
raise UsageError("%s" % e)
def init_display_formatter(self):
if INLINE_OUTPUT_SUPPORTED:
self.display_formatter = DisplayFormatter(parent=self)
self.configurables.append(self.display_formatter)
self.display_formatter.ipython_display_formatter.enabled = True
else:
super(PyDevTerminalInteractiveShell, self).init_display_formatter()
#-------------------------------------------------------------------------
# Things related to hooks
#-------------------------------------------------------------------------
def init_hooks(self):
super(PyDevTerminalInteractiveShell, self).init_hooks()
self.set_hook('show_in_pager', show_in_pager)
#-------------------------------------------------------------------------
# Things related to exceptions
#-------------------------------------------------------------------------
def showtraceback(self, exc_tuple=None, *args, **kwargs):
# IPython does a lot of clever stuff with Exceptions. However mostly
# it is related to IPython running in a terminal instead of an IDE.
# (e.g. it prints out snippets of code around the stack trace)
# PyDev does a lot of clever stuff too, so leave exception handling
# with default print_exc that PyDev can parse and do its clever stuff
# with (e.g. it puts links back to the original source code)
try:
if exc_tuple is None:
etype, value, tb = sys.exc_info()
else:
etype, value, tb = exc_tuple
except ValueError:
return
if tb is not None:
traceback.print_exception(etype, value, tb)
sys.last_type, sys.last_value, sys.last_traceback = etype, value, tb
def init_completer(self):
init_shell_completer(self)
#-------------------------------------------------------------------------
# Things related to aliases
#-------------------------------------------------------------------------
def init_alias(self):
# InteractiveShell defines alias's we want, but TerminalInteractiveShell defines
# ones we don't. So don't use super and instead go right to InteractiveShell
InteractiveShell.init_alias(self)
#-------------------------------------------------------------------------
# Things related to exiting
#-------------------------------------------------------------------------
def ask_exit(self):
""" Ask the shell to exit. Can be overiden and used as a callback. """
# TODO PyDev's console does not have support from the Python side to exit
# the console. If user forces the exit (with sys.exit()) then the console
# simply reports errors. e.g.:
# >>> import sys
# >>> sys.exit()
# Failed to create input stream: Connection refused
# >>>
# Console already exited with value: 0 while waiting for an answer.
# Error stream:
# Output stream:
# >>>
#
# Alternatively if you use the non-IPython shell this is what happens
# >>> exit()
# <type 'exceptions.SystemExit'>:None
# >>>
# <type 'exceptions.SystemExit'>:None
# >>>
#
super(PyDevTerminalInteractiveShell, self).ask_exit()
print('To exit the PyDev Console, terminate the console within IDE.')
#-------------------------------------------------------------------------
# Things related to magics
#-------------------------------------------------------------------------
def init_magics(self):
super(PyDevTerminalInteractiveShell, self).init_magics()
# TODO Any additional magics for PyDev?
InteractiveShellABC.register(PyDevTerminalInteractiveShell) # @UndefinedVariable
#=======================================================================================================================
# _PyDevIPythonFrontEnd
#=======================================================================================================================
class _PyDevIPythonFrontEnd:
version = release.__version__
def __init__(self):
# Create and initialize our IPython instance.
if hasattr(PyDevTerminalInteractiveShell, '_instance') and PyDevTerminalInteractiveShell._instance is not None:
self.ipython = PyDevTerminalInteractiveShell._instance
else:
self.ipython = PyDevTerminalInteractiveShell.instance(config=load_default_config())
self._curr_exec_line = 0
self._curr_exec_lines = []
def show_banner(self):
self.ipython.show_banner()
def update(self, globals, locals):
ns = self.ipython.user_ns
for key in dict_keys(self.ipython.user_ns):
if key not in locals:
locals[key] = ns[key]
self.ipython.user_global_ns.clear()
self.ipython.user_global_ns.update(globals)
# If `globals` and `locals` passed to the method are the same objects, we have to ensure that they are also
# the same in the IPython evaluation context to avoid troubles with some corner-cases such as generator expressions.
# See: `pydevd_console_integration.console_exec()`.
self.ipython.user_ns = self.ipython.user_global_ns if globals is locals else locals
if hasattr(self.ipython, 'history_manager') and hasattr(self.ipython.history_manager, 'save_thread'):
self.ipython.history_manager.save_thread.pydev_do_not_trace = True # don't trace ipython history saving thread
def complete(self, string):
try:
if string:
return self.ipython.complete(None, line=string, cursor_pos=string.__len__())
else:
return self.ipython.complete(string, string, 0)
except:
# Silence completer exceptions
pass
def is_complete(self, string):
#Based on IPython 0.10.1
if string in ('', '\n'):
# Prefiltering, eg through ipython0, may return an empty
# string although some operations have been accomplished. We
# thus want to consider an empty string as a complete
# statement.
return True
else:
try:
# Add line returns here, to make sure that the statement is
# complete (except if '\' was used).
# This should probably be done in a different place (like
# maybe 'prefilter_input' method? For now, this works.
clean_string = string.rstrip('\n')
if not clean_string.endswith('\\'):
clean_string += '\n\n'
is_complete = codeop.compile_command(
clean_string,
"<string>",
"exec"
)
except Exception:
# XXX: Hack: return True so that the
# code gets executed and the error captured.
is_complete = True
return is_complete
def getCompletions(self, text, act_tok):
# Get completions from IPython and from PyDev and merge the results
# IPython only gives context free list of completions, while PyDev
# gives detailed information about completions.
try:
TYPE_IPYTHON = '11'
TYPE_IPYTHON_MAGIC = '12'
_line, ipython_completions = self.complete(text)
from _pydev_bundle._pydev_completer import Completer
completer = Completer(self.get_namespace(), None)
ret = completer.complete(act_tok)
append = ret.append
ip = self.ipython
pydev_completions = set([f[0] for f in ret])
for ipython_completion in ipython_completions:
#PyCharm was not expecting completions with '%'...
#Could be fixed in the backend, but it's probably better
#fixing it at PyCharm.
#if ipython_completion.startswith('%'):
# ipython_completion = ipython_completion[1:]
if ipython_completion not in pydev_completions:
pydev_completions.add(ipython_completion)
inf = ip.object_inspect(ipython_completion)
if inf['type_name'] == 'Magic function':
pydev_type = TYPE_IPYTHON_MAGIC
else:
pydev_type = TYPE_IPYTHON
pydev_doc = inf['docstring']
if pydev_doc is None:
pydev_doc = ''
append((ipython_completion, pydev_doc, '', pydev_type))
return ret
except:
import traceback;traceback.print_exc()
return []
def get_namespace(self):
return self.ipython.user_ns
def clear_buffer(self):
del self._curr_exec_lines[:]
def add_exec(self, line):
if self._curr_exec_lines:
self._curr_exec_lines.append(line)
buf = '\n'.join(self._curr_exec_lines)
if self.is_complete(buf):
self._curr_exec_line += 1
res = self.ipython.run_cell(buf)
del self._curr_exec_lines[:]
if res.error_in_exec is not None:
return False, True
else:
return False, False #execute complete (no more)
return True, False #needs more
else:
if not self.is_complete(line):
#Did not execute
self._curr_exec_lines.append(line)
return True, False #needs more
else:
self._curr_exec_line += 1
res = self.ipython.run_cell(line, store_history=True)
if res.error_in_exec is not None:
return False, True
else:
return False, False #execute complete (no more)
#hist = self.ipython.history_manager.output_hist_reprs
#rep = hist.get(self._curr_exec_line, None)
#if rep is not None:
# print(rep)
return False #execute complete (no more)
def is_automagic(self):
return self.ipython.automagic
def get_greeting_msg(self):
return 'PyDev console: using IPython %s\n' % self.version
class _PyDevFrontEndContainer:
_instance = None
_last_rpc_client = None
def get_client():
return _PyDevFrontEndContainer._last_rpc_client
def get_pydev_ipython_frontend(rpc_client):
if _PyDevFrontEndContainer._instance is None:
_PyDevFrontEndContainer._instance = _PyDevIPythonFrontEnd()
if _PyDevFrontEndContainer._last_rpc_client != rpc_client:
_PyDevFrontEndContainer._last_rpc_client = rpc_client
# Back channel to PyDev to open editors (in the future other
# info may go back this way. This is the same channel that is
# used to get stdin, see StdIn in pydev_console_utils)
_PyDevFrontEndContainer._instance.ipython.hooks['editor'] = create_editor_hook(rpc_client)
# Note: setting the callback directly because setting it with set_hook would actually create a chain instead
# of ovewriting at each new call).
# _PyDevFrontEndContainer._instance.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port))
return _PyDevFrontEndContainer._instance
def get_ipython_hidden_vars(ipython_shell):
try:
if hasattr(ipython_shell, 'user_ns_hidden'):
user_ns_hidden = ipython_shell.user_ns_hidden
if isinstance(user_ns_hidden, dict):
# Since IPython 2 dict `user_ns_hidden` contains hidden variables and values
user_hidden_dict = user_ns_hidden.copy()
else:
# In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables
user_hidden_dict = dict([(key, val) for key, val in dict_iter_items(ipython_shell.user_ns)
if key in user_ns_hidden])
# while `_`, `__` and `___` were not initialized, they are not presented in `user_ns_hidden`
user_hidden_dict.setdefault('_', '')
user_hidden_dict.setdefault('__', '')
user_hidden_dict.setdefault('___', '')
return user_hidden_dict
except:
# Getting IPython variables shouldn't break loading frame variables
traceback.print_exc()
|
{
"content_hash": "24351da36d73839cc96f63da3c8dc641",
"timestamp": "",
"source": "github",
"line_count": 434,
"max_line_length": 143,
"avg_line_length": 40.8778801843318,
"alnum_prop": 0.584070796460177,
"repo_name": "smmribeiro/intellij-community",
"id": "0246c6d2e31be69a34bd3aad748a1bc0b6375c1d",
"size": "18469",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/helpers/pydev/_pydev_bundle/pydev_ipython_console_011.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
"""
A docutils node which simply shows a counter.
"""
from docutils.nodes import Text, target
from docutils.parsers.rst import roles
values = {}
class CounterNode(Text):
children = ()
def __init__(self, data, rawsource=''):
if ':' in data:
self.name, value = [s.lower() for s in data.split(':')][:2]
self.value = int(value)
else:
self.name = data.lower()
self.value = values.get(self.name, 1)
values[self.name] = self.value + 1
def astext(self):
return str(self.value)
def counter_fn(name, rawtext, text, lineno, inliner, options=None, content=None):
n = CounterNode(text)
s = '%s-%s' % (n.name, n.value)
return [target(ids=[s]), n], []
counter_fn.content = True
roles.register_canonical_role('counter', counter_fn)
|
{
"content_hash": "94b2d7b073e0a8596defaa8c09090c77",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 81,
"avg_line_length": 23.11111111111111,
"alnum_prop": 0.5949519230769231,
"repo_name": "aquavitae/rst2pdf",
"id": "aaa9b387eaf460530487e7119fd6e32e9cb7a162",
"size": "857",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rst2pdf/counter_role.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "35249"
},
{
"name": "Makefile",
"bytes": "94343"
},
{
"name": "Python",
"bytes": "989625"
},
{
"name": "Shell",
"bytes": "1706"
}
],
"symlink_target": ""
}
|
"""
Ansible module for yum-based systems determining if multiple releases
of an OpenShift package are available, and if the release requested
(if any) is available down to the given precision.
For Enterprise, multiple releases available suggest that multiple repos
are enabled for the different releases, which may cause installation
problems. With Origin, however, this is a normal state of affairs as
all the releases are provided in a single repo with the expectation that
only the latest can be installed.
Code in the openshift_version role contains a lot of logic to pin down
the exact package and image version to use and so does some validation
of release availability already. Without duplicating all that, we would
like the user to have a helpful error message if we detect things will
not work out right. Note that if openshift_release is not specified in
the inventory, the version comparison checks just pass.
"""
from ansible.module_utils.basic import AnsibleModule
# NOTE: because of the dependency on yum (Python 2-only), this module does not
# work under Python 3. But since we run unit tests against both Python 2 and
# Python 3, we use six for cross compatibility in this module alone:
from ansible.module_utils.six import string_types
YUM_IMPORT_EXCEPTION = None
DNF_IMPORT_EXCEPTION = None
PKG_MGR = None
try:
import yum # pylint: disable=import-error
PKG_MGR = "yum"
except ImportError as err:
YUM_IMPORT_EXCEPTION = err
try:
import dnf # pylint: disable=import-error
PKG_MGR = "dnf"
except ImportError as err:
DNF_IMPORT_EXCEPTION = err
class AosVersionException(Exception):
"""Base exception class for package version problems"""
def __init__(self, message, problem_pkgs=None):
Exception.__init__(self, message)
self.problem_pkgs = problem_pkgs
def main():
"""Entrypoint for this Ansible module"""
module = AnsibleModule(
argument_spec=dict(
package_list=dict(type="list", required=True),
),
supports_check_mode=True
)
if YUM_IMPORT_EXCEPTION and DNF_IMPORT_EXCEPTION:
module.fail_json(
msg="aos_version module could not import yum or dnf: %s %s" %
(YUM_IMPORT_EXCEPTION, DNF_IMPORT_EXCEPTION)
)
# determine the packages we will look for
package_list = module.params['package_list']
if not package_list:
module.fail_json(msg="package_list must not be empty")
# generate set with only the names of expected packages
expected_pkg_names = [p["name"] for p in package_list]
# gather packages that require a multi_minor_release check
multi_minor_pkgs = [p for p in package_list if p["check_multi"]]
# generate list of packages with a specified (non-empty) version
# should look like a version string with possibly many segments e.g. "3.4.1"
versioned_pkgs = [p for p in package_list if p["version"]]
# get the list of packages available and complain if anything is wrong
try:
pkgs = _retrieve_available_packages(expected_pkg_names)
if versioned_pkgs:
_check_precise_version_found(pkgs, _to_dict(versioned_pkgs))
_check_higher_version_found(pkgs, _to_dict(versioned_pkgs))
if multi_minor_pkgs:
_check_multi_minor_release(pkgs, _to_dict(multi_minor_pkgs))
except AosVersionException as excinfo:
module.fail_json(msg=str(excinfo))
module.exit_json(changed=False)
def _to_dict(pkg_list):
return {pkg["name"]: pkg for pkg in pkg_list}
def _retrieve_available_packages(expected_pkgs):
# The openshift excluder prevents unintended updates to openshift
# packages by setting yum excludes on those packages. See:
# https://wiki.centos.org/SpecialInterestGroup/PaaS/OpenShift-Origin-Control-Updates
# Excludes are then disabled during an install or upgrade, but
# this check will most likely be running outside either. When we
# attempt to determine what packages are available via yum they may
# be excluded. So, for our purposes here, disable excludes to see
# what will really be available during an install or upgrade.
if PKG_MGR == "yum":
# search for package versions available for openshift pkgs
yb = yum.YumBase() # pylint: disable=invalid-name
yb.conf.disable_excludes = ['all']
try:
pkgs = yb.pkgSack.returnPackages(patterns=expected_pkgs)
except yum.Errors.PackageSackError as excinfo:
# you only hit this if *none* of the packages are available
raise AosVersionException('\n'.join([
'Unable to find any OpenShift packages.',
'Check your subscription and repo settings.',
str(excinfo),
]))
elif PKG_MGR == "dnf":
dbase = dnf.Base() # pyling: disable=invalid-name
dbase.conf.disable_excludes = ['all']
dbase.read_all_repos()
dbase.fill_sack(load_system_repo=False, load_available_repos=True)
dquery = dbase.sack.query()
aquery = dquery.available()
pkgs = list(aquery.filter(name=expected_pkgs))
if not pkgs:
# pkgs list is empty, raise because no expected packages found
raise AosVersionException('\n'.join([
'Unable to find any OpenShift packages.',
'Check your subscription and repo settings.',
]))
return pkgs
class PreciseVersionNotFound(AosVersionException):
"""Exception for reporting packages not available at given version"""
def __init__(self, not_found):
msg = ['Not all of the required packages are available at their requested version']
msg += ['{}:{} '.format(pkg["name"], pkg["version"]) for pkg in not_found]
msg += ['Please check your subscriptions and enabled repositories.']
AosVersionException.__init__(self, '\n'.join(msg), not_found)
def _check_precise_version_found(pkgs, expected_pkgs_dict):
# see if any packages couldn't be found at requested release version
# we would like to verify that the latest available pkgs have however specific a version is given.
# so e.g. if there is a package version 3.4.1.5 the check passes; if only 3.4.0, it fails.
pkgs_precise_version_found = set()
for pkg in pkgs:
if pkg.name not in expected_pkgs_dict:
continue
expected_pkg_versions = expected_pkgs_dict[pkg.name]["version"]
if isinstance(expected_pkg_versions, string_types):
expected_pkg_versions = [expected_pkg_versions]
for expected_pkg_version in expected_pkg_versions:
# does the version match, to the precision requested?
# and, is it strictly greater, at the precision requested?
match_version = '.'.join(pkg.version.split('.')[:expected_pkg_version.count('.') + 1])
if match_version == expected_pkg_version:
pkgs_precise_version_found.add(pkg.name)
not_found = []
for name, pkg in expected_pkgs_dict.items():
if name not in pkgs_precise_version_found:
not_found.append(pkg)
if not_found:
raise PreciseVersionNotFound(not_found)
class FoundHigherVersion(AosVersionException):
"""Exception for reporting that a higher version than requested is available"""
def __init__(self, higher_found):
msg = ['Some required package(s) are available at a version',
'that is higher than requested']
msg += [' ' + name for name in higher_found]
msg += ['This will prevent installing the version you requested.']
msg += ['Please check your enabled repositories or adjust openshift_release.']
AosVersionException.__init__(self, '\n'.join(msg), higher_found)
def _check_higher_version_found(pkgs, expected_pkgs_dict):
expected_pkg_names = list(expected_pkgs_dict)
# see if any packages are available in a version higher than requested
higher_version_for_pkg = {}
for pkg in pkgs:
if pkg.name not in expected_pkg_names:
continue
expected_pkg_versions = expected_pkgs_dict[pkg.name]["version"]
if isinstance(expected_pkg_versions, string_types):
expected_pkg_versions = [expected_pkg_versions]
# NOTE: the list of versions is assumed to be sorted so that the highest
# desirable version is the last.
highest_desirable_version = expected_pkg_versions[-1]
req_release_arr = [int(segment) for segment in highest_desirable_version.split(".")]
version = [int(segment) for segment in pkg.version.split(".")]
too_high = version[:len(req_release_arr)] > req_release_arr
higher_than_seen = version > higher_version_for_pkg.get(pkg.name, [])
if too_high and higher_than_seen:
higher_version_for_pkg[pkg.name] = version
if higher_version_for_pkg:
higher_found = []
for name, version in higher_version_for_pkg.items():
higher_found.append(name + '-' + '.'.join(str(segment) for segment in version))
raise FoundHigherVersion(higher_found)
class FoundMultiRelease(AosVersionException):
"""Exception for reporting multiple minor releases found for same package"""
def __init__(self, multi_found):
msg = ['Multiple minor versions of these packages are available']
msg += [' ' + name for name in multi_found]
msg += ["There should only be one OpenShift release repository enabled at a time."]
AosVersionException.__init__(self, '\n'.join(msg), multi_found)
def _check_multi_minor_release(pkgs, expected_pkgs_dict):
# see if any packages are available in more than one minor version
pkgs_by_name_version = {}
for pkg in pkgs:
# keep track of x.y (minor release) versions seen
minor_release = '.'.join(pkg.version.split('.')[:2])
if pkg.name not in pkgs_by_name_version:
pkgs_by_name_version[pkg.name] = set()
pkgs_by_name_version[pkg.name].add(minor_release)
multi_found = []
for name in expected_pkgs_dict:
if name in pkgs_by_name_version and len(pkgs_by_name_version[name]) > 1:
multi_found.append(name)
if multi_found:
raise FoundMultiRelease(multi_found)
if __name__ == '__main__':
main()
|
{
"content_hash": "b3aaa45f78239a76086f3bce1aab8fb1",
"timestamp": "",
"source": "github",
"line_count": 247,
"max_line_length": 102,
"avg_line_length": 41.8582995951417,
"alnum_prop": 0.6697939839442886,
"repo_name": "rhdedgar/openshift-tools",
"id": "c8769b511334af42cc29c8d046b89170202aa4f5",
"size": "10357",
"binary": false,
"copies": "2",
"ref": "refs/heads/stg",
"path": "openshift/installer/vendored/openshift-ansible-3.6.173.0.27/roles/openshift_health_checker/library/aos_version.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "588"
},
{
"name": "Groovy",
"bytes": "6322"
},
{
"name": "HTML",
"bytes": "73250"
},
{
"name": "JavaScript",
"bytes": "960"
},
{
"name": "PHP",
"bytes": "35793"
},
{
"name": "Python",
"bytes": "20646861"
},
{
"name": "Shell",
"bytes": "903453"
},
{
"name": "Vim script",
"bytes": "1836"
}
],
"symlink_target": ""
}
|
import flask
import api
import auth
import forms
import model
import task
import util
from main import app
@app.route('/cart/', methods=['GET', 'POST'])
@auth.login_required
def cart():
order, product_dbs = api.v1.recalculate_cart()
if flask.request.method == 'GET' or order['price'] < 5000:
return flask.render_template(
'cart/get.html',
html_class='cart',
title=u'Корзина',
product_dbs=product_dbs,
)
user_db = auth.current_user_db()
form = forms.UserOrderForm(obj=user_db)
if form.validate_on_submit():
form.populate_obj(user_db)
user_db.put()
order_ = []
for product_db in product_dbs:
product_db.count = order.get(
'products', {}).get(product_db.key.urlsafe(), {}).get('count', 0)
order_.append(product_db)
order_db = model.Order()
for product in order_:
cp = model.CartProduct()
cp.product_key = product.key
cp.count = product.count
cp.price = product.count * product.price
cp.put()
order_db.products.append(cp)
order_db.user_key = auth.current_user_db().key
order_db.user_email = auth.current_user_db().email
order_db.user_name = auth.current_user_db().name
order_db.put()
order_id = order_db.key.id()
order_url = flask.url_for('admin_order', order_id=order_id, _external=True)
subject = u'Новый заказ №%s на сумму %s' % (order_id, order.get('price'))
body = flask.render_template(
'cart/emails/order.txt',
product_dbs=order_,
price=order['price'],
order_id=order_id,
order_url=order_url,
)
html = flask.render_template(
'cart/emails/order.html',
product_dbs=order_,
price=order['price'],
title=subject,
order_id=order_id,
order_url=order_url,
)
reply_to = form.email.data or ''
task.send_mail_notification(
subject, body=body, html=html, reply_to=reply_to)
flask.session['cart'] = {}
api.v1.recalculate_cart()
order_view_url = flask.url_for('order_view', order_id=order_id)
flask.flash(
flask.Markup(u'<a href="%s">Заказ №%s</a> успешно оформлен, наш \
менеджер свяжется с Вами в ближайшее время!' % (order_view_url, order_id)),
'success',
)
return flask.redirect(flask.url_for('products'))
return flask.render_template(
'cart/user_update.html',
html_class='cart-user-update',
title=u'Контактные данные',
form=form,
)
@app.route('/admin/orders/')
@auth.admin_required
def admin_orders():
order_dbs, cursors = model.Order.get_dbs(
is_shipped=util.param('is_shipped', bool),
is_trashed=False,
prev_cursor=True,
)
return flask.render_template(
'cart/admin/orders.html',
html_class='order-list',
title=u'Список предзаказаов',
order_dbs=order_dbs,
next_url=util.generate_next_url(cursors['next']),
prev_url=util.generate_next_url(cursors['prev']),
)
@app.route('/admin/order/<int:order_id>/')
@auth.admin_required
def admin_order(order_id):
order_db = model.Order.get_by_id(order_id)
user_db = order_db.user_key.get()
return flask.render_template(
'cart/admin/order.html',
title=u'Заказ №%s' % order_db.key.id(),
html_class='order-view admin-order-view',
order_db=order_db,
user_db=user_db,
)
@app.route('/orders/')
@auth.login_required
def order_history():
order_dbs = model.Order.query(
model.Order.user_key == auth.current_user_key()).fetch()
return flask.render_template(
'cart/order_history.html',
title=u'История заказов',
order_dbs=order_dbs,
)
@app.route('/order/<int:order_id>/')
@auth.login_required
def order_view(order_id):
order_db = model.Order.get_by_id(order_id)
if order_db.user_key != auth.current_user_key():
return flask.redirect(flask.url_for('products'))
return flask.render_template(
'cart/order_view.html',
title=u'Заказ №%s' % order_db.key.id(),
order_db=order_db,
html_class='order-view',
)
|
{
"content_hash": "095a1b7e13500208bae64b9283c946de",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 79,
"avg_line_length": 28.270833333333332,
"alnum_prop": 0.6241709653647752,
"repo_name": "gmist/five-studio2",
"id": "3892a06eb176d0ab16637ec819a802ec6521a5f9",
"size": "4236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main/control/cart.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "24481"
},
{
"name": "CoffeeScript",
"bytes": "60684"
},
{
"name": "HTML",
"bytes": "270202"
},
{
"name": "JavaScript",
"bytes": "65"
},
{
"name": "Python",
"bytes": "224611"
},
{
"name": "Shell",
"bytes": "1082"
}
],
"symlink_target": ""
}
|
"""WSGI app setup."""
import os
import sys
# Add lib as primary libraries directory, with fallback to lib/dist
# and optionally to lib/dist.zip, loaded using zipimport.
lib_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'lib')
if lib_path not in sys.path:
sys.path[0:0] = [
lib_path,
os.path.join(lib_path, 'dist'),
os.path.join(lib_path, 'dist.zip'),
]
from tipfy.app import App
from config import config
from urls import rules
def enable_appstats(app):
"""Enables appstats middleware."""
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app.dispatch = appstats_wsgi_middleware(app.dispatch)
def enable_jinja2_debugging():
"""Enables blacklisted modules that help Jinja2 debugging."""
if not debug:
return
from google.appengine.tools.dev_appserver import HardenedModulesHook
HardenedModulesHook._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
# Is this the development server?
debug = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# Instantiate the application.
app = App(rules=rules, config=config, debug=debug)
enable_appstats(app)
enable_jinja2_debugging()
def main():
app.run()
if __name__ == '__main__':
main()
|
{
"content_hash": "1a28b2b3db02b5038c1143ccf41445cd",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 74,
"avg_line_length": 28.155555555555555,
"alnum_prop": 0.6898184688239937,
"repo_name": "moraes/tipfy",
"id": "218cc861aeeb9c8bc1bd8f9da314bdc028c10246",
"size": "1291",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "project/app/main.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "2954"
},
{
"name": "Python",
"bytes": "818426"
},
{
"name": "Shell",
"bytes": "4509"
}
],
"symlink_target": ""
}
|
from ai.h2o.sparkling.ml.params.H2OTypeConverters import H2OTypeConverters
from ai.h2o.sparkling.ml.params.H2OBaseMOJOParams import H2OBaseMOJOParams
from pyspark.ml.param import *
import warnings
class H2OAlgorithmMOJOParams(H2OBaseMOJOParams):
predictionCol = Param(
Params._dummy(),
"predictionCol",
"Prediction column name",
H2OTypeConverters.toString())
detailedPredictionCol = Param(
Params._dummy(),
"detailedPredictionCol",
"Column containing additional prediction details, its content depends on the model type.",
H2OTypeConverters.toString())
withContributions = Param(
Params._dummy(),
"withContributions",
"Enables or disables generating a sub-column of detailedPredictionCol containing Shapley values.",
H2OTypeConverters.toBoolean())
featuresCols = Param(
Params._dummy(),
"featuresCols",
"Name of feature columns",
H2OTypeConverters.toListString())
namedMojoOutputColumns = Param(
Params._dummy(),
"namedMojoOutputColumns",
"Mojo Output is not stored in the array but in the properly named columns",
H2OTypeConverters.toBoolean())
withLeafNodeAssignments = Param(
Params._dummy(),
"withLeafNodeAssignments",
"Enables or disables computation of leaf node assignments.",
H2OTypeConverters.toBoolean())
withStageResults = Param(
Params._dummy(),
"withStageResults",
"Enables or disables computation of stage results.",
H2OTypeConverters.toBoolean())
##
# Getters
##
def getPredictionCol(self):
return self.getOrDefault(self.predictionCol)
def getDetailedPredictionCol(self):
return self.getOrDefault(self.detailedPredictionCol)
def getWithContributions(self):
return self.getOrDefault(self.withContributions)
def getFeaturesCols(self):
return self.getOrDefault(self.featuresCols)
def getNamedMojoOutputColumns(self):
warnings.warn("The method will be removed without replacement in the version 3.40."
"Named output columns will be always used.", DeprecationWarning)
return self.getOrDefault(self.namedMojoOutputColumns)
def getWithLeafNodeAssignments(self):
return self.getOrDefault(self.withLeafNodeAssignments)
def getWithStageResults(self):
return self.getOrDefault(self.withStageResults)
|
{
"content_hash": "1efc8b2e2778b7cceb4ed47b88621f0a",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 106,
"avg_line_length": 33.554054054054056,
"alnum_prop": 0.6967378171566653,
"repo_name": "h2oai/sparkling-water",
"id": "c47d600bc3d05c21de948f6f762c7b0321b58c8c",
"size": "3268",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py-scoring/src/ai/h2o/sparkling/ml/params/H2OAlgorithmMOJOParams.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8719"
},
{
"name": "CSS",
"bytes": "4539"
},
{
"name": "Groovy",
"bytes": "122809"
},
{
"name": "HCL",
"bytes": "44156"
},
{
"name": "Java",
"bytes": "35009"
},
{
"name": "Python",
"bytes": "442716"
},
{
"name": "R",
"bytes": "63088"
},
{
"name": "Scala",
"bytes": "1720448"
},
{
"name": "Shell",
"bytes": "29726"
},
{
"name": "TeX",
"bytes": "132310"
}
],
"symlink_target": ""
}
|
import json
from tempfile import NamedTemporaryFile
from json.decoder import JSONDecodeError
from neo.Utils.NeoTestCase import NeoTestCase
from neo.UserPreferences import UserPreferencesHolder, PREFERENCES_DEFAULT
class PreferencesTestCase(NeoTestCase):
def test_prefs_json_nonexistant(self):
prefs = UserPreferencesHolder("/this/does/not/exist.json")
self.assertEqual(prefs._userprefs, {})
self.assertEqual(prefs._prefs, PREFERENCES_DEFAULT)
def test_prefs_json_invalid(self):
with NamedTemporaryFile() as prefs_file:
prefs_file.write(b"xxx")
prefs_file.flush()
print("This test expects an error message. Don't worry about the next line ;)")
with self.assertRaises(JSONDecodeError):
prefs = UserPreferencesHolder(prefs_file.name)
def test_prefs_json_empty(self):
with NamedTemporaryFile("w") as prefs_file:
s = json.dumps({}, indent=4, sort_keys=True)
prefs_file.write(s)
prefs_file.flush()
prefs = UserPreferencesHolder(prefs_file.name)
self.assertEqual(prefs._userprefs, {})
self.assertEqual(prefs._prefs, PREFERENCES_DEFAULT)
# Make sure it's possible to get the theme prefs
current_selected_theme = prefs._prefs['theme']
self.assertEqual(prefs.token_style, PREFERENCES_DEFAULT["themes"][current_selected_theme])
# Cannot set an invalid theme
with self.assertRaises(ValueError):
prefs.set_theme("invalid")
# Can set a valid theme
prefs.set_theme("light")
self.assertEqual(prefs.token_style, PREFERENCES_DEFAULT["themes"]["light"])
# Make sure it persists user-preferences to JSON file
prefs2 = UserPreferencesHolder(prefs_file.name)
self.assertEqual(prefs2._userprefs, {"theme": "light"})
self.assertEqual(prefs2.token_style, PREFERENCES_DEFAULT["themes"]["light"])
|
{
"content_hash": "8289a8f9d689e9db42027e80e34c137d",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 102,
"avg_line_length": 44.04347826086956,
"alnum_prop": 0.6525172754195459,
"repo_name": "hal0x2328/neo-python",
"id": "475162eae7da417548ce46ab2275f4a9e52eece2",
"size": "2026",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neo/test_preferences.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2059"
},
{
"name": "Makefile",
"bytes": "1898"
},
{
"name": "Python",
"bytes": "1758220"
},
{
"name": "Shell",
"bytes": "531"
}
],
"symlink_target": ""
}
|
import itertools
from collections import namedtuple
import os.path as p
from pathlib import Path
from flask import safe_join
import abc
import yaml
import os
from landmarkerio import TEMPLATE_DINAME, FileExt
Group = namedtuple('Group', ['label', 'n', 'index'])
def parse_connectivity(index_lst, n):
index = []
for i in index_lst:
if ':' in i:
# User is providing a slice
start, end = (int(x) for x in i.split(':'))
index.extend([x, x+1] for x in xrange(start, end))
else:
# Just a standard pair of numbers
index.append([int(j) for j in i.split(' ')])
indexes = set(itertools.chain.from_iterable(index))
if len(index) > 0 and (min(indexes) < 0 or max(indexes) > n):
raise ValueError("invalid connectivity")
return index
def load_yaml_template(filepath, n_dims):
with open(filepath) as f:
data = yaml.load(f.read())
if 'groups' in data:
raw_groups = data['groups']
else:
raise KeyError(
"Missing 'groups' or 'template' key in yaml file %s"
% filepath)
groups = []
for index, group in enumerate(raw_groups):
label = group.get('label', index) # Allow simple ordered groups
n = group['points'] # Should raise KeyError by design if missing
connectivity = group.get('connectivity', [])
if isinstance(connectivity, list):
index = parse_connectivity(connectivity, n)
elif connectivity == 'cycle':
index = parse_connectivity(
['0:%d' % (n - 1), '%d 0' % (n - 1)], n)
else:
index = [] # Couldn't parse connectivity, safe default
groups.append(Group(label, n, index))
return build_json(groups, n_dims)
def parse_group(group):
# split on \n and strip left and right whitespace.
x = [l.strip() for l in group.split('\n')]
label, n_str = x[0].split(' ')
n = int(n_str)
index_str = x[1:]
if len(index_str) == 0:
return Group(label, n, [])
index = parse_connectivity(index_str, n)
return Group(label, n, index)
def group_to_json(group, n_dims):
group_json = {}
lms = [{'point': [None] * n_dims}] * group.n
group_json['landmarks'] = lms
group_json['connectivity'] = group.index
group_json['label'] = group.label
return group_json
def build_json(groups, n_dims):
n_points = sum(g.n for g in groups)
offset = 0
connectivity = []
labels = []
for g in groups:
connectivity += [[j + offset for j in i] for i in g.index]
labels.append({
'label': g.label,
'mask': list(range(offset, offset + g.n))
})
offset += g.n
lm_json = {
'labels': labels,
'landmarks': {
'connectivity': connectivity,
'points': [[None] * n_dims] * n_points
},
'version': 2,
}
return lm_json
def load_legacy_template(path, n_dims):
with open(path) as f:
ta = f.read().strip().split('\n\n')
groups = [parse_group(g) for g in ta]
return build_json(groups, n_dims)
def group_to_dict(g):
data = {'label': g.label, 'points': g.n}
if g.index:
data['connectivity'] = ['{} {}'.format(c[0], c[1]) for c in g.index]
return data
def convert_legacy_template(path):
with open(path) as f:
ta = f.read().strip().split('\n\n')
groups = [parse_group(g) for g in ta]
data = {'groups': [group_to_dict(g) for g in groups]}
new_path = path[:-3] + 'yml'
warning = ''
if p.isfile(new_path):
new_path = path[:-4] + '-converted.yml'
warning = '(appended -converted to avoid collision)'
with open(new_path, 'w') as nf:
yaml.dump(data, nf, indent=4, default_flow_style=False)
os.remove(path)
print " - {} > {} {}".format(path, new_path, warning)
def load_template(path, n_dims):
return load_yaml_template(path, n_dims)
class TemplateAdapter(object):
r"""
Abstract definition of an adapter that can be passed to app_for_adapter in
order to generate a legal Flask implementation of landmarker.io's REST API
for Template retrieval.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def template_ids(self):
pass
@abc.abstractmethod
def load_template(self, lm_id):
pass
class FileTemplateAdapter(TemplateAdapter):
def __init__(self, n_dims, template_dir=None):
self.n_dims = n_dims
if template_dir is None:
# try the user folder
user_templates = p.expanduser(p.join('~', TEMPLATE_DINAME))
if p.isdir(user_templates):
template_dir = user_templates
else:
raise ValueError("No template dir provided and "
"{} doesn't exist".format(user_templates))
self.template_dir = Path(p.abspath(p.expanduser(template_dir)))
print ('templates: {}'.format(self.template_dir))
def handle_old_templates(self, upgrade_templates=False):
old_ids = [t.stem for t
in self.template_dir.glob('*' + FileExt.old_template)]
if len(old_ids) > 0 and upgrade_templates:
print "Converting {} old style templates".format(len(old_ids))
for lm_id in old_ids:
fp = safe_join(str(self.template_dir),
lm_id + FileExt.old_template)
convert_legacy_template(fp)
elif len(old_ids) > 0:
print((
"\nWARNING: ignored {} old style '.txt' templates in '{}' " +
"({}).\n" +
"See https://github.com/menpo/landmarkerio-server#templates " +
"more information. You can restart with the " +
"'--upgrade-templates' flag to convert them automatically " +
"(one time operation)\n"
).format(
len(old_ids),
self.template_dir,
", ".join(['{}.txt'.format(t) for t in old_ids]))
)
def template_ids(self):
return [t.stem for t in self.template_paths()]
def template_paths(self):
return self.template_dir.glob('*' + FileExt.template)
def load_template(self, lm_id):
fp = safe_join(str(self.template_dir), lm_id + FileExt.template)
return load_template(fp, self.n_dims)
class CachedFileTemplateAdapter(FileTemplateAdapter):
def __init__(self, n_dims, template_dir=None, upgrade_templates=False):
super(CachedFileTemplateAdapter, self).__init__(
n_dims,
template_dir=template_dir
)
# Handle those before generating cache as we want to load them if
# upgrade_templates is True
FileTemplateAdapter.handle_old_templates(
self, upgrade_templates=upgrade_templates)
self._cache = {lm_id: FileTemplateAdapter.load_template(self, lm_id)
for lm_id in FileTemplateAdapter.template_ids(self)}
print('cached {} templates ({})'.format(
len(self._cache), ', '.join(self._cache.keys())))
def load_template(self, lm_id):
return self._cache[lm_id]
|
{
"content_hash": "76e01a2dc4471257a4c602e0ace10e1f",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 79,
"avg_line_length": 30.789915966386555,
"alnum_prop": 0.5659115720524017,
"repo_name": "jabooth/landmarkerio-server",
"id": "f835234c91dc5da9f54f76312702df6bb5b494f0",
"size": "7328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "landmarkerio/template.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "145"
},
{
"name": "Python",
"bytes": "130585"
},
{
"name": "Shell",
"bytes": "119"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
default_app_config = 'casepro.msgs.apps.Config'
|
{
"content_hash": "599b6ec1359deb8373baf950686cdc08",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 47,
"avg_line_length": 29.666666666666668,
"alnum_prop": 0.7640449438202247,
"repo_name": "xkmato/casepro",
"id": "bea0575a7f6b1725ddef6bdd4095399bb992c04e",
"size": "89",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "casepro/msgs/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3475"
},
{
"name": "CoffeeScript",
"bytes": "220522"
},
{
"name": "HTML",
"bytes": "104527"
},
{
"name": "PLpgSQL",
"bytes": "6012"
},
{
"name": "Python",
"bytes": "878626"
}
],
"symlink_target": ""
}
|
import MySQLdb
import collections
import sys
import json
from flask import Flask, jsonify, abort, make_response, request
from flask.ext.httpauth import HTTPBasicAuth
from Lamp import Lamp
from Config import Config
app = Flask(__name__)
auth = HTTPBasicAuth()
#---------------------------------------------------------------------------#
# Hello World
#---------------------------------------------------------------------------#
@app.route("/")
@auth.login_required
def hello():
return "Hello World!"
#---------------------------------------------------------------------------#
# Get all lamps
#---------------------------------------------------------------------------#
@app.route("/ha/api/v1.0/lamps", methods=['GET'])
@auth.login_required
def get_lamps():
lamps = []
#Connect to MySQL
db = MySQLdb.connect(Config.DbHost, Config.DbUser, Config.DbPassword, Config.DbName)
cursor = db.cursor()
try:
#Execure SQL-Query
cursor.execute("SELECT * FROM ha_lamp_objects ORDER BY LampOrder ASC")
results = cursor.fetchall()
#Loop result from database
for row in results:
#Move database row to variables
d = collections.OrderedDict()
d['Id'] = row[0]
d['RoomId'] = row[1]
d['Name'] = row[2]
d['Type'] = row[3]
d['PowerOn'] = row[4]
d['PowerOnMan'] = row[5]
d['CmdOn'] = row[6]
d['CmdOff'] = row[7]
d['IncInAll'] = row[8]
d['Order'] = row[9]
lamps.append(d)
except MySQLdb.Error, e:
#Log exceptions
try:
return make_response(jsonify({'MySQL-Error': e.args[1]}), 500)
except IndexError:
return make_response(jsonify({'MySQL-Error': str(e)}), 500)
finally:
#Close database connection
cursor.close()
db.close()
return jsonify({'lamps': lamps})
#---------------------------------------------------------------------------#
# Get one lamp
#---------------------------------------------------------------------------#
@app.route('/ha/api/v1.0/lamps/<int:lamp_id>', methods=['GET'])
@auth.login_required
def get_lamp(lamp_id):
#Connect to MySQL
db = MySQLdb.connect(Config.DbHost, Config.DbUser, Config.DbPassword, Config.DbName)
cursor = db.cursor()
try:
#Execure SQL-Query
cursor.execute("SELECT * FROM ha_lamp_objects WHERE LampId=%s", lamp_id)
results = cursor.fetchall()
#Loop result from database
for row in results:
#Move database row to variables
d = collections.OrderedDict()
d['Id'] = row[0]
d['RoomId'] = row[1]
d['Name'] = row[2]
d['Type'] = row[3]
d['PowerOn'] = row[4]
d['PowerOnMan'] = row[5]
d['CmdOn'] = row[6]
d['CmdOff'] = row[7]
d['IncInAll'] = row[8]
d['Order'] = row[9]
except MySQLdb.Error, e:
#Log exceptions
try:
return make_response(jsonify({'MySQL-Error': e.args[1]}), 500)
except IndexError:
return make_response(jsonify({'MySQL-Error': str(e)}), 500)
finally:
#Close database connection
cursor.close()
db.close()
return jsonify({'lamps': d})
#---------------------------------------------------------------------------#
# Send power to single lamp
#---------------------------------------------------------------------------#
@app.route('/ha/api/v1.0/lamps/single', methods=['POST'])
@auth.login_required
def powersingle():
if not request.json or not 'id' in request.json or not 'PowerOn' in request.json:
abort(400)
lamp = Lamp()
result = lamp.PowerSingle(request.json['id'], request.json['PowerOn'])
json = {
'id': request.json['id'],
'PowerOn': request.json['PowerOn'],
'result': result
}
return jsonify({'lamp': json})
#---------------------------------------------------------------------------#
# Send power to all lamps in room
#---------------------------------------------------------------------------#
@app.route('/ha/api/v1.0/lamps/room', methods=['POST'])
@auth.login_required
def powerroom():
if not request.json or not 'id' in request.json or not 'PowerOn' in request.json:
abort(400)
lamp = Lamp()
result = lamp.PowerRoom(request.json['id'], request.json['PowerOn'])
json = {
'id': request.json['id'],
'PowerOn': request.json['PowerOn'],
'result': result
}
return jsonify({'lamp': json})
#---------------------------------------------------------------------------#
# Send power to lamps in scene
#---------------------------------------------------------------------------#
@app.route('/ha/api/v1.0/lamps/scene', methods=['POST'])
@auth.login_required
def powerscene():
if not request.json or not 'id' in request.json:
abort(400)
lamp = Lamp()
result = lamp.PowerScene(request.json['id'])
json = {
'id': request.json['id'],
'result': result
}
return jsonify({'scene': json})
#---------------------------------------------------------------------------#
# Send power to all lamps
#---------------------------------------------------------------------------#
@app.route('/ha/api/v1.0/lamps/all', methods=['POST'])
@auth.login_required
def powerall():
if not request.json or not 'PowerOn' in request.json:
abort(400)
lamp = Lamp()
result = lamp.PowerAll(request.json['PowerOn'])
json = {
'PowerOn': request.json['PowerOn'],
'result': result
}
return jsonify({'lamp': json})
#---------------------------------------------------------------------------#
# Get dashboard
#---------------------------------------------------------------------------#
@app.route("/ha/api/v1.0/dashboard", methods=['GET'])
@auth.login_required
def get_dashboard():
scenes = []
rooms = []
lamps = []
sensors = []
sun = []
weather = []
#Connect to MySQL
db = MySQLdb.connect(Config.DbHost, Config.DbUser, Config.DbPassword, Config.DbName)
cursor = db.cursor()
try:
#---------------------------------------------------------------------------#
# Scenes
#---------------------------------------------------------------------------#
cursor.execute("SELECT * FROM ha_scenes ORDER BY SceneOrder ASC")
results = cursor.fetchall()
#Loop result from database
for row in results:
#Move database row to variables
d = collections.OrderedDict()
d['Id'] = row[0]
d['Name'] = row[1]
d['Description'] = row[2]
d['Favorite'] = row[3]
d['Order'] = row[4]
scenes.append(d)
#---------------------------------------------------------------------------#
# Rooms and Lamps
#---------------------------------------------------------------------------#
cursor.execute("SELECT t1.*, IFNULL(t2.RoomName, 'Lamps'), IFNULL(t2.RoomDescription, ''), IFNULL(t2.RoomOrder, 1000) FROM ha_lamp_objects t1 LEFT JOIN ha_rooms t2 ON LampRoomId = RoomId ORDER BY -LampRoomId DESC, RoomOrder ASC, LampOrder ASC")
results = cursor.fetchall()
nCount = 0
nLampCount = 0
nLampsOn = 0
room = collections.OrderedDict()
#Loop result from database
for row in results:
isLastRow = (nCount+1 == cursor.rowcount)
# Add "Entire room" button on top if it's first in the room
if (row[1] is not None and nLampCount == 0):
d = collections.OrderedDict()
d['Id'] = -1
d['RoomId'] = row[1]
d['Name'] = "Entire room"
d['Type'] = ""
d['PowerOn'] = 0
d['PowerOnMan'] = 0
d['CmdOn'] = "EntireRoom"
d['CmdOff'] = "EntireRoom"
d['IncInAll'] = 1
d['Order'] = 0
lamps.append(d)
nLampCount = nLampCount+1
nLampsOn = 0
#Move database row to variables
d = collections.OrderedDict()
d['Id'] = row[0]
d['RoomId'] = row[1]
d['Name'] = row[2]
d['Type'] = row[3]
d['PowerOn'] = row[4]
d['PowerOnMan'] = row[5]
d['CmdOn'] = row[6]
d['CmdOff'] = row[7]
d['IncInAll'] = row[8]
d['Order'] = row[9]
lamps.append(d)
if (row[5] == 1):
nLampsOn = nLampsOn+1
nLampCount = nLampCount+1
# Last row or next row is a new room
if (isLastRow or row[1] <> results[nCount+1][1]):
room = collections.OrderedDict()
room['Id'] = row[1]
room['Name'] = row[10]
room['Description'] = row[11]
room['Order'] = row[12]
room['LampCount'] = nLampCount
room['Lamps'] = lamps
rooms.append(room)
# Update entire room to On if a lamps is powered on
if (row[1] is not None and nLampsOn > 0):
lamps[0]['PowerOn'] = 1
lamps[0]['PowerOnMan'] = 1
lamps = []
nLampCount = 0
nCount = nCount+1
#---------------------------------------------------------------------------#
# Sensors
#---------------------------------------------------------------------------#
cursor.execute("SELECT SensorId, SensorRoomId, SensorName, SensorType, SensorOrder, LogDate, LogValue1, LogValue2 FROM (SELECT * FROM ha_sensors_log ORDER BY LogDate DESC) t1 LEFT JOIN ha_sensors ON SensorId = LogSensorId LEFT JOIN ha_rooms ON RoomId = SensorRoomId GROUP BY LogSensorId")
results = cursor.fetchall()
#Move database row to variables
for row in results:
d = collections.OrderedDict()
d['Id'] = row[0]
d['RoomId'] = row[1]
d['Name'] = row[2]
d['Type'] = row[3]
d['Order'] = row[4]
d['LogDate'] = row[5].strftime("%Y-%m-%d %H:%M:%S")
d['LogValue1'] = "%s" % row[6]
d['LogValue2'] = "%s" % row[7]
d['LogLabel1'] = ""
d['LogLabel2'] = ""
d['LogUnit1'] = ""
d['LogUnit2'] = ""
if (row[3] == "DHT11" or row[3] == "DHT22" or row[3] == "AM2302"):
d['LogValues'] = 2
d['LogLabel1'] = "Temperature"
d['LogLabel2'] = "Humidity"
d['LogUnit1'] = " *C"
d['LogUnit2'] = "%"
elif (row[3] == "DS18S20" or row[3] == "DS1822", row[3] == "DS18B20", row[3] == "MAX31850K"):
d['LogValues'] = 1
d['LogLabel1'] = "Temperature"
d['LogUnit1'] = " *C"
sensors.append(d)
#---------------------------------------------------------------------------#
# Sun
#---------------------------------------------------------------------------#
cursor.execute("SELECT * FROM ha_data WHERE DataName='Sun'")
results = cursor.fetchone()
#Move database row to variables
if results[2]:
d = collections.OrderedDict()
d['Id'] = results[0]
d['Name'] = results[1]
d['Data'] = json.loads(results[2])
d['Status'] = results[3]
d['LastUpdated'] = results[4].strftime("%Y-%m-%d %H:%M:%S")
sun.append(d)
#---------------------------------------------------------------------------#
# Weather
#---------------------------------------------------------------------------#
cursor.execute("SELECT * FROM ha_data WHERE DataName='Weather'")
results = cursor.fetchone()
#Move database row to variables
if results[2]:
d = collections.OrderedDict()
d['Id'] = results[0]
d['Name'] = results[1]
d['Data'] = json.loads(results[2])
d['Status'] = results[3]
d['LastUpdated'] = results[4].strftime("%Y-%m-%d %H:%M:%S")
weather.append(d)
except MySQLdb.Error, e:
#Log exceptions
try:
return make_response(jsonify({'MySQL-Error': e.args[1]}), 500)
except IndexError:
return make_response(jsonify({'MySQL-Error': str(e)}), 500)
finally:
#Close database connection
cursor.close()
db.close()
return jsonify({'scenes': scenes, 'rooms': rooms, 'sensors': sensors, 'sun': sun, 'weather': weather})
#---------------------------------------------------------------------------#
# Get all scenes
#---------------------------------------------------------------------------#
@app.route("/ha/api/v1.0/scenes", methods=['GET'])
@auth.login_required
def get_scenes():
scenes = []
#Connect to MySQL
db = MySQLdb.connect(Config.DbHost, Config.DbUser, Config.DbPassword, Config.DbName)
cursor = db.cursor()
try:
#Execure SQL-Query
cursor.execute("SELECT * FROM ha_scenes ORDER BY SceneOrder ASC")
results = cursor.fetchall()
#Loop result from database
for row in results:
#Move database row to variables
d = collections.OrderedDict()
d['Id'] = row[0]
d['Name'] = row[1]
d['Description'] = row[2]
d['Favorite'] = row[3]
d['Order'] = row[4]
scenes.append(d)
except MySQLdb.Error, e:
#Log exceptions
try:
return make_response(jsonify({'MySQL-Error': e.args[1]}), 500)
except IndexError:
return make_response(jsonify({'MySQL-Error': str(e)}), 500)
finally:
#Close database connection
cursor.close()
db.close()
return jsonify({'scenes': scenes})
#---------------------------------------------------------------------------#
# Get all scenes marked as favorite
#---------------------------------------------------------------------------#
@app.route("/ha/api/v1.0/scenes/favorite", methods=['GET'])
@auth.login_required
def get_scenes_favorite():
scenes = []
#Connect to MySQL
db = MySQLdb.connect(Config.DbHost, Config.DbUser, Config.DbPassword, Config.DbName)
cursor = db.cursor()
try:
#Execure SQL-Query
cursor.execute("SELECT * FROM ha_scenes WHERE SceneFavorite=1 ORDER BY SceneOrder ASC")
results = cursor.fetchall()
#Loop result from database
for row in results:
#Move database row to variables
d = collections.OrderedDict()
d['Id'] = row[0]
d['Name'] = row[1]
d['Description'] = row[2]
d['Favorite'] = row[3]
d['Order'] = row[4]
scenes.append(d)
except MySQLdb.Error, e:
#Log exceptions
try:
return make_response(jsonify({'MySQL-Error': e.args[1]}), 500)
except IndexError:
return make_response(jsonify({'MySQL-Error': str(e)}), 500)
finally:
#Close database connection
cursor.close()
db.close()
return jsonify({'scenes': scenes})
#---------------------------------------------------------------------------#
# Get one scene
#---------------------------------------------------------------------------#
@app.route('/ha/api/v1.0/scenes/<int:scene_id>', methods=['GET'])
@auth.login_required
def get_scene(scene_id):
#Connect to MySQL
db = MySQLdb.connect(Config.DbHost, Config.DbUser, Config.DbPassword, Config.DbName)
cursor = db.cursor()
try:
#Execure SQL-Query
cursor.execute("SELECT * FROM ha_scenes WHERE SceneId=%s", scene_id)
results = cursor.fetchall()
#Loop result from database
for row in results:
#Move database row to variables
d = collections.OrderedDict()
d['Id'] = row[0]
d['Name'] = row[1]
d['Description'] = row[2]
d['Favorite'] = row[3]
d['Order'] = row[4]
except MySQLdb.Error, e:
#Log exceptions
try:
return make_response(jsonify({'MySQL-Error': e.args[1]}), 500)
except IndexError:
return make_response(jsonify({'MySQL-Error': str(e)}), 500)
finally:
#Close database connection
cursor.close()
db.close()
return jsonify({'scene': d})
#---------------------------------------------------------------------------#
# Login
#---------------------------------------------------------------------------#
@app.route('/ha/api/v1.0/login', methods=['POST'])
def login():
if not request.json or not 'username' in request.json or not 'password' in request.json:
abort(400)
db = MySQLdb.connect(Config.DbHost, Config.DbUser, Config.DbPassword, Config.DbName)
cursor = db.cursor()
cursor.execute("SELECT SHA2(CONCAT(%s, UserSalt), 512) FROM ha_users WHERE UserName=%s AND SHA2(CONCAT(UserPassword, %s), 512) = SHA2(CONCAT(SHA2(CONCAT(%s, UserSalt), 512), %s), 512)", (request.json['password'], request.json['username'], request.user_agent.string, request.json['password'], request.user_agent.string))
data = cursor.fetchone()
if data is None:
abort(401)
else:
json = {
'username': request.json['username'],
'password': data[0]
}
return jsonify({'login': json})
#---------------------------------------------------------------------------#
# Authenticate
#---------------------------------------------------------------------------#
@auth.verify_password
def verify_password(username, password):
db = MySQLdb.connect(Config.DbHost, Config.DbUser, Config.DbPassword, Config.DbName)
cursor = db.cursor()
cursor.execute("SELECT * FROM ha_users WHERE UserName = %s AND SHA2(CONCAT(UserPassword, %s), 512) = %s", (username, request.user_agent.string, password))
data = cursor.fetchone()
if data is None:
return False
else:
return True
#---------------------------------------------------------------------------#
# 404 Error
#---------------------------------------------------------------------------#
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
#---------------------------------------------------------------------------#
# Start app
#---------------------------------------------------------------------------#
if __name__ == "__main__":
app.run(host='0.0.0.0', port=5000, debug=False)
|
{
"content_hash": "12d47249504ec2de735afc280e2b16a1",
"timestamp": "",
"source": "github",
"line_count": 545,
"max_line_length": 320,
"avg_line_length": 30.146788990825687,
"alnum_prop": 0.5202678027997566,
"repo_name": "crundberg/CR-Smart-Home",
"id": "6c427117bca6bcc9e815806b9429230455f43a1e",
"size": "16430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "API.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "69183"
},
{
"name": "Shell",
"bytes": "883"
}
],
"symlink_target": ""
}
|
from twisted.words.protocols import irc
from txircd.channel import IRCChannel
from txircd.modbase import Command
class SajoinCommand(Command):
def onUse(self, user, data):
data["targetuser"].join(data["targetchan"])
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "SAJOIN", ":You have not registered")
return {}
if "o" not in user.mode:
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You do not have the correct operator privileges")
return {}
if not params or len(params) < 2:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "SAJOIN", ":Not enough parameters")
return {}
if params[0] not in self.ircd.users:
user.sendMessage(irc.ERR_NOSUCHNICK, params[0], ":No such nick/channel")
return {}
if params[1][0] != "#":
user.sendMessage(irc.ERR_BADCHANMASK, chan["channel"], ":Bad Channel Mask")
return {}
if params[1] in self.ircd.channels:
cdata = self.ircd.channels[params[1]]
else:
cdata = IRCChannel(self.ircd, params[1])
return {
"user": user,
"targetuser": self.ircd.users[params[0]],
"targetchan": cdata
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"SAJOIN": SajoinCommand()
}
}
|
{
"content_hash": "bd9b3166747c2bdeadd834ec0a99a92c",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 122,
"avg_line_length": 35.43181818181818,
"alnum_prop": 0.568313021167415,
"repo_name": "DesertBus/txircd",
"id": "171f7169495f33f75124ef2db37312405a714eca",
"size": "1559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "txircd/modules/sajoin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "454329"
}
],
"symlink_target": ""
}
|
import argparse
import sys
from textwrap import fill
from . import generate_barcode
from .data import barcode_types
supported_barcode_types = "Supported barcode types are:\n" + fill(
", ".join(sorted(barcode_types)), initial_indent=" ", subsequent_indent=" "
)
def parse_opt(x):
if "=" in x:
return x.split("=", 1)
else:
# binary option
return [x, True]
parser = argparse.ArgumentParser(epilog=supported_barcode_types)
parser.add_argument(
"-t", "--type", default="qrcode", help="Barcode type (default %(default)s)"
)
parser.add_argument(
"-f",
"--format",
help=(
"Output format (default is based on file extension, or xbm with no "
+ "output file)"
),
)
parser.add_argument("-o", "--output", help="Output file (default is stdout)")
parser.add_argument("data", help="Barcode data")
parser.add_argument(
"options", nargs="*", type=parse_opt, help="List of BWIPP options (e.g. width=1.5)"
)
def main():
args = parser.parse_args()
if args.type not in barcode_types:
parser.error(
'Barcode type "{}" is not supported. {}'.format(
args.type, supported_barcode_types
)
)
try:
stdout_binary = sys.stdout.buffer
except AttributeError:
stdout_binary = sys.stdout # Python 2
if args.output is None:
args.output = stdout_binary
# PIL needs an explicit format when it doesn't have a filename to guess from
if args.output is stdout_binary and args.format is None:
args.format = "xbm"
image = generate_barcode(args.type, args.data, dict(args.options))
try:
image.convert("1").save(args.output, args.format)
except KeyError as e:
if e.args[0] == args.format.upper():
parser.error('Image format "{}" is not supported'.format(args.format))
else:
raise
if __name__ == "__main__":
main()
|
{
"content_hash": "2fbe2558b1a9a43ea01deb98b8b32cc1",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 87,
"avg_line_length": 26.324324324324323,
"alnum_prop": 0.6113963039014374,
"repo_name": "YPlan/treepoem",
"id": "bae0957b745a8ee09390ad574643796f432d7e5d",
"size": "1948",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/treepoem/__main__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PostScript",
"bytes": "1837024"
},
{
"name": "Python",
"bytes": "16861"
}
],
"symlink_target": ""
}
|
from resrc.userprofile.models import Profile
def karma_rate(user_pk, diff):
user = Profile.objects.get(user__pk=user_pk)
if user.karma:
user.karma += diff
else:
user.karma = diff
user.save()
|
{
"content_hash": "5690ed8fa5abeeaceb16dc776b46e994",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 48,
"avg_line_length": 22.5,
"alnum_prop": 0.6355555555555555,
"repo_name": "vhf/resrc",
"id": "689acb1f81f3ecad919a7c9facf92ca68c0e2884",
"size": "250",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "resrc/utils/karma.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "126893"
},
{
"name": "HTML",
"bytes": "409463"
},
{
"name": "JavaScript",
"bytes": "4135"
},
{
"name": "Makefile",
"bytes": "34"
},
{
"name": "Python",
"bytes": "230941"
},
{
"name": "Ruby",
"bytes": "946"
},
{
"name": "Shell",
"bytes": "115"
}
],
"symlink_target": ""
}
|
import archinfo
import logging
l = logging.getLogger(__name__)
class MinidumpThreadManager:
def __init__(self, loader, arch, **kwargs): # pylint: disable=unused-argument
self.loader = loader
self.arch = arch
self.threads = [MinidumpThread(loader, arch, loader.main_object.thread_registers(tid)) for tid in loader.main_object.threads]
self.modules = [] # ???
def new_thread(self, insert=False): # pylint: disable=no-self-use
raise TypeError("Cannot create new threads from a minidump file... for now")
def register_object(self, obj):
pass
class MinidumpThread:
def __init__(self, loader, arch: archinfo.Arch, registers):
self.loader = loader
self.arch = arch
self._registers = registers
if arch.name == 'AMD64':
self.teb = registers['gs_const']
self.thread_pointer = loader.main_object.memory.unpack_word(self.teb + 0x58)
elif arch.name == 'X86':
self.teb = registers['fs']
self.thread_pointer = loader.main_object.memory.unpack_word(self.teb + 0x2c)
self.user_thread_pointer = self.thread_pointer
def get_tls_data_addr(self, tls_idx):
return self.loader.memory.unpack_word(self.thread_pointer + tls_idx * self.arch.bytes)
|
{
"content_hash": "d3225ed23f4839adda92854d8a8ca1b8",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 133,
"avg_line_length": 38.3235294117647,
"alnum_prop": 0.6431312356101304,
"repo_name": "angr/cle",
"id": "c02db562d57c11c2e46fe18759d32a67572b767f",
"size": "1303",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cle/backends/tls/minidump_tls.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "609361"
}
],
"symlink_target": ""
}
|
"""
parses a file system and returns a data structure of the results in-memory
"""
# TODO: need to come up woth a solution to handle common cases where file-system is larger than given memory.
class ftree(object):
"""
a basic n-dimensional tree that is representative of the file system
"""
node_map = [] # used to get a map of the location of the file
def __init__(self,root):
self.root = root
def walk(self,cnode):
""" walk the tree in depth-first fashion """
pass
def to_json(self):
""" return a json string of the filesystem """
pass
def to_yaml(self):
""" return a yaml string of the filesystem """
pass
class node(object):
"""
an abstract node type that all system nodes inherit from
"""
def __init__(self,val,children=[],parent=None,siblings=None):
self.val = val
self.children = children
self.parent = parent
self.siblings = siblings
def to_json(self):
""" returns the json version of itself """
# XXX: this method might be unneccesary given pythons object structure
d = self.__dict__()
class file_node(node):
""" a file node representing a file """
# TODO: add special attributes that can search and query specific types of file_node(s)
def __init__(self,filename,contents="",extension="",size=0,parent=None,path=""):
self.filename = filename
self.contents = contents
self.extension = extension
self.size = size
self.parent = parent
self.path = path
def load_contents(self,as_string=False):
try:
if as_string:
self.contents = open(self.path + self.filename, "r").read()
else:
self.contents = open(self.path + self.filename, "r").readlines()
except:
# TODO: add a handler here
pass
class dir_node(node):
""" a directory node representing a directory """
def __init__(self):
pass
|
{
"content_hash": "f806df6d0023545b437fc5bda298db5b",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 110,
"avg_line_length": 27.015151515151516,
"alnum_prop": 0.683679192372406,
"repo_name": "baallezx/diffos",
"id": "996145fa9b7d2774b27c4ad0e8cd6afe30ffa6ba",
"size": "1783",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ftree/fparser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18093"
},
{
"name": "Ruby",
"bytes": "897"
}
],
"symlink_target": ""
}
|
from lib import config, helpers
class Cymon(object):
def __init__(self):
# lists of values that can be returned
self.ip_list = []
self.domain_list = []
self.hash_list = []
self.url_list = []
self.score_list = []
self.imphash_list = []
# get helping functions
self.api = helpers.Common()
self.error_log = helpers.IO()
# static station settings
self.station_name = 'Cymon'
self.endpoint = 'https://cymon.io/api/nexus/v1/'
self.url_path = ''
self.parameters = {'limit': '1000'}
self.headers = {'content-type': 'application/json',
'accept': 'application/json',
}
self.user_agent = {}
self.return_format = 'json'
# Check for api key
if config.cymon_api_key:
self.headers.update({'Authorization': 'Token %s' %config.cymon_api_key})
else:
error_msg = 'API Key NOT provided'
self.error_log.error_log(error_msg, self.station_name)
### Station tunes
def domain_to_ipv4(self, domain_name):
self.url_path = '/domain/' + domain_name
response = self.api.session_helper(station_name=self.station_name, endpoint=self.endpoint, method_type='get',
data_to_send=None, url_path=self.url_path, parameters=self.parameters,
headers=self.headers, user_agent=self.user_agent,
response_format=self.return_format)
if response:
for key in response['ips']:
self.ip_list.append(key.split('/')[-1])
return self.ip_list
def ipv4_to_domain(self, ip_address):
self.url_path = '/ip/' + ip_address + '/domains'
response = self.api.session_helper(station_name=self.station_name, endpoint=self.endpoint, method_type='get',
data_to_send=None, url_path=self.url_path, parameters=self.parameters,
headers=self.headers, user_agent=self.user_agent,
response_format=self.return_format)
if response:
for name in response['results']:
self.domain_list.append(name['name'])
return self.domain_list
def ipv4_to_hash(self, ip_address):
self.url_path = '/ip/' + ip_address + '/malware/'
response = self.api.session_helper(station_name=self.station_name, endpoint=self.endpoint, method_type='get',
data_to_send=None, url_path=self.url_path, parameters=self.parameters,
headers=self.headers, user_agent=self.user_agent,
response_format=self.return_format)
if response:
for key in response['results']:
if key['hash_type'] != 'SSDEEP': # Exclude SSDEEP
self.hash_list.append(key['hash_value'])
return self.hash_list
def ipv4_to_url(self, ip_address):
self.url_path = '/ip/' + ip_address + '/events'
response = self.api.session_helper(station_name=self.station_name, endpoint=self.endpoint, method_type='get',
data_to_send=None, url_path=self.url_path, parameters=self.parameters,
headers=self.headers, user_agent=self.user_agent,
response_format=self.return_format)
if response:
for key in response['results']:
if key['details_url']:
self.url_list.append(key['details_url'])
return list(set(self.url_list))
def hash_to_url(self, hash_value):
self.url_path = '/malware/' + hash_value + '/events/'
response = self.api.session_helper(station_name=self.station_name, endpoint=self.endpoint, method_type='get',
data_to_send=None, url_path=self.url_path, parameters=self.parameters,
headers=self.headers, user_agent=self.user_agent,
response_format=self.return_format)
if response:
for key in response['results']:
self.url_list.append(key['details_url'])
return list(set(self.url_list))
### MAIN ###
if __name__ == '__main__':
c = Cymon()
##print c.domain_to_ipv4('google.com')
##print c.ipv4_to_domain('216.58.219.14')
##print c.ipv4_to_hash('216.58.219.14')
##print c.ipv4_to_url('216.58.219.14')
##print c.hash_to_url('c1bed909e40f97a923eda3b738c58a6a8238bd3b')
|
{
"content_hash": "6a0d775a940b5ae5880327f663bdc4e3",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 117,
"avg_line_length": 45.10377358490566,
"alnum_prop": 0.5369169629784564,
"repo_name": "QTek/QRadio",
"id": "a6c6c4d55be50c37c9d203765900135574839559",
"size": "5970",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stations/cymon/station.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "160102"
}
],
"symlink_target": ""
}
|
from django import forms
class CommentForm(forms.Form):
username = forms.CharField(max_length=200, required=True, widget=forms.TextInput(attrs={'class': 'form-control'}))
email = forms.EmailField(label="Email", required=True, widget=forms.EmailInput(attrs={'class': 'form-control'}))
website = forms.URLField(required=False, widget=forms.URLInput(attrs={'class': 'form-control'}))
text = forms.CharField(required=True, widget=forms.Textarea(attrs={'class': 'form-control', 'id': 'comment-form-text'}))
parent = forms.CharField(required=False, widget=forms.HiddenInput(attrs={'id': 'comment-form-parent'}))
|
{
"content_hash": "813a29fa3f39503636f7a50be8cade46",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 124,
"avg_line_length": 69.66666666666667,
"alnum_prop": 0.7272727272727273,
"repo_name": "marthaurion/blog_django",
"id": "5dbd8e56c29c68f06bedf5d623a087fedaa92ae0",
"size": "627",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "comments/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "593621"
},
{
"name": "HTML",
"bytes": "21989"
},
{
"name": "JavaScript",
"bytes": "13398"
},
{
"name": "Python",
"bytes": "61213"
}
],
"symlink_target": ""
}
|
import sys
print("runmod1: passed %s" % sys.argv[1])
|
{
"content_hash": "53d0d69766cda21dfe299b4c6419a57f",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 41,
"avg_line_length": 26.5,
"alnum_prop": 0.6792452830188679,
"repo_name": "blueyed/coveragepy",
"id": "cb1f7e999152fc5e4f937390b5538afb1d81c065",
"size": "251",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/modules/runmod1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3467"
},
{
"name": "C",
"bytes": "51630"
},
{
"name": "CSS",
"bytes": "13550"
},
{
"name": "HTML",
"bytes": "162518"
},
{
"name": "JavaScript",
"bytes": "30478"
},
{
"name": "Makefile",
"bytes": "3473"
},
{
"name": "PowerShell",
"bytes": "7288"
},
{
"name": "Python",
"bytes": "995953"
},
{
"name": "Shell",
"bytes": "1281"
}
],
"symlink_target": ""
}
|
import urllib2,json,sys,re
def firstautocomp(kw):
"""
Get the first autocomplete result
for kw.
"""
webpage="http://suggestqueries.google.com/complete/search?client=chrome&q="\
+kw
result=json.loads(urllib2.urlopen(webpage).read())
if len(result[1]):
return result[1][0]
else:
return ''
def usage():
"""
Show the usage of the program, then
exit with status 1.
"""
sys.stderr.write("Usage: ./googlecomp.py keyword\n")
sys.stderr.write("\tFind the first Google autocomplete keyword.\n")
sys.stderr.write("\tkeyword: A keyword to find autocomplete results for.\n")
sys.exit(1)
if __name__ == '__main__':
if len(sys.argv) < 2:
usage()
else:
try:
print firstautocomp(re.sub(r'\s','+',sys.argv[1]))
except urllib2.HTTPError as e:
sys.stderr.write("There was an HTTP error. Sorry about that.\n")
sys.exit(1)
|
{
"content_hash": "4a91e8b137c4aa07f15d853a8f4e75cc",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 80,
"avg_line_length": 32.03225806451613,
"alnum_prop": 0.5760322255790534,
"repo_name": "ckjbgames/misc-code",
"id": "20a88dd34d7bbaabe113237aa64ac803dc57723b",
"size": "1254",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/googlecomp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "3224"
},
{
"name": "Python",
"bytes": "4384"
}
],
"symlink_target": ""
}
|
"""Save and retrieve the compiler settings into a text file.
Copyright (c) 2017 carlosperate https://github.com/carlosperate/
Licensed under the Apache License, Version 2.0 (the "License"):
http://www.apache.org/licenses/LICENSE-2.0
The ServerCompilerSettings is a singleton class maintained in memory, and
the the Ardublockly and Arduino IDE settings into a file.
On first invocation of the singleton it reads the settings from the file.
"""
from __future__ import unicode_literals, absolute_import, print_function
import os
import re
import sys
import codecs
# local-packages imports
import configparser
# This package modules
import ardublocklyserver.serialport
class ServerCompilerSettings(object):
"""Singleton class to store and save the Ardublockly settings.
The class on first invocation tries to read the settings from the file, it
keeps them in memory, and every time they are modified the changes are
written to the file again.
No compiler is part of the Python code, instead settings that
point to the local Arduino IDE and sketch are stored here.
The public settings to set and get are:
compiler_dir
sketch_name
sketch_dir
arduino_board
serial_port
load_ide_option
"""
# Class variables that after initialisation will not change
__singleton_instance = None
__settings_path = None
# Class variable to indicate the settings filename, static content
__settings_filename = 'ServerCompilerSettings.ini'
# Class dictionary to define Arduino board types, static content
# TODO: This content will be moved from here and integrated completely
# into 'blockly\generators\arduino\boards.js', which should then
# send the selected flag to be saved as a single value
__arduino_types = {'Uno': 'arduino:avr:uno',
'Nano 328': 'arduino:avr:nano:cpu=atmega328',
'Nano 168': 'arduino:avr:nano:cpu=atmega168',
'Leonardo': 'arduino:avr:leonardo',
'Yun': 'arduino:avr:leonardo',
'Mega': 'arduino:avr:mega',
'Duemilanove 328p': 'arduino:avr:diecimila',
'Duemilanove 168p':
'arduino:avr:diecimila:cpu=atmega168',
'Atmel atmega328p Xplained mini':
'atmel:avr:atmega328p_xplained_mini',
'Atmel atmega168pb Xplained mini':
'atmel:avr:atmega168pb_xplained_mini',
'Atmel atmega328pb Xplained mini':
'atmel:avr:atmega328pb_xplained_mini',
'ESP8266 Huzzah': 'esp8266:esp8266:generic',
'ESP8266 WeMos D1': 'esp8266:esp8266:generic'}
# Class dictionary to contain the computer COM ports, dynamic content
__serial_ports = {'port0': 'COM1'}
# Class dictionary to define IDE load options, static content
__ide_load_options = {'open': 'Open sketch in IDE',
'verify': 'Verify sketch',
'upload': 'Compile and Upload sketch'}
#
# Singleton creator and destructor
#
def __new__(cls, settings_dir=None, *args, **kwargs):
"""Create or returning the singleton instance.
The argument settings_file_dir is only processed on first
initialisation, and any future calls to the constructor will returned
the already initialised instance with a set settings_file_dir.
"""
if not cls.__singleton_instance:
# Create the singleton instance
cls.__singleton_instance =\
super(ServerCompilerSettings, cls).__new__(
cls, *args, **kwargs)
# Initialise the instance, defaults if file not found
cls.__singleton_instance.__initialise(settings_dir)
return cls.__singleton_instance
def __initialise(self, settings_dir=None):
# Create variables to be used with accessors
self.__load_ide_option = None
self.__compiler_dir = None
self.__sketch_dir = None
self.__sketch_name = None
self.__arduino_board_key = None
self.__arduino_board_value = None
self.__serial_port_key = None
self.__serial_port_value = None
if settings_dir:
self.__settings_path = os.path.join(settings_dir,
self.__settings_filename)
else:
# If not set, the file path will be same location as the executed
# python code that calls this class
called_script_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
self.__settings_path = os.path.normpath(
os.path.join(called_script_dir, self.__settings_filename))
# Load settings from file
self.read_settings()
@classmethod
def _drop(cls):
"""Drop the instance."""
cls.__singleton_instance = None
#
# Compiler Directory accessors
#
def get_compiler_dir(self):
return self.__compiler_dir
def set_compiler_dir(self, new_compiler_dir):
"""Set the compiler dir, must a valid file or directory."""
# Mac only check, as apps are packaged directories
if sys.platform == 'darwin':
# Arduino version >1.6.0 has changed the binary name, so check both
bundle = os.path.join(new_compiler_dir, 'Contents', 'MacOS')
if os.path.isfile(os.path.join(bundle, 'JavaApplicationStub')):
new_compiler_dir = os.path.join(bundle, 'JavaApplicationStub')
print('Compiler file in OS X located within the app bundle.')
elif os.path.isfile(os.path.join(bundle, 'Arduino')):
new_compiler_dir = os.path.join(bundle, 'Arduino')
print('Compiler file in OS X located within the app bundle.')
else:
print('Could not find Arduino executable in OS X app bundle.')
# Check directory
if os.path.isfile(new_compiler_dir):
self.__compiler_dir = new_compiler_dir
print('Compiler directory set to:\n\t%s' % self.__compiler_dir)
self.save_settings()
else:
print('The provided compiler path is not valid !!!'
'\n\t %s' % new_compiler_dir)
if self.__compiler_dir:
print('Previous compiler path maintained:\n\t%s' %
self.__compiler_dir)
else:
self.set_compiler_dir_default()
print('Default compiler path set:\n\t%s' %
self.__compiler_dir)
self.save_settings()
compiler_dir = property(get_compiler_dir, set_compiler_dir)
def set_compiler_dir_default(self):
self.__compiler_dir = None
def set_compiler_dir_from_file(self, new_compiler_dir):
"""Set the compiler location, must be full path to an existing file."""
if os.path.exists(new_compiler_dir):
self.__compiler_dir = new_compiler_dir
else:
print('The provided compiler path in the settings file is not '
'valid:')
print('\t%s' % new_compiler_dir)
self.set_compiler_dir_default()
print('Default compiler path set:\n\t%s' % self.__compiler_dir)
#
# Sketch name accessors
#
def get_sketch_name(self):
return self.__sketch_name
def set_sketch_name(self, new_sketch_name):
"""Set the Sketch name.
It only accepts letters, numbers, underscores and dashes.
"""
if re.match("^[\w\d_-]*$", new_sketch_name):
self.__sketch_name = new_sketch_name
print('Sketch name set to:\n\t%s' % self.__sketch_name)
self.save_settings()
else:
print('Provided Sketch name contains invalid characters: !!!'
'\n\t%s' % new_sketch_name)
if self.__sketch_name:
print('Previous Sketch name maintained:\n\t%s' %
self.__sketch_name)
else:
self.set_sketch_name_default()
print('Default Sketch name set:\n\t%s' %
self.__sketch_name)
self.save_settings()
sketch_name = property(get_sketch_name, set_sketch_name)
def set_sketch_name_default(self):
self.__sketch_name = 'ArdublocklySketch'
def set_sketch_name_from_file(self, new_sketch_name):
"""Set the Sketch name from a file read.
It only accepts letters, numbers, underscores and dashes.
"""
if re.match("^[\w\d_-]*$", new_sketch_name):
self.__sketch_name = new_sketch_name
else:
print('Settings file Sketch name contains invalid characters:'
'\n\t%s' % new_sketch_name.decode("utf8"))
self.set_sketch_name_default()
print('Default Sketch name set:\n\t%s' % self.__sketch_name)
#
# Sketch Directory accessors
#
def get_sketch_dir(self):
return self.__sketch_dir
def set_sketch_dir(self, new_sketch_dir):
"""Set the sketch directory, which must be a folder."""
if os.path.isdir(new_sketch_dir):
self.__sketch_dir = new_sketch_dir
print('Sketch directory set to:\n\t%s' % self.__sketch_dir)
self.save_settings()
else:
print('The provided sketch directory is not valid !!!'
'\n\t%s' % new_sketch_dir)
if self.__sketch_dir:
print('Previous Sketch directory maintained:\n\t%s' %
self.__sketch_dir)
else:
self.set_sketch_dir_default()
print('Default Sketch directory set:\n\t%s' %
self.__sketch_dir)
self.save_settings()
sketch_dir = property(get_sketch_dir, set_sketch_dir)
def set_sketch_dir_default(self):
"""Sketch default location the same as the settings file location."""
self.__sketch_dir = os.path.dirname(self.__settings_path)
def set_sketch_dir_from_file(self, new_sketch_dir):
"""Set the sketch directory from settings file, must be a folder."""
if os.path.isdir(new_sketch_dir):
self.__sketch_dir = new_sketch_dir
else:
print('Settings file sketch directory is not valid:'
'\n\t%s' % new_sketch_dir)
self.set_sketch_dir_default()
print('Default Sketch directory set:\n\t%s' % self.__sketch_dir)
#
# Arduino Board and board lists accessors
#
def get_arduino_board(self):
return self.__arduino_board_key
def set_arduino_board(self, new_board):
if new_board in self.__arduino_types:
self.__arduino_board_value = self.__arduino_types[new_board]
self.__arduino_board_key = new_board
print('Arduino Board set to:\n\t%s' % self.__arduino_board_key)
self.save_settings()
else:
print('Provided Arduino Board does not exist: !!!'
'\n\t%s' % new_board)
if self.__arduino_board_key and self.__arduino_board_value:
print('Previous Arduino board type maintained:\n\t%s' %
self.__arduino_board_key)
else:
self.set_arduino_board_default()
print('Default Arduino board type set:\n\t%s' %
self.__arduino_board_key)
self.save_settings()
arduino_board = property(get_arduino_board, set_arduino_board)
def set_arduino_board_default(self):
self.__arduino_board_key = sorted(self.__arduino_types.keys())[0]
self.__arduino_board_value = \
self.__arduino_types[self.__arduino_board_key]
def set_arduino_board_from_file(self, new_board):
if new_board in self.__arduino_types:
self.__arduino_board_value = self.__arduino_types[new_board]
self.__arduino_board_key = new_board
else:
print('Settings file Arduino Board does not exist:\n\t%s'
% new_board)
self.set_arduino_board_default()
print('Default Arduino board type set:\n\t%s' %
self.__arduino_board_key)
def get_arduino_board_flag(self):
return self.__arduino_board_value
def get_arduino_board_types(self):
return [key for key in self.__arduino_types]
#
# Serial Port and lists accessors
# Extra checks of the available Ports are required as states can change
#
def get_serial_port(self):
"""Check available Serial Ports and populates the port dictionary.
Returns currently selected Serial Port key if available.
Returns None if selected Serial Port is not available anymore.
:return: Serial Port dictionary key
"""
self.populate_serial_port_list()
if not self.__serial_ports:
print('There are no available Serial Ports !!!')
self.__serial_port_key = None
self.__serial_port_value = None
self.save_settings()
elif self.__serial_port_value not in self.__serial_ports.values():
print('The selected Serial Port is no longer available !!!')
self.__serial_port_key = None
self.__serial_port_value = None
self.save_settings()
elif self.__serial_ports[self.__serial_port_key] != \
self.__serial_port_value:
# At this point the dictionary is not empty and the value is
# present, but not with the right key. So correct the key.
for key, value in self.__serial_ports.items():
if self.__serial_port_value == value:
self.__serial_port_key = key
# No need to save settings as only value saved and stays the same
return self.__serial_port_key
def set_serial_port(self, new_port):
"""Check available Serial Ports and populates the port dictionary.
If the new serial port is not in the dictionary or the dictionary is
empty it prints an error in the console.
:param new_port: the new port to set
"""
if new_port in self.__serial_ports:
self.__serial_port_value = self.__serial_ports[new_port]
self.__serial_port_key = new_port
# Now we check if the Port is still available
self.populate_serial_port_list()
if not self.__serial_ports:
print('There are no available Serial Ports !!!')
self.__serial_port_key = None
self.__serial_port_value = None
elif self.__serial_port_value not in self.__serial_ports.values():
print('The selected Serial Port is no longer available !!!')
self.__serial_port_key = None
self.__serial_port_value = None
print('Serial Port set to:\n\t%s' % self.__serial_port_value)
self.save_settings()
else:
print('Provided Serial Port is not valid: !!!'
'\n\t%s' % new_port)
if self.__serial_port_key and self.__serial_port_value:
print('Previous Serial Port maintained:\n\t%s' %
self.__serial_port_value)
else:
self.set_serial_port_default()
print('Default Serial Port set:\n\t%s' %
self.__serial_port_value)
self.save_settings()
serial_port = property(get_serial_port, set_serial_port)
def set_serial_port_default(self):
"""Check available Serial Ports and populate the port dictionary.
If there are no available serial ports is resets the variables.
"""
self.populate_serial_port_list()
if not self.__serial_ports:
self.__serial_port_key = None
self.__serial_port_value = None
else:
self.__serial_port_key = sorted(self.__serial_ports.keys())[0]
self.__serial_port_value = \
self.__serial_ports[self.__serial_port_key]
def set_serial_port_from_file(self, new_port_value):
"""Check available Serial Ports and populate the port dictionary.
If the new serial port is not in the dictionary or the dictionary is
empty it prints an error in the console.
:param new_port_value: the new port to set
"""
# Check if the settings file value is present in available ports list
set_default = True
self.populate_serial_port_list()
if self.__serial_ports:
for key, value in self.__serial_ports.items():
if new_port_value == value:
self.__serial_port_key = key
self.__serial_port_value = value
set_default = False
if set_default:
print('Settings file Serial Port is not currently available:'
'\n\t%s' % new_port_value)
self.set_serial_port_default()
print('Default Serial Port set:\n\t%s' % self.__serial_port_value)
def get_serial_port_flag(self):
"""Check available Serial Ports and populates the port dictionary.
Returns currently selected Serial Port value if available.
Returns None if selected Serial Port is not available anymore.
:return: Serial Port dictionary value
"""
self.populate_serial_port_list()
if not self.__serial_ports:
print('There are no available Serial Ports !!!')
self.__serial_port_key = None
self.__serial_port_value = None
self.save_settings()
elif self.__serial_port_value not in self.__serial_ports.values():
print('The selected Serial Port is no longer available !!!')
self.__serial_port_key = None
self.__serial_port_value = None
self.save_settings()
elif self.__serial_ports[self.__serial_port_key] != \
self.__serial_port_value:
# At this point the dictionary is not empty and the flag
# (dictionary value) is present, but not with the right key.
# So correct the key.
for key, value in self.__serial_ports.items():
if self.__serial_port_value == value:
self.__serial_port_key = key
# No need to save settings as only value saved and stays the same
return self.__serial_port_value
def get_serial_ports(self):
self.populate_serial_port_list()
return self.__serial_ports
def populate_serial_port_list(self):
"""Populate the serial ports dictionary with the available ports."""
port_list = ardublocklyserver.serialport.get_port_list()
self.__serial_ports = {}
if port_list:
port_id = 0
for item in port_list:
id_string = 'port' + str(port_id)
self.__serial_ports.update({id_string: item})
port_id += 1
#
# Load the IDE accessors
#
def get_load_ide(self):
return self.__load_ide_option
def set_load_ide(self, new_load_option):
if new_load_option in self.__ide_load_options:
self.__load_ide_option = new_load_option
print('IDE options set to:\n\t%s' %
self.__ide_load_options[self.__load_ide_option])
self.save_settings()
else:
print('The provided "Load IDE option" is not valid !!!'
'\n\t%s' % new_load_option)
if self.__load_ide_option:
print('Previous "Load IDE option" maintained:\n\t%s' %
self.__ide_load_options[self.__load_ide_option])
else:
self.set_load_ide_default()
print('Default "Load IDE option" set:\n\t%s' %
self.__ide_load_options[self.__load_ide_option])
self.save_settings()
load_ide_option = property(get_load_ide, set_load_ide)
def set_load_ide_default(self):
self.__load_ide_option = \
sorted(self.__ide_load_options.keys())[0]
def set_load_ide_from_file(self, new_load_option):
if new_load_option in self.__ide_load_options:
self.__load_ide_option = new_load_option
else:
print('Settings file "Load IDE option" is not valid:'
'\n\t%s' % new_load_option)
self.set_load_ide_default()
print('Default "Load IDE option" set:\n\t%s' %
self.__load_ide_option)
def get_load_ide_options(self):
return self.__ide_load_options
#
# Sets all the settings to default values
#
def set_default_settings(self):
self.set_load_ide_default()
self.set_compiler_dir_default()
self.set_sketch_dir_default()
self.set_sketch_name_default()
self.set_serial_port_default()
self.set_arduino_board_default()
#
# Settings file
#
def save_settings(self):
"""Save all the settings into a configuration file."""
settings_parser = configparser.ConfigParser()
# IDE Section
settings_parser.add_section('Arduino_IDE')
settings_parser.set(
'Arduino_IDE', 'arduino_exec_path', '%s' % self.compiler_dir)
settings_parser.set(
'Arduino_IDE', 'arduino_board', '%s' % self.arduino_board)
settings_parser.set(
'Arduino_IDE',
'arduino_serial_port',
'%s' % self.__serial_port_value)
# Sketch section
settings_parser.add_section('Arduino_Sketch')
settings_parser.set(
'Arduino_Sketch', 'sketch_name', '%s' % self.sketch_name)
settings_parser.set(
'Arduino_Sketch', 'sketch_directory', '%s' % self.sketch_dir)
# Ardublockly section
settings_parser.add_section('Ardublockly')
settings_parser.set(
'Ardublockly', 'ide_load', '%s' % self.load_ide_option)
# Set the path and create/overwrite the file
try:
with codecs.open(self.__settings_path, 'wb+', encoding='utf-8') as\
config_file:
settings_parser.write(config_file)
except Exception as e:
print('%s\nUnable to write the settings file to:\n\t%s' %
(self.__settings_path, str(e)))
else:
print('Settings file saved to:\n\t%s' % self.__settings_path)
sys.stdout.flush()
def read_settings(self):
"""Read the settings from a file and load them into the instance.
If it cannot read the file it sets the variables
to the default value.
"""
settings_dict = self.get_settings_file_data()
if settings_dict:
self.set_compiler_dir_from_file(settings_dict['arduino_exec_path'])
self.set_arduino_board_from_file(settings_dict['arduino_board'])
self.set_serial_port_from_file(
settings_dict['arduino_serial_port'])
self.set_sketch_name_from_file(settings_dict['sketch_name'])
self.set_sketch_dir_from_file(settings_dict['sketch_directory'])
self.set_load_ide_from_file(settings_dict['ide_load'])
else:
print('Settings will be set to the default values.')
self.set_default_settings()
# Printing the settings to be able to easily spot issues at load
print('Final settings loaded:')
print('\tCompiler directory: %s' % self.__compiler_dir)
print('\tArduino Board Key: %s' % self.__arduino_board_key)
print('\tArduino Board Value: %s' % self.__arduino_board_value)
print('\tSerial Port Value: %s' % self.__serial_port_value)
print('\tSketch Name: %s' % self.__sketch_name)
print('\tSketch Directory: %s' % self.__sketch_dir)
print('\tLoad IDE option: %s' % self.__load_ide_option)
# The read X_from_file() functions do not save new settings and neither
# does the set_default_settings() function, so save them either way.
self.save_settings()
def get_settings_file_data(self):
"""Create a dictionary from the settings stored in a file.
:return: A dictionary with all the options and values from the settings
file (sections are ignored during parsing).
"""
settings_dict = {}
settings_parser = configparser.ConfigParser()
try:
with codecs.open(self.__settings_path, 'r', 'utf8') as config_file:
settings_parser.read_file(config_file)
settings_dict['arduino_exec_path'] =\
settings_parser.get('Arduino_IDE', 'arduino_exec_path')
settings_dict['arduino_board'] =\
settings_parser.get('Arduino_IDE', 'arduino_board')
settings_dict['arduino_serial_port'] =\
settings_parser.get('Arduino_IDE', 'arduino_serial_port')
settings_dict['sketch_name'] =\
settings_parser.get('Arduino_Sketch', 'sketch_name')
settings_dict['sketch_directory'] =\
settings_parser.get('Arduino_Sketch', 'sketch_directory')
settings_dict['ide_load'] =\
settings_parser.get('Ardublockly', 'ide_load')
print('Settings loaded from:\n\t%s' % self.__settings_path)
except Exception:
print('Settings file corrupted or not found in:\n\t%s'
% self.__settings_path)
settings_dict = None
return settings_dict
def get_settings_file_path(self):
return self.__settings_path
def delete_settings_file(self):
success = False
if os.path.exists(self.__settings_path):
os.remove(self.__settings_path)
success = True
return success
|
{
"content_hash": "37a60451a59d704af582fbea94fbd5a2",
"timestamp": "",
"source": "github",
"line_count": 624,
"max_line_length": 79,
"avg_line_length": 42.00480769230769,
"alnum_prop": 0.5775819312502385,
"repo_name": "drissfr2017/BlocklyDriss",
"id": "f6beb3843c55f22c0808b753c97ccbfa766fc706",
"size": "26235",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ardublocklyserver/compilersettings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2275"
},
{
"name": "CSS",
"bytes": "179368"
},
{
"name": "Emacs Lisp",
"bytes": "2410"
},
{
"name": "HTML",
"bytes": "1602303"
},
{
"name": "JavaScript",
"bytes": "23798190"
},
{
"name": "NSIS",
"bytes": "8432"
},
{
"name": "Python",
"bytes": "982260"
},
{
"name": "Shell",
"bytes": "6322"
}
],
"symlink_target": ""
}
|
from exabgp.util import coroutine
class UnexpectedData (Exception):
def __init__(self, line, position, token):
super(UnexpectedData, self).__init__('Unexpected data at line %d position %d : "%s"' % (line,position,token))
@coroutine.join
def unescape(s):
start = 0
while start < len(s):
pos = s.find('\\', start)
if pos == -1:
yield s[start:]
break
yield s[start:pos]
pos += 1
esc = s[pos]
if esc == 'b':
yield '\b'
elif esc == 'f':
yield '\f'
elif esc == 'n':
yield '\n'
elif esc == 'r':
yield '\r'
elif esc == 't':
yield '\t'
elif esc == 'u':
yield chr(int(s[pos + 1:pos + 5], 16))
pos += 4
else:
yield esc
start = pos + 1
@coroutine.each
def tokens (stream):
spaces = [' ','\t','\r','\n']
strings = ['"', "'"]
syntax = [',','[',']','{','}',';']
comment = ['#',]
nb_lines = 0
for line in stream:
nb_chars = 0
quoted = ''
word = ''
for char in line:
if char in comment:
if quoted:
word += char
else:
if word:
yield nb_lines,nb_chars,char
word = ''
break
elif char in syntax:
if quoted:
word += char
else:
if word:
yield nb_lines,nb_chars,word
nb_chars += len(word)
word = ''
yield nb_lines,nb_chars,char
nb_chars += 1
elif char in spaces:
if quoted:
word += char
elif word:
yield nb_lines,nb_chars,word
nb_chars += len(word)
word = ''
nb_chars += 1
elif char in strings:
word += char
if quoted == char:
quoted = ''
yield nb_lines,nb_chars,word
nb_chars += len(word) + 1
word = ''
else:
quoted = char
nb_chars += 1
else:
word += char
nb_chars += 1
class Tokeniser (object):
def __init__ (self,stream):
self.tokeniser = tokens(stream)
self._rewind = []
def __call__ (self):
if self._rewind:
return self._rewind.pop()
return Tokeniser.parser(self.tokeniser)
def rewind (self,token):
self._rewind.append(token)
@staticmethod
def parser (tokeniser):
def content(next):
try:
while True:
line,position,token = next()
if token == '[':
l = []
for element in iterate_list(next):
l.append(element)
return l
elif token[0] in ('"',"'"):
return unescape(token[1:-1])
# elif token == 'true':
# return True
# elif token == 'false':
# return False
# elif token == 'null':
# return None
else:
return token
except ValueError:
raise UnexpectedData(line,position,token)
except StopIteration:
return ''
def iterate_list(next):
token = content(next)
while token != ']':
yield token
token = content(next)
return content(tokeniser)
|
{
"content_hash": "5525464e20e092e4a24b7ab2d454db13",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 111,
"avg_line_length": 19.868613138686133,
"alnum_prop": 0.5547391623806025,
"repo_name": "dwcarder/sdn-ix-demo",
"id": "05ea41de769f0af05cb755cb65f9b192e19883f3",
"size": "2722",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "exabgp-3.4.3/lib/exabgp/configuration/engine/tokeniser.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Groff",
"bytes": "1554"
},
{
"name": "Makefile",
"bytes": "232"
},
{
"name": "Perl",
"bytes": "8011"
},
{
"name": "Python",
"bytes": "838018"
},
{
"name": "Shell",
"bytes": "26752"
}
],
"symlink_target": ""
}
|
import datetime
from django.utils import timezone
def compute_age(born):
assert isinstance(born, datetime.date)
today = timezone.datetime.today()
age = today.year - born.year - ((today.month, today.day) < (born.month, born.day))
return age
|
{
"content_hash": "352c5a15596ba6757ebcb5b833072f08",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 86,
"avg_line_length": 23.636363636363637,
"alnum_prop": 0.6961538461538461,
"repo_name": "moas/sketchbadges",
"id": "e34776ea88676f1c46614f15bfcf242dff7bbd16",
"size": "260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "exo/studio/helpers/functional.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4263"
},
{
"name": "Python",
"bytes": "44728"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('testreport', '0003_auto_20141023_1423'),
]
operations = [
migrations.AddField(
model_name='launch',
name='started_by',
field=models.URLField(default=None, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='launch',
name='created',
field=models.DateTimeField(default=datetime.datetime(2014, 10, 29, 9, 0, 21, 354221), auto_now_add=True),
preserve_default=True,
),
]
|
{
"content_hash": "1396f62b1ca2fca77db4c4a1397a6ef5",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 117,
"avg_line_length": 27.346153846153847,
"alnum_prop": 0.5907172995780591,
"repo_name": "2gis/badger-api",
"id": "46079f4669e351b0da1c7bbc5bd17a8cbfd5e4a0",
"size": "735",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testreport/migrations/0004_auto_20141029_0900.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1597"
},
{
"name": "Python",
"bytes": "241445"
}
],
"symlink_target": ""
}
|
import gettext
import netaddr
import re
import six
import socket
from otopi import base, util
from otopi import plugin as oplugin
from ovirt_setup_lib import dialog
def _(m):
return gettext.dgettext(message=m, domain='ovirt-setup-lib')
@util.export
class Hostname(base.Base):
_DOMAIN_RE = re.compile(
flags=re.VERBOSE,
pattern=r"""
^
[A-Za-z0-9\.\-]+
$
"""
)
_INTERFACE_RE_STR = r'(?P<interface>\w+([-.]\w+)*(\.\w+)?)'
_INTERFACE_RE = re.compile(
pattern=_INTERFACE_RE_STR,
)
_IP_INTERFACE_RE = re.compile(
flags=re.VERBOSE,
pattern='{pref}{iface}{suff}'.format(
pref=r"""
^
\d+
:
\s+
""",
iface=_INTERFACE_RE_STR,
suff=r"""
(@\w+)?
:
\s+
<(?P<options>[^>]+)
.*
""",
),
)
_ADDRESS_RE_STR = '(?P<address>[0-9a-fA-F:.]+)'
_IP_ADDRESS_RE = re.compile(
flags=re.VERBOSE,
pattern='{pref}{addr}{suff}'.format(
pref=r"""
\s+
inet6?
\s
""",
addr=_ADDRESS_RE_STR,
suff=r"""
/\d{1,3}
.*
$
""",
),
)
_ADDRESS_SCOPE_RE = re.compile(
pattern='{addr}%{scope}'.format(
addr=_ADDRESS_RE_STR,
scope='(?P<scope>.*)', # Can't use _INTERFACE_RE_STR
),
)
_DIG_LOOKUP_RE = re.compile(
flags=re.VERBOSE,
pattern=r"""
^
[\w.-]+
\s+
\d+
\s+
IN
\s+
(A|AAAA|CNAME)
\s+
[\w.-]+
"""
)
_DIG_REVLOOKUP_RE = re.compile(
flags=re.VERBOSE,
pattern=r"""
^
[\w/.-]+\.in-addr\.arpa\.
\s+
\d+
\s+
IN
\s+
PTR
\s+
(?P<answer>[\w/.-]+)
\.
$
"""
)
_REQUIRED_CMD = set(['dig', 'ip'])
def __init__(self, plugin):
super(Hostname, self).__init__()
self._plugin = plugin
context = self._plugin.context
if hasattr(context, 'currentStage'):
current_stage = context.currentStage
else:
msg = _(
'{classname} cannot be initialized out of '
'OTOPI stages'
).format(
classname=type(self).__name__,
)
self.logger.error(msg)
raise RuntimeError(msg)
if current_stage < oplugin.Stages.STAGE_PROGRAMS:
for cmd in self._REQUIRED_CMD:
self.command.detect(cmd)
else:
self.logger.debug(
(
'{classname} initialized only at stage {current_stage} '
'so the detection of the required commands is up to '
'the caller object'
).format(
classname=type(self).__name__,
current_stage=current_stage,
)
)
cmd_to_be_detected = set(self.command.enum())
if not self._REQUIRED_CMD.issubset(cmd_to_be_detected):
msg = _(
'Not all of the required commands have been required for '
'command detection, please instantiate this class '
'before STAGE_PROGRAMS or externally detect'
)
self.logger.error(msg)
raise RuntimeError(msg)
@property
def plugin(self):
return self._plugin
@property
def command(self):
return self._plugin.command
@property
def dialog(self):
return self._plugin.dialog
@property
def execute(self):
return self._plugin.execute
@property
def environment(self):
return self._plugin.environment
@property
def logger(self):
return self._plugin.logger
@staticmethod
def valid_ip_address(address):
return netaddr.valid_ipv4(address)
def getLocalAddresses(
self,
exclude_loopback=False,
device=None,
with_subnet=False
):
interfaces = {}
addresses = {}
if device:
rc, stdout, stderr = self.execute(
args=(
self.command.get('ip'),
'addr',
'show',
device,
),
)
else:
rc, stdout, stderr = self.execute(
args=(
self.command.get('ip'),
'addr',
),
)
for line in stdout:
interfacematch = self._IP_INTERFACE_RE.match(line)
addressmatch = self._IP_ADDRESS_RE.match(line)
if interfacematch is not None:
iface = interfacematch.group('interface')
interfaces[
iface
] = 'LOOPBACK' in interfacematch.group('options')
elif addressmatch is not None:
addresses.setdefault(
iface,
[]
).append(
addressmatch.group('address')
)
iplist = []
for interface, loopback in six.iteritems(interfaces):
if exclude_loopback and loopback:
pass
else:
iplist.extend(addresses.get(interface, []))
if not with_subnet:
iplist = [i.split('/')[0] for i in iplist]
self.logger.debug('addresses: %s' % iplist)
return set(iplist)
def _dig_reverse_lookup(self, addr):
names = set()
args = [
self.command.get('dig'),
'-x',
addr,
]
rc, stdout, stderr = self.execute(
args=args,
raiseOnError=False
)
if rc == 0:
for line in stdout:
found = self._DIG_REVLOOKUP_RE.search(line)
if found:
names.add(found.group('answer'))
return names
def _validateFQDNresolvability(
self,
fqdn,
system,
dns,
reverse_dns,
local_non_loopback,
not_local,
not_local_text,
v6,
v4,
):
if system:
try:
resolvedAddresses = self.getResolvedAddresses(fqdn, v6, v4)
self.logger.debug(
'{fqdn} resolves to: {addresses}'.format(
fqdn=fqdn,
addresses=resolvedAddresses,
)
)
resolvedAddressesAsString = ' '.join(resolvedAddresses)
except socket.error:
raise RuntimeError(
_('{fqdn} did not resolve into an IP address').format(
fqdn=fqdn,
)
)
if dns:
resolvedByDNS = self.isResolvedByDNS(fqdn)
if not resolvedByDNS:
self.logger.warning(
_(
'Failed to resolve {fqdn} using DNS, '
'it can be resolved only locally'
).format(
fqdn=fqdn,
)
)
elif reverse_dns:
revResolved = False
for address in resolvedAddresses:
for name in self._dig_reverse_lookup(address):
revResolved = name.lower() == fqdn.lower()
if revResolved:
break
if revResolved:
break
if not revResolved:
raise RuntimeError(
_(
'The following addresses: {addresses} did not '
'reverse resolve into {fqdn}'
).format(
addresses=resolvedAddressesAsString,
fqdn=fqdn
)
)
if local_non_loopback:
if not resolvedAddresses.issubset(
self.getLocalAddresses(exclude_loopback=True)
):
raise RuntimeError(
_(
'{fqdn} resolves to {addresses} and '
'not all of them can be mapped '
'to non loopback devices on this host'
).format(
fqdn=fqdn,
addresses=resolvedAddressesAsString
)
)
if not_local:
if resolvedAddresses.intersection(
self.getLocalAddresses(exclude_loopback=False)
):
raise RuntimeError(
_(
'{fqdn} resolves to {addresses}, and at least one of '
'them is locally used on this machine. '
'{not_local_text}'
).format(
fqdn=fqdn,
addresses=resolvedAddressesAsString,
not_local_text=not_local_text,
)
)
def _validateFQDN(self, fqdn):
if self.valid_ip_address(fqdn):
raise RuntimeError(
_(
'{fqdn} is an IP address and not a FQDN. '
'A FQDN is needed to be able to generate '
'certificates correctly.'
).format(
fqdn=fqdn,
)
)
if not fqdn:
raise RuntimeError(
_('Please specify host FQDN')
)
if len(fqdn) > 1000:
raise RuntimeError(
_('FQDN has invalid length')
)
components = fqdn.split('.', 1)
if len(components) == 1:
self.logger.warning(
_('Host name {fqdn} has no domain suffix').format(
fqdn=fqdn,
)
)
if not components[0] or not self._DOMAIN_RE.match(fqdn):
raise RuntimeError(
_('Host name {fqdn} is not valid').format(
fqdn=fqdn,
)
)
def isResolvedByDNS(self, fqdn):
args = [
self.command.get('dig'),
'+noall',
'+answer',
fqdn,
'A',
fqdn,
'AAAA',
]
rc, stdout, stderr = self.execute(
args=args,
raiseOnError=False
)
resolved = False
if rc == 0:
for line in stdout:
if self._DIG_LOOKUP_RE.search(line):
resolved = True
return resolved
def getResolvedAddresses(self, fqdn, v6=False, v4=False):
res = set([])
type = 0
# In case both v6 and v4 variables are set to True (unlikely),
# we prioritize IPv6 over IPv4, same as DNS resolution.
# In case both v6 and v4 variables are set to False (default values),
# a set of all IPv6 and IPv4 addresses will be returned.
if v6:
type = socket.AF_INET6
elif v4:
type = socket.AF_INET
for __, __, __, __, sockaddr in socket.getaddrinfo(
fqdn,
None,
type,
):
address = sockaddr[0]
# python's getaddrinfo seems to simply wrap libc's getaddrinfo,
# which (see getaddrinfo(3) manpage):
# "supports the address%scope-id notation for specifying the IPv6
# scope-ID.". See also: https://tools.ietf.org/html/rfc4007
addr_scope_match = self._ADDRESS_SCOPE_RE.match(address)
if addr_scope_match:
address = addr_scope_match.group('address')
res.add(address)
self.logger.debug('getResolvedAddresses: %s', res)
return res
def getHostnameTester(
self,
validate_syntax=False, # Validate fqdn syntax
system=True, # Local resolver
dns=False, # dig against default name server(s)
reverse_dns=True, # dig -x, check only if dns==True
local_non_loopback=False, # matches a local non-loopback address
not_local=False, # If True, refuse a name of the current machine
not_local_text='', # Additional hint if it fails not_local test
allow_empty=False, # Allow empty responses
v6=False, # If True, validate resolved FQDN for IPv6 only
v4=False, # If True, validate resolved FQDN for IPv4 only
):
def test_hostname(name):
res = ''
try:
if not name:
if not allow_empty:
raise RuntimeError(_('An empty name is not allowed'))
else:
# Do nothing - an empty name is allowed and it's empty,
# no need to validate it further.
pass
else:
# If we got a name, need to validate it, regardless of
# whether it's allowed to be empty or not.
if validate_syntax:
self._validateFQDN(name)
self._validateFQDNresolvability(
name,
system,
dns,
reverse_dns,
local_non_loopback,
not_local,
not_local_text,
v6,
v4,
)
except RuntimeError as e:
res = _('Host name is not valid: {e}').format(e=e)
self.logger.debug('test_hostname exception', exc_info=True)
return res
return test_hostname
def fqdnLocalhostValidation(self, fqdn):
res = ''
if fqdn == 'localhost' or fqdn == 'localhost.localdomain':
res = _("Using the name 'localhost' is not recommended, "
"and may cause problems later on.")
return res
def getHostname(
self,
envkey,
whichhost,
supply_default,
prompttext=None,
dialog_name=None,
extra_tests=None,
**tester_kwarg
):
if prompttext is None:
prompttext = _(
'Host fully qualified DNS name of {whichhost} server'
).format(
whichhost=whichhost,
)
if dialog_name is None:
dialog_name = 'OVESETUP_NETWORK_FQDN_{whichhost}'.format(
whichhost=whichhost.replace(' ', '_'),
)
tests = (
{
'test': self.getHostnameTester(**tester_kwarg),
},
{
'test': self.fqdnLocalhostValidation,
'is_error': False,
'warn_note': 'Are you sure?',
},
)
if extra_tests is not None:
tests = *tests, extra_tests
if supply_default:
note = _(f'{prompttext} [@DEFAULT@]: ')
default = socket.getfqdn()
else:
note = _(f'{prompttext}: ')
default = None
return dialog.queryEnvKey(
name=dialog_name,
dialog=self.dialog,
logger=self.logger,
env=self.environment,
key=envkey,
note=note,
prompt=True,
default=default,
tests=tests,
store=(True if envkey else False),
)
# vim: expandtab tabstop=4 shiftwidth=4
|
{
"content_hash": "4acc475e02bf46db436d0fac79abee24",
"timestamp": "",
"source": "github",
"line_count": 539,
"max_line_length": 79,
"avg_line_length": 29.92578849721707,
"alnum_prop": 0.44389336639801613,
"repo_name": "oVirt/ovirt-setup-lib",
"id": "b3d54caea095d71b8f7f4f09c00380a0ee3104af",
"size": "16763",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ovirt_setup_lib/hostname.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "707"
},
{
"name": "HTML",
"bytes": "269"
},
{
"name": "M4",
"bytes": "1730"
},
{
"name": "Makefile",
"bytes": "2411"
},
{
"name": "Python",
"bytes": "36856"
},
{
"name": "Shell",
"bytes": "3348"
}
],
"symlink_target": ""
}
|
import unittest
import numpy as np
from lintrain import Trainer
from lintrain import ParallelTrainer
class LinTrainTest(unittest.TestCase):
"""
Must be run from outside of the LinTrain folder, otherwise parallel library fails due to relative paths.
"""
def test_trainer(self):
# generate random data
num_entries = 50
num_features = 10
# seed random number generator for consistency
np.random.seed(1238927)
x = np.random.rand(num_entries, num_features)
y = (30 * x[:, 0]) - (10 * x[:, 2]) + np.random.rand(1, num_entries)
# run forward
t = Trainer(x, y)
# run forward selection
t.run_forward_selection([1], None)
# initial column should still be in there when training
self.assertIn(1, t.column_indices)
# should have at least one more column than initial data
self.assertGreater(len(t.column_indices), 1)
# run forward selection
t.run_forward_selection()
self.assertIn(1, t.column_indices)
# run backward selection
t.run_backward_selection(range(0, num_features), None)
# column 0 should still be in there with 99.9% likelihood
self.assertIn(0, t.column_indices)
# should have at least one more column than initial data
self.assertLess(len(t.column_indices), num_features)
def test_parallel_trainer(self):
# generate random data
num_entries = 50
num_features = 10
# seed random number generator for consistency
np.random.seed(1238927)
x = np.random.rand(num_entries, num_features)
y = (30 * x[:, 0]) - (10 * x[:, 2]) + np.random.rand(1, num_entries)
# run forward
t = ParallelTrainer(x, y)
# run forward selection
t.run_forward_selection([1], None)
# initial column should still be in there when training
self.assertIn(1, t.column_indices)
# should have at least one more column than initial data
self.assertGreater(len(t.column_indices), 1)
# run forward selection
t.run_forward_selection()
self.assertIn(1, t.column_indices)
# run backward selection
t.run_backward_selection(range(0, num_features), None)
# column 0 should still be in there with 99.9% likelihood
self.assertIn(0, t.column_indices)
# should have at least one more column than initial data
self.assertLess(len(t.column_indices), num_features)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "5e2dab071c003a74e7393b850240e0d7",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 108,
"avg_line_length": 31.132530120481928,
"alnum_prop": 0.6261609907120743,
"repo_name": "nathanntg/lin-train",
"id": "5b11bff8712bad88d6d57aeabe1b39435f1c2f9c",
"size": "2584",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "lintrain/tests/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27090"
},
{
"name": "Shell",
"bytes": "235"
}
],
"symlink_target": ""
}
|
"""
Django settings for app project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h$(m+om*(p!6*d6#*sh!!avl_zxyz8ja_q_!i62d5wi^ec*#&8'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'gunicorn',
'django_nose',
'opbeat.contrib.django',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'app.urls'
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
STATIC_URL = '/static/'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
OPBEAT = {
"ORGANIZATION_ID": "03e37bf3e1ea4c01a9611fef295b172b",
"APP_ID": "ddb17607d3",
"SECRET_TOKEN": "870956d5b4eaee932c99de5760eadc85cac482e4"
}
|
{
"content_hash": "31b49d56a66a6fd84ce0e94295f86434",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 71,
"avg_line_length": 24.31578947368421,
"alnum_prop": 0.7168831168831169,
"repo_name": "kaeawc/django-auth-example",
"id": "574ddcb2bdc8034fa54fc0fa200d563d2f08679a",
"size": "2333",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25901"
}
],
"symlink_target": ""
}
|
"""
Functionalities for cross-validating words-as-classifiers reference resolution (not yet finished!).
"""
__author__ = "Todd Shore <errantlinguist+github@gmail.com>"
__copyright__ = "Copyright 2017 Todd Shore"
__license__ = "Apache License, Version 2.0"
import csv
import itertools
from collections import namedtuple
from typing import Callable, Iterable, Iterator, Mapping, Optional, Tuple
import pandas as pd
from . import game_utterances
from . import iristk
from . import session_data as sd
CATEGORICAL_VAR_COL_NAMES = (
game_utterances.EventColumn.ENTITY_SHAPE.value, game_utterances.EventColumn.EVENT_SUBMITTER.value)
# NOTE: For some reason, "pandas.get_dummies(..., columns=[col_name_1,...])" works with list objects but not with tuples
CATEGORICAL_DEPENDENT_VAR_COL_NAMES = [game_utterances.EventColumn.ENTITY_SHAPE.value]
assert all(col_name in CATEGORICAL_VAR_COL_NAMES for col_name in CATEGORICAL_DEPENDENT_VAR_COL_NAMES)
RESULTS_FILE_ENCODING = "utf-8"
__RESULTS_FILE_DTYPES = {"Cleaning.DISFLUENCIES": bool, "Cleaning.DUPLICATES": bool, "Cleaning.FILLERS": bool}
CrossValidationDataFrames = namedtuple("CrossValidationDataFrames", ("training", "testing"))
class CachingSessionDataFrameFactory(object):
def __init__(self, session_data_frame_factory: Optional[Callable[[sd.SessionData], pd.DataFrame]] = None):
self.session_data_frame_factory = game_utterances.SessionGameRoundUtteranceSequenceFactory() if session_data_frame_factory is None else session_data_frame_factory
self.cache = {}
def __call__(self, infile: str, session: sd.SessionData) -> pd.DataFrame:
try:
result = self.cache[infile]
except KeyError:
result = self.session_data_frame_factory(session)
result[game_utterances.EventColumn.DYAD_ID.value] = infile
self.cache[infile] = result
return result
class CrossValidationData(object):
def __init__(self, testing_data: Tuple[str, sd.SessionData], training_data: Mapping[str, sd.SessionData]):
self.testing_data = testing_data
self.training_data = training_data
@property
def __key(self):
return self.testing_data, self.training_data
def __eq__(self, other):
return (self is other or (isinstance(other, type(self))
and self.__key == other.__key))
def __hash__(self):
return hash(self.__key)
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return self.__class__.__name__ + str(self.__dict__)
class CrossValidationDataFrameFactory(object):
@staticmethod
def __categoricize_data(training_feature_df: pd.DataFrame, testing_feature_df: pd.DataFrame):
for col_name in CATEGORICAL_VAR_COL_NAMES:
unique_values = tuple(sorted(frozenset(
itertools.chain(training_feature_df[col_name].unique(), testing_feature_df[col_name].unique()))))
training_feature_df[col_name] = pd.Categorical(training_feature_df[col_name], categories=unique_values,
ordered=False)
testing_feature_df[col_name] = pd.Categorical(testing_feature_df[col_name], categories=unique_values,
ordered=False)
def __init__(self, session_data_frame_factory: Optional[Callable[[str, sd.SessionData], pd.DataFrame]]):
self.session_data_frame_factory = CachingSessionDataFrameFactory() if session_data_frame_factory is None else session_data_frame_factory
def __call__(self, named_session_data=Iterable[Tuple[str, sd.SessionData]]) -> Iterator[CrossValidationDataFrames]:
for testing_session_name, testing_session_data in named_session_data:
training_sessions = dict(
(infile, training_session_data) for (infile, training_session_data) in named_session_data if
testing_session_data != training_session_data)
cross_validation_set = CrossValidationData((testing_session_name, testing_session_data),
training_sessions)
yield self.__create_cross_validation_data_frames(cross_validation_set)
def __create_cross_validation_data_frames(self,
cross_validation_data: CrossValidationData) -> CrossValidationDataFrames:
training_feature_df = pd.concat(self.session_data_frame_factory(infile, session) for (infile, session) in
cross_validation_data.training_data.items())
testing_feature_df = self.session_data_frame_factory(*cross_validation_data.testing_data)
# noinspection PyTypeChecker
self.__categoricize_data(training_feature_df, testing_feature_df)
dummified_training_feature_df = pd.get_dummies(training_feature_df, columns=CATEGORICAL_DEPENDENT_VAR_COL_NAMES)
dummified_testing_feature_df = pd.get_dummies(testing_feature_df, columns=CATEGORICAL_DEPENDENT_VAR_COL_NAMES)
return CrossValidationDataFrames(dummified_training_feature_df, dummified_testing_feature_df)
def read_results_file(inpath: str) -> pd.DataFrame:
return pd.read_csv(inpath, sep=csv.excel_tab.delimiter, dialect=csv.excel_tab, float_precision="round_trip",
encoding=RESULTS_FILE_ENCODING, memory_map=True, parse_dates=["TIME", "EVENT_TIME"],
date_parser=iristk.parse_timestamp,
dtype=__RESULTS_FILE_DTYPES)
|
{
"content_hash": "b7ca176763ac6047fc18fe09ad65dfad",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 164,
"avg_line_length": 45.11711711711712,
"alnum_prop": 0.7434105431309904,
"repo_name": "errantlinguist/tangrams-analysis",
"id": "060d1082e4d609ebbb503e1e297bbf40969c868a",
"size": "5008",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tangrams_analysis/cross_validation.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "170843"
},
{
"name": "Python",
"bytes": "90965"
},
{
"name": "R",
"bytes": "43013"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from .models import TestResource, RelatedResource1, RelatedResource2
class NestedFieldsTest(TestCase):
def setUp(self):
self.related_resource_1 = RelatedResource1.objects.create(name="Related-Resource1")
self.nested_related_resource_1_1 = RelatedResource1.objects.create(name="Nested-Related-Resource11")
self.nested_related_resource_1_2 = RelatedResource1.objects.create(name="Nested-Related-Resource12")
self.related_resource_2 = RelatedResource2.objects.create(name="Related-Resource2")
self.related_resource_2.related_resources_1.add(self.nested_related_resource_1_1, self.nested_related_resource_1_2)
self.test_resource_1 = TestResource.objects.create(name="Test-Resource", related_resource_1=self.related_resource_1,
related_resource_2=self.related_resource_2)
def testGetNestedFieldsStatic(self):
resp = self.client.get("/test-resources/")
self.assertEqual(200, resp.status_code, resp.content)
self.assertEqual(1, len(resp.data))
test_resource_data = resp.data[0]
self.assertEqual(4, len(test_resource_data))
self.assertEqual(self.test_resource_1.id, test_resource_data['id'])
self.assertEqual(self.test_resource_1.name, test_resource_data['name'])
self.assertEqual(self.test_resource_1.related_resource_1.id, test_resource_data['related_resource_1'])
related_resource_2_data = test_resource_data['related_resource_2']
self.assertEqual(2, len(related_resource_2_data))
self.assertEqual(self.related_resource_2.name, related_resource_2_data['name'])
nested_related_resources_data = related_resource_2_data['related_resources_1']
self.assertEqual(2, len(nested_related_resources_data))
self.assertEqual(self.nested_related_resource_1_1.id, nested_related_resources_data[0]['id'])
self.assertEqual(self.nested_related_resource_1_1.name, nested_related_resources_data[0]['name'])
self.assertEqual(self.nested_related_resource_1_2.id, nested_related_resources_data[1]['id'])
self.assertEqual(self.nested_related_resource_1_2.name, nested_related_resources_data[1]['name'])
def testGetNestedFieldsDynamic(self):
resp = self.client.get("/test-resources/?fields=related_resource_1.fields(active),related_resource_2.fields("
"related_resources_1.fields(active))")
self.assertEqual(200, resp.status_code, resp.content)
self.assertEqual(1, len(resp.data))
test_resource_data = resp.data[0]
self.assertEqual(3, len(test_resource_data))
self.assertEqual(self.test_resource_1.id, test_resource_data['id'])
related_resource_1_data = test_resource_data['related_resource_1']
self.assertEqual(2, len(related_resource_1_data))
self.assertEqual(self.test_resource_1.related_resource_1.id, related_resource_1_data['id'])
self.assertEqual(self.test_resource_1.related_resource_1.active, related_resource_1_data['active'])
related_resource_2_data = test_resource_data['related_resource_2']
self.assertEqual(2, len(related_resource_2_data))
self.assertEqual(self.related_resource_2.id, related_resource_2_data['id'])
nested_related_resources_data = related_resource_2_data['related_resources_1']
self.assertEqual(2, len(nested_related_resources_data))
self.assertEqual(self.nested_related_resource_1_1.id, nested_related_resources_data[0]['id'])
self.assertEqual(self.nested_related_resource_1_1.active, nested_related_resources_data[0]['active'])
self.assertEqual(self.nested_related_resource_1_2.id, nested_related_resources_data[1]['id'])
self.assertEqual(self.nested_related_resource_1_2.active, nested_related_resources_data[1]['active'])
|
{
"content_hash": "6bf3c1df82c3eb6156d0dfd52e5f7c49",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 124,
"avg_line_length": 70.98181818181818,
"alnum_prop": 0.701844262295082,
"repo_name": "seebass/drf-nested-fields",
"id": "3d34db27aa6b776d472031cd9a9856a7512b8040",
"size": "3904",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/testproject/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18057"
}
],
"symlink_target": ""
}
|
description = '''
Large non-Mersenne prime
Problem 97
The first known prime found to exceed one million digits was discovered in 1999, and is a Mersenne prime of the form 26972593−1; it contains exactly 2,098,960 digits. Subsequently other Mersenne primes, of the form 2p−1, have been found which contain more digits.
However, in 2004 there was found a massive non-Mersenne prime which contains 2,357,207 digits: 28433×2^7830457+1.
Find the last ten digits of this prime number.
'''
x = 28433
for i in range(0, 7830457):
x *= 2
x %= 10**10
x += 1
print(x)
|
{
"content_hash": "0f4051dfee0903915466b095eca24788",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 263,
"avg_line_length": 35.1875,
"alnum_prop": 0.7442273534635879,
"repo_name": "mbuhot/mbuhot-euler-solutions",
"id": "bbac77f4a21d3652e19f81ecad919523eda5b3c1",
"size": "593",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/problem-097.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Julia",
"bytes": "58575"
},
{
"name": "Pony",
"bytes": "6146"
},
{
"name": "Python",
"bytes": "135708"
},
{
"name": "Swift",
"bytes": "13417"
}
],
"symlink_target": ""
}
|
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'tensorflow_hmm'
copyright = u'2016, Zach Dwiel'
author = u'Zach Dwiel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'tensorflow_hmmdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'tensorflow_hmm.tex', u'tensorflow\\_hmm Documentation',
u'Zach Dwiel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'tensorflow_hmm', u'tensorflow_hmm Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'tensorflow_hmm', u'tensorflow_hmm Documentation',
author, 'tensorflow_hmm', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
|
{
"content_hash": "c5bdd4c224cd802eb355d00668f0112e",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 79,
"avg_line_length": 32.857142857142854,
"alnum_prop": 0.6978818283166109,
"repo_name": "dwiel/tensorflow_hmm",
"id": "709540cf9049023e734f8e3de606d61ef4e2d15c",
"size": "9397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1004093"
},
{
"name": "Python",
"bytes": "24892"
}
],
"symlink_target": ""
}
|
import importlib
import logging
from pathlib import Path
logger = logging.getLogger(__name__)
def main(args, result_path: Path, scenario_path: Path) -> bool:
error = False
for module in ('seqdiag', 'matplotlib', 'numpy', 'pandas'):
try:
importlib.import_module(module)
except ImportError:
logger.warning("Cannot find {}".format(module))
error = True
try:
import cairo
except ImportError:
try:
import cairocffi
except ImportError:
logger.warning('Cannot find cairo or cairocffi')
error = True
if error:
return error
try:
from .disappearing_node import main
main(args, result_path, scenario_path)
except Exception:
logger.exception('Failed to run scenario')
error = True
return error
|
{
"content_hash": "71157d6dc7e2c4fa258194a3cbb77566",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 63,
"avg_line_length": 24.914285714285715,
"alnum_prop": 0.6009174311926605,
"repo_name": "reisub-de/dmpr-simulator",
"id": "8be95686c0e24ed8640d09d055b388bcdb9a992f",
"size": "872",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dmprsim/analyze/check_deps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1513"
},
{
"name": "Python",
"bytes": "74292"
}
],
"symlink_target": ""
}
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class MemoryviewTests(TranspileTestCase):
pass
class BuiltinMemoryviewFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["memoryview"]
not_implemented = [
'test_bool',
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_float',
'test_frozenset',
'test_int',
'test_list',
'test_None',
'test_NotImplemented',
'test_range',
'test_set',
'test_slice',
'test_str',
'test_tuple',
'test_obj',
]
|
{
"content_hash": "99584d839aa8d04e3ac3df71e5cb1dc6",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 81,
"avg_line_length": 22.2,
"alnum_prop": 0.56006006006006,
"repo_name": "freakboy3742/voc",
"id": "608c5d76b87c869bb9a277ffbc14c78dac47de91",
"size": "666",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/builtins/test_memoryview.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "1016347"
},
{
"name": "Python",
"bytes": "1167243"
}
],
"symlink_target": ""
}
|
from traits.api import HasTraits, List, Str, Instance
from apptools.preferences.api import PreferencesHelper
###############################################################################
# `PreferencesMirror` class.
###############################################################################
class PreferencesMirror(HasTraits):
"""
This class mirrors preferences from a PreferencesHelper such that
users can use them and change them but nothing is saved to disk till
the user asks for an explicit save.
"""
# The preferences we mirror.
preferences = Instance(PreferencesHelper)
# Private trait to store names of traits.
_trait_names = List(Str)
######################################################################
# Public interface.
######################################################################
def save(self):
"""Updates the actual preferences and thereby persists them to
disk.
"""
for name in self._trait_names:
setattr(self.preferences, name, getattr(self, name))
######################################################################
# Private interface.
######################################################################
def _preferences_changed(self):
"""Setup traits of our own based on those of the mayavi
preferences.
"""
trait_names = []
opts = self.preferences
for key, value in opts.traits().items():
if key not in ['trait_added', 'trait_modified',
'preferences', 'preferences_path']:
self.add_trait(key, value)
setattr(self, key, getattr(opts, key))
trait_names.append(key)
opts.on_trait_change(self._update, key)
self._trait_names = trait_names
def _update(self, obj, name, old, new):
setattr(self, name, new)
|
{
"content_hash": "c67907ac14cb0ca51287cccd1787ae49",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 79,
"avg_line_length": 38.74,
"alnum_prop": 0.47083118224057824,
"repo_name": "dmsurti/mayavi",
"id": "68840163ccc9094931b2806f6e9e9cc095083be0",
"size": "2098",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mayavi/tools/preferences_mirror.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1054"
},
{
"name": "GAP",
"bytes": "34817"
},
{
"name": "Python",
"bytes": "2494055"
},
{
"name": "Shell",
"bytes": "147"
}
],
"symlink_target": ""
}
|
"""Support for the Philips Hue lights."""
import asyncio
from datetime import timedelta
import logging
from time import monotonic
import random
import aiohue
import async_timeout
from homeassistant.components import hue
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH,
ATTR_TRANSITION, ATTR_HS_COLOR, EFFECT_COLORLOOP, EFFECT_RANDOM,
FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_COLOR, SUPPORT_TRANSITION,
Light)
from homeassistant.util import color
SCAN_INTERVAL = timedelta(seconds=5)
_LOGGER = logging.getLogger(__name__)
SUPPORT_HUE_ON_OFF = (SUPPORT_FLASH | SUPPORT_TRANSITION)
SUPPORT_HUE_DIMMABLE = (SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS)
SUPPORT_HUE_COLOR_TEMP = (SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP)
SUPPORT_HUE_COLOR = (SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR)
SUPPORT_HUE_EXTENDED = (SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR)
SUPPORT_HUE = {
'Extended color light': SUPPORT_HUE_EXTENDED,
'Color light': SUPPORT_HUE_COLOR,
'Dimmable light': SUPPORT_HUE_DIMMABLE,
'On/Off plug-in unit': SUPPORT_HUE_ON_OFF,
'Color temperature light': SUPPORT_HUE_COLOR_TEMP,
}
ATTR_IS_HUE_GROUP = 'is_hue_group'
GAMUT_TYPE_UNAVAILABLE = 'None'
# Minimum Hue Bridge API version to support groups
# 1.4.0 introduced extended group info
# 1.12 introduced the state object for groups
# 1.13 introduced "any_on" to group state objects
GROUP_MIN_API_VERSION = (1, 13, 0)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Old way of setting up Hue lights.
Can only be called when a user accidentally mentions hue platform in their
config. But even in that case it would have been ignored.
"""
pass
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Hue lights from a config entry."""
bridge = hass.data[hue.DOMAIN][config_entry.data['host']]
cur_lights = {}
cur_groups = {}
api_version = tuple(
int(v) for v in bridge.api.config.apiversion.split('.'))
allow_groups = bridge.allow_groups
if allow_groups and api_version < GROUP_MIN_API_VERSION:
_LOGGER.warning('Please update your Hue bridge to support groups')
allow_groups = False
# Hue updates all lights via a single API call.
#
# If we call a service to update 2 lights, we only want the API to be
# called once.
#
# The throttle decorator will return right away if a call is currently
# in progress. This means that if we are updating 2 lights, the first one
# is in the update method, the second one will skip it and assume the
# update went through and updates it's data, not good!
#
# The current mechanism will make sure that all lights will wait till
# the update call is done before writing their data to the state machine.
#
# An alternative approach would be to disable automatic polling by Home
# Assistant and take control ourselves. This works great for polling as now
# we trigger from 1 time update an update to all entities. However it gets
# tricky from inside async_turn_on and async_turn_off.
#
# If automatic polling is enabled, Home Assistant will call the entity
# update method after it is done calling all the services. This means that
# when we update, we know all commands have been processed. If we trigger
# the update from inside async_turn_on, the update will not capture the
# changes to the second entity until the next polling update because the
# throttle decorator will prevent the call.
progress = None
light_progress = set()
group_progress = set()
async def request_update(is_group, object_id):
"""Request an update.
We will only make 1 request to the server for updating at a time. If a
request is in progress, we will join the request that is in progress.
This approach is possible because should_poll=True. That means that
Home Assistant will ask lights for updates during a polling cycle or
after it has called a service.
We keep track of the lights that are waiting for the request to finish.
When new data comes in, we'll trigger an update for all non-waiting
lights. This covers the case where a service is called to enable 2
lights but in the meanwhile some other light has changed too.
"""
nonlocal progress
progress_set = group_progress if is_group else light_progress
progress_set.add(object_id)
if progress is not None:
return await progress
progress = asyncio.ensure_future(update_bridge())
result = await progress
progress = None
light_progress.clear()
group_progress.clear()
return result
async def update_bridge():
"""Update the values of the bridge.
Will update lights and, if enabled, groups from the bridge.
"""
tasks = []
tasks.append(async_update_items(
hass, bridge, async_add_entities, request_update,
False, cur_lights, light_progress
))
if allow_groups:
tasks.append(async_update_items(
hass, bridge, async_add_entities, request_update,
True, cur_groups, group_progress
))
await asyncio.wait(tasks)
await update_bridge()
async def async_update_items(hass, bridge, async_add_entities,
request_bridge_update, is_group, current,
progress_waiting):
"""Update either groups or lights from the bridge."""
if is_group:
api_type = 'group'
api = bridge.api.groups
else:
api_type = 'light'
api = bridge.api.lights
try:
start = monotonic()
with async_timeout.timeout(4):
await api.update()
except (asyncio.TimeoutError, aiohue.AiohueException) as err:
_LOGGER.debug('Failed to fetch %s: %s', api_type, err)
if not bridge.available:
return
_LOGGER.error('Unable to reach bridge %s (%s)', bridge.host, err)
bridge.available = False
for light_id, light in current.items():
if light_id not in progress_waiting:
light.async_schedule_update_ha_state()
return
finally:
_LOGGER.debug('Finished %s request in %.3f seconds',
api_type, monotonic() - start)
if not bridge.available:
_LOGGER.info('Reconnected to bridge %s', bridge.host)
bridge.available = True
new_lights = []
for item_id in api:
if item_id not in current:
current[item_id] = HueLight(
api[item_id], request_bridge_update, bridge, is_group)
new_lights.append(current[item_id])
elif item_id not in progress_waiting:
current[item_id].async_schedule_update_ha_state()
if new_lights:
async_add_entities(new_lights)
class HueLight(Light):
"""Representation of a Hue light."""
def __init__(self, light, request_bridge_update, bridge, is_group=False):
"""Initialize the light."""
self.light = light
self.async_request_bridge_update = request_bridge_update
self.bridge = bridge
self.is_group = is_group
if is_group:
self.is_osram = False
self.is_philips = False
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
else:
self.is_osram = light.manufacturername == 'OSRAM'
self.is_philips = light.manufacturername == 'Philips'
self.gamut_typ = self.light.colorgamuttype
self.gamut = self.light.colorgamut
_LOGGER.debug("Color gamut of %s: %s", self.name, str(self.gamut))
if self.light.swupdatestate == "readytoinstall":
err = (
"Please check for software updates of the %s "
"bulb in the Philips Hue App."
)
_LOGGER.warning(err, self.name)
if self.gamut:
if not color.check_valid_gamut(self.gamut):
err = (
"Color gamut of %s: %s, not valid, "
"setting gamut to None."
)
_LOGGER.warning(err, self.name, str(self.gamut))
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
@property
def unique_id(self):
"""Return the ID of this Hue light."""
return self.light.uniqueid
@property
def name(self):
"""Return the name of the Hue light."""
return self.light.name
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
if self.is_group:
return self.light.action.get('bri')
return self.light.state.get('bri')
@property
def _color_mode(self):
"""Return the hue color mode."""
if self.is_group:
return self.light.action.get('colormode')
return self.light.state.get('colormode')
@property
def hs_color(self):
"""Return the hs color value."""
mode = self._color_mode
source = self.light.action if self.is_group else self.light.state
if mode in ('xy', 'hs') and 'xy' in source:
return color.color_xy_to_hs(*source['xy'], self.gamut)
return None
@property
def color_temp(self):
"""Return the CT color value."""
# Don't return color temperature unless in color temperature mode
if self._color_mode != "ct":
return None
if self.is_group:
return self.light.action.get('ct')
return self.light.state.get('ct')
@property
def is_on(self):
"""Return true if device is on."""
if self.is_group:
return self.light.state['any_on']
return self.light.state['on']
@property
def available(self):
"""Return if light is available."""
return self.bridge.available and (self.is_group or
self.bridge.allow_unreachable or
self.light.state['reachable'])
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_HUE.get(self.light.type, SUPPORT_HUE_EXTENDED)
@property
def effect(self):
"""Return the current effect."""
return self.light.state.get('effect', None)
@property
def effect_list(self):
"""Return the list of supported effects."""
return [EFFECT_COLORLOOP, EFFECT_RANDOM]
@property
def device_info(self):
"""Return the device info."""
if self.light.type in ('LightGroup', 'Room',
'Luminaire', 'LightSource'):
return None
return {
'identifiers': {
(hue.DOMAIN, self.unique_id)
},
'name': self.name,
'manufacturer': self.light.manufacturername,
# productname added in Hue Bridge API 1.24
# (published 03/05/2018)
'model': self.light.productname or self.light.modelid,
# Not yet exposed as properties in aiohue
'sw_version': self.light.raw['swversion'],
'via_hub': (hue.DOMAIN, self.bridge.api.config.bridgeid),
}
async def async_turn_on(self, **kwargs):
"""Turn the specified or all lights on."""
command = {'on': True}
if ATTR_TRANSITION in kwargs:
command['transitiontime'] = int(kwargs[ATTR_TRANSITION] * 10)
if ATTR_HS_COLOR in kwargs:
if self.is_osram:
command['hue'] = int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535)
command['sat'] = int(kwargs[ATTR_HS_COLOR][1] / 100 * 255)
else:
# Philips hue bulb models respond differently to hue/sat
# requests, so we convert to XY first to ensure a consistent
# color.
xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR],
self.gamut)
command['xy'] = xy_color
elif ATTR_COLOR_TEMP in kwargs:
temp = kwargs[ATTR_COLOR_TEMP]
command['ct'] = max(self.min_mireds, min(temp, self.max_mireds))
if ATTR_BRIGHTNESS in kwargs:
command['bri'] = kwargs[ATTR_BRIGHTNESS]
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
command['alert'] = 'lselect'
del command['on']
elif flash == FLASH_SHORT:
command['alert'] = 'select'
del command['on']
else:
command['alert'] = 'none'
effect = kwargs.get(ATTR_EFFECT)
if effect == EFFECT_COLORLOOP:
command['effect'] = 'colorloop'
elif effect == EFFECT_RANDOM:
command['hue'] = random.randrange(0, 65535)
command['sat'] = random.randrange(150, 254)
elif self.is_philips:
command['effect'] = 'none'
if self.is_group:
await self.light.set_action(**command)
else:
await self.light.set_state(**command)
async def async_turn_off(self, **kwargs):
"""Turn the specified or all lights off."""
command = {'on': False}
if ATTR_TRANSITION in kwargs:
command['transitiontime'] = int(kwargs[ATTR_TRANSITION] * 10)
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
command['alert'] = 'lselect'
del command['on']
elif flash == FLASH_SHORT:
command['alert'] = 'select'
del command['on']
else:
command['alert'] = 'none'
if self.is_group:
await self.light.set_action(**command)
else:
await self.light.set_state(**command)
async def async_update(self):
"""Synchronize state with bridge."""
await self.async_request_bridge_update(self.is_group, self.light.id)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
if self.is_group:
attributes[ATTR_IS_HUE_GROUP] = self.is_group
return attributes
|
{
"content_hash": "65cc5c5c1359ca76213548af126cd8fa",
"timestamp": "",
"source": "github",
"line_count": 422,
"max_line_length": 79,
"avg_line_length": 34.60900473933649,
"alnum_prop": 0.5982197877439233,
"repo_name": "DavidLP/home-assistant",
"id": "a79b0e3ee23a586c86cb2314b454c8624b346df6",
"size": "14605",
"binary": false,
"copies": "5",
"ref": "refs/heads/dev",
"path": "homeassistant/components/hue/light.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "Python",
"bytes": "15309293"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17609"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from django.core.urlresolvers import resolve
from django.template.loader import render_to_string
from lists.views import home_page
from django.http import HttpRequest
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('home.html')
# self.assertEqual(response.content.decode(), expected_html)
def test_home_page_can_save_a_POST_request(self):
request = HttpRequest()
request.method = 'POST'
request.POST['item_text'] = 'A new list item'
response = home_page(request)
self.assertIn('A new list item', response.content.decode())
expected_html = render_to_string(
'home.html',
{'new_item_text': 'A new list item'}
)
# self.assertEqual(response.content.decode(), expected_html)
|
{
"content_hash": "7de75c12bbcd6c2ddcfaa217e5954beb",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 68,
"avg_line_length": 30.555555555555557,
"alnum_prop": 0.6509090909090909,
"repo_name": "liulei2015/virtualRobot",
"id": "c136407bede8cf213aa666d4b7e47fcdc2c88ae5",
"size": "1100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myapp/lists/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "401"
},
{
"name": "Python",
"bytes": "8015"
}
],
"symlink_target": ""
}
|
"""
Convenience class for building sequential deep networks.
"""
from __future__ import division
from __future__ import unicode_literals
import warnings
import tensorflow as tf
from deepchem.models.tensorgraph.tensor_graph import TensorGraph
from deepchem.models.tensorgraph.layers import Feature
from deepchem.models.tensorgraph.layers import Label
from deepchem.models.tensorgraph.layers import SoftMaxCrossEntropy
from deepchem.models.tensorgraph.layers import ReduceMean
from deepchem.models.tensorgraph.layers import ReduceSquareDifference
class Sequential(TensorGraph):
"""Sequential models are linear stacks of layers.
Analogous to the Sequential model from Keras and allows for less
verbose construction of simple deep learning model.
Example
-------
>>> import deepchem as dc
>>> import numpy as np
>>> from deepchem.models.tensorgraph import layers
>>> # Define Data
>>> X = np.random.rand(20, 2)
>>> y = [[0, 1] for x in range(20)]
>>> dataset = dc.data.NumpyDataset(X, y)
>>> model = dc.models.Sequential(loss='binary_crossentropy', learning_rate=0.01)
>>> model.add(layers.Dense(out_channels=2))
>>> model.add(layers.SoftMax())
Parameters
----------
loss: string
the loss function to use. Supported values are 'binary_crossentropy' and 'mse'.
"""
def __init__(self, loss, **kwargs):
"""Initializes a sequential model
"""
self._loss_function = loss
self.num_layers = 0
self._prev_layer = None
if "use_queue" in kwargs:
if kwargs["use_queue"]:
raise ValueError("Sequential doesn't support queues.")
kwargs["use_queue"] = False
self._layer_list = []
self._built = False
super(Sequential, self).__init__(**kwargs)
def add(self, layer):
"""Adds a new layer to model.
Parameter
---------
layer: Layer
Adds layer to this graph.
"""
self._layer_list.append(layer)
def fit(self, dataset, **kwargs):
"""Fits on the specified dataset.
If called for the first time, constructs the TensorFlow graph for this
model. Fits this graph on the specified dataset according to the specified
loss.
Parameters
----------
dataset: dc.data.Dataset
Dataset with data
"""
X_shape, y_shape, _, _ = dataset.get_shape()
self._create_graph((None,) + X_shape[1:], (None,) + y_shape[1:])
super(Sequential, self).fit(dataset, **kwargs)
def _create_graph(self, feature_shape, label_shape):
"""This is called to create the full TensorGraph from the added layers."""
if self.built:
return # The graph has already been created.
# Add in features
features = Feature(shape=feature_shape)
# Add in labels
labels = Label(shape=label_shape)
# Add in all layers
prev_layer = features
if len(self._layer_list) == 0:
raise ValueError("No layers have been added to model.")
for ind, layer in enumerate(self._layer_list):
if len(layer.in_layers) > 1:
raise ValueError("Cannot specify more than one "
"in_layer for Sequential.")
layer.in_layers += [prev_layer]
prev_layer = layer
# The last layer is the output of the model
self.outputs.append(prev_layer)
if self._loss_function == "binary_crossentropy":
smce = SoftMaxCrossEntropy(in_layers=[labels, prev_layer])
self.set_loss(ReduceMean(in_layers=[smce]))
elif self._loss_function == "mse":
mse = ReduceSquareDifference(in_layers=[prev_layer, labels])
self.set_loss(mse)
else:
# TODO(rbharath): Add in support for additional
# losses.
raise ValueError("Unsupported loss.")
self.build()
def make_estimator(self,
feature_columns,
weight_column=None,
metrics={},
model_dir=None,
config=None):
self._create_graph((None,) + feature_columns[0].shape, None)
return super(Sequential, self).make_estimator(
feature_columns,
weight_column=weight_column,
metrics=metrics,
model_dir=model_dir,
config=config)
def restore(self, checkpoint=None):
"""Not currently supported.
"""
# TODO(rbharath): The TensorGraph can't be built until
# fit is called since the shapes of features/labels
# not specified. Need to figure out a good restoration
# method for this use case.
raise ValueError("Restore is not yet supported " "for sequential models.")
|
{
"content_hash": "968b06006395a91d22580a2c4e6a967f",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 84,
"avg_line_length": 32.410071942446045,
"alnum_prop": 0.6526082130965594,
"repo_name": "Agent007/deepchem",
"id": "75ac1bb0c9a6940fc64d3134e689d14041e4c89e",
"size": "4505",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "deepchem/models/tensorgraph/sequential.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16453"
},
{
"name": "HTML",
"bytes": "20618"
},
{
"name": "Jupyter Notebook",
"bytes": "59756"
},
{
"name": "Python",
"bytes": "2129306"
},
{
"name": "Shell",
"bytes": "11976"
}
],
"symlink_target": ""
}
|
import pytest
import pymysql
from mycli.main import format_output
from os import getenv
# TODO: should this be somehow be divined from environment?
USER, HOST = 'root', 'localhost'
PASSWORD = getenv('PASSWORD')
def db_connection(dbname=None):
conn = pymysql.connect(user=USER, host=HOST, database=dbname, password=PASSWORD)
conn.autocommit = True
return conn
try:
db_connection()
CAN_CONNECT_TO_DB = True
except:
CAN_CONNECT_TO_DB = False
dbtest = pytest.mark.skipif(
not CAN_CONNECT_TO_DB,
reason="Need a mysql instance at localhost accessible by user 'root'")
def create_db(dbname):
with db_connection().cursor() as cur:
try:
cur.execute('''DROP DATABASE IF EXISTS _test_db''')
cur.execute('''CREATE DATABASE _test_db''')
except:
pass
def run(executor, sql, join=False):
" Return string output for the sql to be run "
result = []
for title, rows, headers, status in executor.run(sql):
result.extend(format_output(title, rows, headers, status, 'psql'))
if join:
result = '\n'.join(result)
return result
|
{
"content_hash": "e2b9e62f3677352393a0d712fa1911e5",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 84,
"avg_line_length": 28.35,
"alnum_prop": 0.6596119929453262,
"repo_name": "steverobbins/mycli",
"id": "484ba505a3f072dc4edf0ee38413625ce61cbe68",
"size": "1134",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "163187"
}
],
"symlink_target": ""
}
|
import os
import stat
import sys
import tempfile
import unittest
from ds3.ds3 import *
from ds3.libds3 import LibDs3JobStatus
bucketName = "python_test_bucket"
resources = ["beowulf.txt", "sherlock_holmes.txt", "tale_of_two_cities.txt", "ulysses.txt"]
unicodeResources = [unicode(filename) for filename in resources]
def pathForResource(resourceName):
encoding = sys.getfilesystemencoding()
currentPath = os.path.dirname(unicode(__file__, encoding))
return os.path.join(currentPath, "resources", resourceName)
def populateTestData(client, bucketName, resourceList = None, prefix = "", metadata = None):
if not resourceList:
resourceList = resources
def getSize(fileName):
size = os.stat(pathForResource(fileName)).st_size
return (prefix + fileName, size)
client.putBucket(bucketName)
pathes = {prefix + fileName: pathForResource(fileName) for fileName in resourceList}
fileList = map(getSize, resourceList)
bulkResult = client.putBulk(bucketName, fileList)
for chunk in bulkResult.chunks:
allocateChunk = client.allocateChunk(chunk.chunkId)
for obj in allocateChunk.chunk.objects:
client.putObject(bucketName, obj.name, obj.offset, obj.length, bulkResult.jobId, pathes[obj.name], metadata)
return fileList
def clearBucket(client, bucketName):
bucketContents = client.getBucket(bucketName)
for obj in bucketContents.objects:
client.deleteObject(bucketName, obj.name)
client.deleteBucket(bucketName)
def statusCodeList(status):
return [Ds3Error, lambda obj: obj.statusCode, status]
def typeErrorList(badType):
return [TypeError, str, "expected instance of type basestring, got instance of type " + type(badType).__name__]
def reasonErrorList(reason):
return [Ds3Error, str, reason]
class Ds3TestCase(unittest.TestCase):
def setUp(self):
self.client = createClientFromEnv()
def tearDown(self):
try:
clearBucket(self.client, bucketName)
except Ds3Error as e:
pass
def checkBadInputs(self, testFunction, inputs, second_arg_dict = None):
for test_input, status in inputs.items():
if second_arg_dict:
for arg, second_status in second_arg_dict.items():
if second_status:
try:
testFunction(test_input, arg)
except second_status[0] as e:
self.assertEqual(second_status[1](e), second_status[2])
else:
try:
testFunction(test_input, arg)
except status[0] as e:
self.assertEqual(status[1](e), status[2])
else:
try:
testFunction(test_input)
except status[0] as e:
self.assertEqual(status[1](e), status[2])
class BucketTestCase(Ds3TestCase):
def testPutBucket(self):
"""tests putBucket"""
self.client.putBucket(bucketName)
bucketSet = frozenset(map(lambda service: service.name, self.client.getService()))
self.assertTrue(bucketName in bucketSet)
def testPutBucketUnicode(self):
"""tests putBucket"""
self.client.putBucket(unicode(bucketName))
bucketSet = frozenset(map(lambda service: service.name, self.client.getService()))
self.assertTrue(bucketName in bucketSet)
def testPutBucketBadInput(self):
"""tests putBucket: bad input to function"""
self.client.putBucket(bucketName)
badBuckets = {"": statusCodeList(400), bucketName: statusCodeList(409), 1234: typeErrorList(1234), None:typeErrorList(None)}
self.checkBadInputs(self.client.putBucket, badBuckets)
def testDeleteEmptyBucket(self):
"""tests deleteBucket: deleting an empty bucket"""
self.client.putBucket(bucketName)
self.client.deleteBucket(bucketName)
bucketSet = frozenset(map(lambda service: service.name, self.client.getService()))
self.assertFalse(bucketName in bucketSet)
def testDeleteBucketBadInput(self):
"""tests deleteBucket: bad input to function"""
populateTestData(self.client, bucketName)
badBuckets = {"": statusCodeList(400), bucketName: statusCodeList(409), "not-here": statusCodeList(404), 1234: typeErrorList(1234), None:typeErrorList(None)}
self.checkBadInputs(self.client.deleteBucket, badBuckets)
def testGetEmptyBucket(self):
"""tests getBucket: when bucket is empty"""
self.client.putBucket(bucketName)
bucketContents = self.client.getBucket(bucketName)
self.assertEqual(bucketContents.isTruncated, False)
self.assertEqual(bucketContents.marker, None)
self.assertEqual(bucketContents.delimiter, None)
self.assertEqual(bucketContents.maxKeys, 1000)
self.assertEqual(bucketContents.nextMarker, None)
self.assertEqual(bucketContents.prefix, None)
self.assertEqual(len(bucketContents.commonPrefixes), 0)
self.assertEqual(len(bucketContents.objects), 0)
def testPutBulkUnicode(self):
"""tests getBucket: when bucket has contents"""
fileList = populateTestData(self.client, bucketName, resourceList = unicodeResources)
def testGetFilledBucket(self):
"""tests getBucket: when bucket has contents"""
fileList = populateTestData(self.client, bucketName)
bucketContents = self.client.getBucket(bucketName)
self.assertEqual(bucketContents.isTruncated, False)
self.assertEqual(bucketContents.marker, None)
self.assertEqual(bucketContents.delimiter, None)
self.assertEqual(bucketContents.maxKeys, 1000)
self.assertEqual(bucketContents.nextMarker, None)
self.assertEqual(bucketContents.prefix, None)
self.assertEqual(len(bucketContents.commonPrefixes), 0)
self.assertEqual(len(bucketContents.objects), 4)
returnedFileList = map(lambda obj: (obj.name, obj.size), bucketContents.objects)
self.assertEqual(returnedFileList, fileList)
def testGetBucketBadInput(self):
"""tests getBucket: bad input to function"""
badBuckets = {"": reasonErrorList("Reason: The bucket name parameter is required."), "not-here": statusCodeList(404), 1234: typeErrorList(1234), None:typeErrorList(None)}
self.checkBadInputs(self.client.getBucket, badBuckets)
def testPrefix(self):
"""tests getBucket: prefix parameter"""
populateTestData(self.client, bucketName)
bucketContents = self.client.getBucket(bucketName, prefix = "beo")
self.assertEqual(len(bucketContents.objects), 1)
def testPagination(self):
"""tests getBucket: maxKeys parameter, getBucket: nextMarker parameter"""
fileList = []
for i in xrange(0, 15):
fileList.append(("file" + str(i), 0))
self.client.putBucket(bucketName)
self.client.putBulk(bucketName, fileList)
bucketResult = self.client.getBucket(bucketName, maxKeys = 5)
self.assertEqual(len(bucketResult.objects), 5)
self.assertTrue(bucketResult.nextMarker != None)
self.assertEqual(bucketResult.objects[4].name[4:6], "12")
bucketResult = self.client.getBucket(bucketName, maxKeys = 5, nextMarker = bucketResult.nextMarker)
self.assertEqual(len(bucketResult.objects), 5)
self.assertTrue(bucketResult.nextMarker != None)
self.assertEqual(bucketResult.objects[4].name[4], "4")
bucketResult = self.client.getBucket(bucketName, maxKeys = 5, nextMarker = bucketResult.nextMarker)
self.assertEqual(len(bucketResult.objects), 5)
self.assertTrue(bucketResult.nextMarker == None)
self.assertEqual(bucketResult.objects[4].name[4], "9")
def testDelimiter(self):
"""tests getBucket: delimiter parameter"""
fileList = []
for i in xrange(0, 10):
fileList.append(("dir/file" + str(i), 0))
for i in xrange(0, 10):
fileList.append(("file" + str(i), 0))
self.client.putBucket(bucketName)
self.client.putBulk(bucketName, fileList)
bucketResult = self.client.getBucket(bucketName, delimiter = "/")
self.assertEqual(len(bucketResult.objects), 10)
self.assertEqual(len(bucketResult.commonPrefixes), 1)
self.assertEqual(bucketResult.commonPrefixes[0], "dir/")
def testGetService(self):
"""tests getService"""
servicesBefore = map(lambda service: service.name, frozenset(self.client.getService()))
self.assertFalse(bucketName in servicesBefore)
self.client.putBucket(bucketName)
servicesAfter = map(lambda service: service.name, frozenset(self.client.getService()))
self.assertTrue(bucketName in servicesAfter)
def testHeadBucket(self):
self.client.putBucket(bucketName)
self.client.headBucket(bucketName)
def testHeadBucketBadInput(self):
badBuckets = {"": statusCodeList(400), "not-here": statusCodeList(404), 1234: typeErrorList(1234), None:typeErrorList(None)}
self.checkBadInputs(self.client.headBucket, badBuckets)
class JobTestCase(Ds3TestCase):
def testGetJobs(self):
populateTestData(self.client, bucketName)
bucketContents = self.client.getBucket(bucketName)
bulkGetResult = self.client.getBulk(bucketName, map(lambda obj: obj.name, bucketContents.objects))
result = map(lambda obj: obj.jobId, self.client.getJobs())
self.assertTrue(bulkGetResult.jobId in result)
self.client.deleteJob(bulkGetResult.jobId)
result = map(lambda obj: obj.jobId, self.client.getJobs())
self.assertFalse(bulkGetResult.jobId in result)
class ObjectTestCase(Ds3TestCase):
def validateSearchObjects(self, objects, resourceList = resources, objType = "DATA"):
self.assertEqual(len(objects), len(resourceList))
def getSize(fileName):
size = os.stat(pathForResource(fileName)).st_size
return (fileName, size)
fileList = map(getSize, resourceList)
if len(objects)>0:
self.assertEqual(len(set(map(lambda obj: obj.bucketId, objects))), 1)
for index in xrange(0, len(objects)):
self.assertEqual(objects[index].name, fileList[index][0])
# charlesh: in BP 1.2, size returns 0 (will be fixed in 2.4)
# self.assertEqual(objects[index].size, fileList[index][1])
self.assertEqual(objects[index].type, objType)
self.assertEqual(objects[index].version, "1")
def testDeleteObject(self):
"""tests deleteObject: when object exists"""
populateTestData(self.client, bucketName, resourceList = ["beowulf.txt"])
self.client.deleteObject(bucketName, "beowulf.txt")
bucketContents = self.client.getBucket(bucketName)
self.assertEqual(len(bucketContents.objects), 0)
def testDeleteObjectUnicode(self):
"""tests deleteObject: unicode parameter"""
populateTestData(self.client, bucketName, resourceList = ["beowulf.txt"])
self.client.deleteObject(bucketName, unicode("beowulf.txt"))
bucketContents = self.client.getBucket(bucketName)
self.assertEqual(len(bucketContents.objects), 0)
def testDeleteObjectBadInput(self):
"""tests deleteObject: bad input to function"""
self.client.putBucket(bucketName)
badBuckets = {1234:typeErrorList(1234), None:typeErrorList(None)}
self.checkBadInputs(self.client.deleteObject, badBuckets, second_arg_dict = {"":None, "badFile":None, 1234: None, None:None})
badBuckets = {bucketName: statusCodeList(404), "not-here": statusCodeList(404)}
self.checkBadInputs(self.client.deleteObject, badBuckets, second_arg_dict = {"":None, "badFile":None, 1234: typeErrorList(1234), None:typeErrorList(None)})
badBuckets = {"":reasonErrorList("Reason: The bucket name parameter is required.")}
self.checkBadInputs(self.client.deleteObject, badBuckets, second_arg_dict = {"badFile":None})
def testDeleteObjects(self):
"""tests deleteObjects"""
fileList = populateTestData(self.client, bucketName)
deletedResponse = self.client.deleteObjects(bucketName, map(lambda obj: obj[0], fileList))
bucketContents = self.client.getBucket(bucketName)
self.assertEqual(len(bucketContents.objects), 0)
def testDeleteObjectsUnicode(self):
"""tests deleteObjects: unicode parameter"""
fileList = populateTestData(self.client, bucketName)
deletedResponse = self.client.deleteObjects(bucketName, map(lambda obj: unicode(obj[0]), fileList))
bucketContents = self.client.getBucket(bucketName)
self.assertEqual(len(bucketContents.objects), 0)
def testDeleteObjectsEmpty(self):
"""tests deleteObjects: when list passed is empty"""
self.client.putBucket(bucketName)
try:
self.client.deleteObjects(bucketName, [])
except Ds3Error as e:
self.assertEqual(e.reason, "The bulk command requires a list of objects to process")
def testDeleteBadObjects(self):
"""tests deleteObjects: when bucket is empty"""
self.client.putBucket(bucketName)
self.client.deleteObjects(bucketName, ["not-here", "also-not-here"])
def testDeleteObjectsBadBucket(self):
"""tests deleteObjects: when bucket doesn't exist"""
try:
self.client.deleteObjects(bucketName, ["not-here", "also-not-here"])
except Ds3Error as e:
self.assertEqual(e.statusCode, 404)
def testGetPhysicalPlacement(self):
"""tests getPhysicalPlacement: with an empty file"""
populateTestData(self.client, bucketName)
self.assertEqual(len(self.client.getPhysicalPlacement(bucketName, ["bogus.txt"])), 0)
def testGetPhysicalPlacementBadInput(self):
"""tests getPhysicalPlacement: with non-existent bucket"""
try:
self.client.getPhysicalPlacement(bucketName, ["bogus.txt"])
except Ds3Error as e:
self.assertEqual(e.statusCode, 404)
def testGetPhysicalPlacementFull(self):
"""tests getPhysicalPlacement: with an empty file"""
populateTestData(self.client, bucketName)
self.assertEqual(len(self.client.getPhysicalPlacement(bucketName, ["bogus.txt"], fullDetails = True)), 0)
def testGetPhysicalPlacementFullBadInput(self):
"""tests getPhysicalPlacement: with non-existent bucket"""
try:
self.client.getPhysicalPlacement(bucketName, ["bogus.txt"], fullDetails = True)
except Ds3Error as e:
self.assertEqual(e.statusCode, 404)
def testDeleteFolder(self):
"""tests deleteFolder"""
populateTestData(self.client, bucketName, prefix = "folder/")
self.client.deleteFolder(bucketName, "folder")
bucketResult = self.client.getBucket(bucketName)
self.assertEqual(len(bucketResult.objects), 0)
def testDeleteFolderBadInput(self):
"""tests deleteFolder"""
self.client.putBucket(bucketName)
badBuckets = {"": statusCodeList(404), "fakeBucket": statusCodeList(404), bucketName: statusCodeList(404)}
self.checkBadInputs(self.client.deleteFolder, badBuckets, second_arg_dict = {"folder":None})
def testGetObjects(self):
populateTestData(self.client, bucketName)
objects = self.client.getObjects()
self.validateSearchObjects(objects, resources)
def testGetObjectsBucketName(self):
populateTestData(self.client, bucketName)
objects = self.client.getObjects(bucketName = bucketName)
self.validateSearchObjects(objects, resources)
def testGetObjectsObjectName(self):
populateTestData(self.client, bucketName)
objects = self.client.getObjects(bucketName = bucketName, name = "beowulf.txt")
self.validateSearchObjects(objects, ["beowulf.txt"])
def testGetObjectsPageParameters(self):
populateTestData(self.client, bucketName)
first_half = self.client.getObjects(bucketName = bucketName, pageLength = 2)
self.assertEqual(len(first_half), 2)
second_half = self.client.getObjects(bucketName = bucketName, pageLength = 2, pageOffset = 2)
self.assertEqual(len(second_half), 2)
self.validateSearchObjects(first_half+second_half, resources)
def testGetObjectsType(self):
populateTestData(self.client, bucketName)
objects = self.client.getObjects(bucketName = bucketName, objType = "DATA")
self.validateSearchObjects(objects, resources)
objects = self.client.getObjects(bucketName = bucketName, objType = "FOLDER")
self.validateSearchObjects(objects, [], objType = "FOLDER")
def testGetObjectsVersion(self):
populateTestData(self.client, bucketName)
objects = self.client.getObjects(bucketName = bucketName, version = 1)
self.validateSearchObjects(objects, resources)
def testGetBulkUnicode(self):
"""tests getObject: unicode parameter"""
populateTestData(self.client, bucketName, resourceList = unicodeResources)
bucketContents = self.client.getBucket(bucketName)
bulkGetResult = self.client.getBulk(bucketName, map(lambda obj: unicode(obj.name), bucketContents.objects))
tempFiles = []
availableChunks = self.client.getAvailableChunks(bulkGetResult.jobId)
for obj in availableChunks.bulkPlan.chunks[0].objects:
newFile = tempfile.mkstemp()
tempFiles.append(newFile)
metadata_resp = self.client.getObject(bucketName, obj.name, obj.offset, bulkGetResult.jobId, newFile[1])
for tempFile in tempFiles:
os.close(tempFile[0])
os.remove(tempFile[1])
#jobStatusResponse = self.client.getJob(bulkGetResult.jobId)
#self.assertEqual(jobStatusResponse.status, LibDs3JobStatus.COMPLETED)
class ObjectMetadataTestCase(Ds3TestCase):
def testHeadObject(self):
"""tests headObject"""
metadata = {"name1":["value1"], "name2":"value2", "name3":("value3")}
metadata_check = {"name1":["value1"], "name2":["value2"], "name3":["value3"]}
populateTestData(self.client, bucketName, resourceList = ["beowulf.txt"], metadata = metadata)
metadata_resp = self.client.headObject(bucketName, "beowulf.txt")
self.assertEqual(metadata_check, metadata_resp)
def testHeadObjectBadInput(self):
"""tests headObject: bad input to function"""
metadata = {"name1":["value1"], "name2":"value2", "name3":("value3")}
populateTestData(self.client, bucketName, resourceList = ["beowulf.txt"], metadata = metadata)
badBuckets = {"fakeBucket": statusCodeList(404), bucketName: statusCodeList(404)}
self.checkBadInputs(self.client.headObject, badBuckets, second_arg_dict = {"":reasonErrorList("Reason: The object name parameter is required."), "badFile":None, None:typeErrorList(None), 1234:typeErrorList(1234)})
badBuckets = {None:typeErrorList(None), 1234:typeErrorList(1234)}
self.checkBadInputs(self.client.headObject, badBuckets, second_arg_dict = {"":None, "badFile":None, None:None, 1234:None})
badBuckets = {"": reasonErrorList("Reason: The bucket name parameter is required.")}
self.checkBadInputs(self.client.headObject, badBuckets, second_arg_dict = {"badFile":None, None:typeErrorList(None), 1234:typeErrorList(1234)})
def testGetBulkWithMetadata(self):
"""tests getObject: metadata parameter, putObject:metadata parameter"""
metadata = {"name1":["value1"], "name2":["value2"], "name3":["value3"]}
populateTestData(self.client, bucketName, resourceList = ["beowulf.txt"], metadata = metadata)
bucketContents = self.client.getBucket(bucketName)
bulkGetResult = self.client.getBulk(bucketName, map(lambda obj: obj.name, bucketContents.objects))
tempFiles = []
availableChunks = self.client.getAvailableChunks(bulkGetResult.jobId)
for obj in availableChunks.bulkPlan.chunks[0].objects:
newFile = tempfile.mkstemp()
tempFiles.append(newFile)
metadata_resp = self.client.getObject(bucketName, obj.name, obj.offset, bulkGetResult.jobId, newFile[1])
for tempFile in tempFiles:
os.close(tempFile[0])
os.remove(tempFile[1])
jobStatusResponse = self.client.getJob(bulkGetResult.jobId)
self.assertEqual(metadata, metadata_resp)
#self.assertEqual(jobStatusResponse.status, LibDs3JobStatus.COMPLETED)
class BasicClientTestCase(Ds3TestCase):
def testGetSystemInformation(self):
result = self.client.getSystemInformation()
self.assertNotEqual(result.apiVersion, None)
self.assertNotEqual(result.serialNumber, None)
def testVerifySystemHealth(self):
result = self.client.verifySystemHealth()
self.assertTrue(result.msRequiredToVerifyDataPlannerHealth >= 0)
|
{
"content_hash": "16fa311ecca32f25bad52524f31e8342",
"timestamp": "",
"source": "github",
"line_count": 537,
"max_line_length": 221,
"avg_line_length": 40.687150837988824,
"alnum_prop": 0.6608082749782599,
"repo_name": "chanchett/ds3_python_sdk",
"id": "6ce55b660064cede30452d7db1b23c8b00b8799b",
"size": "22429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/clientTests.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "70405"
},
{
"name": "Ruby",
"bytes": "2156"
},
{
"name": "Shell",
"bytes": "297"
}
],
"symlink_target": ""
}
|
"""
__create_time__ = '13-10-17'
__author__ = 'Madre'
"""
from django.contrib.auth.models import User
from django.db import models
from django.core.urlresolvers import reverse
from django.template.defaultfilters import slugify
class Topic(models.Model):
name = models.CharField(max_length=60, unique=True)
slug = models.SlugField(max_length=255)
help_text = models.CharField(max_length=255, null=True, blank=True)
description = models.TextField(null=True, blank=True)
thumbnail = models.ImageField(upload_to='topics', null=True, blank=True)
official_website = models.URLField(null=True, blank=True)
class Meta:
ordering = ['name', ]
verbose_name = "主题/题材"
verbose_name_plural = "主题/题材"
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse('topic_detail', kwargs={'pk': self.pk})
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
if self.description and not self.help_text:
self.help_text = self.description.replace("\n", " ")[:220]
super(Topic, self).save(*args, **kwargs)
class ResourceType(models.Model):
name = models.CharField(max_length=60, unique=True)
slug = models.SlugField(max_length=255)
help_text = models.CharField(max_length=255, null=True, blank=True)
class Meta:
ordering = ['name', ]
verbose_name = "代码类型"
verbose_name_plural = "代码类型"
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse('resource_list', kwargs={'slug': self.slug})
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
super(ResourceType, self).save(*args, **kwargs)
LEVELS = ['beginner', 'intermediate', 'advanced']
class Resource(models.Model):
title = models.CharField(max_length=255, unique=True)
slug = models.SlugField(max_length=255, blank=True, default='')
url = models.URLField(unique=True)
help_text = models.CharField(max_length=255, null=True, blank=True, help_text="资源类型为文档时,本字段为文档封面的URL地址")
description = models.TextField(null=True, blank=True, default='')
resource_type = models.ForeignKey(ResourceType)
level = models.CharField('Difficulty Level', max_length=30, choices=zip(LEVELS, LEVELS))
topics = models.ManyToManyField(Topic)
user = models.ForeignKey(User)
#rating = RatingField(range=5, weight=10, use_cookies=True, allow_delete=True)
createtime = models.DateTimeField(auto_now_add=True, editable=False)
updatetime = models.DateTimeField(auto_now=True, editable=False)
show = models.BooleanField(default=True)
class Meta:
ordering = ['-createtime', ]
verbose_name = "资源"
verbose_name_plural = "资源"
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('resource_detail', kwargs={'pk': self.id})
def save(self, *args, **kwargs):
#INFO Checks if present because admins have option to change slug
if not self.slug:
self.slug = slugify(self.name)
if self.description and not self.help_text:
self.help_text = self.description.replace("\n", " ")[:220]
super(Resource, self).save(*args, **kwargs)
|
{
"content_hash": "cf72341858ab552ef6f87adf127bec2b",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 108,
"avg_line_length": 35.680851063829785,
"alnum_prop": 0.6535480023852117,
"repo_name": "madre/PersonalWeb",
"id": "c06eddc627f504c9e9635750999c1aefed171b29",
"size": "3448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resource/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "79689"
},
{
"name": "Nginx",
"bytes": "1140"
},
{
"name": "Python",
"bytes": "55561"
}
],
"symlink_target": ""
}
|
import datetime
import mock
from dateutil.relativedelta import relativedelta
from django.contrib.auth.models import Permission
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.utils import timezone
from timepiece.contracts.models import ProjectContract, ContractHour
from timepiece.entries.models import Entry
from timepiece.tests.base import ViewTestMixin
from timepiece.tests import factories
class ContractListTestCase(ViewTestMixin, TestCase):
url_name = 'list_contracts'
perm_names = [('contracts', 'add_projectcontract')]
def setUp(self):
get_perm = lambda ct, n: Permission.objects.get(
content_type__app_label=ct, codename=n)
self.permissions = [get_perm(*perm) for perm in self.perm_names]
self.user = factories.User()
self.user.user_permissions.add(*self.permissions)
self.login_user(self.user)
self.project1 = factories.Project()
self.project2 = factories.Project()
self.projects = [self.project1, self.project2]
def test_permission(self):
"""Permission is required to see this view."""
response = self._get()
self.assertEqual(response.status_code, 200)
def test_no_permission(self):
"""Permission is required to see this view."""
self.user.user_permissions.remove(*self.permissions)
response = self._get()
self.assertEqual(response.status_code, 302)
def test_no_contracts(self):
"""List should return all current contracts."""
ProjectContract.objects.all().delete()
response = self._get()
self.assertEqual(response.status_code, 200)
contracts = response.context['contracts']
self.assertEqual(len(contracts), 0)
def test_one_contract(self):
"""List should return all current contracts."""
correct_contract = factories.ProjectContract(
projects=self.projects, status=ProjectContract.STATUS_CURRENT)
response = self._get()
self.assertEqual(response.status_code, 200)
contracts = response.context['contracts']
self.assertEqual(len(contracts), 1)
self.assertTrue(correct_contract in contracts)
def test_contracts(self):
"""List should return all current contracts."""
correct_contracts = [factories.ProjectContract(projects=self.projects,
status=ProjectContract.STATUS_CURRENT) for i in range(3)]
response = self._get()
self.assertEqual(response.status_code, 200)
contracts = response.context['contracts']
self.assertEqual(len(contracts), 3)
for i in range(3):
self.assertTrue(correct_contracts[i] in contracts)
def test_non_current_contracts(self):
"""List should return all current contracts."""
factories.ProjectContract(
projects=self.projects, status=ProjectContract.STATUS_COMPLETE)
factories.ProjectContract(
projects=self.projects, status=ProjectContract.STATUS_UPCOMING)
response = self._get()
self.assertEqual(response.status_code, 200)
contracts = response.context['contracts']
self.assertEqual(len(contracts), 0)
class ContractViewTestCase(ViewTestMixin, TestCase):
url_name = 'view_contract'
perm_names = [('contracts', 'add_projectcontract')]
@property
def url_args(self):
return (self.contract.pk,)
def setUp(self):
get_perm = lambda ct, n: Permission.objects.get(
content_type__app_label=ct, codename=n)
self.permissions = [get_perm(*perm) for perm in self.perm_names]
self.user = factories.User()
self.user.user_permissions.add(*self.permissions)
self.login_user(self.user)
self.project1 = factories.Project()
self.project2 = factories.Project()
self.projects = [self.project1, self.project2]
self.contract = factories.ProjectContract(projects=self.projects)
def test_permission(self):
"""Permission is required to view a contract."""
response = self._get()
self.assertEqual(response.status_code, 200)
def test_no_permission(self):
"""Permission is required to view a contract."""
self.user.user_permissions.remove(*self.permissions)
response = self._get()
self.assertEqual(response.status_code, 302)
def test_bad_id(self):
response = self._get(url_args=('12345',))
self.assertEqual(response.status_code, 404)
def test_current_contract(self):
contract = factories.ProjectContract(
projects=self.projects, status=ProjectContract.STATUS_CURRENT)
response = self._get(url_args=(contract.pk,))
self.assertEqual(response.status_code, 200)
self.assertEqual(contract, response.context['contract'])
def test_upcoming_contract(self):
contract = factories.ProjectContract(
projects=self.projects, status=ProjectContract.STATUS_UPCOMING)
response = self._get(url_args=(contract.pk,))
self.assertEqual(response.status_code, 200)
self.assertEqual(contract, response.context['contract'])
def test_complete_contract(self):
contract = factories.ProjectContract(
projects=self.projects, status=ProjectContract.STATUS_COMPLETE)
response = self._get(url_args=(contract.pk,))
self.assertEqual(response.status_code, 200)
self.assertEqual(contract, response.context['contract'])
class ContractHourTestCase(TestCase):
def test_defaults(self):
contract_hour = ContractHour()
self.assertEqual(0, contract_hour.hours)
self.assertEqual(ContractHour.PENDING_STATUS, contract_hour.status)
def test_contracted_hours(self):
# If we create some Contract Hour objects and then go to the
# project contract and get contracted_hours(), it gives the sum
# of the hours
pc = factories.ProjectContract(contract_hours=4)
self.assertEqual(4, pc.contracted_hours())
self.assertEqual(0, pc.pending_hours())
def test_pending_hours(self):
# If we create some pending Contract Hour objects and then go to the
# project contract and get pending_hours(), it gives the sum
# of the hours
pc = factories.ProjectContract(contract_hours=4)
ch = factories.ContractHour(
contract=pc, hours=27, status=ContractHour.PENDING_STATUS)
self.assertEqual(4, pc.contracted_hours())
self.assertEqual(27, pc.pending_hours())
ch.delete()
self.assertEqual(4, pc.contracted_hours())
self.assertEqual(0, pc.pending_hours())
def test_validation(self):
with self.assertRaises(ValidationError):
ch = factories.ContractHour(
status=ContractHour.PENDING_STATUS, date_approved=datetime.date.today())
ch.clean()
def test_default_date_approved(self):
# If saved with status approved and no date approved,
# it sets it to today
ch = factories.ContractHour(
status=ContractHour.APPROVED_STATUS, date_approved=None)
ch = ContractHour.objects.get(pk=ch.pk)
self.assertEqual(datetime.date.today(), ch.date_approved)
def test_fraction_hours(self):
# fraction_hours returns what fraction of the contracted hours
# have been worked
contracted_hours = 0
pc = factories.ProjectContract(contract_hours=contracted_hours)
# If contracted hours 0, return 0 (don't div/0)
self.assertEqual(0.0, pc.fraction_hours)
contracted_hours = 10.0
pc = factories.ProjectContract(contract_hours=contracted_hours)
# If contracted hours non-zero, worked hours 0, return 0
self.assertEqual(0.0, pc.fraction_hours)
# Now do some work
pc._worked = 5.0
self.assertEqual(0.5, pc.fraction_hours)
def test_fraction_schedule(self):
# fraction_schedule returns what fraction of the contract period
# has elapsed - if the contract is current
one_month = datetime.timedelta(days=30)
today = datetime.date.today()
last_month = today - one_month
next_month = today + one_month
pc = factories.ProjectContract(
status=ProjectContract.STATUS_UPCOMING, start_date=last_month,
end_date=next_month
)
self.assertEqual(0.0, pc.fraction_schedule)
pc.status = ProjectContract.STATUS_COMPLETE
self.assertEqual(0.0, pc.fraction_schedule)
pc.status = ProjectContract.STATUS_CURRENT
self.assertEqual(0.5, pc.fraction_schedule)
# Just to be perverse, a contract in current state whose start
# date hasn't arrived yet
pc.start_date = today + datetime.timedelta(days=2)
self.assertEqual(0.0, pc.fraction_schedule)
def test_get_absolute_url(self):
ch = factories.ContractHour.create()
url = reverse(
'admin:contracts_contracthour_change', args=(ch.pk,), current_app='timepiece')
self.assertEqual(url, ch.get_absolute_url())
class ContractHourEmailTestCase(TestCase):
def test_save_pending_calls_send_email(self):
with mock.patch('timepiece.contracts.models.ContractHour._send_mail') as send_mail:
factories.ContractHour(status=ContractHour.PENDING_STATUS)
self.assertTrue(send_mail.called)
(subject, ctx) = send_mail.call_args[0]
self.assertTrue(subject.startswith("New"))
def test_save_approved_does_not_call_send_email(self):
with mock.patch('timepiece.contracts.models.ContractHour._send_mail') as send_mail:
factories.ContractHour(status=ContractHour.APPROVED_STATUS)
self.assertFalse(send_mail.called)
def test_delete_pending_calls_send_email(self):
ch = factories.ContractHour(status=ContractHour.PENDING_STATUS)
with mock.patch('timepiece.contracts.models.ContractHour._send_mail') as send_mail:
ch.delete()
self.assertTrue(send_mail.called)
(subject, ctx) = send_mail.call_args[0]
self.assertTrue(subject.startswith("Deleted"))
def test_change_pending_calls_send_email(self):
ch = factories.ContractHour(status=ContractHour.PENDING_STATUS)
with mock.patch('timepiece.contracts.models.ContractHour._send_mail') as send_mail:
ch.save()
self.assertTrue(send_mail.called)
(subject, ctx) = send_mail.call_args[0]
self.assertTrue(subject.startswith("Changed"))
class ProjectContractEntryTestCase(TestCase):
"""
Set up two projects and two contracts. The relationship diagram looks like a Z,
with the following instances:
Project A ----- Contract 1
/
Project B /______Contract 2
User A logs one hour per day to Project A and user B logs one hour per day to
project B.
"""
def setUp(self):
super(ProjectContractEntryTestCase, self).setUp()
self.user_a = factories.User(username='userA')
self.user_b = factories.User(username='userB')
self.project_a = factories.Project(
type__enable_timetracking=True,
status__enable_timetracking=True,
name='Project A')
self.project_b = factories.Project(
type__enable_timetracking=True,
status__enable_timetracking=True,
name='Project B')
self.contract1 = factories.ProjectContract(
name='Contract 1',
projects=[self.project_a, self.project_b],
status=ProjectContract.STATUS_CURRENT,
start_date=timezone.now().replace(
hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=16),
end_date=timezone.now().replace(
hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=12),
)
self.contract2 = factories.ProjectContract(
name='Contract 2',
projects=[self.project_b],
status=ProjectContract.STATUS_CURRENT,
start_date=timezone.now().replace(
hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=8),
end_date=timezone.now().replace(
hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=4),
)
for x in range(20):
factories.Entry(**{
'user': self.user_a,
'project': self.project_a,
'start_time': timezone.now() - relativedelta(days=x),
'end_time': (timezone.now() - relativedelta(days=x)) + relativedelta(hours=1),
'seconds_paused': 0,
'status': Entry.UNVERIFIED,
})
factories.Entry(**{
'user': self.user_b,
'project': self.project_b,
'start_time': timezone.now() - relativedelta(days=x),
'end_time': (timezone.now() - relativedelta(days=x)) + relativedelta(hours=1),
'seconds_paused': 0,
'status': Entry.UNVERIFIED,
})
def testContract1PreValues(self):
self.assertEqual(self.contract1.pre_launch_entries.count(), 6)
self.assertEqual(self.contract1.pre_launch_hours_worked, 6.0)
def testContract1Values(self):
self.assertEqual(self.contract1.entries.count(), 10)
self.assertEqual(self.contract1.hours_worked, 10.0)
def testContract1PostValues(self):
self.assertEqual(self.contract1.post_launch_entries.count(), 24)
self.assertEqual(self.contract1.post_launch_hours_worked, 24.0)
def testContract2PreValues(self):
self.assertEqual(self.contract2.pre_launch_entries.count(), 11)
self.assertEqual(self.contract2.pre_launch_hours_worked, 11.0)
def testContract2Values(self):
self.assertEqual(self.contract2.entries.count(), 5)
self.assertEqual(self.contract2.hours_worked, 5.0)
def testContract2PostValues(self):
self.assertEqual(self.contract2.post_launch_entries.count(), 4)
self.assertEqual(self.contract2.post_launch_hours_worked, 4.0)
|
{
"content_hash": "1cac0163f81c449e75ba31be3256dbcd",
"timestamp": "",
"source": "github",
"line_count": 350,
"max_line_length": 95,
"avg_line_length": 40.90571428571429,
"alnum_prop": 0.6495075784032968,
"repo_name": "caktus/django-timepiece",
"id": "8157819b4740df623e989477356ac7151b9696b2",
"size": "14317",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "timepiece/contracts/tests/test_contracts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23745"
},
{
"name": "HTML",
"bytes": "235951"
},
{
"name": "JavaScript",
"bytes": "202697"
},
{
"name": "Python",
"bytes": "562382"
}
],
"symlink_target": ""
}
|
import sys
from zoo.pipeline.api.keras2.base import ZooKeras2Layer
if sys.version >= '3':
long = int
unicode = str
class LocallyConnected1D(ZooKeras2Layer):
"""
Locally-connected layer for 1D inputs which works similarly to the TemporalConvolution
layer, except that weights are unshared, that is, a different set of filters is applied
at each different patch of the input..
Padding currently supported for this layer is 'valid'.
The input of this layer should be 3D.
When you use this layer as the first layer of a model, you need to provide the argument
input_shape (a shape tuple, does not include the batch dimension).
# Arguments
filters: Dimensionality of the output.
kernel_size: The extension (spatial or temporal) of each filter.
strides: Factor by which to subsample output. Int. Default is 1.
padding: Only 'valid' is supported for now.
activation: String representation of the activation function to use
(such as 'relu' or 'sigmoid'). Default is None.
kernel_regularizer: An instance of [[Regularizer]], (eg. L1 or L2 regularization),
applied to the input weights matrices. Default is None.
bias_regularizer: An instance of [[Regularizer]], applied to the bias. Default is None.
use_bias: Whether to include a bias (i.e. make the layer affine rather than linear).
Default is True.
input_shape: A shape tuple, not including batch.
name: String to set the name of the layer.
If not specified, its name will by default to be a generated string.
>>> locallyconnected1d = LocallyConnected1D(6, 3, input_shape=(8, 12))
creating: createZooKeras2LocallyConnected1D
"""
def __init__(self,
filters,
kernel_size,
strides=1,
padding="valid",
activation=None,
kernel_regularizer=None,
bias_regularizer=None,
use_bias=True,
input_shape=None,
**kwargs):
if padding != "valid":
raise ValueError("For LocallyConnected1D, "
"only padding='valid' is supported for now")
super(LocallyConnected1D, self).__init__(None,
filters,
kernel_size,
strides,
padding,
activation,
kernel_regularizer,
bias_regularizer,
use_bias,
list(input_shape) if input_shape else None,
**kwargs)
|
{
"content_hash": "13ed5fd572e4c5a1ef673136e7ad9ce7",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 92,
"avg_line_length": 47.435483870967744,
"alnum_prop": 0.5426725603536212,
"repo_name": "intel-analytics/analytics-zoo",
"id": "63618356302163590e2a17ed9c6fa2b3aa33d24d",
"size": "3531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyzoo/zoo/pipeline/api/keras2/layers/local.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "73165"
},
{
"name": "Groovy",
"bytes": "1613"
},
{
"name": "Java",
"bytes": "209136"
},
{
"name": "Jupyter Notebook",
"bytes": "24437284"
},
{
"name": "Makefile",
"bytes": "11724"
},
{
"name": "PureBasic",
"bytes": "593"
},
{
"name": "Python",
"bytes": "4085490"
},
{
"name": "RobotFramework",
"bytes": "17467"
},
{
"name": "Scala",
"bytes": "3562801"
},
{
"name": "Shell",
"bytes": "413512"
}
],
"symlink_target": ""
}
|
from unittest import mock
from nova import exception
from nova.tests.unit.virt.hyperv import test_base
from nova.virt.hyperv import serialconsolehandler
from nova.virt.hyperv import serialconsoleops
class SerialConsoleOpsTestCase(test_base.HyperVBaseTestCase):
def setUp(self):
super(SerialConsoleOpsTestCase, self).setUp()
serialconsoleops._console_handlers = {}
self._serialops = serialconsoleops.SerialConsoleOps()
self._serialops._pathutils = mock.MagicMock()
def _setup_console_handler_mock(self):
mock_console_handler = mock.Mock()
serialconsoleops._console_handlers = {mock.sentinel.instance_name:
mock_console_handler}
return mock_console_handler
@mock.patch.object(serialconsolehandler, 'SerialConsoleHandler')
@mock.patch.object(serialconsoleops.SerialConsoleOps,
'stop_console_handler_unsync')
def _test_start_console_handler(self, mock_stop_handler,
mock_console_handler,
raise_exception=False):
mock_handler = mock_console_handler.return_value
if raise_exception:
mock_handler.start.side_effect = Exception
self._serialops.start_console_handler(mock.sentinel.instance_name)
mock_stop_handler.assert_called_once_with(mock.sentinel.instance_name)
mock_console_handler.assert_called_once_with(
mock.sentinel.instance_name)
if raise_exception:
mock_handler.stop.assert_called_once_with()
else:
console_handler = serialconsoleops._console_handlers.get(
mock.sentinel.instance_name)
self.assertEqual(mock_handler, console_handler)
def test_start_console_handler(self):
self._test_start_console_handler()
def test_start_console_handler_exception(self):
self._test_start_console_handler(raise_exception=True)
def test_stop_console_handler(self):
mock_console_handler = self._setup_console_handler_mock()
self._serialops.stop_console_handler(mock.sentinel.instance_name)
mock_console_handler.stop.assert_called_once_with()
handler = serialconsoleops._console_handlers.get(
mock.sentinel.instance_name)
self.assertIsNone(handler)
def test_get_serial_console(self):
mock_console_handler = self._setup_console_handler_mock()
ret_val = self._serialops.get_serial_console(
mock.sentinel.instance_name)
self.assertEqual(mock_console_handler.get_serial_console(),
ret_val)
def test_get_serial_console_exception(self):
self.assertRaises(exception.ConsoleTypeUnavailable,
self._serialops.get_serial_console,
mock.sentinel.instance_name)
@mock.patch('builtins.open')
@mock.patch("os.path.exists")
def test_get_console_output_exception(self, fake_path_exists, fake_open):
self._serialops._pathutils.get_vm_console_log_paths.return_value = [
mock.sentinel.log_path_1, mock.sentinel.log_path_2]
fake_open.side_effect = IOError
fake_path_exists.return_value = True
self.assertRaises(exception.ConsoleLogOutputException,
self._serialops.get_console_output,
mock.sentinel.instance_name)
fake_open.assert_called_once_with(mock.sentinel.log_path_2, 'rb')
@mock.patch('os.path.exists')
@mock.patch.object(serialconsoleops.SerialConsoleOps,
'start_console_handler')
def test_start_console_handlers(self, mock_get_instance_dir, mock_exists):
self._serialops._pathutils.get_instance_dir.return_value = [
mock.sentinel.nova_instance_name,
mock.sentinel.other_instance_name]
mock_exists.side_effect = [True, False]
self._serialops.start_console_handlers()
self._serialops._vmutils.get_active_instances.assert_called_once_with()
|
{
"content_hash": "67206d729a1d890019b371c3f99a0ef5",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 79,
"avg_line_length": 41,
"alnum_prop": 0.6521951219512195,
"repo_name": "openstack/nova",
"id": "4a4b7c8e4f28acfaa736af6268be6d8f3e4ba491",
"size": "4739",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/unit/virt/hyperv/test_serialconsoleops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3545"
},
{
"name": "Mako",
"bytes": "1952"
},
{
"name": "Python",
"bytes": "23261880"
},
{
"name": "Shell",
"bytes": "28113"
},
{
"name": "Smarty",
"bytes": "507244"
}
],
"symlink_target": ""
}
|
from unittest import TestCase
from nose.tools import assert_equal, raises, assert_true
from wlauto.core.extension import Extension, Parameter, Param, ExtensionMeta, Module
from wlauto.utils.types import list_of_ints
from wlauto.exceptions import ConfigError
class MyMeta(ExtensionMeta):
virtual_methods = ['validate', 'virtual1', 'virtual2']
class MyBaseExtension(Extension):
__metaclass__ = MyMeta
name = 'base'
parameters = [
Parameter('base'),
]
def __init__(self, **kwargs):
super(MyBaseExtension, self).__init__(**kwargs)
self.v1 = 0
self.v2 = 0
self.v3 = ''
def virtual1(self):
self.v1 += 1
self.v3 = 'base'
def virtual2(self):
self.v2 += 1
class MyAcidExtension(MyBaseExtension):
name = 'acid'
parameters = [
Parameter('hydrochloric', kind=list_of_ints, default=[1, 2]),
'citric',
('carbonic', int),
]
def __init__(self, **kwargs):
super(MyAcidExtension, self).__init__(**kwargs)
self.vv1 = 0
self.vv2 = 0
def virtual1(self):
self.vv1 += 1
self.v3 = 'acid'
def virtual2(self):
self.vv2 += 1
class MyOtherExtension(MyBaseExtension):
name = 'other'
parameters = [
Param('mandatory', mandatory=True),
Param('optional', allowed_values=['test', 'check']),
]
class MyOtherOtherExtension(MyOtherExtension):
name = 'otherother'
parameters = [
Param('mandatory', override=True),
]
class MyOverridingExtension(MyAcidExtension):
name = 'overriding'
parameters = [
Parameter('hydrochloric', override=True, default=[3, 4]),
]
class MyThirdTeerExtension(MyOverridingExtension):
name = 'thirdteer'
class MultiValueParamExt(Extension):
name = 'multivalue'
parameters = [
Parameter('test', kind=list_of_ints, allowed_values=[42, 7, 73]),
]
class MyCoolModule(Module):
name = 'cool_module'
capabilities = ['fizzle']
def initialize(self):
self.fizzle_factor = 0 # pylint: disable=attribute-defined-outside-init
def fizzle(self):
self.fizzle_factor += 1
class MyEvenCoolerModule(Module):
name = 'even_cooler_module'
capabilities = ['fizzle']
def fizzle(self):
self.owner.self_fizzle_factor += 2
class MyModularExtension(Extension):
name = 'modular'
parameters = [
Parameter('modules', override=True, default=['cool_module']),
]
class MyOtherModularExtension(Extension):
name = 'other_modular'
parameters = [
Parameter('modules', override=True, default=[
'cool_module',
'even_cooler_module',
]),
]
def __init__(self, **kwargs):
super(MyOtherModularExtension, self).__init__(**kwargs)
self.self_fizzle_factor = 0
class FakeLoader(object):
modules = [
MyCoolModule,
MyEvenCoolerModule,
]
def get_module(self, name, owner, **kwargs): # pylint: disable=unused-argument
for module in self.modules:
if module.name == name:
return _instantiate(module, owner)
class ExtensionMetaTest(TestCase):
def test_propagation(self):
acid_params = [p.name for p in MyAcidExtension.parameters]
assert_equal(acid_params, ['modules', 'base', 'hydrochloric', 'citric', 'carbonic'])
@raises(ValueError)
def test_duplicate_param_spec(self):
class BadExtension(MyBaseExtension): # pylint: disable=W0612
parameters = [
Parameter('base'),
]
def test_param_override(self):
class OverridingExtension(MyBaseExtension): # pylint: disable=W0612
parameters = [
Parameter('base', override=True, default='cheese'),
]
assert_equal(OverridingExtension.parameters['base'].default, 'cheese')
@raises(ValueError)
def test_invalid_param_spec(self):
class BadExtension(MyBaseExtension): # pylint: disable=W0612
parameters = [
7,
]
def test_virtual_methods(self):
acid = _instantiate(MyAcidExtension)
acid.virtual1()
assert_equal(acid.v1, 1)
assert_equal(acid.vv1, 1)
assert_equal(acid.v2, 0)
assert_equal(acid.vv2, 0)
assert_equal(acid.v3, 'acid')
acid.virtual2()
acid.virtual2()
assert_equal(acid.v1, 1)
assert_equal(acid.vv1, 1)
assert_equal(acid.v2, 2)
assert_equal(acid.vv2, 2)
class ParametersTest(TestCase):
def test_setting(self):
myext = _instantiate(MyAcidExtension, hydrochloric=[5, 6], citric=5, carbonic=42)
assert_equal(myext.hydrochloric, [5, 6])
assert_equal(myext.citric, '5')
assert_equal(myext.carbonic, 42)
def test_validation_ok(self):
myext = _instantiate(MyOtherExtension, mandatory='check', optional='check')
myext.validate()
def test_default_override(self):
myext = _instantiate(MyOverridingExtension)
assert_equal(myext.hydrochloric, [3, 4])
myotherext = _instantiate(MyThirdTeerExtension)
assert_equal(myotherext.hydrochloric, [3, 4])
def test_multivalue_param(self):
myext = _instantiate(MultiValueParamExt, test=[7, 42])
myext.validate()
assert_equal(myext.test, [7, 42])
@raises(ConfigError)
def test_bad_multivalue_param(self):
myext = _instantiate(MultiValueParamExt, test=[5])
myext.validate()
@raises(ConfigError)
def test_validation_no_mandatory(self):
myext = _instantiate(MyOtherExtension, optional='check')
myext.validate()
@raises(ConfigError)
def test_validation_no_mandatory_in_derived(self):
_instantiate(MyOtherOtherExtension)
@raises(ConfigError)
def test_validation_bad_value(self):
myext = _instantiate(MyOtherExtension, mandatory=1, optional='invalid')
myext.validate()
class ModuleTest(TestCase):
def test_fizzle(self):
myext = _instantiate(MyModularExtension)
myext.load_modules(FakeLoader())
assert_true(myext.can('fizzle'))
myext.fizzle()
assert_equal(myext.fizzle_factor, 1)
def test_self_fizzle(self):
myext = _instantiate(MyOtherModularExtension)
myext.load_modules(FakeLoader())
myext.fizzle()
assert_equal(myext.self_fizzle_factor, 2)
def _instantiate(cls, *args, **kwargs):
# Needed to get around Extension's __init__ checks
return cls(*args, **kwargs)
|
{
"content_hash": "616e50f36ebb97d01cd734f1003ec103",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 92,
"avg_line_length": 24.650557620817843,
"alnum_prop": 0.6190619816015684,
"repo_name": "rockyzhang/workload-automation",
"id": "41794f93c6c9d9f5e125e383a61a85e3d33bbc97",
"size": "7254",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wlauto/tests/test_extension.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "35633"
},
{
"name": "HTML",
"bytes": "8402"
},
{
"name": "Java",
"bytes": "91333"
},
{
"name": "JavaScript",
"bytes": "6578"
},
{
"name": "Makefile",
"bytes": "430"
},
{
"name": "Prolog",
"bytes": "31390"
},
{
"name": "Python",
"bytes": "968893"
},
{
"name": "Shell",
"bytes": "23204"
},
{
"name": "VimL",
"bytes": "901"
}
],
"symlink_target": ""
}
|
import sys
import argparse
from os import getenv
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from taxcli.config import config
CONFIG = config[getenv('TAXCLI_CONFIG', 'default')]
engine = create_engine(CONFIG.sql_uri)
base = declarative_base(bind=engine)
import taxcli.models # noqa
from taxcli.commands import add, delete, lists
from taxcli.commands.analysis import get_month, get_year
def main():
# Specifying commands
parser = argparse.ArgumentParser(description='Taxcli')
# Initialze supbparser
subparsers = parser.add_subparsers(
title='Subcommands', description='Add data or get an analysis')
# Add
add_types = ['transaction', 'invoice', 'contact', 'file']
add_subcommand = subparsers.add_parser(
'add', help='Add data to the database')
add_subcommand.add_argument(
'type', type=str, choices=add_types,
help='The kind of data you want to add.'
)
add_subcommand.add_argument(
'--file', '-f', type=str,
help='A file you want to attach to an invoice.'
)
add_subcommand.set_defaults(func=add)
# Delete
delete_types = ['transaction', 'invoice', 'contact']
delete_subcommand = subparsers.add_parser(
'delete', help='Add data to the database')
delete_subcommand.add_argument(
'type', type=str, choices=delete_types,
help='The kind of data you want to add.'
)
delete_subcommand.set_defaults(func=delete)
# List
list_types = ['transaction', 'invoice', 'contact']
list_subcommand = subparsers.add_parser(
'list', help='Add data to the database')
list_subcommand.add_argument(
'type', type=str, choices=list_types,
help='The kind of data you want to add.'
)
list_subcommand.set_defaults(func=lists)
# Analysis part
get_parser = subparsers.add_parser(
'get', help='Get analysis of data.')
# Analysis for month
get_subparser = get_parser.add_subparsers(
title='`get` subcommands', description='Subcommands for getting analysis')
month_subcommand = get_subparser.add_parser(
'month', help='Get monthly analysis of data.')
month_subcommand.add_argument(
'year', type=int, help='The year you want to look at.')
month_subcommand.add_argument(
'month', type=int, help='The month you want to look at.')
month_subcommand.set_defaults(func=get_month)
# Analysis for years
year_subcommand = get_subparser.add_parser(
'year', help='Get analysis of data for one year.')
year_subcommand.add_argument(
'year', type=int, help='The year you want to look at.')
year_subcommand.set_defaults(func=get_year)
args = parser.parse_args()
if hasattr(args, 'func'):
try:
args.func(vars(args))
except KeyboardInterrupt:
sys.exit(0)
|
{
"content_hash": "1d25e11af507d7f86bcb04074fcf85bb",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 82,
"avg_line_length": 32.674157303370784,
"alnum_prop": 0.6605914718019257,
"repo_name": "Nukesor/taxcli",
"id": "9b2d24d58bbf1e57567054e47083d1bfc9a10e53",
"size": "2908",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "taxcli/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "445"
},
{
"name": "Mako",
"bytes": "493"
},
{
"name": "Python",
"bytes": "44807"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/component/droid/shared_struct_maint_module_2.iff"
result.attribute_template_id = -1
result.stfName("craft_droid_ingredients_n","struct_maint_module_2")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "5323038d0329616a252fb387dd963a62",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 85,
"avg_line_length": 26.384615384615383,
"alnum_prop": 0.7113702623906706,
"repo_name": "anhstudios/swganh",
"id": "af0543c00f30d7275a670c35eee87d051d95fd74",
"size": "488",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/tangible/component/droid/shared_struct_maint_module_2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
import logging
import re
import urllib
import netaddr
from neutron.i18n import _LI, _LE
from novaclient import exceptions as nova_exc
from oslo.serialization import jsonutils
from oslo.utils import excutils
import requests
from vyatta.common import config
from vyatta.common import exceptions as v_exc
from vyatta.common import globals as vyatta_globals
from vyatta.common import parsers
from vyatta.common import utils as vyatta_utils
LOG = logging.getLogger(__name__)
class VRouterRestAPIClient(object):
"""Vyatta vRouter REST API Client.
Uses vRouter REST API to configure vRouter.
"""
CFG_FMT_GENERIC = 'generic'
CFG_FMT_COMMANDS = 'commands'
IF_MAC_ADDRESS = 'mac_address'
IF_IP_ADDRESS = 'ip_address'
IF_GATEWAY_IP = 'gateway_ip'
REST_RETRY_LIMIT = 10
REST_RETRY_DELAY = 5
_VROUTER_VSE_MODEL = 54
_VROUTER_VR_MODEL = 56
# Floating ip NAT rules will be prioritized before subnet NAT rules.
# Same rule number is used for both SNAT and DNAT rule.
_MAX_NAT_FLOATING_IP_RULE_NUM = 4000
_MAX_NAT_SUBNET_IP_RULE_NUM = 8000
_EXTERNAL_GATEWAY_DESCR = 'External_Gateway'
_ROUTER_INTERFACE_DESCR = 'Router_Interface'
_external_gw_info = None
_router_if_subnet_dict = {}
_floating_ip_dict = {}
# Floating IP NAT rule number counter.
# It will be incremented in get next method.
_nat_floating_ip_rule_num = 0
# Subnet ip NAT rules are for router interfaces.
# As we want to prioritize floating ip NAT rules first,
# subnet rules will start only after floating ip rules.
# It will be incremented in get next method.
_nat_subnet_ip_rule_num = _MAX_NAT_FLOATING_IP_RULE_NUM
# Stores the vrouter model
_vrouter_model = None
def __init__(self):
self.address = None
def connect(self, address):
"""Connects to vRouter using the provided address.
Retrieves the configuration and updates the cache.
"""
self.address = address
LOG.info(_LI("Vyatta vRouter REST API: "
"Connecting to vRouter %s"), address)
self._process_model()
self._sync_cache()
def init_router(self, router_name, admin_state_up):
"""
Configures Router name and Admin State.
"""
cmd_list = []
self._set_router_name_cmd(cmd_list, router_name)
self._set_admin_state_cmd(cmd_list, admin_state_up)
vyatta_utils.retry(
self.exec_cmd_batch,
args=(cmd_list,), exceptions=(v_exc.VRouterOperationError,),
limit=self.REST_RETRY_LIMIT, delay=self.REST_RETRY_DELAY)
def update_router(self, router_name=None,
admin_state_up=None, external_gateway_info=None):
"""Updates Router name, Admin state, External gateway.
All the parameters are optional.
"""
cmd_list = []
if router_name:
self._set_router_name_cmd(cmd_list, router_name)
if admin_state_up is not None:
self._set_admin_state_cmd(cmd_list, admin_state_up)
if external_gateway_info is not None:
given_gw_info = self._get_gw_interface_info(external_gateway_info)
nat_rules = self._update_gw_config_on_change(given_gw_info,
cmd_list)
self._update_gw_cache_info(given_gw_info, nat_rules)
else:
self._clear_gw_configuration(cmd_list)
self._clear_cached_gw_info()
def add_interface_to_router(self, interface_info):
"""Sets ip address of the ethernet interface.
Ethernet interface identifier is derived from the given mac-address.
"""
(if_ip_address,
eth_if_id) = self._get_ethernet_if_info(interface_info)
cmd_list = []
self._set_ethernet_if_cmd(cmd_list,
eth_if_id,
if_ip_address,
self._ROUTER_INTERFACE_DESCR)
router_if_subnet = self._get_subnet_from_ip_address(if_ip_address)
# If external gateway was configured before then
# we need to add SNAT rules
rule_num = None
if self._external_gw_info is not None:
rule_num = self._add_snat_rule_for_router_if_cmd(
cmd_list, router_if_subnet, self._external_gw_info)
self.exec_cmd_batch(cmd_list)
# Cache the router interface info using subnet
if router_if_subnet not in self._router_if_subnet_dict:
self._router_if_subnet_dict[router_if_subnet] = None
if self._external_gw_info is not None:
self._router_if_subnet_dict[router_if_subnet] = rule_num
def remove_interface_from_router(self, interface_info):
"""Removes ip address of the ethernet interface.
Ethernet interface identifier is derived from the given mac-address.
"""
(if_ip_address,
eth_if_id) = self._get_ethernet_if_info(interface_info)
cmd_list = []
self._delete_ethernet_if_cmd(cmd_list,
eth_if_id,
if_ip_address,
self._ROUTER_INTERFACE_DESCR)
# Check the cache for router interface
router_if_subnet = self._get_subnet_from_ip_address(if_ip_address)
if router_if_subnet in self._router_if_subnet_dict:
# We need to delete the SNAT rule
nat_rule = self._router_if_subnet_dict[router_if_subnet]
if nat_rule is not None:
self._delete_snat_rule_cmd(cmd_list, nat_rule)
self.exec_cmd_batch(cmd_list)
# Remove the router interface info from cache
self._router_if_subnet_dict.pop(router_if_subnet, None)
def assign_floating_ip(self, floating_ip, fixed_ip):
"""Creates SNAT and DNAT rules for given floating ip and fixed ip."""
if self._external_gw_info is None:
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='External gateway not configured')
cmd_list = []
ext_if_id = self._external_gw_info.get_ethernet_if_id()
# Get the next NAT rule number and add the NAT rule
nat_rule_num = self._get_next_nat_floating_ip_rule_num()
self._add_snat_rule_cmd(cmd_list, nat_rule_num, ext_if_id,
fixed_ip, floating_ip)
self._add_dnat_rule_cmd(cmd_list, nat_rule_num, ext_if_id,
floating_ip, fixed_ip)
# Set the floating ip in external gateway interface
gw_net = netaddr.IPNetwork(self._external_gw_info.get_ip_address())
self._set_ethernet_ip(
cmd_list, self._external_gw_info.get_ethernet_if_id(),
'{0}/{1}'.format(floating_ip, gw_net.prefixlen))
self.exec_cmd_batch(cmd_list)
# Store SNAT and DNAT rule in cache
dict_key = self._get_floating_ip_key(floating_ip, fixed_ip)
self._floating_ip_dict[dict_key] = nat_rule_num
def unassign_floating_ip(self, floating_ip, fixed_ip):
"""Deletes SNAT and DNAT rules for given floating ip and fixed ip."""
if self._external_gw_info is None:
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='External gateway not configured')
cmd_list = []
# Check the cache for nat rules
dict_key = self._get_floating_ip_key(floating_ip, fixed_ip)
if dict_key in self._floating_ip_dict:
# Get the NAT rules from the cache and delete them
nat_rule = self._floating_ip_dict[dict_key]
self._delete_snat_rule_cmd(cmd_list, nat_rule)
self._delete_dnat_rule_cmd(cmd_list, nat_rule)
# Delete the floating ip in external gateway interface
gw_net = netaddr.IPNetwork(self._external_gw_info.get_ip_address())
self._delete_ethernet_ip_cmd(
cmd_list, self._external_gw_info.get_ethernet_if_id(),
'{0}/{1}'.format(floating_ip, gw_net.prefixlen))
else:
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='NAT rule not found for floating ip {0}'
.format(floating_ip))
self.exec_cmd_batch(cmd_list)
if dict_key in self._floating_ip_dict:
self._floating_ip_dict.pop(dict_key)
def update_static_routes(self, routes_add, routes_del):
def _get_route_type(dest):
ip = netaddr.IPNetwork(rule.dest_cidr)
if ip.version == 4:
return 'route'
else:
return 'route6'
cmd_list = []
for rule in routes_add:
cmd_list.append(SetCmd(
'protocols/static/{0}/{1}/next-hop/{2}'.format(
_get_route_type(rule.dest_cidr),
urllib.quote_plus(rule.dest_cidr),
urllib.quote_plus(rule.next_hop))))
for rule in routes_del:
cmd_list.append(DeleteCmd(
'protocols/static/{0}/{1}'.format(
_get_route_type(rule.dest_cidr),
urllib.quote_plus(rule.dest_cidr))))
self.exec_cmd_batch(cmd_list)
def disconnect(self):
self.address = None
def _rest_call(self, action, uri, custom_headers=None, session=None):
LOG.debug('Vyatta Router REST Request: {0} {1}'.format(action, uri))
if session is None:
session = requests
auth = tuple(config.VROUTER.vrouter_credentials.split(':'))
if len(auth) != 2:
raise v_exc.InvalidParameter(
cause=_("Invalid vrouter_credentials %s") % len(auth))
headers = {'Accept': 'application/json',
'Content-Length': 0}
if custom_headers:
headers.update(custom_headers)
try:
uri = 'https://{0}{1}'.format(self.address, uri)
return session.request(action, uri, auth=auth,
headers=headers, verify=False)
except requests.ConnectionError:
LOG.error(_LE('Vyatta vRouter REST API: '
'Could not establish HTTP connection to %s'),
self.address)
with excutils.save_and_reraise_exception():
raise v_exc.VRouterConnectFailure(ip_address=self.address)
def _get_ethernet_if_info(self, interface_info):
gw_mac_address = interface_info[self.IF_MAC_ADDRESS]
gw_ip_address = interface_info[self.IF_IP_ADDRESS]
gw_if_id = self.get_ethernet_if_id(gw_mac_address)
return gw_ip_address, gw_if_id
def _get_gw_interface_info(self, external_gateway_info):
(gw_ip_address,
gw_if_id) = self._get_ethernet_if_info(external_gateway_info)
gw_gateway_ip = external_gateway_info[self.IF_GATEWAY_IP]
given_gw_info = InterfaceInfo(gw_if_id, gw_ip_address, gw_gateway_ip)
return given_gw_info
def _update_gw_config_on_change(self, given_gw_info, cmd_list):
# Check if the external gw info is already cached.
# If the given external gw info is not equal to cached gw info
# then we need to update the existing gw info.
# So, clear old gw info and set new gw info.
if (self._external_gw_info is not None and
given_gw_info != self._external_gw_info):
LOG.debug("Vyatta vRouter REST API: Cached Gateway info is "
"not the same as given gateway info")
self._delete_external_gateway_if_cmd(
cmd_list, self._external_gw_info)
nat_rules = self._set_external_gateway_if_cmd(
cmd_list, given_gw_info)
# Execute the configuration commands
self.exec_cmd_batch(cmd_list)
return nat_rules
def _update_gw_cache_info(self, given_gw_info, nat_rules):
# Cache the external gateway info
self._external_gw_info = given_gw_info
# Cache the nat rules
for router_if_subnet, rule_num in nat_rules.iteritems():
self._router_if_subnet_dict[router_if_subnet] = rule_num
def _clear_gw_configuration(self, cmd_list):
# If external gateway info was cached before
# then clear the gateway router info
if self._external_gw_info is not None:
self._delete_external_gateway_if_cmd(
cmd_list, self._external_gw_info)
else:
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='External gateway not already configured')
# Execute the configuration commands
self.exec_cmd_batch(cmd_list)
def _clear_cached_gw_info(self):
# Clear the external gateway info from the cache
self._external_gw_info = None
# Remove NAT rules for the existing router interfaces
for router_if_subnet in self._router_if_subnet_dict.keys():
self._router_if_subnet_dict[router_if_subnet] = None
def _set_external_gateway_if_cmd(self, cmd_list, gw_info):
"""Sets the external gateway configuration.
Adds SNAT rules and updates the cache.
"""
# Set the external gateway ip address
self._set_ethernet_if_cmd(cmd_list,
gw_info.get_ethernet_if_id(),
gw_info.get_ip_address(),
self._EXTERNAL_GATEWAY_DESCR)
self._set_system_gateway_cmd(cmd_list, gw_info.get_gateway_ip())
# Add NAT rules for the existing router interfaces
nat_rules = {}
for router_if_subnet in self._router_if_subnet_dict.keys():
rule_num = self._add_snat_rule_for_router_if_cmd(cmd_list,
router_if_subnet,
gw_info)
nat_rules[router_if_subnet] = rule_num
return nat_rules
def _delete_external_gateway_if_cmd(self, cmd_list, gw_info):
"""Sets the external gateway configuration.
Adds SNAT rules and updates the cache.
"""
# Remove default gateway
self._delete_system_gateway_cmd(cmd_list,
gw_info.get_gateway_ip())
# Delete the external gateway ip address
self._delete_ethernet_if_cmd(cmd_list,
gw_info.get_ethernet_if_id(),
gw_info.get_ip_address(),
self._EXTERNAL_GATEWAY_DESCR)
# Remove NAT rules for the existing router interfaces
for nat_rule in self._router_if_subnet_dict.values():
self._delete_snat_rule_cmd(cmd_list, nat_rule)
def _add_snat_rule_for_router_if_cmd(self, cmd_list,
router_if_subnet,
ext_gw_info):
# Get the next SNAT rule number
rule_num = self._get_next_nat_subnet_ip_rule_num()
# Create the SNAT rule and store in the cache
self._add_snat_rule_cmd(cmd_list,
rule_num,
ext_gw_info.get_ethernet_if_id(),
router_if_subnet,
ext_gw_info.get_ip_addr_without_cidr())
return rule_num
def _get_subnet_from_ip_address(self, ip_address):
ip_network = netaddr.IPNetwork(ip_address)
# Return subnet with CIDR format
ip_subnet = str(ip_network.cidr)
return ip_subnet
def _get_floating_ip_key(self, floating_ip, fixed_ip):
"""Returns the key to store floating ip and fixed ip combination."""
return "{0}.{1}".format(floating_ip, fixed_ip)
def _get_next_nat_floating_ip_rule_num(self):
"""Returns the next NAT rule number for floating ip."""
if (self._nat_floating_ip_rule_num >=
self._MAX_NAT_FLOATING_IP_RULE_NUM):
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='Max NAT Floating IP rule count reached')
self._nat_floating_ip_rule_num += 1
return self._nat_floating_ip_rule_num
def _get_next_nat_subnet_ip_rule_num(self):
"""Returns the next NAT rule number for subnet ip."""
if self._nat_subnet_ip_rule_num >= self._MAX_NAT_SUBNET_IP_RULE_NUM:
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='Max NAT Subnet IP rule count reached')
self._nat_subnet_ip_rule_num += 1
return self._nat_subnet_ip_rule_num
def _get_admin_state(self):
"""Retrieves Admin State."""
output = self._show_cmd("ip/forwarding")
LOG.info(_LI('Vyatta vRouter status : %s'), output)
return "IP forwarding is on" in output
def _get_nat_cmd(self):
return 'service/nat' if (self._vrouter_model ==
self._VROUTER_VR_MODEL) else 'nat'
def _add_snat_rule_cmd(self, cmd_list, rule_num, ext_if_id,
src_addr, translation_addr):
"""Creates SNAT rule with the given parameters."""
nat_cmd = self._get_nat_cmd()
# Execute the commands
cmd_list.append(
SetCmd("{0}/source/rule/{1}".format(nat_cmd, rule_num)))
cmd_list.append(SetCmd("{0}/source/rule/{1}/outbound-interface/{2}"
.format(nat_cmd, rule_num, ext_if_id)))
cmd_list.append(SetCmd("{0}/source/rule/{1}/source/address/{2}"
.format(nat_cmd, rule_num,
urllib.quote_plus(src_addr))))
cmd_list.append(SetCmd("{0}/source/rule/{1}/translation/address/{2}"
.format(nat_cmd, rule_num,
urllib.quote_plus(translation_addr))))
def _add_dnat_rule_cmd(self, cmd_list, rule_num, ext_if_id,
dest_addr, translation_addr):
"""Creates DNAT rule with the given parameters."""
nat_cmd = self._get_nat_cmd()
# Execute the commands
cmd_list.append(
SetCmd("{0}/destination/rule/{1}".format(nat_cmd, rule_num)))
cmd_list.append(SetCmd("{0}/destination/rule/{1}/inbound-interface/{2}"
.format(nat_cmd, rule_num, ext_if_id)))
cmd_list.append(SetCmd("{0}/destination/rule/{1}/destination/"
"address/{2}".format(
nat_cmd, rule_num,
urllib.quote_plus(dest_addr))))
cmd_list.append(SetCmd("{0}/destination/rule/{1}/translation/"
"address/{2}".format(
nat_cmd, rule_num,
urllib.quote_plus(translation_addr))))
def _delete_snat_rule_cmd(self, cmd_list, rule_num):
"""Deletes the given SNAT rule."""
cmd_list.append(DeleteCmd("{0}/source/rule/{1}".
format(self._get_nat_cmd(), rule_num)))
def _delete_dnat_rule_cmd(self, cmd_list, rule_num):
"""Deletes the given DNAT rule."""
cmd_list.append(DeleteCmd("{0}/destination/rule/{1}".
format(self._get_nat_cmd(), rule_num)))
def _set_admin_state_cmd(self, cmd_list, admin_state):
"""Sets Admin State using command."""
if admin_state:
if not self._get_admin_state():
cmd_list.append(DeleteCmd("system/ip/disable-forwarding"))
else:
if self._get_admin_state():
cmd_list.append(SetCmd("system/ip/disable-forwarding"))
def get_vrouter_configuration(self, mode=CFG_FMT_GENERIC):
cmd = ['configuration']
if mode == self.CFG_FMT_GENERIC:
pass
elif mode == self.CFG_FMT_COMMANDS:
cmd.append('commands')
else:
raise v_exc.InvalidParameter(
cause='unsupported configuration dump format')
cmd = '/'.join(cmd)
return self._show_cmd(cmd)
def get_vpn_ipsec_sa(self, peer=None, tunnel=None):
assert not tunnel or peer
cmd = ['vpn', 'ipsec', 'sa']
if peer:
cmd.append('peer')
cmd.append(urllib.quote_plus(peer))
if tunnel:
cmd.append('tunnel')
cmd.append(urllib.quote_plus(tunnel))
cmd = '/'.join(cmd)
return self._show_cmd(cmd)
def get_ethernet_if_id(self, mac_address):
"""Uses show command output to find the ethernet interface."""
LOG.debug('Vyatta vRouter:get_ethernet_if_id. Given MAC {0}'
.format(repr(mac_address)))
mac_address = mac_address.strip().lower()
ifaces = self._get_interfaces()
for iface in ifaces:
if iface['mac_address'] == mac_address:
return iface['name']
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='Ethernet interface with Mac-address {0} does not exist'
.format(mac_address))
def _get_interface_cmd(self):
if self._vrouter_model == self._VROUTER_VR_MODEL:
return "dataplane"
else:
return "ethernet"
def _set_ethernet_ip(self, cmd_list, if_id, ip_address):
"""Sets ip address to an ethernet interface."""
if_cmd = self._get_interface_cmd()
cmd_list.append(SetCmd("interfaces/{0}/{1}/address/{2}"
.format(if_cmd, if_id,
urllib.quote_plus(ip_address))))
def _set_ethernet_if_cmd(self, cmd_list, if_id,
ip_address, descr):
"""Sets ip address and description of an ethernet interface."""
if_cmd = self._get_interface_cmd()
# Execute the commands
cmd_list.append(SetCmd("interfaces/{0}/{1}/address/{2}"
.format(if_cmd, if_id,
urllib.quote_plus(ip_address))))
cmd_list.append(SetCmd("interfaces/{0}/{1}/description/{2}"
.format(if_cmd, if_id,
urllib.quote_plus(descr))))
def _delete_ethernet_ip_cmd(self, cmd_list, if_id, ip_address):
"""Deletes ip address from an ethernet interface."""
if_cmd = self._get_interface_cmd()
cmd_list.append(DeleteCmd("interfaces/{0}/{1}/address/{2}"
.format(if_cmd, if_id,
urllib.quote_plus(ip_address))))
def _delete_ethernet_if_cmd(self, cmd_list, if_id,
ip_address, descr):
"""Deletes ip address and description of an ethernet interface."""
if_cmd = self._get_interface_cmd()
# Execute the commands
cmd_list.append(DeleteCmd("interfaces/{0}/{1}/address/{2}"
.format(if_cmd, if_id,
urllib.quote_plus(ip_address))))
cmd_list.append(DeleteCmd("interfaces/{0}/{1}/description/{2}"
.format(if_cmd, if_id,
urllib.quote_plus(descr))))
cmd_list.append(DeleteCmd("interfaces/{0}/{1}".
format(if_cmd, if_id)))
def _set_router_name_cmd(self, cmd_list, router_name):
"""Configures router name using command."""
cmd_list.append(SetCmd("system/host-name/{0}".
format(urllib.quote_plus(router_name))))
def _set_system_gateway_cmd(self, cmd_list, gateway_ip):
cmd_list.append(SetCmd("protocols/static/route/{0}/next-hop/{1}".
format(urllib.quote_plus('0.0.0.0/0'),
urllib.quote_plus(gateway_ip))))
def _delete_system_gateway_cmd(self, cmd_list, gateway_ip):
cmd_list.append(DeleteCmd("protocols/static/route/{0}".
format(urllib.quote_plus('0.0.0.0/0'))))
def _configure_cmd(self, cmd_type, cmd):
"""Executes the given configuration command.
Commits and Saves the configuration changes to the startup config.
"""
self.configure_cmd_list(cmd_type, [cmd])
def exec_cmd_batch(self, user_cmd_list):
"""Executes the given configuration command list.
Commits and Saves the configuration changes to the startup config.
"""
with requests.Session() as session:
response = self._rest_call("POST", "/rest/conf", session=session)
self._check_response(response, session=session)
config_url = response.headers['Location']
if config_url is None:
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='REST API configuration URL is null')
config_url = "/" + config_url
for user_cmd in user_cmd_list:
url = user_cmd.make_url(config_url)
LOG.debug(
"Vyatta vRouter REST API: Config command %s", url)
response = self._rest_call("PUT", url, session=session)
self._check_response(response, config_url, session=session)
response = self._rest_call(
"POST", config_url + "/commit", session=session)
LOG.debug("Vyatta vRouter REST API: %s/commit", config_url)
self._check_response(response, config_url, session=session)
response = self._rest_call(
"POST", config_url + "/save", session=session)
LOG.debug("Vyatta vRouter REST API: %s/save", config_url)
self._check_response(response, config_url, session=session)
response = self._rest_call("DELETE", config_url, session=session)
self._check_response(response, session=session)
def _check_response(self, response, config_url=None, session=None):
if session is None:
session = requests
if response.status_code not in (requests.codes.OK,
requests.codes.CREATED):
LOG.error(_LE('Vyatta vRouter REST API: Response Status : '
'%(status)s Reason: %(reason)s') %
{'status': response.status_code,
'reason': response.reason})
if config_url is not None:
self._rest_call("DELETE", config_url, session=session)
raise v_exc.VRouterOperationError(
ip_address=self.address, reason=response.reason)
def _get_config_cmd(self, user_cmd):
"""Executes the given "get config" command."""
response = self._rest_call("POST", "/rest/conf")
self._check_response(response)
config_url = response.headers['Location']
if config_url is None:
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='REST API Configuration URL is None')
config_url = "/" + config_url
config_cmd = '{0}/{1}/'.format(config_url, user_cmd)
response = self._rest_call("GET", config_cmd)
self._check_response(response)
data = jsonutils.loads(response.text)
self._rest_call("DELETE", config_url)
return data
def _show_cmd(self, user_cmd):
op_cmd = '/rest/op/show/{0}'.format(user_cmd)
response = self._rest_call("POST", op_cmd)
self._check_response(response)
op_url = response.headers['Location']
if op_url is None:
raise v_exc.VRouterOperationError(
ip_address=self.address, reason='REST API Op URL is None')
op_url = "/" + op_url
def get_response_wrapper():
response = self._rest_call("GET", op_url)
if not response.text.strip():
raise v_exc.VRouterOperationError(
ip_address=self.address, reason='Response is empty')
return response
response = vyatta_utils.retry(
get_response_wrapper, exceptions=(v_exc.VRouterOperationError,),
limit=self.REST_RETRY_LIMIT, delay=self.REST_RETRY_DELAY)
self._rest_call("DELETE", op_url)
return response.text
def _process_model(self):
model = None
show_output = self._show_cmd("version")
LOG.debug('Vyatta vRouter REST API: Version output : %s',
show_output)
if show_output is not None:
ma = re.compile(".+Description.+Brocade Vyatta\D+(\d+).+",
re.DOTALL)
result = ma.match(show_output)
LOG.debug('Vyatta vRouter REST API: Result : %s', result)
if result is not None:
model_str = result.group(1)
LOG.debug('Vyatta vRouter REST API: Result : %s',
model_str)
model = int(model_str) / 100
LOG.debug('Vyatta vRouter REST API: Result : %s',
model)
if model in (self._VROUTER_VSE_MODEL, self._VROUTER_VR_MODEL):
self._vrouter_model = model
LOG.debug('Vyatta vRouter REST API: Version : %s',
self._vrouter_model)
if self._vrouter_model is None:
raise v_exc.VRouterOperationError(
ip_address=self.address,
reason='Unable to process vRouter model info: {0}'
.format(model))
def _sync_cache(self):
show_output = self._show_cmd("configuration/all")
system_gw = None
gateway_str = self._get_config_block("protocols", show_output)
if gateway_str is not None:
system_gw = self._parse_system_gateway(gateway_str)
interfaces_str = self._get_config_block("interfaces", show_output)
if interfaces_str is not None:
self._process_interfaces(interfaces_str, system_gw)
if self._vrouter_model == self._VROUTER_VR_MODEL:
show_output = self._get_config_block("service", show_output)
nat_str = self._get_config_block("nat", show_output)
if nat_str is not None:
self._process_source_nat_rules(nat_str)
LOG.info(_LI("Vyatta vRouter cache ext gw %s"),
self._external_gw_info)
LOG.info(_LI("Vyatta vRouter cache router if dict %s"),
self._router_if_subnet_dict)
LOG.info(_LI("Vyatta vRouter cache floating ip dict %s"),
self._floating_ip_dict)
LOG.info(_LI("Vyatta vRouter cache NAT floating ip %s"),
self._nat_floating_ip_rule_num)
LOG.info(_LI("Vyatta vRouter cache NAT subnet ip %s"),
self._nat_subnet_ip_rule_num)
def _parse_system_gateway(self, search_str):
system_gw_ip = None
ma = re.compile(".+static.+route.+next-hop ([^ \n]+).+", re.DOTALL)
result = ma.match(search_str)
if result is not None:
system_gw_ip = result.group(1)
return system_gw_ip
def _process_interfaces(self, search_str, system_gw_ip):
for paragraph in search_str.split('}'):
ma = re.compile(
".+ethernet (eth\d+).+address ([^ \n]+).+description ([^ \n]+)"
".+", re.DOTALL)
result = ma.match(paragraph)
if result is not None:
eth_if_id = result.group(1)
ip_address = result.group(2)
description = result.group(3)
if description == self._EXTERNAL_GATEWAY_DESCR:
ext_gw_info = InterfaceInfo(eth_if_id,
ip_address, system_gw_ip)
self._external_gw_info = ext_gw_info
elif description == self._ROUTER_INTERFACE_DESCR:
# Cache the router interface info using subnet
router_if_subnet = self._get_subnet_from_ip_address(
ip_address)
self._router_if_subnet_dict[router_if_subnet] = None
def _process_source_nat_rules(self, search_str):
for paragraph in search_str.split('rule'):
ma = re.compile(
".(\d+).+outbound-interface.+source.+address ([^ \n]+)"
".+translation.+address ([^ \n]+).+", re.DOTALL)
result = ma.match(paragraph)
if result is not None:
rule_num = int(result.group(1))
src_addr = result.group(2)
translation_addr = result.group(3)
if (self._MAX_NAT_FLOATING_IP_RULE_NUM < rule_num <
self._MAX_NAT_SUBNET_IP_RULE_NUM and
src_addr in self._router_if_subnet_dict):
# Cache the SNAT rule for router interface
self._router_if_subnet_dict[src_addr] = rule_num
self._nat_subnet_ip_rule_num = rule_num
elif rule_num < self._MAX_NAT_FLOATING_IP_RULE_NUM:
self._nat_floating_ip_rule_num = rule_num
floating_ip = translation_addr
fixed_ip = src_addr
# Store SNAT and DNAT rule in cache
dict_key = self._get_floating_ip_key(floating_ip, fixed_ip)
self._floating_ip_dict[dict_key] = rule_num
def _get_config_block(self, input_str, search_str):
if search_str is not None:
index = search_str.find(input_str)
if index >= 0:
block_start = search_str[index + len(input_str):]
block_str = []
for line in block_start.split('\n'):
if line.startswith('}'):
break
block_str.append(line)
return ''.join(block_str)
return None
def _get_interfaces(self):
output = self._show_cmd('interfaces/detail')
return parsers.parse_interfaces(output)
class ClientsPool(object):
_client_factory = VRouterRestAPIClient
def __init__(self, compute_client):
# TODO(dbogun): avoid dependency from nova client
self._compute_client = compute_client
self._active_connections = dict()
def get_by_address(self, router_id, address):
try:
client = self._active_connections[router_id]
except KeyError:
self._active_connections[router_id] = client = \
self._client_factory()
client.connect(address)
return client
def get_by_db_lookup(self, router_id, context):
try:
client = self._active_connections[router_id]
except KeyError:
self._active_connections[router_id] = client = \
self._make_connection(context, router_id)
return client
def _make_connection(self, context, router_id):
LOG.debug('Vyatta vRouter Driver::Get router driver')
try:
router = self._compute_client.servers.get(router_id)
except nova_exc.ClientException as ex:
LOG.error(_LE(
'Unable to find Vyatta vRouter instance {0}. '
'Exception {1}').format(router_id, ex))
raise v_exc.InvalidVRouterInstance(router_id=router_id)
network = vyatta_globals.get_management_network(context)
network = context.session.merge(network)
if network in context.session.new:
raise v_exc.InvalidInstanceConfiguration(
cause='Unable to find management network')
LOG.debug('Vyatta vRouter Management network: {0}'.format(
network['name']))
try:
address_map = router.addresses[network['name']]
address = address_map[0]['addr']
except (KeyError, IndexError):
raise v_exc.InvalidVRouterInstance(router_id=router_id)
return self._client_by_address(address)
def _client_by_address(self, address):
# Initialize vRouter API
try:
client = self._client_factory()
client.connect(address)
except Exception as ex:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Vyatta vRouter Driver: vRouter {0} '
'Connection exception {1}').format(
address, ex))
raise
return client
# REST API commands
class UserCmd(object):
def __init__(self, cmd_type, cmd):
self.cmd_type = cmd_type
self.cmd = cmd
def __repr__(self):
return '{0} {1!r}'.format(self.cmd_type, self.cmd)
def __eq__(self, other):
if not isinstance(other, UserCmd):
return NotImplemented
return (self.cmd_type, self.cmd) == (other.cmd_type, other.cmd)
def __ne__(self, other):
return not self.__eq__(other)
def make_url(self, prefix):
url = (prefix, self.cmd_type, self.cmd)
return '/'.join(url)
class SetCmd(UserCmd):
def __init__(self, cmd):
super(SetCmd, self).__init__("set", cmd)
class DeleteCmd(UserCmd):
def __init__(self, cmd):
super(DeleteCmd, self).__init__("delete", cmd)
class InterfaceInfo(object):
"""Class for storing interface related info."""
def __init__(self, ethernet_if_id, ip_address,
gateway_ip=None):
self._ethernet_if_id = ethernet_if_id
self._ip_address = ip_address
self._gateway_ip = gateway_ip
self._ip_addr_without_cidr = None
def get_ethernet_if_id(self):
return self._ethernet_if_id
def get_ip_address(self):
return self._ip_address
def get_ip_addr_without_cidr(self):
if self._ip_addr_without_cidr is None:
# Find the subnet
ip_network = netaddr.IPNetwork(self._ip_address)
# Without CIDR format
self._ip_addr_without_cidr = str(ip_network.ip)
return self._ip_addr_without_cidr
def get_gateway_ip(self):
return self._gateway_ip
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return 'Eth if:{0} IP:{1} GW:{2}'.format(self._ethernet_if_id,
self._ip_address,
self._gateway_ip)
def __repr(self):
return self.__str__()
|
{
"content_hash": "4e2fbb3f4cd88e8b23e961eedd2fd714",
"timestamp": "",
"source": "github",
"line_count": 1032,
"max_line_length": 79,
"avg_line_length": 37.917635658914726,
"alnum_prop": 0.5559019703048734,
"repo_name": "Brocade-OpenSource/vrouter-plugins",
"id": "bfd43fb3af7803f7a7dd4a8c70b5a93dd710e8ec",
"size": "39782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vyatta/vrouter/client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "129957"
}
],
"symlink_target": ""
}
|
"""Test the VLC media player Telnet config flow."""
from __future__ import annotations
from typing import Any
from unittest.mock import patch
from aiovlc.exceptions import AuthError, ConnectError
import pytest
from homeassistant import config_entries
from homeassistant.components.hassio import HassioServiceInfo
from homeassistant.components.vlc_telnet.const import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from tests.common import MockConfigEntry
@pytest.mark.parametrize(
"input_data, entry_data",
[
(
{
"password": "test-password",
"host": "1.1.1.1",
"port": 8888,
},
{
"password": "test-password",
"host": "1.1.1.1",
"port": 8888,
},
),
(
{
"password": "test-password",
},
{
"password": "test-password",
"host": "localhost",
"port": 4212,
},
),
],
)
async def test_user_flow(
hass: HomeAssistant, input_data: dict[str, Any], entry_data: dict[str, Any]
) -> None:
"""Test successful user flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == FlowResultType.FORM
assert result["errors"] is None
with patch("homeassistant.components.vlc_telnet.config_flow.Client.connect"), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.login"
), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.disconnect"
), patch(
"homeassistant.components.vlc_telnet.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
input_data,
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == entry_data["host"]
assert result["data"] == entry_data
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize("source", [config_entries.SOURCE_USER])
async def test_abort_already_configured(hass: HomeAssistant, source: str) -> None:
"""Test we handle already configured host."""
entry_data = {
"password": "test-password",
"host": "1.1.1.1",
"port": 8888,
"name": "custom name",
}
entry = MockConfigEntry(domain=DOMAIN, data=entry_data)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": source},
data=entry_data,
)
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "already_configured"
@pytest.mark.parametrize("source", [config_entries.SOURCE_USER])
@pytest.mark.parametrize(
"error, connect_side_effect, login_side_effect",
[
("invalid_auth", None, AuthError),
("cannot_connect", ConnectError, None),
("unknown", Exception, None),
],
)
async def test_errors(
hass: HomeAssistant,
error: str,
connect_side_effect: Exception | None,
login_side_effect: Exception | None,
source: str,
) -> None:
"""Test we handle form errors."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": source}
)
with patch(
"homeassistant.components.vlc_telnet.config_flow.Client.connect",
side_effect=connect_side_effect,
), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.login",
side_effect=login_side_effect,
), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.disconnect"
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"password": "test-password"},
)
assert result2["type"] == FlowResultType.FORM
assert result2["errors"] == {"base": error}
async def test_reauth_flow(hass: HomeAssistant) -> None:
"""Test successful reauth flow."""
entry_data: dict[str, Any] = {
"password": "old-password",
"host": "1.1.1.1",
"port": 8888,
"name": "custom name",
}
entry = MockConfigEntry(domain=DOMAIN, data=entry_data)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": config_entries.SOURCE_REAUTH,
"entry_id": entry.entry_id,
"unique_id": entry.unique_id,
},
data=entry_data,
)
with patch("homeassistant.components.vlc_telnet.config_flow.Client.connect"), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.login"
), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.disconnect"
), patch(
"homeassistant.components.vlc_telnet.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"password": "new-password"},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert len(mock_setup_entry.mock_calls) == 1
assert dict(entry.data) == {**entry_data, "password": "new-password"}
@pytest.mark.parametrize(
"error, connect_side_effect, login_side_effect",
[
("invalid_auth", None, AuthError),
("cannot_connect", ConnectError, None),
("unknown", Exception, None),
],
)
async def test_reauth_errors(
hass: HomeAssistant,
error: str,
connect_side_effect: Exception | None,
login_side_effect: Exception | None,
) -> None:
"""Test we handle reauth errors."""
entry_data = {
"password": "old-password",
"host": "1.1.1.1",
"port": 8888,
"name": "custom name",
}
entry = MockConfigEntry(domain=DOMAIN, data=entry_data)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": config_entries.SOURCE_REAUTH,
"entry_id": entry.entry_id,
"unique_id": entry.unique_id,
},
data=entry_data,
)
with patch(
"homeassistant.components.vlc_telnet.config_flow.Client.connect",
side_effect=connect_side_effect,
), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.login",
side_effect=login_side_effect,
), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.disconnect"
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"password": "test-password"},
)
assert result2["type"] == FlowResultType.FORM
assert result2["errors"] == {"base": error}
async def test_hassio_flow(hass: HomeAssistant) -> None:
"""Test successful hassio flow."""
with patch("homeassistant.components.vlc_telnet.config_flow.Client.connect"), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.login"
), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.disconnect"
), patch(
"homeassistant.components.vlc_telnet.async_setup_entry",
return_value=True,
) as mock_setup_entry:
test_data = HassioServiceInfo(
config={
"password": "test-password",
"host": "1.1.1.1",
"port": 8888,
"name": "custom name",
"addon": "VLC",
},
name="VLC",
slug="vlc",
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HASSIO},
data=test_data,
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result2["type"] == FlowResultType.CREATE_ENTRY
assert result2["title"] == test_data.config["name"]
assert result2["data"] == test_data.config
assert len(mock_setup_entry.mock_calls) == 1
async def test_hassio_already_configured(hass: HomeAssistant) -> None:
"""Test successful hassio flow."""
entry_data = {
"password": "test-password",
"host": "1.1.1.1",
"port": 8888,
"name": "custom name",
"addon": "vlc",
}
entry = MockConfigEntry(domain=DOMAIN, data=entry_data, unique_id="hassio")
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HASSIO},
data=HassioServiceInfo(config=entry_data, name="VLC", slug="vlc"),
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.ABORT
@pytest.mark.parametrize(
"error, connect_side_effect, login_side_effect",
[
("invalid_auth", None, AuthError),
("cannot_connect", ConnectError, None),
("unknown", Exception, None),
],
)
async def test_hassio_errors(
hass: HomeAssistant,
error: str,
connect_side_effect: Exception | None,
login_side_effect: Exception | None,
) -> None:
"""Test we handle hassio errors."""
with patch(
"homeassistant.components.vlc_telnet.config_flow.Client.connect",
side_effect=connect_side_effect,
), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.login",
side_effect=login_side_effect,
), patch(
"homeassistant.components.vlc_telnet.config_flow.Client.disconnect"
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HASSIO},
data=HassioServiceInfo(
config={
"password": "test-password",
"host": "1.1.1.1",
"port": 8888,
"name": "custom name",
"addon": "VLC",
},
name="VLC",
slug="vlc",
),
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result2["type"] == FlowResultType.ABORT
assert result2["reason"] == error
|
{
"content_hash": "307565b404a1964c8ff20ce08e3bf4d9",
"timestamp": "",
"source": "github",
"line_count": 342,
"max_line_length": 88,
"avg_line_length": 31.359649122807017,
"alnum_prop": 0.5940326340326341,
"repo_name": "w1ll1am23/home-assistant",
"id": "f5059517e3e36a6e7de3e6e61211ecf939b88d73",
"size": "10725",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/vlc_telnet/test_config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52277012"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
"""Module grouping tests for the
pydov.types.interpretaties.InformeleStratigrafie class."""
from pydov.types.interpretaties import InformeleStratigrafie
from pydov.util.dovutil import build_dov_url
from tests.abstract import AbstractTestTypes
location_wfs_getfeature = \
'tests/data/types/interpretaties/informele_stratigrafie/wfsgetfeature.xml'
location_wfs_feature = \
'tests/data/types/interpretaties/informele_stratigrafie/feature.xml'
location_dov_xml = \
'tests/data/types/interpretaties/informele_stratigrafie' \
'/informele_stratigrafie.xml'
class TestInformeleStratigrafie(AbstractTestTypes):
"""Class grouping tests for the
pydov.types.interpretaties.InformeleStratigrafie class."""
datatype_class = InformeleStratigrafie
namespace = 'http://dov.vlaanderen.be/ocdov/interpretaties'
pkey_base = build_dov_url('data/interpretatie/')
field_names = [
'pkey_interpretatie', 'pkey_boring',
'pkey_sondering', 'betrouwbaarheid_interpretatie', 'x', 'y',
'start_interpretatie_mtaw',
'diepte_laag_van', 'diepte_laag_tot', 'beschrijving']
field_names_subtypes = [
'diepte_laag_van', 'diepte_laag_tot', 'beschrijving']
field_names_nosubtypes = [
'pkey_interpretatie', 'pkey_boring',
'pkey_sondering', 'betrouwbaarheid_interpretatie', 'x', 'y',
'start_interpretatie_mtaw']
valid_returnfields = ('pkey_interpretatie', 'pkey_boring')
valid_returnfields_subtype = (
'pkey_interpretatie', 'diepte_laag_van', 'diepte_laag_tot')
inexistent_field = 'onbestaand'
|
{
"content_hash": "49cfa8521ce547c4e251797b75cf7775",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 78,
"avg_line_length": 39.75,
"alnum_prop": 0.7163522012578616,
"repo_name": "DOV-Vlaanderen/pydov",
"id": "45686bd5cc73b31adab5cf9832f2850bf85c3268",
"size": "1590",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_types_itp_informelestratigrafie.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "724748"
}
],
"symlink_target": ""
}
|
from unittest import main
from json import loads
from qiita_pet.test.tornado_test_base import TestHandlerBase
class TestOntologyHandler(TestHandlerBase):
def test_patch(self):
arguments = {'op': 'add', 'path': 'ENA', 'value': 'new-term'}
response = self.patch('/ontology/', data=arguments)
self.assertEqual(response.code, 200)
exp = {'status': 'success', 'message': ''}
self.assertEqual(loads(response.body), exp)
if __name__ == '__main__':
main()
|
{
"content_hash": "c4077d61f808d1fb3cea57cecc9a9c63",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 69,
"avg_line_length": 31.25,
"alnum_prop": 0.642,
"repo_name": "squirrelo/qiita",
"id": "64eecf4a3b7332f1c703b3b79fbd39a51474d4da",
"size": "850",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qiita_pet/test/test_ontology.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1692"
},
{
"name": "HTML",
"bytes": "449930"
},
{
"name": "JavaScript",
"bytes": "5876"
},
{
"name": "Makefile",
"bytes": "6838"
},
{
"name": "PLSQL",
"bytes": "2359"
},
{
"name": "PLpgSQL",
"bytes": "45311"
},
{
"name": "Python",
"bytes": "1696427"
},
{
"name": "SQLPL",
"bytes": "6192"
},
{
"name": "Shell",
"bytes": "3062"
}
],
"symlink_target": ""
}
|
"""
Sort a linked list in O(n log n) time using constant space complexity.
"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
"""
:type head: ListNode
:rtype: ListNode
"""
def sortList(self, head):
# base case
if head == None or head.next == None:
return head
# split the list into 2 halves
left, right = self.split(head)
# sort each half
left = self.sortList(left)
right = self.sortList(right)
# merge sorted halves into on list
return self.merge(left, right)
"""
:desc cut a linked list into equal sized halves
:type head: ListNode
:rtype: (ListNode, ListNode)
"""
def split(self, head):
slow = head
fast = head
while fast.next and fast.next.next:
slow = slow.next
fast = fast.next.next
# terminate first half (head)
half_head = slow.next
slow.next = None
return (head, half_head)
"""
:desc merge 2 sorted lists into one
:type left: ListNode
:type right: ListNode
:rtype: ListNode
"""
def merge(self, left, right):
preHead = ListNode(-1)
pointer = preHead
while left and right:
# merge left first
if left.val < right.val:
pointer.next = left
left = left.next
# merge right first
else:
pointer.next = right
right = right.next
# move forward
pointer = pointer.next
# merge remaining parts
if left != None:
pointer.next = left
if right != None:
pointer.next = right
return preHead.next
|
{
"content_hash": "ea7502fe83c28b40a54b12caf8f639e6",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 70,
"avg_line_length": 17.655172413793103,
"alnum_prop": 0.6373697916666666,
"repo_name": "Ahmed--Mohsen/leetcode",
"id": "a82deecfa5fba8cf87e0b48d4a27abd46a735959",
"size": "1536",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sort_list.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "317482"
}
],
"symlink_target": ""
}
|
"""
Copyright 2014 CSIRO
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contains the VT module for a dataset created from a file pattern.
Copyright CSIRO 2014
"""
from vistrails.core.modules.basic_modules import List
from vistrails.core.modules.vistrails_module import Module, ModuleError, NotCacheable
from cwsl.configuration import configuration
from cwsl.core.pattern_dataset import PatternDataSet
from cwsl.core.constraint import Constraint
import os
import logging
logger = logging.getLogger('cwsl.vtmodules.dataset')
class DataReferenceSyntax(NotCacheable, Module):
"""
"""
# User defined constraints
_input_ports = [('added_constraints', List,
{'defaults': ["[]"]})]
_output_ports = [('out_dataset', '(csiro.au.cwsl:VtDataSet)')]
def __init__(self, pattern, constraints=None):
Module.__init__(self)
self.pattern = pattern
self.constraints = constraints
def get_filepath_patterns(self):
"""
Generate full filepath pattern using global configuration options
and file pattern.
TODO: determine with paths to use (user, auth or both)
"""
if configuration.check('drs_basepath'):
basepath = configuration.drs_basepath
else:
raise ModuleError(self,
("No authorative path set"
" in the CWSL Configuration"))
patterns = os.path.join(basepath,self.pattern)
return patterns
def compute(self):
# Determine file path
patterns = self.get_filepath_patterns()
logger.debug('Using pattern %s' % patterns)
# Create constraints
constraints = [Constraint(attribute, [values])
for attribute, values in self.constraints.iteritems()]
# Add user contraints
user_constraints = self.getInputFromPort("added_constraints")
if user_constraints:
constraints.extend(user_constraints)
else:
raise ModuleError(self, "No constraints set on DataSet - you can not run a workflow on the entire DataSet")
# Create dataset based on file search path and contraints
dataset = PatternDataSet(patterns, constraints)
if not dataset.files:
error_string = "No files found for this dataset with constraints: {}".format(constraints)
error_string = error_string.replace('],', '],\n')
logger.error(error_string)
raise ModuleError(self, error_string)
self.setResult('out_dataset', dataset)
class RegionalClimateModel(DataReferenceSyntax):
"""
File path search based on the CORDEX DRS (ref).
Path structure: <path>/<mip>/<product>/<domain>/<institute>/<model>/<experiment>/<ensemble>/<RCMName>/<RCMVersionID>/<frequency>/<variable>/<filename>
where:
<path>: Configured via menu: Packages->CWSL->Configure: authorative_path
<filename>: <variable>_<domain>_<model>_<experiment>_<ensemble>_<RCMName>_<RCMVersionID>_<frequency>_<time_span>
"""
# CORDEX DATA REFERENCE SYNTAX
pattern = '%mip%/%product%/%domain%/%institute%/%model%/%experiment%/%ensemble%/%RCMName%/%RCMVersionID%/%frequency%/%variable%/%variable%_%domain%_%model%_%experiment%_%ensemble%_%RCMName%_%RCMVersionID%_%frequency%_%time_span%.nc'
# Restrict to RCM
constraints = {'product': 'RCM'}
def __init__(self):
super(RegionalClimateModel, self).__init__(self.pattern, self.constraints)
class GlobalClimateModel(DataReferenceSyntax):
"""
File path search based on the CMIP5 GCM DRS (http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf).
Path structure: <path>/<mip>/<product>/<institute>/<model>/<experiment>/<frequency>/<realm>/<variable>/<ensemble>/<filename>
where:
<path>: Configured via menu: Packages->CWSL->Configure: authorative_path
<filename>: <variable>_<mip_table>_<model>_<experiment>_<ensemble>_<time_span>
"""
# DATA REFERENCE SYNTAX
pattern = '%mip%/%product%/%institute%/%model%/%experiment%/%frequency%/%realm%/%variable%/%ensemble%/%variable%_%mip_table%_%model%_%experiment%_%ensemble%_%time_span%.nc'
# Restrict to GCM
constraints = {'product': 'GCM'}
def __init__(self):
super(GlobalClimateModel, self).__init__(self.pattern, self.constraints)
class CMIP5(GlobalClimateModel):
"""
File path search for CMIP5
Path structure: <path>/<mip>/<product>/<institute>/<model>/<experiment>/<frequency>/<realm>/<variable>/<ensemble>/<filename>
where:
<path>: Configured via menu: Packages->CWSL->Configure: authorative_path
<filename>: <variable>_<mip_table>_<model>_<experiment>_<ensemble>_<time_span>
"""
constraints = {'mip': 'CMIP5',
'product': 'GCM',
}
class CMIP3(GlobalClimateModel):
"""
File path search for CMIP3 based on CMIP5 DRS structure (http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf)
Path structure: <path>/<mip>/<product>/<institute>/<model>/<experiment>/<frequency>/<realm>/<variable>/<ensemble>/<filename>
where:
<path>: Configured via menu: Packages->CWSL->Configure: authorative_path
<filename>: <variable>_<mip_table>_<model>_<experiment>_<ensemble>_<time_span>
"""
constraints = {'mip': 'CMIP3',
'product': 'GCM',
}
class RegionalClimateModel_SDMa_NRM(RegionalClimateModel):
"""
Creates a DataSet based on the CORDEX DRS.
"""
constraints = {'mip': 'CMIP5',
'product': 'RCM',
'domain': 'AUS',
'RCMName': 'BOM-SDMa-NRM'}
class RegionalClimateModel_CCAM_NRM(RegionalClimateModel):
"""
Creates a DataSet based on the CORDEX DRS.
"""
constraints = {'mip': 'CMIP5',
'product': 'RCM',
'domain': 'global',
'RCMName': 'CSIRO-CCAM-NRM50'}
|
{
"content_hash": "dfa1f963d5f1a78ebe566c40d4e94e34",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 236,
"avg_line_length": 35.88461538461539,
"alnum_prop": 0.6420150053590568,
"repo_name": "CWSL/cwsl-mas",
"id": "29e8607724de168f836799212f9f140aeb60b5b8",
"size": "6531",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cwsl/vt_modules/drs_dataset.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "246401"
}
],
"symlink_target": ""
}
|
import unittest.mock
# noinspection PyUnresolvedReferences
from test_client import client, flask_app
def test_empty(client):
r = client.get('/')
# print("Got {}".format(r))
assert r is not None
def test_register_validation_valid():
# 3 A's of test: Arrange, Act, then Assert
# Arrange
from pypi_vm.viewmodels.account.register_viewmodel import RegisterViewModel
form_data = {
'name': 'Michael',
'email': 'michael@talkpython.fm',
'password': 'a'
}
with flask_app.app.test_request_context(path='/account/register', data=form_data):
vm = RegisterViewModel()
# Act
target = 'pypi_vm.services.user_service.find_user_by_email'
with unittest.mock.patch(target, return_value=None):
vm.validate()
# Assert:
assert vm.error is None
def test_register_validation_existing_user():
# Arrange
from pypi_vm.viewmodels.account.register_viewmodel import RegisterViewModel
from pypi_vm.data.users import User
form_data = {
'name': 'Michael',
'email': 'michael@talkpython.fm',
'password': 'a'
}
with flask_app.app.test_request_context(path='/account/register', data=form_data):
vm = RegisterViewModel()
# Act
target = 'pypi_vm.services.user_service.find_user_by_email'
with unittest.mock.patch(target, return_value=User()):
vm.validate()
# Assert:
assert vm.error is not None
assert 'exist' in vm.error
def test_register_validation_no_email():
# Arrange
from pypi_vm.viewmodels.account.register_viewmodel import RegisterViewModel
form_data = {
'email': '',
'password': 'a'
}
with flask_app.app.test_request_context(path='/account/register', data=form_data):
vm = RegisterViewModel()
# Act
vm.validate()
# Assert:
assert vm.error is not None
assert 'email' in vm.error
|
{
"content_hash": "730afd4a64cb0ec0686899e092c9880c",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 86,
"avg_line_length": 26.283783783783782,
"alnum_prop": 0.6349614395886889,
"repo_name": "Wintellect/WintellectWebinars",
"id": "25592b48d9ebbb9599c056f63a042b77de0d752a",
"size": "1945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "2019-06-06-ten-tips-python-web-devs-kennedy/code/top_10_web_explore/ex07_viewmodels/tests/account_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "47583"
},
{
"name": "CSS",
"bytes": "39803"
},
{
"name": "HTML",
"bytes": "87870"
},
{
"name": "JavaScript",
"bytes": "4383753"
},
{
"name": "Jupyter Notebook",
"bytes": "234737"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "208421"
},
{
"name": "SCSS",
"bytes": "152"
},
{
"name": "Shell",
"bytes": "4251"
},
{
"name": "TypeScript",
"bytes": "142946"
}
],
"symlink_target": ""
}
|
"""Collection of tests around log handling."""
import logging
import pytest
from cookiecutter.log import configure_logger
def create_log_records():
"""Test function, create log entries in expected stage of test."""
cookiecutter_logger = logging.getLogger('cookiecutter')
foo_logger = logging.getLogger('cookiecutter.foo')
foobar_logger = logging.getLogger('cookiecutter.foo.bar')
cookiecutter_logger.info('Welcome to Cookiecutter')
cookiecutter_logger.debug('Generating project from pytest-plugin')
foo_logger.info('Loading user config from home dir')
foobar_logger.debug("I don't know.")
foobar_logger.debug('I wanted to save the world.')
foo_logger.error('Aw, snap! Something went wrong')
cookiecutter_logger.debug('Successfully generated project')
@pytest.fixture
def info_messages():
"""Fixture. List of test info messages."""
return [
'INFO: Welcome to Cookiecutter',
'INFO: Loading user config from home dir',
'ERROR: Aw, snap! Something went wrong',
]
@pytest.fixture
def debug_messages():
"""Fixture. List of test debug messages."""
return [
'INFO cookiecutter: ' 'Welcome to Cookiecutter',
'DEBUG cookiecutter: ' 'Generating project from pytest-plugin',
'INFO cookiecutter.foo: ' 'Loading user config from home dir',
"DEBUG cookiecutter.foo.bar: " "I don't know.",
'DEBUG cookiecutter.foo.bar: ' 'I wanted to save the world.',
'ERROR cookiecutter.foo: ' 'Aw, snap! Something went wrong',
'DEBUG cookiecutter: ' 'Successfully generated project',
]
@pytest.fixture
def info_logger():
"""Fixture. Call cookiecutter logger setup with `info` debug level."""
return configure_logger(stream_level='INFO')
@pytest.fixture
def debug_logger():
"""Fixture. Call cookiecutter logger setup with `debug` debug level."""
return configure_logger(stream_level='DEBUG')
@pytest.fixture
def debug_file(tmpdir):
"""Fixture. Generate debug file location for tests."""
return tmpdir / 'pytest-plugin.log'
@pytest.fixture
def info_logger_with_file(debug_file):
"""Fixture. Call cookiecutter logger setup with `info` debug level + `file`."""
return configure_logger(stream_level='INFO', debug_file=str(debug_file))
def test_info_stdout_logging(caplog, info_logger, info_messages):
"""Test that stdout logs use info format and level."""
[stream_handler] = info_logger.handlers
assert isinstance(stream_handler, logging.StreamHandler)
assert stream_handler.level == logging.INFO
create_log_records()
stream_messages = [
stream_handler.format(r)
for r in caplog.records
if r.levelno >= stream_handler.level
]
assert stream_messages == info_messages
def test_debug_stdout_logging(caplog, debug_logger, debug_messages):
"""Test that stdout logs use debug format and level."""
[stream_handler] = debug_logger.handlers
assert isinstance(stream_handler, logging.StreamHandler)
assert stream_handler.level == logging.DEBUG
create_log_records()
stream_messages = [
stream_handler.format(r)
for r in caplog.records
if r.levelno >= stream_handler.level
]
assert stream_messages == debug_messages
def test_debug_file_logging(caplog, info_logger_with_file, debug_file, debug_messages):
"""Test that logging to stdout uses a different format and level than \
the the file handler."""
[file_handler, stream_handler] = info_logger_with_file.handlers
assert isinstance(file_handler, logging.FileHandler)
assert isinstance(stream_handler, logging.StreamHandler)
assert stream_handler.level == logging.INFO
assert file_handler.level == logging.DEBUG
create_log_records()
assert debug_file.exists()
# Last line in the log file is an empty line
assert debug_file.readlines(cr=False) == debug_messages + ['']
|
{
"content_hash": "a246f59252b62f696afb7efae4759bea",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 87,
"avg_line_length": 32.56198347107438,
"alnum_prop": 0.6941624365482234,
"repo_name": "michaeljoseph/cookiecutter",
"id": "7bcbff733198400cf70514d7504aaf38f3910232",
"size": "3965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_log.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "3206"
},
{
"name": "Python",
"bytes": "215886"
},
{
"name": "Shell",
"bytes": "161"
}
],
"symlink_target": ""
}
|
import json
import pyudev
import re
import subprocess # nosec
from ansible.module_utils.basic import AnsibleModule
DOCUMENTATION = '''
---
module: find_disks
short_description: Return list of devices containing a specfied name or label
description:
- This will return a list of all devices with either GPT partition name
or filesystem label of the name specified.
options:
match_mode:
description:
- Label match mode, either strict or prefix
default: 'strict'
required: False
choices: [ "strict", "prefix" ]
type: str
name:
description:
- Partition name or filesystem label
required: True
type: str
aliases: [ 'partition_name' ]
use_udev:
description:
- When True, use Linux udev to read disk info such as partition labels,
uuid, etc. Some older host operating systems have issues using udev to
get the info this module needs. Set to False to fall back to more low
level commands such as blkid to retrieve this information. Most users
should not need to change this.
default: True
required: False
type: bool
author: Sam Yaple
'''
EXAMPLES = '''
- hosts: ceph-osd
tasks:
- name: Return all valid formated devices with the name KOLLA_CEPH_OSD
find_disks:
name: 'KOLLA_CEPH_OSD'
register: osds
- hosts: swift-object-server
tasks:
- name: Return all valid devices with the name KOLLA_SWIFT
find_disks:
name: 'KOLLA_SWIFT'
register: swift_disks
- hosts: swift-object-server
tasks:
- name: Return all valid devices with wildcard name 'swift_d*'
find_disks:
name: 'swift_d' match_mode: 'prefix'
register: swift_disks
'''
PREFERRED_DEVICE_LINK_ORDER = [
'/dev/disk/by-uuid',
'/dev/disk/by-partuuid',
'/dev/disk/by-parttypeuuid',
'/dev/disk/by-label',
'/dev/disk/by-partlabel'
]
def get_id_part_entry_name(dev, use_udev):
if use_udev:
dev_name = dev.get('ID_PART_ENTRY_NAME', '')
else:
part = re.sub(r'.*[^\d]', '', dev.device_node)
parent = dev.find_parent('block').device_node
# NOTE(Mech422): Need to use -i as -p truncates the partition name
out = subprocess.Popen(['/usr/sbin/sgdisk', '-i', part, # nosec
parent],
stdout=subprocess.PIPE).communicate()
match = re.search(r'Partition name: \'(\w+)\'', out[0])
if match:
dev_name = match.group(1)
else:
dev_name = ''
return dev_name
def get_id_fs_uuid(dev, use_udev):
if use_udev:
id_fs_uuid = dev.get('ID_FS_UUID', '')
else:
out = subprocess.Popen(['/usr/sbin/blkid', '-o', 'export', # nosec
dev.device_node],
stdout=subprocess.PIPE).communicate()
match = re.search(r'\nUUID=([\w-]+)', out[0])
if match:
id_fs_uuid = match.group(1)
else:
id_fs_uuid = ''
return id_fs_uuid
def is_dev_matched_by_name(dev, name, mode, use_udev):
if dev.get('DEVTYPE', '') == 'partition':
dev_name = get_id_part_entry_name(dev, use_udev)
else:
dev_name = dev.get('ID_FS_LABEL', '')
if mode == 'strict':
return dev_name == name
elif mode == 'prefix':
return dev_name.startswith(name)
else:
return False
def find_disk(ct, name, match_mode, use_udev):
for dev in ct.list_devices(subsystem='block'):
if is_dev_matched_by_name(dev, name, match_mode, use_udev):
yield dev
def get_device_link(dev):
for preferred_link in PREFERRED_DEVICE_LINK_ORDER:
for link in dev.device_links:
if link.startswith(preferred_link):
return link
return dev.device_node
def extract_disk_info(ct, dev, name, use_udev):
if not dev:
return
kwargs = dict()
kwargs['fs_uuid'] = get_id_fs_uuid(dev, use_udev)
kwargs['fs_label'] = dev.get('ID_FS_LABEL', '')
if dev.get('DEVTYPE', '') == 'partition':
kwargs['partition_label'] = name
kwargs['device'] = dev.find_parent('block').device_node
kwargs['partition'] = dev.device_node
kwargs['partition_num'] = re.sub(r'.*[^\d]', '', dev.device_node)
if is_dev_matched_by_name(dev, name, 'strict', use_udev):
kwargs['external_journal'] = False
# NOTE(jeffrey4l): this is only used for bootstrap osd stage and
# there is no journal partion at all. So it is OK to use
# device_node directly.
kwargs['journal'] = dev.device_node[:-1] + '2'
kwargs['journal_device'] = kwargs['device']
kwargs['journal_num'] = 2
else:
kwargs['external_journal'] = True
journal_name = get_id_part_entry_name(dev, use_udev) + '_J'
for journal in find_disk(ct, journal_name, 'strict', use_udev):
kwargs['journal'] = get_device_link(journal)
kwargs['journal_device'] = \
journal.find_parent('block').device_node
kwargs['journal_num'] = \
re.sub(r'.*[^\d]', '', journal.device_node)
break
if 'journal' not in kwargs:
# NOTE(SamYaple): Journal not found, not returning info
return
else:
kwargs['device'] = dev.device_node
yield kwargs
def extract_disk_info_bs(ct, dev, name, use_udev):
if not dev:
return
kwargs = dict(bs_blk_label='', bs_blk_device='', bs_db_label='',
bs_db_device='', bs_wal_label='', bs_wal_device='',
bs_wal_partition_num='', bs_db_partition_num='',
bs_blk_partition_num='', partition='', partition_label='',
partition_num='', device='', partition_usage='')
kwargs['fs_uuid'] = get_id_fs_uuid(dev, use_udev)
kwargs['fs_label'] = dev.get('ID_FS_LABEL', '')
if dev.get('DEVTYPE', '') == 'partition':
actual_name = get_id_part_entry_name(dev, use_udev)
if (('BOOTSTRAP_BS' in name or 'DATA_BS' in name)
and name in actual_name):
if actual_name.endswith("_B"):
kwargs['partition_usage'] = 'block'
kwargs['bs_blk_partition_num'] = \
re.sub(r'.*[^\d]', '', dev.device_node)
kwargs['bs_blk_device'] = dev.find_parent('block').device_node
kwargs['bs_blk_label'] = actual_name
return kwargs
if actual_name.endswith("_D"):
kwargs['partition_usage'] = 'block.db'
kwargs['bs_db_partition_num'] = \
re.sub(r'.*[^\d]', '', dev.device_node)
kwargs['bs_db_device'] = dev.find_parent('block').device_node
kwargs['bs_db_label'] = actual_name
return kwargs
if actual_name.endswith("_W"):
kwargs['partition_usage'] = 'block.wal'
kwargs['bs_wal_partition_num'] = \
re.sub(r'.*[^\d]', '', dev.device_node)
kwargs['bs_wal_device'] = dev.find_parent('block').device_node
kwargs['bs_wal_label'] = actual_name
return kwargs
if '_BS' in actual_name:
kwargs['partition_usage'] = 'osd'
kwargs['partition'] = dev.find_parent('block').device_node
kwargs['partition_label'] = actual_name
kwargs['partition_num'] = \
re.sub(r'.*[^\d]', '', dev.device_node)
kwargs['device'] = dev.find_parent('block').device_node
return kwargs
return 0
def nb_of_osd(disks):
osd_info = dict()
osd_info['block_label'] = list()
nb_of_osds = 0
for item in disks:
if item['partition_usage'] == 'osd':
osd_info['block_label'].append(item['partition_label'])
nb_of_osds += 1
osd_info['nb_of_osd'] = nb_of_osds
return osd_info
def combine_info(disks):
info = list()
osds = nb_of_osd(disks)
osd_id = 0
while osd_id < osds['nb_of_osd']:
final = dict()
idx = 0
idx_osd = idx_blk = idx_wal = idx_db = -1
for item in disks:
if (item['partition_usage'] == 'osd' and
item['partition_label'] == osds['block_label'][osd_id]):
idx_osd = idx
elif (item['partition_usage'] == 'block' and
item['bs_blk_label'] ==
osds['block_label'][osd_id] + "_B"):
idx_blk = idx
elif (item['partition_usage'] == 'block.wal' and
item['bs_wal_label'] ==
osds['block_label'][osd_id] + "_W"):
idx_wal = idx
elif (item['partition_usage'] == 'block.db' and
item['bs_db_label'] ==
osds['block_label'][osd_id] + "_D"):
idx_db = idx
idx = idx + 1
# write the information of block.db and block.wal to block item
# if block.db and block.wal are found
if idx_blk != -1:
disks[idx_osd]['bs_blk_device'] = disks[idx_blk]['bs_blk_device']
disks[idx_osd]['bs_blk_label'] = disks[idx_blk]['bs_blk_label']
disks[idx_osd]['bs_blk_partition_num'] = \
disks[idx_blk]['bs_blk_partition_num']
disks[idx_blk]['partition_usage'] = ''
if idx_wal != -1:
disks[idx_osd]['bs_wal_device'] = disks[idx_wal]['bs_wal_device']
disks[idx_osd]['bs_wal_partition_num'] = \
disks[idx_wal]['bs_wal_partition_num']
disks[idx_osd]['bs_wal_label'] = disks[idx_wal]['bs_wal_label']
disks[idx_wal]['partition_usage'] = ''
if idx_db != -1:
disks[idx_osd]['bs_db_device'] = disks[idx_db]['bs_db_device']
disks[idx_osd]['bs_db_partition_num'] = \
disks[idx_db]['bs_db_partition_num']
disks[idx_osd]['bs_db_label'] = disks[idx_db]['bs_db_label']
disks[idx_db]['partition_usage'] = ''
final['fs_uuid'] = disks[idx_osd]['fs_uuid']
final['fs_label'] = disks[idx_osd]['fs_label']
final['bs_blk_device'] = disks[idx_osd]['bs_blk_device']
final['bs_blk_label'] = disks[idx_osd]['bs_blk_label']
final['bs_blk_partition_num'] = disks[idx_osd]['bs_blk_partition_num']
final['bs_db_device'] = disks[idx_osd]['bs_db_device']
final['bs_db_partition_num'] = disks[idx_osd]['bs_db_partition_num']
final['bs_db_label'] = disks[idx_osd]['bs_db_label']
final['bs_wal_device'] = disks[idx_osd]['bs_wal_device']
final['bs_wal_partition_num'] = disks[idx_osd]['bs_wal_partition_num']
final['bs_wal_label'] = disks[idx_osd]['bs_wal_label']
final['device'] = disks[idx_osd]['device']
final['partition'] = disks[idx_osd]['partition']
final['partition_label'] = disks[idx_osd]['partition_label']
final['partition_num'] = disks[idx_osd]['partition_num']
final['external_journal'] = False
final['journal'] = ''
final['journal_device'] = ''
final['journal_num'] = 0
info.append(final)
disks[idx_osd]['partition_usage'] = ''
osd_id += 1
return info
def main():
argument_spec = dict(
match_mode=dict(required=False, choices=['strict', 'prefix'],
default='strict'),
name=dict(aliases=['partition_name'], required=True, type='str'),
use_udev=dict(required=False, default=True, type='bool')
)
module = AnsibleModule(argument_spec)
match_mode = module.params.get('match_mode')
name = module.params.get('name')
use_udev = module.params.get('use_udev')
try:
ret = list()
ct = pyudev.Context()
for dev in find_disk(ct, name, match_mode, use_udev):
if '_BS' in name:
info = extract_disk_info_bs(ct, dev, name, use_udev)
if info:
ret.append(info)
else:
for info in extract_disk_info(ct, dev, name, use_udev):
if info:
ret.append(info)
if '_BS' in name and len(ret) > 0:
ret = combine_info(ret)
module.exit_json(disks=json.dumps(ret))
except Exception as e:
module.exit_json(failed=True, msg=repr(e))
if __name__ == '__main__':
main()
|
{
"content_hash": "4c4f4c85ea5540243ec628542cb4327c",
"timestamp": "",
"source": "github",
"line_count": 341,
"max_line_length": 79,
"avg_line_length": 36.96774193548387,
"alnum_prop": 0.5398223068380137,
"repo_name": "openstack/kolla",
"id": "405852f0b8f6a021d2c0a568182a6645f5bc1f3c",
"size": "13335",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "docker/kolla-toolbox/find_disks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "235221"
},
{
"name": "Python",
"bytes": "227320"
},
{
"name": "Shell",
"bytes": "85926"
}
],
"symlink_target": ""
}
|
import sublime, sublime_plugin, subprocess, os
class PhpCbfCommand(sublime_plugin.TextCommand):
def run(self, edit):
settings = sublime.load_settings("phpcbf.sublime-settings")
syntaxes = settings.get('syntaxes')
cur_syntax = self.view.settings().get('syntax')
if cur_syntax not in syntaxes:
return
path = settings.get('path', "")
level = settings.get('level', 'psr2')
patch = settings.get('patch', False)
suffix = settings.get('suffix', '')
sniffs = settings.get('sniffs', '')
tab_width = settings.get('tab_width', False)
if not patch:
patch = "--no-patch"
else:
patch = ""
encoding = self.view.encoding()
if encoding == 'Undefined':
encoding = settings.get('encoding', 'UTF-8')
if not path:
path = "phpcbf"
if suffix:
suffix = ' --suffix='+suffix
sniff_string = ''
if sniffs:
sniff_string = ' --sniffs='
for sniff in sniffs:
sniff_string += sniff+','
sniff_string.rtrim(',')
if tab_width:
tab_width = ' --tab_width='+tab_width
file_name = self.view.file_name()
if file_name:
call = path+" "+patch+suffix+sniff_string+tab_width+' --standard='+level+" --encoding="+encoding+" "+file_name
try:
output = subprocess.check_output(call, shell=True,universal_newlines=True)
except subprocess.CalledProcessError as e:
print(e.output)
sublime.status_message("An error occured while fixing, please check the console")
else:
sublime.status_message("All fixable errors have been fixed")
else:
sublime.status_message("Please save the file first")
class PhpCbfListener(sublime_plugin.EventListener):
def on_post_save(self, view):
settings = sublime.load_settings("phpcbf.sublime-settings")
if(settings.get('on_save', True)):
view.run_command('php_cbf')
def on_load(self, view):
settings = sublime.load_settings("phpcbf.sublime-settings")
if(settings.get('on_load', True)):
view.run_command('php_cbf')
|
{
"content_hash": "5ac140ca3d73f10245bec5927d3b3d53",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 122,
"avg_line_length": 35.55384615384615,
"alnum_prop": 0.5629597576806578,
"repo_name": "Ennosuke/PHP-Codebeautifier",
"id": "88a9d56a01b9eeb89e95c872a103d206c9dfabb9",
"size": "2311",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phpcbf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "4374"
},
{
"name": "Python",
"bytes": "2311"
}
],
"symlink_target": ""
}
|
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
import re
from colourmaps import viridis_colormap
from pipelines import inMemoryIMS_low_mem
from collections import OrderedDict
# webserver
import bottle
# isotope pattern generation
from pyMS.pyisocalc import pyisocalc
from pyIMS.ion_datacube import ion_datacube
from pyImagingMSpec.image_measures import *
from cpyImagingMSpec import ImzbReader
def get_datacube(reader, mzs, ppm):
cube = ion_datacube()
cube.xic = []
cube.nRows = reader.height
cube.nColumns = reader.width
cube.pixel_indices = None
for mz in mzs:
img = reader.get_mz_image(mz, ppm)
if cube.pixel_indices is None:
cube.pixel_indices = np.where(img.ravel() >= 0)[0]
img = img.ravel()[cube.pixel_indices]
img[img < 0] = 0.0
cube.xic.append(img)
return cube
class ImageWebserver(bottle.Bottle):
def __init__(self, *args, **kwargs):
super(ImageWebserver, self).__init__(*args, **kwargs)
def run(self, filenames, **kwargs):
self.load_data(filenames)
print "running webserver..."
super(ImageWebserver, self).run(**kwargs)
def load_data(self, filenames):
def prettify_fn(fn):
import os
return os.path.splitext(os.path.basename(fn))[0]
if len(filenames) == 0:
print "usage: python simple_webserver.py <file.imzML>"
print " python simple_webserver.py <file.hdf5>"
print " python simple_webserver.py <file1.imzb> [<file2.imzb> ...]"
sys.exit(0)
if len(filenames) > 1 and not all(fn.endswith(".imzb") for fn in filenames):
print "multiple-file mode is supported only for .imzb files"
sys.exit(2)
if len(filenames) == 1 and not filenames[0].endswith(".imzb"):
filename = filenames[0]
if filename.endswith(".imzML") or filename.endswith(".hdf5"):
print "loading data..."
self.data = inMemoryIMS_low_mem(filename)
self.in_memory = True
self.paths = { prettify_fn(filename) : filename }
else:
print "unsupported format"
sys.exit(3)
else:
self.paths = OrderedDict()
for fn in filenames:
if os.path.exists(fn):
self.paths[prettify_fn(fn)] = ImzbReader(fn)
else:
print "WARNING: file " + fn + " doesn't exist, skipping"
self.in_memory = False
def get_ion_image(self, dataset, mz, tol):
if self.in_memory is True:
return self.data.get_ion_image(np.array([mz]), tol).xic_to_image(0)
else:
return self.paths[dataset].get_mz_image(mz, tol)
def get_datacube(self, dataset, mzs, tol):
if self.in_memory is True:
return self.data.get_ion_image(np.array(mzs), tol)
else:
return get_datacube(self.paths[dataset], mzs, tol)
app = ImageWebserver()
@app.route('/', method='GET')
def show_form():
return bottle.template('show_images', hs_removal=True, selected_dataset=app.paths.iterkeys().next(),
isotope_patterns={}, formula="", selected_adduct='H', pretty_formula="", tol=5,
resolution=100000, npeaks=4, datasets=app.paths.keys())
import io
import os
import numpy as np
from matplotlib.colors import Normalize
cmap = viridis_colormap()
@app.route("/show_image/<dataset>/<mz>/<tol>")
def generate_image(dataset, mz, tol):
mz, tol = float(mz), float(tol)
img = app.get_ion_image(dataset, mz, tol)
if img.shape[0] > img.shape[1]:
img = img.T
buf = io.BytesIO()
mask = img >= 0
if bottle.request.query.remove_hotspots:
pc = np.percentile(img[mask], 99)
img[img > pc] = pc
values = img[mask]
norm = Normalize(vmin=np.min(values), vmax=np.max(values))
colorized_img = np.zeros((img.shape[0], img.shape[1], 4))
colorized_img[mask] = cmap(norm(values))
# set alpha channel to 0 for pixels with no data
colorized_img[img < 0, -1] = 0
plt.imsave(buf, colorized_img, format='png')
bottle.response.content_type = 'image/png'
buf.seek(0, os.SEEK_END)
bottle.response.content_length = buf.tell()
buf.seek(0)
return buf
@app.route("/correlation_plot/<dataset>/<formula>/<adduct>/<mzs>/<intensities>/<tol>")
def generate_correlation_plot(dataset, formula, adduct, mzs, intensities, tol):
mzs = np.array(map(float, mzs.split(",")))
intensities = np.array(map(float, intensities.split(",")))
order = intensities.argsort()[::-1]
mzs = mzs[order]
intensities = intensities[order]
tol = float(tol)
datacube = app.get_datacube(dataset, np.array(mzs), tol)
images = datacube.xic
buf = io.BytesIO()
transform = np.sqrt
base_intensities = images[0]
plt.figure(figsize=(16, 8))
ax1 = plt.subplot(1, 2, 1)
plt.title("per-pixel isotope pattern agreement (higher is better)")
n = min(len(datacube.xic), len(intensities))
full_images = np.array([transform(datacube.xic_to_image(i)) for i in xrange(n)])
full_images /= np.linalg.norm(full_images, ord=2, axis=0)
normalized_ints = transform(intensities[:n])
normalized_ints /= np.linalg.norm(normalized_ints)
#correlations = np.einsum("ijk,i", full_images, normalized_ints)
#plt.imshow(correlations, vmin=0, vmax=1)
deviations = 1 - np.amax(np.abs(np.transpose(full_images, (1, 2, 0)) - normalized_ints), axis=2)
if deviations.shape[0] > deviations.shape[1]:
deviations = deviations.T
plt.imshow(deviations, vmin=0, vmax=1, cmap="gnuplot", interpolation='none')
plt.axis('off')
# http://stackoverflow.com/questions/26034777/matplotlib-same-height-for-colorbar-as-for-plot
divider = make_axes_locatable(ax1)
cax1 = divider.append_axes("right", size="5%", pad="3%")
cbar = plt.colorbar(cax = cax1)
markersize = min(20, (10000.0 / (1 + np.sum(images[1] > 0))) ** 0.5)
plt.subplot(1, 2, 2)
plt.xlabel("sqrt( principal peak intensities )")
plt.ylabel("sqrt( other peak intensities )")
plt.title(formula + " + " + adduct + " (m/z={:.4f})".format(mzs[0]) +\
"\n(lines are based on the predicted isotope pattern)")
colors = ['blue', 'red', 'green', 'purple', 'black']
for i in xrange(1, min(5, len(images))):
ratio = intensities[i] / intensities[0]
observed = images[i]
mask = base_intensities > 0
label = "m/z={0:.4f} {1:.1%}".format(mzs[i], intensities[i] / 100.0)
plt.plot(transform(base_intensities[mask]), transform(observed[mask]), '.', markersize=markersize,
color = colors[i-1], label=label)
xs = transform(base_intensities[mask])
ys = transform(base_intensities[mask] * ratio)
order = xs.argsort()
plt.plot(xs[order], ys[order], color=colors[i-1], linewidth=0.5)
lgnd = plt.legend(loc='upper left', numpoints=10)
# http://stackoverflow.com/questions/24706125/setting-a-fixed-size-for-points-in-legend
for handle in lgnd.legendHandles:
handle._legmarker.set_markersize(6)
plt.tight_layout(w_pad=5.0)
plt.savefig(buf)
plt.close()
bottle.response.content_type = 'image/png'
buf.seek(0, os.SEEK_END)
bottle.response.content_length = buf.tell()
buf.seek(0)
return buf
@app.route("/show")
def show_images_get():
dataset = bottle.request.params.get('dataset', app.paths.iterkeys().next())
formula = bottle.request.params.get('formula', '')
tolerance = float(bottle.request.params.get('tolerance', 5.0))
resolution = float(bottle.request.params.get('resolution', 1e5))
selected_adduct = bottle.request.params.get('adduct', 'H')
hs_removal = bottle.request.GET.get('hs_removal', False)
k = int(bottle.request.params.get('npeaks', 4))
if hs_removal == 'on':
hs_removal = True
pts = float(bottle.request.params.get('pts', 10))
cutoff = float(bottle.request.params.get('pyisocalc_cutoff', 1e-3))
adducts = ['H', 'K', 'Na']
isotope_patterns = {}
for adduct in adducts:
sf = pyisocalc.SumFormulaParser.parse_string(formula + adduct)
raw_pattern = pyisocalc.isodist(sf, cutoff)
fwhm = raw_pattern.get_spectrum()[0][0] / resolution
pattern = pyisocalc.apply_gaussian(raw_pattern, fwhm, pts, exact=True)
mzs, intensities = map(np.array, pattern.get_spectrum(source='centroids'))
if len(mzs) > k:
order = intensities.argsort()[::-1]
mzs = mzs[order][:k]
intensities = intensities[order][:k]
order = mzs.argsort()
mzs = mzs[order]
intensities = intensities[order]
datacube = app.get_datacube(dataset, mzs, tolerance)
if hs_removal:
for img in datacube.xic:
if len(img) > 0:
pc = np.percentile(img, 99)
img[img > pc] = pc
chaos = measure_of_chaos(datacube.xic_to_image(0), 30, overwrite=False)
iso_corr = isotope_pattern_match(datacube.xic, intensities)
img_corr = 1.0 # return 1 if there's a single peak
if len(intensities[1:]) > 1:
img_corr = isotope_image_correlation(datacube.xic, weights=intensities[1:])
stats = {'measure of chaos': chaos,
'image correlation score': img_corr,
'isotope pattern score': iso_corr}
isotope_patterns[adduct] = (mzs, intensities, stats)
return bottle.template('show_images', hs_removal=hs_removal,
isotope_patterns=isotope_patterns, formula=formula, selected_adduct=selected_adduct,
pretty_formula=re.sub(r"(\d+)", r"<sub>\1</sub>", formula),
resolution=resolution, tol=tolerance, datasets=app.paths.keys(),
npeaks=k, selected_dataset=dataset)
import sys
app.run(sys.argv[1:], port=8081)
|
{
"content_hash": "d22d7653d24e214e0916157a7f12d754",
"timestamp": "",
"source": "github",
"line_count": 266,
"max_line_length": 111,
"avg_line_length": 38.21052631578947,
"alnum_prop": 0.612849271940181,
"repo_name": "SpatialMetabolomics/SM_standalone",
"id": "e80128f55005674a3044bc50949c9f067da21ea9",
"size": "10185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "simple_webserver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "21"
},
{
"name": "Go",
"bytes": "1102"
},
{
"name": "Python",
"bytes": "68513"
},
{
"name": "Smarty",
"bytes": "5951"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.