repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
epfl-lts2/pygsp | pygsp/graphs/_io.py | IOMixIn._join_signals | def _join_signals(self):
r"""Join N 1D signals into one N-dimensional signal."""
joined = dict()
for name in self.signals:
name_base = name.rsplit('_', 1)[0]
names = joined.get(name_base, list())
names.append(name)
joined[name_base] = names
for name_base, names in joined.items():
if len(names) > 1:
names = sorted(names) # ensure dim ordering (_0, _1, etc.)
signal_nd = np.stack([self.signals[n] for n in names], axis=1)
self.signals[name_base] = signal_nd
for name in names:
del self.signals[name] | python | def _join_signals(self):
r"""Join N 1D signals into one N-dimensional signal."""
joined = dict()
for name in self.signals:
name_base = name.rsplit('_', 1)[0]
names = joined.get(name_base, list())
names.append(name)
joined[name_base] = names
for name_base, names in joined.items():
if len(names) > 1:
names = sorted(names) # ensure dim ordering (_0, _1, etc.)
signal_nd = np.stack([self.signals[n] for n in names], axis=1)
self.signals[name_base] = signal_nd
for name in names:
del self.signals[name] | [
"def",
"_join_signals",
"(",
"self",
")",
":",
"joined",
"=",
"dict",
"(",
")",
"for",
"name",
"in",
"self",
".",
"signals",
":",
"name_base",
"=",
"name",
".",
"rsplit",
"(",
"'_'",
",",
"1",
")",
"[",
"0",
"]",
"names",
"=",
"joined",
".",
"get... | r"""Join N 1D signals into one N-dimensional signal. | [
"r",
"Join",
"N",
"1D",
"signals",
"into",
"one",
"N",
"-",
"dimensional",
"signal",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/graphs/_io.py#L37-L51 | train | 210,900 |
epfl-lts2/pygsp | pygsp/graphs/_io.py | IOMixIn.to_networkx | def to_networkx(self):
r"""Export the graph to NetworkX.
Edge weights are stored as an edge attribute,
under the name "weight".
Signals are stored as node attributes,
under their name in the :attr:`signals` dictionary.
`N`-dimensional signals are broken into `N` 1-dimensional signals.
They will eventually be joined back together on import.
Returns
-------
graph : :class:`networkx.Graph`
A NetworkX graph object.
See Also
--------
to_graphtool : export to graph-tool
save : save to a file
Examples
--------
>>> import networkx as nx
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Path(4, directed=True)
>>> graph.set_signal(np.full(4, 2.3), 'signal')
>>> graph = graph.to_networkx()
>>> print(nx.info(graph))
Name: Path
Type: DiGraph
Number of nodes: 4
Number of edges: 3
Average in degree: 0.7500
Average out degree: 0.7500
>>> nx.is_directed(graph)
True
>>> graph.nodes()
NodeView((0, 1, 2, 3))
>>> graph.edges()
OutEdgeView([(0, 1), (1, 2), (2, 3)])
>>> graph.nodes()[2]
{'signal': 2.3}
>>> graph.edges()[(0, 1)]
{'weight': 1.0}
>>> # nx.draw(graph, with_labels=True)
Another common goal is to use NetworkX to compute some properties to be
be imported back in the PyGSP as signals.
>>> import networkx as nx
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Sensor(100, seed=42)
>>> graph.set_signal(graph.coords, 'coords')
>>> graph = graph.to_networkx()
>>> betweenness = nx.betweenness_centrality(graph, weight='weight')
>>> nx.set_node_attributes(graph, betweenness, 'betweenness')
>>> graph = graphs.Graph.from_networkx(graph)
>>> graph.compute_fourier_basis()
>>> graph.set_coordinates(graph.signals['coords'])
>>> fig, axes = plt.subplots(1, 2)
>>> _ = graph.plot(graph.signals['betweenness'], ax=axes[0])
>>> _ = axes[1].plot(graph.e, graph.gft(graph.signals['betweenness']))
"""
nx = _import_networkx()
def convert(number):
# NetworkX accepts arbitrary python objects as attributes, but:
# * the GEXF writer does not accept any NumPy types (on signals),
# * the GraphML writer does not accept NumPy ints.
if issubclass(number.dtype.type, (np.integer, np.bool_)):
return int(number)
else:
return float(number)
def edges():
for source, target, weight in zip(*self.get_edge_list()):
yield int(source), int(target), {'weight': convert(weight)}
def nodes():
for vertex in range(self.n_vertices):
signals = {name: convert(signal[vertex])
for name, signal in self.signals.items()}
yield vertex, signals
self._break_signals()
graph = nx.DiGraph() if self.is_directed() else nx.Graph()
graph.add_nodes_from(nodes())
graph.add_edges_from(edges())
graph.name = self.__class__.__name__
return graph | python | def to_networkx(self):
r"""Export the graph to NetworkX.
Edge weights are stored as an edge attribute,
under the name "weight".
Signals are stored as node attributes,
under their name in the :attr:`signals` dictionary.
`N`-dimensional signals are broken into `N` 1-dimensional signals.
They will eventually be joined back together on import.
Returns
-------
graph : :class:`networkx.Graph`
A NetworkX graph object.
See Also
--------
to_graphtool : export to graph-tool
save : save to a file
Examples
--------
>>> import networkx as nx
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Path(4, directed=True)
>>> graph.set_signal(np.full(4, 2.3), 'signal')
>>> graph = graph.to_networkx()
>>> print(nx.info(graph))
Name: Path
Type: DiGraph
Number of nodes: 4
Number of edges: 3
Average in degree: 0.7500
Average out degree: 0.7500
>>> nx.is_directed(graph)
True
>>> graph.nodes()
NodeView((0, 1, 2, 3))
>>> graph.edges()
OutEdgeView([(0, 1), (1, 2), (2, 3)])
>>> graph.nodes()[2]
{'signal': 2.3}
>>> graph.edges()[(0, 1)]
{'weight': 1.0}
>>> # nx.draw(graph, with_labels=True)
Another common goal is to use NetworkX to compute some properties to be
be imported back in the PyGSP as signals.
>>> import networkx as nx
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Sensor(100, seed=42)
>>> graph.set_signal(graph.coords, 'coords')
>>> graph = graph.to_networkx()
>>> betweenness = nx.betweenness_centrality(graph, weight='weight')
>>> nx.set_node_attributes(graph, betweenness, 'betweenness')
>>> graph = graphs.Graph.from_networkx(graph)
>>> graph.compute_fourier_basis()
>>> graph.set_coordinates(graph.signals['coords'])
>>> fig, axes = plt.subplots(1, 2)
>>> _ = graph.plot(graph.signals['betweenness'], ax=axes[0])
>>> _ = axes[1].plot(graph.e, graph.gft(graph.signals['betweenness']))
"""
nx = _import_networkx()
def convert(number):
# NetworkX accepts arbitrary python objects as attributes, but:
# * the GEXF writer does not accept any NumPy types (on signals),
# * the GraphML writer does not accept NumPy ints.
if issubclass(number.dtype.type, (np.integer, np.bool_)):
return int(number)
else:
return float(number)
def edges():
for source, target, weight in zip(*self.get_edge_list()):
yield int(source), int(target), {'weight': convert(weight)}
def nodes():
for vertex in range(self.n_vertices):
signals = {name: convert(signal[vertex])
for name, signal in self.signals.items()}
yield vertex, signals
self._break_signals()
graph = nx.DiGraph() if self.is_directed() else nx.Graph()
graph.add_nodes_from(nodes())
graph.add_edges_from(edges())
graph.name = self.__class__.__name__
return graph | [
"def",
"to_networkx",
"(",
"self",
")",
":",
"nx",
"=",
"_import_networkx",
"(",
")",
"def",
"convert",
"(",
"number",
")",
":",
"# NetworkX accepts arbitrary python objects as attributes, but:",
"# * the GEXF writer does not accept any NumPy types (on signals),",
"# * the Grap... | r"""Export the graph to NetworkX.
Edge weights are stored as an edge attribute,
under the name "weight".
Signals are stored as node attributes,
under their name in the :attr:`signals` dictionary.
`N`-dimensional signals are broken into `N` 1-dimensional signals.
They will eventually be joined back together on import.
Returns
-------
graph : :class:`networkx.Graph`
A NetworkX graph object.
See Also
--------
to_graphtool : export to graph-tool
save : save to a file
Examples
--------
>>> import networkx as nx
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Path(4, directed=True)
>>> graph.set_signal(np.full(4, 2.3), 'signal')
>>> graph = graph.to_networkx()
>>> print(nx.info(graph))
Name: Path
Type: DiGraph
Number of nodes: 4
Number of edges: 3
Average in degree: 0.7500
Average out degree: 0.7500
>>> nx.is_directed(graph)
True
>>> graph.nodes()
NodeView((0, 1, 2, 3))
>>> graph.edges()
OutEdgeView([(0, 1), (1, 2), (2, 3)])
>>> graph.nodes()[2]
{'signal': 2.3}
>>> graph.edges()[(0, 1)]
{'weight': 1.0}
>>> # nx.draw(graph, with_labels=True)
Another common goal is to use NetworkX to compute some properties to be
be imported back in the PyGSP as signals.
>>> import networkx as nx
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Sensor(100, seed=42)
>>> graph.set_signal(graph.coords, 'coords')
>>> graph = graph.to_networkx()
>>> betweenness = nx.betweenness_centrality(graph, weight='weight')
>>> nx.set_node_attributes(graph, betweenness, 'betweenness')
>>> graph = graphs.Graph.from_networkx(graph)
>>> graph.compute_fourier_basis()
>>> graph.set_coordinates(graph.signals['coords'])
>>> fig, axes = plt.subplots(1, 2)
>>> _ = graph.plot(graph.signals['betweenness'], ax=axes[0])
>>> _ = axes[1].plot(graph.e, graph.gft(graph.signals['betweenness'])) | [
"r",
"Export",
"the",
"graph",
"to",
"NetworkX",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/graphs/_io.py#L53-L144 | train | 210,901 |
epfl-lts2/pygsp | pygsp/graphs/_io.py | IOMixIn.to_graphtool | def to_graphtool(self):
r"""Export the graph to graph-tool.
Edge weights are stored as an edge property map,
under the name "weight".
Signals are stored as vertex property maps,
under their name in the :attr:`signals` dictionary.
`N`-dimensional signals are broken into `N` 1-dimensional signals.
They will eventually be joined back together on import.
Returns
-------
graph : :class:`graph_tool.Graph`
A graph-tool graph object.
See Also
--------
to_networkx : export to NetworkX
save : save to a file
Examples
--------
>>> import graph_tool as gt
>>> import graph_tool.draw
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Path(4, directed=True)
>>> graph.set_signal(np.full(4, 2.3), 'signal')
>>> graph = graph.to_graphtool()
>>> graph.is_directed()
True
>>> graph.vertex_properties['signal'][2]
2.3
>>> graph.edge_properties['weight'][(0, 1)]
1.0
>>> # gt.draw.graph_draw(graph, vertex_text=graph.vertex_index)
Another common goal is to use graph-tool to compute some properties to
be imported back in the PyGSP as signals.
>>> import graph_tool as gt
>>> import graph_tool.centrality
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Sensor(100, seed=42)
>>> graph.set_signal(graph.coords, 'coords')
>>> graph = graph.to_graphtool()
>>> vprop, eprop = gt.centrality.betweenness(
... graph, weight=graph.edge_properties['weight'])
>>> graph.vertex_properties['betweenness'] = vprop
>>> graph = graphs.Graph.from_graphtool(graph)
>>> graph.compute_fourier_basis()
>>> graph.set_coordinates(graph.signals['coords'])
>>> fig, axes = plt.subplots(1, 2)
>>> _ = graph.plot(graph.signals['betweenness'], ax=axes[0])
>>> _ = axes[1].plot(graph.e, graph.gft(graph.signals['betweenness']))
"""
# See gt.value_types() for the list of accepted types.
# See the definition of _type_alias() for a list of aliases.
# Mapping from https://docs.scipy.org/doc/numpy/user/basics.types.html.
convert = {
np.bool_: 'bool',
np.int8: 'int8_t',
np.int16: 'int16_t',
np.int32: 'int32_t',
np.int64: 'int64_t',
np.short: 'short',
np.intc: 'int',
np.uintc: 'unsigned int',
np.long: 'long',
np.longlong: 'long long',
np.uint: 'unsigned long',
np.single: 'float',
np.double: 'double',
np.longdouble: 'long double',
}
gt = _import_graphtool()
graph = gt.Graph(directed=self.is_directed())
sources, targets, weights = self.get_edge_list()
graph.add_edge_list(np.asarray((sources, targets)).T)
try:
dtype = convert[weights.dtype.type]
except KeyError:
raise TypeError("Type {} of the edge weights is not supported."
.format(weights.dtype))
prop = graph.new_edge_property(dtype)
prop.get_array()[:] = weights
graph.edge_properties['weight'] = prop
self._break_signals()
for name, signal in self.signals.items():
try:
dtype = convert[signal.dtype.type]
except KeyError:
raise TypeError("Type {} of signal {} is not supported."
.format(signal.dtype, name))
prop = graph.new_vertex_property(dtype)
prop.get_array()[:] = signal
graph.vertex_properties[name] = prop
return graph | python | def to_graphtool(self):
r"""Export the graph to graph-tool.
Edge weights are stored as an edge property map,
under the name "weight".
Signals are stored as vertex property maps,
under their name in the :attr:`signals` dictionary.
`N`-dimensional signals are broken into `N` 1-dimensional signals.
They will eventually be joined back together on import.
Returns
-------
graph : :class:`graph_tool.Graph`
A graph-tool graph object.
See Also
--------
to_networkx : export to NetworkX
save : save to a file
Examples
--------
>>> import graph_tool as gt
>>> import graph_tool.draw
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Path(4, directed=True)
>>> graph.set_signal(np.full(4, 2.3), 'signal')
>>> graph = graph.to_graphtool()
>>> graph.is_directed()
True
>>> graph.vertex_properties['signal'][2]
2.3
>>> graph.edge_properties['weight'][(0, 1)]
1.0
>>> # gt.draw.graph_draw(graph, vertex_text=graph.vertex_index)
Another common goal is to use graph-tool to compute some properties to
be imported back in the PyGSP as signals.
>>> import graph_tool as gt
>>> import graph_tool.centrality
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Sensor(100, seed=42)
>>> graph.set_signal(graph.coords, 'coords')
>>> graph = graph.to_graphtool()
>>> vprop, eprop = gt.centrality.betweenness(
... graph, weight=graph.edge_properties['weight'])
>>> graph.vertex_properties['betweenness'] = vprop
>>> graph = graphs.Graph.from_graphtool(graph)
>>> graph.compute_fourier_basis()
>>> graph.set_coordinates(graph.signals['coords'])
>>> fig, axes = plt.subplots(1, 2)
>>> _ = graph.plot(graph.signals['betweenness'], ax=axes[0])
>>> _ = axes[1].plot(graph.e, graph.gft(graph.signals['betweenness']))
"""
# See gt.value_types() for the list of accepted types.
# See the definition of _type_alias() for a list of aliases.
# Mapping from https://docs.scipy.org/doc/numpy/user/basics.types.html.
convert = {
np.bool_: 'bool',
np.int8: 'int8_t',
np.int16: 'int16_t',
np.int32: 'int32_t',
np.int64: 'int64_t',
np.short: 'short',
np.intc: 'int',
np.uintc: 'unsigned int',
np.long: 'long',
np.longlong: 'long long',
np.uint: 'unsigned long',
np.single: 'float',
np.double: 'double',
np.longdouble: 'long double',
}
gt = _import_graphtool()
graph = gt.Graph(directed=self.is_directed())
sources, targets, weights = self.get_edge_list()
graph.add_edge_list(np.asarray((sources, targets)).T)
try:
dtype = convert[weights.dtype.type]
except KeyError:
raise TypeError("Type {} of the edge weights is not supported."
.format(weights.dtype))
prop = graph.new_edge_property(dtype)
prop.get_array()[:] = weights
graph.edge_properties['weight'] = prop
self._break_signals()
for name, signal in self.signals.items():
try:
dtype = convert[signal.dtype.type]
except KeyError:
raise TypeError("Type {} of signal {} is not supported."
.format(signal.dtype, name))
prop = graph.new_vertex_property(dtype)
prop.get_array()[:] = signal
graph.vertex_properties[name] = prop
return graph | [
"def",
"to_graphtool",
"(",
"self",
")",
":",
"# See gt.value_types() for the list of accepted types.",
"# See the definition of _type_alias() for a list of aliases.",
"# Mapping from https://docs.scipy.org/doc/numpy/user/basics.types.html.",
"convert",
"=",
"{",
"np",
".",
"bool_",
":... | r"""Export the graph to graph-tool.
Edge weights are stored as an edge property map,
under the name "weight".
Signals are stored as vertex property maps,
under their name in the :attr:`signals` dictionary.
`N`-dimensional signals are broken into `N` 1-dimensional signals.
They will eventually be joined back together on import.
Returns
-------
graph : :class:`graph_tool.Graph`
A graph-tool graph object.
See Also
--------
to_networkx : export to NetworkX
save : save to a file
Examples
--------
>>> import graph_tool as gt
>>> import graph_tool.draw
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Path(4, directed=True)
>>> graph.set_signal(np.full(4, 2.3), 'signal')
>>> graph = graph.to_graphtool()
>>> graph.is_directed()
True
>>> graph.vertex_properties['signal'][2]
2.3
>>> graph.edge_properties['weight'][(0, 1)]
1.0
>>> # gt.draw.graph_draw(graph, vertex_text=graph.vertex_index)
Another common goal is to use graph-tool to compute some properties to
be imported back in the PyGSP as signals.
>>> import graph_tool as gt
>>> import graph_tool.centrality
>>> from matplotlib import pyplot as plt
>>> graph = graphs.Sensor(100, seed=42)
>>> graph.set_signal(graph.coords, 'coords')
>>> graph = graph.to_graphtool()
>>> vprop, eprop = gt.centrality.betweenness(
... graph, weight=graph.edge_properties['weight'])
>>> graph.vertex_properties['betweenness'] = vprop
>>> graph = graphs.Graph.from_graphtool(graph)
>>> graph.compute_fourier_basis()
>>> graph.set_coordinates(graph.signals['coords'])
>>> fig, axes = plt.subplots(1, 2)
>>> _ = graph.plot(graph.signals['betweenness'], ax=axes[0])
>>> _ = axes[1].plot(graph.e, graph.gft(graph.signals['betweenness'])) | [
"r",
"Export",
"the",
"graph",
"to",
"graph",
"-",
"tool",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/graphs/_io.py#L146-L249 | train | 210,902 |
epfl-lts2/pygsp | pygsp/graphs/_io.py | IOMixIn.from_networkx | def from_networkx(cls, graph, weight='weight'):
r"""Import a graph from NetworkX.
Edge weights are retrieved as an edge attribute,
under the name specified by the ``weight`` parameter.
Signals are retrieved from node attributes,
and stored in the :attr:`signals` dictionary under the attribute name.
`N`-dimensional signals that were broken during export are joined.
Parameters
----------
graph : :class:`networkx.Graph`
A NetworkX graph object.
weight : string or None, optional
The edge attribute that holds the numerical values used as the edge
weights. All edge weights are set to 1 if None, or not found.
Returns
-------
graph : :class:`~pygsp.graphs.Graph`
A PyGSP graph object.
Notes
-----
The nodes are ordered according to :meth:`networkx.Graph.nodes`.
In NetworkX, node attributes need not be set for every node.
If a node attribute is not set for a node, a NaN is assigned to the
corresponding signal for that node.
If the graph is a :class:`networkx.MultiGraph`, multiedges are
aggregated by summation.
See Also
--------
from_graphtool : import from graph-tool
load : load from a file
Examples
--------
>>> import networkx as nx
>>> graph = nx.Graph()
>>> graph.add_edge(1, 2, weight=0.2)
>>> graph.add_edge(2, 3, weight=0.9)
>>> graph.add_node(4, sig=3.1416)
>>> graph.nodes()
NodeView((1, 2, 3, 4))
>>> graph = graphs.Graph.from_networkx(graph)
>>> graph.W.toarray()
array([[0. , 0.2, 0. , 0. ],
[0.2, 0. , 0.9, 0. ],
[0. , 0.9, 0. , 0. ],
[0. , 0. , 0. , 0. ]])
>>> graph.signals
{'sig': array([ nan, nan, nan, 3.1416])}
"""
nx = _import_networkx()
from .graph import Graph
adjacency = nx.to_scipy_sparse_matrix(graph, weight=weight)
graph_pg = Graph(adjacency)
for i, node in enumerate(graph.nodes()):
for name in graph.nodes[node].keys():
try:
signal = graph_pg.signals[name]
except KeyError:
signal = np.full(graph_pg.n_vertices, np.nan)
graph_pg.set_signal(signal, name)
try:
signal[i] = graph.nodes[node][name]
except KeyError:
pass # attribute not set for node
graph_pg._join_signals()
return graph_pg | python | def from_networkx(cls, graph, weight='weight'):
r"""Import a graph from NetworkX.
Edge weights are retrieved as an edge attribute,
under the name specified by the ``weight`` parameter.
Signals are retrieved from node attributes,
and stored in the :attr:`signals` dictionary under the attribute name.
`N`-dimensional signals that were broken during export are joined.
Parameters
----------
graph : :class:`networkx.Graph`
A NetworkX graph object.
weight : string or None, optional
The edge attribute that holds the numerical values used as the edge
weights. All edge weights are set to 1 if None, or not found.
Returns
-------
graph : :class:`~pygsp.graphs.Graph`
A PyGSP graph object.
Notes
-----
The nodes are ordered according to :meth:`networkx.Graph.nodes`.
In NetworkX, node attributes need not be set for every node.
If a node attribute is not set for a node, a NaN is assigned to the
corresponding signal for that node.
If the graph is a :class:`networkx.MultiGraph`, multiedges are
aggregated by summation.
See Also
--------
from_graphtool : import from graph-tool
load : load from a file
Examples
--------
>>> import networkx as nx
>>> graph = nx.Graph()
>>> graph.add_edge(1, 2, weight=0.2)
>>> graph.add_edge(2, 3, weight=0.9)
>>> graph.add_node(4, sig=3.1416)
>>> graph.nodes()
NodeView((1, 2, 3, 4))
>>> graph = graphs.Graph.from_networkx(graph)
>>> graph.W.toarray()
array([[0. , 0.2, 0. , 0. ],
[0.2, 0. , 0.9, 0. ],
[0. , 0.9, 0. , 0. ],
[0. , 0. , 0. , 0. ]])
>>> graph.signals
{'sig': array([ nan, nan, nan, 3.1416])}
"""
nx = _import_networkx()
from .graph import Graph
adjacency = nx.to_scipy_sparse_matrix(graph, weight=weight)
graph_pg = Graph(adjacency)
for i, node in enumerate(graph.nodes()):
for name in graph.nodes[node].keys():
try:
signal = graph_pg.signals[name]
except KeyError:
signal = np.full(graph_pg.n_vertices, np.nan)
graph_pg.set_signal(signal, name)
try:
signal[i] = graph.nodes[node][name]
except KeyError:
pass # attribute not set for node
graph_pg._join_signals()
return graph_pg | [
"def",
"from_networkx",
"(",
"cls",
",",
"graph",
",",
"weight",
"=",
"'weight'",
")",
":",
"nx",
"=",
"_import_networkx",
"(",
")",
"from",
".",
"graph",
"import",
"Graph",
"adjacency",
"=",
"nx",
".",
"to_scipy_sparse_matrix",
"(",
"graph",
",",
"weight"... | r"""Import a graph from NetworkX.
Edge weights are retrieved as an edge attribute,
under the name specified by the ``weight`` parameter.
Signals are retrieved from node attributes,
and stored in the :attr:`signals` dictionary under the attribute name.
`N`-dimensional signals that were broken during export are joined.
Parameters
----------
graph : :class:`networkx.Graph`
A NetworkX graph object.
weight : string or None, optional
The edge attribute that holds the numerical values used as the edge
weights. All edge weights are set to 1 if None, or not found.
Returns
-------
graph : :class:`~pygsp.graphs.Graph`
A PyGSP graph object.
Notes
-----
The nodes are ordered according to :meth:`networkx.Graph.nodes`.
In NetworkX, node attributes need not be set for every node.
If a node attribute is not set for a node, a NaN is assigned to the
corresponding signal for that node.
If the graph is a :class:`networkx.MultiGraph`, multiedges are
aggregated by summation.
See Also
--------
from_graphtool : import from graph-tool
load : load from a file
Examples
--------
>>> import networkx as nx
>>> graph = nx.Graph()
>>> graph.add_edge(1, 2, weight=0.2)
>>> graph.add_edge(2, 3, weight=0.9)
>>> graph.add_node(4, sig=3.1416)
>>> graph.nodes()
NodeView((1, 2, 3, 4))
>>> graph = graphs.Graph.from_networkx(graph)
>>> graph.W.toarray()
array([[0. , 0.2, 0. , 0. ],
[0.2, 0. , 0.9, 0. ],
[0. , 0.9, 0. , 0. ],
[0. , 0. , 0. , 0. ]])
>>> graph.signals
{'sig': array([ nan, nan, nan, 3.1416])} | [
"r",
"Import",
"a",
"graph",
"from",
"NetworkX",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/graphs/_io.py#L252-L330 | train | 210,903 |
epfl-lts2/pygsp | pygsp/graphs/_io.py | IOMixIn.from_graphtool | def from_graphtool(cls, graph, weight='weight'):
r"""Import a graph from graph-tool.
Edge weights are retrieved as an edge property,
under the name specified by the ``weight`` parameter.
Signals are retrieved from node properties,
and stored in the :attr:`signals` dictionary under the property name.
`N`-dimensional signals that were broken during export are joined.
Parameters
----------
graph : :class:`graph_tool.Graph`
A graph-tool graph object.
weight : string
The edge property that holds the numerical values used as the edge
weights. All edge weights are set to 1 if None, or not found.
Returns
-------
graph : :class:`~pygsp.graphs.Graph`
A PyGSP graph object.
Notes
-----
If the graph has multiple edge connecting the same two nodes, a sum
over the edges is taken to merge them.
See Also
--------
from_networkx : import from NetworkX
load : load from a file
Examples
--------
>>> import graph_tool as gt
>>> graph = gt.Graph(directed=False)
>>> e1 = graph.add_edge(0, 1)
>>> e2 = graph.add_edge(1, 2)
>>> v = graph.add_vertex()
>>> eprop = graph.new_edge_property("double")
>>> eprop[e1] = 0.2
>>> eprop[(1, 2)] = 0.9
>>> graph.edge_properties["weight"] = eprop
>>> vprop = graph.new_vertex_property("double", val=np.nan)
>>> vprop[3] = 3.1416
>>> graph.vertex_properties["sig"] = vprop
>>> graph = graphs.Graph.from_graphtool(graph)
>>> graph.W.toarray()
array([[0. , 0.2, 0. , 0. ],
[0.2, 0. , 0.9, 0. ],
[0. , 0.9, 0. , 0. ],
[0. , 0. , 0. , 0. ]])
>>> graph.signals
{'sig': PropertyArray([ nan, nan, nan, 3.1416])}
"""
gt = _import_graphtool()
import graph_tool.spectral
from .graph import Graph
weight = graph.edge_properties.get(weight, None)
adjacency = gt.spectral.adjacency(graph, weight=weight)
graph_pg = Graph(adjacency.T)
for name, signal in graph.vertex_properties.items():
graph_pg.set_signal(signal.get_array(), name)
graph_pg._join_signals()
return graph_pg | python | def from_graphtool(cls, graph, weight='weight'):
r"""Import a graph from graph-tool.
Edge weights are retrieved as an edge property,
under the name specified by the ``weight`` parameter.
Signals are retrieved from node properties,
and stored in the :attr:`signals` dictionary under the property name.
`N`-dimensional signals that were broken during export are joined.
Parameters
----------
graph : :class:`graph_tool.Graph`
A graph-tool graph object.
weight : string
The edge property that holds the numerical values used as the edge
weights. All edge weights are set to 1 if None, or not found.
Returns
-------
graph : :class:`~pygsp.graphs.Graph`
A PyGSP graph object.
Notes
-----
If the graph has multiple edge connecting the same two nodes, a sum
over the edges is taken to merge them.
See Also
--------
from_networkx : import from NetworkX
load : load from a file
Examples
--------
>>> import graph_tool as gt
>>> graph = gt.Graph(directed=False)
>>> e1 = graph.add_edge(0, 1)
>>> e2 = graph.add_edge(1, 2)
>>> v = graph.add_vertex()
>>> eprop = graph.new_edge_property("double")
>>> eprop[e1] = 0.2
>>> eprop[(1, 2)] = 0.9
>>> graph.edge_properties["weight"] = eprop
>>> vprop = graph.new_vertex_property("double", val=np.nan)
>>> vprop[3] = 3.1416
>>> graph.vertex_properties["sig"] = vprop
>>> graph = graphs.Graph.from_graphtool(graph)
>>> graph.W.toarray()
array([[0. , 0.2, 0. , 0. ],
[0.2, 0. , 0.9, 0. ],
[0. , 0.9, 0. , 0. ],
[0. , 0. , 0. , 0. ]])
>>> graph.signals
{'sig': PropertyArray([ nan, nan, nan, 3.1416])}
"""
gt = _import_graphtool()
import graph_tool.spectral
from .graph import Graph
weight = graph.edge_properties.get(weight, None)
adjacency = gt.spectral.adjacency(graph, weight=weight)
graph_pg = Graph(adjacency.T)
for name, signal in graph.vertex_properties.items():
graph_pg.set_signal(signal.get_array(), name)
graph_pg._join_signals()
return graph_pg | [
"def",
"from_graphtool",
"(",
"cls",
",",
"graph",
",",
"weight",
"=",
"'weight'",
")",
":",
"gt",
"=",
"_import_graphtool",
"(",
")",
"import",
"graph_tool",
".",
"spectral",
"from",
".",
"graph",
"import",
"Graph",
"weight",
"=",
"graph",
".",
"edge_prop... | r"""Import a graph from graph-tool.
Edge weights are retrieved as an edge property,
under the name specified by the ``weight`` parameter.
Signals are retrieved from node properties,
and stored in the :attr:`signals` dictionary under the property name.
`N`-dimensional signals that were broken during export are joined.
Parameters
----------
graph : :class:`graph_tool.Graph`
A graph-tool graph object.
weight : string
The edge property that holds the numerical values used as the edge
weights. All edge weights are set to 1 if None, or not found.
Returns
-------
graph : :class:`~pygsp.graphs.Graph`
A PyGSP graph object.
Notes
-----
If the graph has multiple edge connecting the same two nodes, a sum
over the edges is taken to merge them.
See Also
--------
from_networkx : import from NetworkX
load : load from a file
Examples
--------
>>> import graph_tool as gt
>>> graph = gt.Graph(directed=False)
>>> e1 = graph.add_edge(0, 1)
>>> e2 = graph.add_edge(1, 2)
>>> v = graph.add_vertex()
>>> eprop = graph.new_edge_property("double")
>>> eprop[e1] = 0.2
>>> eprop[(1, 2)] = 0.9
>>> graph.edge_properties["weight"] = eprop
>>> vprop = graph.new_vertex_property("double", val=np.nan)
>>> vprop[3] = 3.1416
>>> graph.vertex_properties["sig"] = vprop
>>> graph = graphs.Graph.from_graphtool(graph)
>>> graph.W.toarray()
array([[0. , 0.2, 0. , 0. ],
[0.2, 0. , 0.9, 0. ],
[0. , 0.9, 0. , 0. ],
[0. , 0. , 0. , 0. ]])
>>> graph.signals
{'sig': PropertyArray([ nan, nan, nan, 3.1416])} | [
"r",
"Import",
"a",
"graph",
"from",
"graph",
"-",
"tool",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/graphs/_io.py#L333-L403 | train | 210,904 |
epfl-lts2/pygsp | pygsp/graphs/_io.py | IOMixIn.load | def load(cls, path, fmt=None, backend=None):
r"""Load a graph from a file.
Edge weights are retrieved as an edge attribute named "weight".
Signals are retrieved from node attributes,
and stored in the :attr:`signals` dictionary under the attribute name.
`N`-dimensional signals that were broken during export are joined.
Parameters
----------
path : string
Path to the file from which to load the graph.
fmt : {'graphml', 'gml', 'gexf', None}, optional
Format in which the graph is saved.
Guessed from the filename extension if None.
backend : {'networkx', 'graph-tool', None}, optional
Library used to load the graph. Automatically chosen if None.
Returns
-------
graph : :class:`Graph`
The loaded graph.
See Also
--------
save : save a graph to a file
from_networkx : load with NetworkX then import in the PyGSP
from_graphtool : load with graph-tool then import in the PyGSP
Notes
-----
A lossless round-trip is only guaranteed if the graph (and its signals)
is saved and loaded with the same backend.
Loading from other formats is possible by loading in NetworkX or
graph-tool, and importing to the PyGSP.
The proposed formats are however tested for faithful round-trips.
Examples
--------
>>> graph = graphs.Logo()
>>> graph.save('logo.graphml')
>>> graph = graphs.Graph.load('logo.graphml')
>>> import os
>>> os.remove('logo.graphml')
"""
if fmt is None:
fmt = os.path.splitext(path)[1][1:]
if fmt not in ['graphml', 'gml', 'gexf']:
raise ValueError('Unsupported format {}.'.format(fmt))
def load_networkx(path, fmt):
nx = _import_networkx()
load = getattr(nx, 'read_' + fmt)
graph = load(path)
return cls.from_networkx(graph)
def load_graphtool(path, fmt):
gt = _import_graphtool()
graph = gt.load_graph(path, fmt=fmt)
return cls.from_graphtool(graph)
if backend == 'networkx':
return load_networkx(path, fmt)
elif backend == 'graph-tool':
return load_graphtool(path, fmt)
elif backend is None:
try:
return load_networkx(path, fmt)
except ImportError:
try:
return load_graphtool(path, fmt)
except ImportError:
raise ImportError('Cannot import networkx nor graph-tool.')
else:
raise ValueError('Unknown backend {}.'.format(backend)) | python | def load(cls, path, fmt=None, backend=None):
r"""Load a graph from a file.
Edge weights are retrieved as an edge attribute named "weight".
Signals are retrieved from node attributes,
and stored in the :attr:`signals` dictionary under the attribute name.
`N`-dimensional signals that were broken during export are joined.
Parameters
----------
path : string
Path to the file from which to load the graph.
fmt : {'graphml', 'gml', 'gexf', None}, optional
Format in which the graph is saved.
Guessed from the filename extension if None.
backend : {'networkx', 'graph-tool', None}, optional
Library used to load the graph. Automatically chosen if None.
Returns
-------
graph : :class:`Graph`
The loaded graph.
See Also
--------
save : save a graph to a file
from_networkx : load with NetworkX then import in the PyGSP
from_graphtool : load with graph-tool then import in the PyGSP
Notes
-----
A lossless round-trip is only guaranteed if the graph (and its signals)
is saved and loaded with the same backend.
Loading from other formats is possible by loading in NetworkX or
graph-tool, and importing to the PyGSP.
The proposed formats are however tested for faithful round-trips.
Examples
--------
>>> graph = graphs.Logo()
>>> graph.save('logo.graphml')
>>> graph = graphs.Graph.load('logo.graphml')
>>> import os
>>> os.remove('logo.graphml')
"""
if fmt is None:
fmt = os.path.splitext(path)[1][1:]
if fmt not in ['graphml', 'gml', 'gexf']:
raise ValueError('Unsupported format {}.'.format(fmt))
def load_networkx(path, fmt):
nx = _import_networkx()
load = getattr(nx, 'read_' + fmt)
graph = load(path)
return cls.from_networkx(graph)
def load_graphtool(path, fmt):
gt = _import_graphtool()
graph = gt.load_graph(path, fmt=fmt)
return cls.from_graphtool(graph)
if backend == 'networkx':
return load_networkx(path, fmt)
elif backend == 'graph-tool':
return load_graphtool(path, fmt)
elif backend is None:
try:
return load_networkx(path, fmt)
except ImportError:
try:
return load_graphtool(path, fmt)
except ImportError:
raise ImportError('Cannot import networkx nor graph-tool.')
else:
raise ValueError('Unknown backend {}.'.format(backend)) | [
"def",
"load",
"(",
"cls",
",",
"path",
",",
"fmt",
"=",
"None",
",",
"backend",
"=",
"None",
")",
":",
"if",
"fmt",
"is",
"None",
":",
"fmt",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"path",
")",
"[",
"1",
"]",
"[",
"1",
":",
"]",
"if... | r"""Load a graph from a file.
Edge weights are retrieved as an edge attribute named "weight".
Signals are retrieved from node attributes,
and stored in the :attr:`signals` dictionary under the attribute name.
`N`-dimensional signals that were broken during export are joined.
Parameters
----------
path : string
Path to the file from which to load the graph.
fmt : {'graphml', 'gml', 'gexf', None}, optional
Format in which the graph is saved.
Guessed from the filename extension if None.
backend : {'networkx', 'graph-tool', None}, optional
Library used to load the graph. Automatically chosen if None.
Returns
-------
graph : :class:`Graph`
The loaded graph.
See Also
--------
save : save a graph to a file
from_networkx : load with NetworkX then import in the PyGSP
from_graphtool : load with graph-tool then import in the PyGSP
Notes
-----
A lossless round-trip is only guaranteed if the graph (and its signals)
is saved and loaded with the same backend.
Loading from other formats is possible by loading in NetworkX or
graph-tool, and importing to the PyGSP.
The proposed formats are however tested for faithful round-trips.
Examples
--------
>>> graph = graphs.Logo()
>>> graph.save('logo.graphml')
>>> graph = graphs.Graph.load('logo.graphml')
>>> import os
>>> os.remove('logo.graphml') | [
"r",
"Load",
"a",
"graph",
"from",
"a",
"file",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/graphs/_io.py#L406-L485 | train | 210,905 |
epfl-lts2/pygsp | pygsp/graphs/_io.py | IOMixIn.save | def save(self, path, fmt=None, backend=None):
r"""Save the graph to a file.
Edge weights are stored as an edge attribute,
under the name "weight".
Signals are stored as node attributes,
under their name in the :attr:`signals` dictionary.
`N`-dimensional signals are broken into `N` 1-dimensional signals.
They will eventually be joined back together on import.
Supported formats are:
* GraphML_, a comprehensive XML format.
`Wikipedia <https://en.wikipedia.org/wiki/GraphML>`_.
Supported by NetworkX_, graph-tool_, NetworKit_, igraph_, Gephi_,
Cytoscape_, SocNetV_.
* GML_ (Graph Modelling Language), a simple non-XML format.
`Wikipedia <https://wikipedia.org/wiki/Graph_Modelling_Language>`_.
Supported by NetworkX_, graph-tool_, NetworKit_, igraph_, Gephi_,
Cytoscape_, SocNetV_, Tulip_.
* GEXF_ (Graph Exchange XML Format), Gephi's XML format.
Supported by NetworkX_, NetworKit_, Gephi_, Tulip_, ngraph_.
If unsure, we recommend GraphML_.
.. _GraphML: http://graphml.graphdrawing.org
.. _GML: http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html
.. _GEXF: https://gephi.org/gexf/format
.. _NetworkX: https://networkx.github.io
.. _graph-tool: https://graph-tool.skewed.de
.. _NetworKit: https://networkit.github.io
.. _igraph: https://igraph.org
.. _ngraph: https://github.com/anvaka/ngraph
.. _Gephi: https://gephi.org
.. _Cytoscape: https://cytoscape.org
.. _SocNetV: https://socnetv.org
.. _Tulip: http://tulip.labri.fr
Parameters
----------
path : string
Path to the file where the graph is to be saved.
fmt : {'graphml', 'gml', 'gexf', None}, optional
Format in which to save the graph.
Guessed from the filename extension if None.
backend : {'networkx', 'graph-tool', None}, optional
Library used to load the graph. Automatically chosen if None.
See Also
--------
load : load a graph from a file
to_networkx : export as a NetworkX graph, and save with NetworkX
to_graphtool : export as a graph-tool graph, and save with graph-tool
Notes
-----
A lossless round-trip is only guaranteed if the graph (and its signals)
is saved and loaded with the same backend.
Saving in other formats is possible by exporting to NetworkX or
graph-tool, and using their respective saving functionality.
The proposed formats are however tested for faithful round-trips.
Edge weights and signal values are rounded at the sixth decimal when
saving in ``fmt='gml'`` with ``backend='graph-tool'``.
Examples
--------
>>> graph = graphs.Logo()
>>> graph.save('logo.graphml')
>>> graph = graphs.Graph.load('logo.graphml')
>>> import os
>>> os.remove('logo.graphml')
"""
if fmt is None:
fmt = os.path.splitext(path)[1][1:]
if fmt not in ['graphml', 'gml', 'gexf']:
raise ValueError('Unsupported format {}.'.format(fmt))
def save_networkx(graph, path, fmt):
nx = _import_networkx()
graph = graph.to_networkx()
save = getattr(nx, 'write_' + fmt)
save(graph, path)
def save_graphtool(graph, path, fmt):
graph = graph.to_graphtool()
graph.save(path, fmt=fmt)
if backend == 'networkx':
save_networkx(self, path, fmt)
elif backend == 'graph-tool':
save_graphtool(self, path, fmt)
elif backend is None:
try:
save_networkx(self, path, fmt)
except ImportError:
try:
save_graphtool(self, path, fmt)
except ImportError:
raise ImportError('Cannot import networkx nor graph-tool.')
else:
raise ValueError('Unknown backend {}.'.format(backend)) | python | def save(self, path, fmt=None, backend=None):
r"""Save the graph to a file.
Edge weights are stored as an edge attribute,
under the name "weight".
Signals are stored as node attributes,
under their name in the :attr:`signals` dictionary.
`N`-dimensional signals are broken into `N` 1-dimensional signals.
They will eventually be joined back together on import.
Supported formats are:
* GraphML_, a comprehensive XML format.
`Wikipedia <https://en.wikipedia.org/wiki/GraphML>`_.
Supported by NetworkX_, graph-tool_, NetworKit_, igraph_, Gephi_,
Cytoscape_, SocNetV_.
* GML_ (Graph Modelling Language), a simple non-XML format.
`Wikipedia <https://wikipedia.org/wiki/Graph_Modelling_Language>`_.
Supported by NetworkX_, graph-tool_, NetworKit_, igraph_, Gephi_,
Cytoscape_, SocNetV_, Tulip_.
* GEXF_ (Graph Exchange XML Format), Gephi's XML format.
Supported by NetworkX_, NetworKit_, Gephi_, Tulip_, ngraph_.
If unsure, we recommend GraphML_.
.. _GraphML: http://graphml.graphdrawing.org
.. _GML: http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html
.. _GEXF: https://gephi.org/gexf/format
.. _NetworkX: https://networkx.github.io
.. _graph-tool: https://graph-tool.skewed.de
.. _NetworKit: https://networkit.github.io
.. _igraph: https://igraph.org
.. _ngraph: https://github.com/anvaka/ngraph
.. _Gephi: https://gephi.org
.. _Cytoscape: https://cytoscape.org
.. _SocNetV: https://socnetv.org
.. _Tulip: http://tulip.labri.fr
Parameters
----------
path : string
Path to the file where the graph is to be saved.
fmt : {'graphml', 'gml', 'gexf', None}, optional
Format in which to save the graph.
Guessed from the filename extension if None.
backend : {'networkx', 'graph-tool', None}, optional
Library used to load the graph. Automatically chosen if None.
See Also
--------
load : load a graph from a file
to_networkx : export as a NetworkX graph, and save with NetworkX
to_graphtool : export as a graph-tool graph, and save with graph-tool
Notes
-----
A lossless round-trip is only guaranteed if the graph (and its signals)
is saved and loaded with the same backend.
Saving in other formats is possible by exporting to NetworkX or
graph-tool, and using their respective saving functionality.
The proposed formats are however tested for faithful round-trips.
Edge weights and signal values are rounded at the sixth decimal when
saving in ``fmt='gml'`` with ``backend='graph-tool'``.
Examples
--------
>>> graph = graphs.Logo()
>>> graph.save('logo.graphml')
>>> graph = graphs.Graph.load('logo.graphml')
>>> import os
>>> os.remove('logo.graphml')
"""
if fmt is None:
fmt = os.path.splitext(path)[1][1:]
if fmt not in ['graphml', 'gml', 'gexf']:
raise ValueError('Unsupported format {}.'.format(fmt))
def save_networkx(graph, path, fmt):
nx = _import_networkx()
graph = graph.to_networkx()
save = getattr(nx, 'write_' + fmt)
save(graph, path)
def save_graphtool(graph, path, fmt):
graph = graph.to_graphtool()
graph.save(path, fmt=fmt)
if backend == 'networkx':
save_networkx(self, path, fmt)
elif backend == 'graph-tool':
save_graphtool(self, path, fmt)
elif backend is None:
try:
save_networkx(self, path, fmt)
except ImportError:
try:
save_graphtool(self, path, fmt)
except ImportError:
raise ImportError('Cannot import networkx nor graph-tool.')
else:
raise ValueError('Unknown backend {}.'.format(backend)) | [
"def",
"save",
"(",
"self",
",",
"path",
",",
"fmt",
"=",
"None",
",",
"backend",
"=",
"None",
")",
":",
"if",
"fmt",
"is",
"None",
":",
"fmt",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"path",
")",
"[",
"1",
"]",
"[",
"1",
":",
"]",
"i... | r"""Save the graph to a file.
Edge weights are stored as an edge attribute,
under the name "weight".
Signals are stored as node attributes,
under their name in the :attr:`signals` dictionary.
`N`-dimensional signals are broken into `N` 1-dimensional signals.
They will eventually be joined back together on import.
Supported formats are:
* GraphML_, a comprehensive XML format.
`Wikipedia <https://en.wikipedia.org/wiki/GraphML>`_.
Supported by NetworkX_, graph-tool_, NetworKit_, igraph_, Gephi_,
Cytoscape_, SocNetV_.
* GML_ (Graph Modelling Language), a simple non-XML format.
`Wikipedia <https://wikipedia.org/wiki/Graph_Modelling_Language>`_.
Supported by NetworkX_, graph-tool_, NetworKit_, igraph_, Gephi_,
Cytoscape_, SocNetV_, Tulip_.
* GEXF_ (Graph Exchange XML Format), Gephi's XML format.
Supported by NetworkX_, NetworKit_, Gephi_, Tulip_, ngraph_.
If unsure, we recommend GraphML_.
.. _GraphML: http://graphml.graphdrawing.org
.. _GML: http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html
.. _GEXF: https://gephi.org/gexf/format
.. _NetworkX: https://networkx.github.io
.. _graph-tool: https://graph-tool.skewed.de
.. _NetworKit: https://networkit.github.io
.. _igraph: https://igraph.org
.. _ngraph: https://github.com/anvaka/ngraph
.. _Gephi: https://gephi.org
.. _Cytoscape: https://cytoscape.org
.. _SocNetV: https://socnetv.org
.. _Tulip: http://tulip.labri.fr
Parameters
----------
path : string
Path to the file where the graph is to be saved.
fmt : {'graphml', 'gml', 'gexf', None}, optional
Format in which to save the graph.
Guessed from the filename extension if None.
backend : {'networkx', 'graph-tool', None}, optional
Library used to load the graph. Automatically chosen if None.
See Also
--------
load : load a graph from a file
to_networkx : export as a NetworkX graph, and save with NetworkX
to_graphtool : export as a graph-tool graph, and save with graph-tool
Notes
-----
A lossless round-trip is only guaranteed if the graph (and its signals)
is saved and loaded with the same backend.
Saving in other formats is possible by exporting to NetworkX or
graph-tool, and using their respective saving functionality.
The proposed formats are however tested for faithful round-trips.
Edge weights and signal values are rounded at the sixth decimal when
saving in ``fmt='gml'`` with ``backend='graph-tool'``.
Examples
--------
>>> graph = graphs.Logo()
>>> graph.save('logo.graphml')
>>> graph = graphs.Graph.load('logo.graphml')
>>> import os
>>> os.remove('logo.graphml') | [
"r",
"Save",
"the",
"graph",
"to",
"a",
"file",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/graphs/_io.py#L487-L593 | train | 210,906 |
epfl-lts2/pygsp | pygsp/utils.py | loadmat | def loadmat(path):
r"""
Load a matlab data file.
Parameters
----------
path : string
Path to the mat file from the data folder, without the .mat extension.
Returns
-------
data : dict
dictionary with variable names as keys, and loaded matrices as
values.
Examples
--------
>>> from pygsp import utils
>>> data = utils.loadmat('pointclouds/bunny')
>>> data['bunny'].shape
(2503, 3)
"""
data = pkgutil.get_data('pygsp', 'data/' + path + '.mat')
data = io.BytesIO(data)
return scipy.io.loadmat(data) | python | def loadmat(path):
r"""
Load a matlab data file.
Parameters
----------
path : string
Path to the mat file from the data folder, without the .mat extension.
Returns
-------
data : dict
dictionary with variable names as keys, and loaded matrices as
values.
Examples
--------
>>> from pygsp import utils
>>> data = utils.loadmat('pointclouds/bunny')
>>> data['bunny'].shape
(2503, 3)
"""
data = pkgutil.get_data('pygsp', 'data/' + path + '.mat')
data = io.BytesIO(data)
return scipy.io.loadmat(data) | [
"def",
"loadmat",
"(",
"path",
")",
":",
"data",
"=",
"pkgutil",
".",
"get_data",
"(",
"'pygsp'",
",",
"'data/'",
"+",
"path",
"+",
"'.mat'",
")",
"data",
"=",
"io",
".",
"BytesIO",
"(",
"data",
")",
"return",
"scipy",
".",
"io",
".",
"loadmat",
"(... | r"""
Load a matlab data file.
Parameters
----------
path : string
Path to the mat file from the data folder, without the .mat extension.
Returns
-------
data : dict
dictionary with variable names as keys, and loaded matrices as
values.
Examples
--------
>>> from pygsp import utils
>>> data = utils.loadmat('pointclouds/bunny')
>>> data['bunny'].shape
(2503, 3) | [
"r",
"Load",
"a",
"matlab",
"data",
"file",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/utils.py#L64-L89 | train | 210,907 |
epfl-lts2/pygsp | pygsp/utils.py | distanz | def distanz(x, y=None):
r"""
Calculate the distance between two colon vectors.
Parameters
----------
x : ndarray
First colon vector
y : ndarray
Second colon vector
Returns
-------
d : ndarray
Distance between x and y
Examples
--------
>>> from pygsp import utils
>>> x = np.arange(3)
>>> utils.distanz(x, x)
array([[0., 1., 2.],
[1., 0., 1.],
[2., 1., 0.]])
"""
try:
x.shape[1]
except IndexError:
x = x.reshape(1, x.shape[0])
if y is None:
y = x
else:
try:
y.shape[1]
except IndexError:
y = y.reshape(1, y.shape[0])
rx, cx = x.shape
ry, cy = y.shape
# Size verification
if rx != ry:
raise ValueError("The sizes of x and y do not fit")
xx = (x * x).sum(axis=0)
yy = (y * y).sum(axis=0)
xy = np.dot(x.T, y)
d = abs(np.kron(np.ones((cy, 1)), xx).T +
np.kron(np.ones((cx, 1)), yy) - 2 * xy)
return np.sqrt(d) | python | def distanz(x, y=None):
r"""
Calculate the distance between two colon vectors.
Parameters
----------
x : ndarray
First colon vector
y : ndarray
Second colon vector
Returns
-------
d : ndarray
Distance between x and y
Examples
--------
>>> from pygsp import utils
>>> x = np.arange(3)
>>> utils.distanz(x, x)
array([[0., 1., 2.],
[1., 0., 1.],
[2., 1., 0.]])
"""
try:
x.shape[1]
except IndexError:
x = x.reshape(1, x.shape[0])
if y is None:
y = x
else:
try:
y.shape[1]
except IndexError:
y = y.reshape(1, y.shape[0])
rx, cx = x.shape
ry, cy = y.shape
# Size verification
if rx != ry:
raise ValueError("The sizes of x and y do not fit")
xx = (x * x).sum(axis=0)
yy = (y * y).sum(axis=0)
xy = np.dot(x.T, y)
d = abs(np.kron(np.ones((cy, 1)), xx).T +
np.kron(np.ones((cx, 1)), yy) - 2 * xy)
return np.sqrt(d) | [
"def",
"distanz",
"(",
"x",
",",
"y",
"=",
"None",
")",
":",
"try",
":",
"x",
".",
"shape",
"[",
"1",
"]",
"except",
"IndexError",
":",
"x",
"=",
"x",
".",
"reshape",
"(",
"1",
",",
"x",
".",
"shape",
"[",
"0",
"]",
")",
"if",
"y",
"is",
... | r"""
Calculate the distance between two colon vectors.
Parameters
----------
x : ndarray
First colon vector
y : ndarray
Second colon vector
Returns
-------
d : ndarray
Distance between x and y
Examples
--------
>>> from pygsp import utils
>>> x = np.arange(3)
>>> utils.distanz(x, x)
array([[0., 1., 2.],
[1., 0., 1.],
[2., 1., 0.]]) | [
"r",
"Calculate",
"the",
"distance",
"between",
"two",
"colon",
"vectors",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/utils.py#L92-L146 | train | 210,908 |
epfl-lts2/pygsp | pygsp/utils.py | resistance_distance | def resistance_distance(G):
r"""
Compute the resistance distances of a graph.
Parameters
----------
G : Graph or sparse matrix
Graph structure or Laplacian matrix (L)
Returns
-------
rd : sparse matrix
distance matrix
References
----------
:cite:`klein1993resistance`
"""
if sparse.issparse(G):
L = G.tocsc()
else:
if G.lap_type != 'combinatorial':
raise ValueError('Need a combinatorial Laplacian.')
L = G.L.tocsc()
try:
pseudo = sparse.linalg.inv(L)
except RuntimeError:
pseudo = sparse.lil_matrix(np.linalg.pinv(L.toarray()))
N = np.shape(L)[0]
d = sparse.csc_matrix(pseudo.diagonal())
rd = sparse.kron(d, sparse.csc_matrix(np.ones((N, 1)))).T \
+ sparse.kron(d, sparse.csc_matrix(np.ones((N, 1)))) \
- pseudo - pseudo.T
return rd | python | def resistance_distance(G):
r"""
Compute the resistance distances of a graph.
Parameters
----------
G : Graph or sparse matrix
Graph structure or Laplacian matrix (L)
Returns
-------
rd : sparse matrix
distance matrix
References
----------
:cite:`klein1993resistance`
"""
if sparse.issparse(G):
L = G.tocsc()
else:
if G.lap_type != 'combinatorial':
raise ValueError('Need a combinatorial Laplacian.')
L = G.L.tocsc()
try:
pseudo = sparse.linalg.inv(L)
except RuntimeError:
pseudo = sparse.lil_matrix(np.linalg.pinv(L.toarray()))
N = np.shape(L)[0]
d = sparse.csc_matrix(pseudo.diagonal())
rd = sparse.kron(d, sparse.csc_matrix(np.ones((N, 1)))).T \
+ sparse.kron(d, sparse.csc_matrix(np.ones((N, 1)))) \
- pseudo - pseudo.T
return rd | [
"def",
"resistance_distance",
"(",
"G",
")",
":",
"if",
"sparse",
".",
"issparse",
"(",
"G",
")",
":",
"L",
"=",
"G",
".",
"tocsc",
"(",
")",
"else",
":",
"if",
"G",
".",
"lap_type",
"!=",
"'combinatorial'",
":",
"raise",
"ValueError",
"(",
"'Need a ... | r"""
Compute the resistance distances of a graph.
Parameters
----------
G : Graph or sparse matrix
Graph structure or Laplacian matrix (L)
Returns
-------
rd : sparse matrix
distance matrix
References
----------
:cite:`klein1993resistance` | [
"r",
"Compute",
"the",
"resistance",
"distances",
"of",
"a",
"graph",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/utils.py#L149-L187 | train | 210,909 |
epfl-lts2/pygsp | pygsp/utils.py | symmetrize | def symmetrize(W, method='average'):
r"""
Symmetrize a square matrix.
Parameters
----------
W : array_like
Square matrix to be symmetrized
method : string
* 'average' : symmetrize by averaging with the transpose. Most useful
when transforming a directed graph to an undirected one.
* 'maximum' : symmetrize by taking the maximum with the transpose.
Similar to 'fill' except that ambiguous entries are resolved by
taking the largest value.
* 'fill' : symmetrize by filling in the zeros in both the upper and
lower triangular parts. Ambiguous entries are resolved by averaging
the values.
* 'tril' : symmetrize by considering the lower triangular part only.
* 'triu' : symmetrize by considering the upper triangular part only.
Notes
-----
You can have the sum by multiplying the average by two. It is however not a
good candidate for this function as it modifies an already symmetric
matrix.
Examples
--------
>>> from pygsp import utils
>>> W = np.array([[0, 3, 0], [3, 1, 6], [4, 2, 3]], dtype=float)
>>> W
array([[0., 3., 0.],
[3., 1., 6.],
[4., 2., 3.]])
>>> utils.symmetrize(W, method='average')
array([[0., 3., 2.],
[3., 1., 4.],
[2., 4., 3.]])
>>> 2 * utils.symmetrize(W, method='average')
array([[0., 6., 4.],
[6., 2., 8.],
[4., 8., 6.]])
>>> utils.symmetrize(W, method='maximum')
array([[0., 3., 4.],
[3., 1., 6.],
[4., 6., 3.]])
>>> utils.symmetrize(W, method='fill')
array([[0., 3., 4.],
[3., 1., 4.],
[4., 4., 3.]])
>>> utils.symmetrize(W, method='tril')
array([[0., 3., 4.],
[3., 1., 2.],
[4., 2., 3.]])
>>> utils.symmetrize(W, method='triu')
array([[0., 3., 0.],
[3., 1., 6.],
[0., 6., 3.]])
"""
if W.shape[0] != W.shape[1]:
raise ValueError('Matrix must be square.')
if method == 'average':
return (W + W.T) / 2
elif method == 'maximum':
if sparse.issparse(W):
bigger = (W.T > W)
return W - W.multiply(bigger) + W.T.multiply(bigger)
else:
return np.maximum(W, W.T)
elif method == 'fill':
A = (W > 0) # Boolean type.
if sparse.issparse(W):
mask = (A + A.T) - A
W = W + mask.multiply(W.T)
else:
# Numpy boolean subtract is deprecated.
mask = np.logical_xor(np.logical_or(A, A.T), A)
W = W + mask * W.T
return symmetrize(W, method='average') # Resolve ambiguous entries.
elif method in ['tril', 'triu']:
if sparse.issparse(W):
tri = getattr(sparse, method)
else:
tri = getattr(np, method)
W = tri(W)
return symmetrize(W, method='maximum')
else:
raise ValueError('Unknown symmetrization method {}.'.format(method)) | python | def symmetrize(W, method='average'):
r"""
Symmetrize a square matrix.
Parameters
----------
W : array_like
Square matrix to be symmetrized
method : string
* 'average' : symmetrize by averaging with the transpose. Most useful
when transforming a directed graph to an undirected one.
* 'maximum' : symmetrize by taking the maximum with the transpose.
Similar to 'fill' except that ambiguous entries are resolved by
taking the largest value.
* 'fill' : symmetrize by filling in the zeros in both the upper and
lower triangular parts. Ambiguous entries are resolved by averaging
the values.
* 'tril' : symmetrize by considering the lower triangular part only.
* 'triu' : symmetrize by considering the upper triangular part only.
Notes
-----
You can have the sum by multiplying the average by two. It is however not a
good candidate for this function as it modifies an already symmetric
matrix.
Examples
--------
>>> from pygsp import utils
>>> W = np.array([[0, 3, 0], [3, 1, 6], [4, 2, 3]], dtype=float)
>>> W
array([[0., 3., 0.],
[3., 1., 6.],
[4., 2., 3.]])
>>> utils.symmetrize(W, method='average')
array([[0., 3., 2.],
[3., 1., 4.],
[2., 4., 3.]])
>>> 2 * utils.symmetrize(W, method='average')
array([[0., 6., 4.],
[6., 2., 8.],
[4., 8., 6.]])
>>> utils.symmetrize(W, method='maximum')
array([[0., 3., 4.],
[3., 1., 6.],
[4., 6., 3.]])
>>> utils.symmetrize(W, method='fill')
array([[0., 3., 4.],
[3., 1., 4.],
[4., 4., 3.]])
>>> utils.symmetrize(W, method='tril')
array([[0., 3., 4.],
[3., 1., 2.],
[4., 2., 3.]])
>>> utils.symmetrize(W, method='triu')
array([[0., 3., 0.],
[3., 1., 6.],
[0., 6., 3.]])
"""
if W.shape[0] != W.shape[1]:
raise ValueError('Matrix must be square.')
if method == 'average':
return (W + W.T) / 2
elif method == 'maximum':
if sparse.issparse(W):
bigger = (W.T > W)
return W - W.multiply(bigger) + W.T.multiply(bigger)
else:
return np.maximum(W, W.T)
elif method == 'fill':
A = (W > 0) # Boolean type.
if sparse.issparse(W):
mask = (A + A.T) - A
W = W + mask.multiply(W.T)
else:
# Numpy boolean subtract is deprecated.
mask = np.logical_xor(np.logical_or(A, A.T), A)
W = W + mask * W.T
return symmetrize(W, method='average') # Resolve ambiguous entries.
elif method in ['tril', 'triu']:
if sparse.issparse(W):
tri = getattr(sparse, method)
else:
tri = getattr(np, method)
W = tri(W)
return symmetrize(W, method='maximum')
else:
raise ValueError('Unknown symmetrization method {}.'.format(method)) | [
"def",
"symmetrize",
"(",
"W",
",",
"method",
"=",
"'average'",
")",
":",
"if",
"W",
".",
"shape",
"[",
"0",
"]",
"!=",
"W",
".",
"shape",
"[",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"'Matrix must be square.'",
")",
"if",
"method",
"==",
"'averag... | r"""
Symmetrize a square matrix.
Parameters
----------
W : array_like
Square matrix to be symmetrized
method : string
* 'average' : symmetrize by averaging with the transpose. Most useful
when transforming a directed graph to an undirected one.
* 'maximum' : symmetrize by taking the maximum with the transpose.
Similar to 'fill' except that ambiguous entries are resolved by
taking the largest value.
* 'fill' : symmetrize by filling in the zeros in both the upper and
lower triangular parts. Ambiguous entries are resolved by averaging
the values.
* 'tril' : symmetrize by considering the lower triangular part only.
* 'triu' : symmetrize by considering the upper triangular part only.
Notes
-----
You can have the sum by multiplying the average by two. It is however not a
good candidate for this function as it modifies an already symmetric
matrix.
Examples
--------
>>> from pygsp import utils
>>> W = np.array([[0, 3, 0], [3, 1, 6], [4, 2, 3]], dtype=float)
>>> W
array([[0., 3., 0.],
[3., 1., 6.],
[4., 2., 3.]])
>>> utils.symmetrize(W, method='average')
array([[0., 3., 2.],
[3., 1., 4.],
[2., 4., 3.]])
>>> 2 * utils.symmetrize(W, method='average')
array([[0., 6., 4.],
[6., 2., 8.],
[4., 8., 6.]])
>>> utils.symmetrize(W, method='maximum')
array([[0., 3., 4.],
[3., 1., 6.],
[4., 6., 3.]])
>>> utils.symmetrize(W, method='fill')
array([[0., 3., 4.],
[3., 1., 4.],
[4., 4., 3.]])
>>> utils.symmetrize(W, method='tril')
array([[0., 3., 4.],
[3., 1., 2.],
[4., 2., 3.]])
>>> utils.symmetrize(W, method='triu')
array([[0., 3., 0.],
[3., 1., 6.],
[0., 6., 3.]]) | [
"r",
"Symmetrize",
"a",
"square",
"matrix",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/utils.py#L190-L283 | train | 210,910 |
epfl-lts2/pygsp | pygsp/utils.py | compute_log_scales | def compute_log_scales(lmin, lmax, Nscales, t1=1, t2=2):
r"""
Compute logarithm scales for wavelets.
Parameters
----------
lmin : float
Smallest non-zero eigenvalue.
lmax : float
Largest eigenvalue, i.e. :py:attr:`pygsp.graphs.Graph.lmax`.
Nscales : int
Number of scales.
Returns
-------
scales : ndarray
List of scales of length Nscales.
Examples
--------
>>> from pygsp import utils
>>> utils.compute_log_scales(1, 10, 3)
array([2. , 0.4472136, 0.1 ])
"""
scale_min = t1 / lmax
scale_max = t2 / lmin
return np.exp(np.linspace(np.log(scale_max), np.log(scale_min), Nscales)) | python | def compute_log_scales(lmin, lmax, Nscales, t1=1, t2=2):
r"""
Compute logarithm scales for wavelets.
Parameters
----------
lmin : float
Smallest non-zero eigenvalue.
lmax : float
Largest eigenvalue, i.e. :py:attr:`pygsp.graphs.Graph.lmax`.
Nscales : int
Number of scales.
Returns
-------
scales : ndarray
List of scales of length Nscales.
Examples
--------
>>> from pygsp import utils
>>> utils.compute_log_scales(1, 10, 3)
array([2. , 0.4472136, 0.1 ])
"""
scale_min = t1 / lmax
scale_max = t2 / lmin
return np.exp(np.linspace(np.log(scale_max), np.log(scale_min), Nscales)) | [
"def",
"compute_log_scales",
"(",
"lmin",
",",
"lmax",
",",
"Nscales",
",",
"t1",
"=",
"1",
",",
"t2",
"=",
"2",
")",
":",
"scale_min",
"=",
"t1",
"/",
"lmax",
"scale_max",
"=",
"t2",
"/",
"lmin",
"return",
"np",
".",
"exp",
"(",
"np",
".",
"lins... | r"""
Compute logarithm scales for wavelets.
Parameters
----------
lmin : float
Smallest non-zero eigenvalue.
lmax : float
Largest eigenvalue, i.e. :py:attr:`pygsp.graphs.Graph.lmax`.
Nscales : int
Number of scales.
Returns
-------
scales : ndarray
List of scales of length Nscales.
Examples
--------
>>> from pygsp import utils
>>> utils.compute_log_scales(1, 10, 3)
array([2. , 0.4472136, 0.1 ]) | [
"r",
"Compute",
"logarithm",
"scales",
"for",
"wavelets",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/utils.py#L318-L345 | train | 210,911 |
epfl-lts2/pygsp | pygsp/utils.py | import_modules | def import_modules(names, src, dst):
"""Import modules in package."""
for name in names:
module = importlib.import_module(src + '.' + name)
setattr(sys.modules[dst], name, module) | python | def import_modules(names, src, dst):
"""Import modules in package."""
for name in names:
module = importlib.import_module(src + '.' + name)
setattr(sys.modules[dst], name, module) | [
"def",
"import_modules",
"(",
"names",
",",
"src",
",",
"dst",
")",
":",
"for",
"name",
"in",
"names",
":",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"src",
"+",
"'.'",
"+",
"name",
")",
"setattr",
"(",
"sys",
".",
"modules",
"[",
"dst",... | Import modules in package. | [
"Import",
"modules",
"in",
"package",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/utils.py#L348-L352 | train | 210,912 |
epfl-lts2/pygsp | pygsp/utils.py | import_classes | def import_classes(names, src, dst):
"""Import classes in package from their implementation modules."""
for name in names:
module = importlib.import_module('pygsp.' + src + '.' + name.lower())
setattr(sys.modules['pygsp.' + dst], name, getattr(module, name)) | python | def import_classes(names, src, dst):
"""Import classes in package from their implementation modules."""
for name in names:
module = importlib.import_module('pygsp.' + src + '.' + name.lower())
setattr(sys.modules['pygsp.' + dst], name, getattr(module, name)) | [
"def",
"import_classes",
"(",
"names",
",",
"src",
",",
"dst",
")",
":",
"for",
"name",
"in",
"names",
":",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"'pygsp.'",
"+",
"src",
"+",
"'.'",
"+",
"name",
".",
"lower",
"(",
")",
")",
"setattr"... | Import classes in package from their implementation modules. | [
"Import",
"classes",
"in",
"package",
"from",
"their",
"implementation",
"modules",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/utils.py#L355-L359 | train | 210,913 |
epfl-lts2/pygsp | pygsp/utils.py | import_functions | def import_functions(names, src, dst):
"""Import functions in package from their implementation modules."""
for name in names:
module = importlib.import_module('pygsp.' + src)
setattr(sys.modules['pygsp.' + dst], name, getattr(module, name)) | python | def import_functions(names, src, dst):
"""Import functions in package from their implementation modules."""
for name in names:
module = importlib.import_module('pygsp.' + src)
setattr(sys.modules['pygsp.' + dst], name, getattr(module, name)) | [
"def",
"import_functions",
"(",
"names",
",",
"src",
",",
"dst",
")",
":",
"for",
"name",
"in",
"names",
":",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"'pygsp.'",
"+",
"src",
")",
"setattr",
"(",
"sys",
".",
"modules",
"[",
"'pygsp.'",
"+... | Import functions in package from their implementation modules. | [
"Import",
"functions",
"in",
"package",
"from",
"their",
"implementation",
"modules",
"."
] | 8ce5bde39206129287375af24fdbcd7edddca8c5 | https://github.com/epfl-lts2/pygsp/blob/8ce5bde39206129287375af24fdbcd7edddca8c5/pygsp/utils.py#L362-L366 | train | 210,914 |
richardchien/python-aiocqhttp | aiocqhttp/api.py | _handle_api_result | def _handle_api_result(result: Optional[Dict[str, Any]]) -> Any:
"""
Retrieve 'data' field from the API result object.
:param result: API result that received from HTTP API
:return: the 'data' field in result object
:raise ActionFailed: the 'status' field is 'failed'
"""
if isinstance(result, dict):
if result.get('status') == 'failed':
raise ActionFailed(retcode=result.get('retcode'))
return result.get('data') | python | def _handle_api_result(result: Optional[Dict[str, Any]]) -> Any:
"""
Retrieve 'data' field from the API result object.
:param result: API result that received from HTTP API
:return: the 'data' field in result object
:raise ActionFailed: the 'status' field is 'failed'
"""
if isinstance(result, dict):
if result.get('status') == 'failed':
raise ActionFailed(retcode=result.get('retcode'))
return result.get('data') | [
"def",
"_handle_api_result",
"(",
"result",
":",
"Optional",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
")",
"->",
"Any",
":",
"if",
"isinstance",
"(",
"result",
",",
"dict",
")",
":",
"if",
"result",
".",
"get",
"(",
"'status'",
")",
"==",
"'f... | Retrieve 'data' field from the API result object.
:param result: API result that received from HTTP API
:return: the 'data' field in result object
:raise ActionFailed: the 'status' field is 'failed' | [
"Retrieve",
"data",
"field",
"from",
"the",
"API",
"result",
"object",
"."
] | 2ada22c449ab836e560dd945119b77e4e9581311 | https://github.com/richardchien/python-aiocqhttp/blob/2ada22c449ab836e560dd945119b77e4e9581311/aiocqhttp/api.py#L29-L40 | train | 210,915 |
richardchien/python-aiocqhttp | aiocqhttp/message.py | Message.reduce | def reduce(self) -> None:
"""
Remove redundant segments.
Since this class is implemented based on list,
this method may require O(n) time.
"""
idx = 0
while idx < len(self):
if idx > 0 and \
self[idx - 1].type == 'text' and self[idx].type == 'text':
self[idx - 1].data['text'] += self[idx].data['text']
del self[idx]
else:
idx += 1 | python | def reduce(self) -> None:
"""
Remove redundant segments.
Since this class is implemented based on list,
this method may require O(n) time.
"""
idx = 0
while idx < len(self):
if idx > 0 and \
self[idx - 1].type == 'text' and self[idx].type == 'text':
self[idx - 1].data['text'] += self[idx].data['text']
del self[idx]
else:
idx += 1 | [
"def",
"reduce",
"(",
"self",
")",
"->",
"None",
":",
"idx",
"=",
"0",
"while",
"idx",
"<",
"len",
"(",
"self",
")",
":",
"if",
"idx",
">",
"0",
"and",
"self",
"[",
"idx",
"-",
"1",
"]",
".",
"type",
"==",
"'text'",
"and",
"self",
"[",
"idx",... | Remove redundant segments.
Since this class is implemented based on list,
this method may require O(n) time. | [
"Remove",
"redundant",
"segments",
"."
] | 2ada22c449ab836e560dd945119b77e4e9581311 | https://github.com/richardchien/python-aiocqhttp/blob/2ada22c449ab836e560dd945119b77e4e9581311/aiocqhttp/message.py#L260-L274 | train | 210,916 |
richardchien/python-aiocqhttp | aiocqhttp/message.py | Message.extract_plain_text | def extract_plain_text(self, reduce: bool = False) -> str:
"""
Extract text segments from the message, joined by single space.
:param reduce: reduce the message before extracting
:return: the joined string
"""
if reduce:
self.reduce()
result = ''
for seg in self:
if seg.type == 'text':
result += ' ' + seg.data['text']
if result:
result = result[1:]
return result | python | def extract_plain_text(self, reduce: bool = False) -> str:
"""
Extract text segments from the message, joined by single space.
:param reduce: reduce the message before extracting
:return: the joined string
"""
if reduce:
self.reduce()
result = ''
for seg in self:
if seg.type == 'text':
result += ' ' + seg.data['text']
if result:
result = result[1:]
return result | [
"def",
"extract_plain_text",
"(",
"self",
",",
"reduce",
":",
"bool",
"=",
"False",
")",
"->",
"str",
":",
"if",
"reduce",
":",
"self",
".",
"reduce",
"(",
")",
"result",
"=",
"''",
"for",
"seg",
"in",
"self",
":",
"if",
"seg",
".",
"type",
"==",
... | Extract text segments from the message, joined by single space.
:param reduce: reduce the message before extracting
:return: the joined string | [
"Extract",
"text",
"segments",
"from",
"the",
"message",
"joined",
"by",
"single",
"space",
"."
] | 2ada22c449ab836e560dd945119b77e4e9581311 | https://github.com/richardchien/python-aiocqhttp/blob/2ada22c449ab836e560dd945119b77e4e9581311/aiocqhttp/message.py#L276-L292 | train | 210,917 |
pinax/django-mailer | mailer/__init__.py | send_html_mail | def send_html_mail(subject, message, message_html, from_email, recipient_list,
priority=None, fail_silently=False, auth_user=None,
auth_password=None, headers={}):
"""
Function to queue HTML e-mails
"""
from django.utils.encoding import force_text
from django.core.mail import EmailMultiAlternatives
from mailer.models import make_message
priority = get_priority(priority)
# need to do this in case subject used lazy version of ugettext
subject = force_text(subject)
message = force_text(message)
msg = make_message(subject=subject,
body=message,
from_email=from_email,
to=recipient_list,
priority=priority)
email = msg.email
email = EmailMultiAlternatives(
email.subject,
email.body,
email.from_email,
email.to,
headers=headers
)
email.attach_alternative(message_html, "text/html")
msg.email = email
msg.save()
return 1 | python | def send_html_mail(subject, message, message_html, from_email, recipient_list,
priority=None, fail_silently=False, auth_user=None,
auth_password=None, headers={}):
"""
Function to queue HTML e-mails
"""
from django.utils.encoding import force_text
from django.core.mail import EmailMultiAlternatives
from mailer.models import make_message
priority = get_priority(priority)
# need to do this in case subject used lazy version of ugettext
subject = force_text(subject)
message = force_text(message)
msg = make_message(subject=subject,
body=message,
from_email=from_email,
to=recipient_list,
priority=priority)
email = msg.email
email = EmailMultiAlternatives(
email.subject,
email.body,
email.from_email,
email.to,
headers=headers
)
email.attach_alternative(message_html, "text/html")
msg.email = email
msg.save()
return 1 | [
"def",
"send_html_mail",
"(",
"subject",
",",
"message",
",",
"message_html",
",",
"from_email",
",",
"recipient_list",
",",
"priority",
"=",
"None",
",",
"fail_silently",
"=",
"False",
",",
"auth_user",
"=",
"None",
",",
"auth_password",
"=",
"None",
",",
"... | Function to queue HTML e-mails | [
"Function",
"to",
"queue",
"HTML",
"e",
"-",
"mails"
] | 129a848090d5de8a3e25067048ba6d3091c3b187 | https://github.com/pinax/django-mailer/blob/129a848090d5de8a3e25067048ba6d3091c3b187/mailer/__init__.py#L58-L90 | train | 210,918 |
pinax/django-mailer | mailer/models.py | make_message | def make_message(subject="", body="", from_email=None, to=None, bcc=None,
attachments=None, headers=None, priority=None):
"""
Creates a simple message for the email parameters supplied.
The 'to' and 'bcc' lists are filtered using DontSendEntry.
If needed, the 'email' attribute can be set to any instance of EmailMessage
if e-mails with attachments etc. need to be supported.
Call 'save()' on the result when it is ready to be sent, and not before.
"""
to = filter_recipient_list(to)
bcc = filter_recipient_list(bcc)
core_msg = EmailMessage(
subject=subject,
body=body,
from_email=from_email,
to=to,
bcc=bcc,
attachments=attachments,
headers=headers
)
db_msg = Message(priority=priority)
db_msg.email = core_msg
return db_msg | python | def make_message(subject="", body="", from_email=None, to=None, bcc=None,
attachments=None, headers=None, priority=None):
"""
Creates a simple message for the email parameters supplied.
The 'to' and 'bcc' lists are filtered using DontSendEntry.
If needed, the 'email' attribute can be set to any instance of EmailMessage
if e-mails with attachments etc. need to be supported.
Call 'save()' on the result when it is ready to be sent, and not before.
"""
to = filter_recipient_list(to)
bcc = filter_recipient_list(bcc)
core_msg = EmailMessage(
subject=subject,
body=body,
from_email=from_email,
to=to,
bcc=bcc,
attachments=attachments,
headers=headers
)
db_msg = Message(priority=priority)
db_msg.email = core_msg
return db_msg | [
"def",
"make_message",
"(",
"subject",
"=",
"\"\"",
",",
"body",
"=",
"\"\"",
",",
"from_email",
"=",
"None",
",",
"to",
"=",
"None",
",",
"bcc",
"=",
"None",
",",
"attachments",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"priority",
"=",
"None",... | Creates a simple message for the email parameters supplied.
The 'to' and 'bcc' lists are filtered using DontSendEntry.
If needed, the 'email' attribute can be set to any instance of EmailMessage
if e-mails with attachments etc. need to be supported.
Call 'save()' on the result when it is ready to be sent, and not before. | [
"Creates",
"a",
"simple",
"message",
"for",
"the",
"email",
"parameters",
"supplied",
".",
"The",
"to",
"and",
"bcc",
"lists",
"are",
"filtered",
"using",
"DontSendEntry",
"."
] | 129a848090d5de8a3e25067048ba6d3091c3b187 | https://github.com/pinax/django-mailer/blob/129a848090d5de8a3e25067048ba6d3091c3b187/mailer/models.py#L187-L211 | train | 210,919 |
pinax/django-mailer | mailer/models.py | DontSendEntryManager.has_address | def has_address(self, address):
"""
is the given address on the don't send list?
"""
queryset = self.filter(to_address__iexact=address)
return queryset.exists() | python | def has_address(self, address):
"""
is the given address on the don't send list?
"""
queryset = self.filter(to_address__iexact=address)
return queryset.exists() | [
"def",
"has_address",
"(",
"self",
",",
"address",
")",
":",
"queryset",
"=",
"self",
".",
"filter",
"(",
"to_address__iexact",
"=",
"address",
")",
"return",
"queryset",
".",
"exists",
"(",
")"
] | is the given address on the don't send list? | [
"is",
"the",
"given",
"address",
"on",
"the",
"don",
"t",
"send",
"list?"
] | 129a848090d5de8a3e25067048ba6d3091c3b187 | https://github.com/pinax/django-mailer/blob/129a848090d5de8a3e25067048ba6d3091c3b187/mailer/models.py#L216-L221 | train | 210,920 |
pinax/django-mailer | mailer/engine.py | prioritize | def prioritize():
"""
Yield the messages in the queue in the order they should be sent.
"""
while True:
hp_qs = Message.objects.high_priority().using('default')
mp_qs = Message.objects.medium_priority().using('default')
lp_qs = Message.objects.low_priority().using('default')
while hp_qs.count() or mp_qs.count():
while hp_qs.count():
for message in hp_qs.order_by("when_added"):
yield message
while hp_qs.count() == 0 and mp_qs.count():
yield mp_qs.order_by("when_added")[0]
while hp_qs.count() == 0 and mp_qs.count() == 0 and lp_qs.count():
yield lp_qs.order_by("when_added")[0]
if Message.objects.non_deferred().using('default').count() == 0:
break | python | def prioritize():
"""
Yield the messages in the queue in the order they should be sent.
"""
while True:
hp_qs = Message.objects.high_priority().using('default')
mp_qs = Message.objects.medium_priority().using('default')
lp_qs = Message.objects.low_priority().using('default')
while hp_qs.count() or mp_qs.count():
while hp_qs.count():
for message in hp_qs.order_by("when_added"):
yield message
while hp_qs.count() == 0 and mp_qs.count():
yield mp_qs.order_by("when_added")[0]
while hp_qs.count() == 0 and mp_qs.count() == 0 and lp_qs.count():
yield lp_qs.order_by("when_added")[0]
if Message.objects.non_deferred().using('default').count() == 0:
break | [
"def",
"prioritize",
"(",
")",
":",
"while",
"True",
":",
"hp_qs",
"=",
"Message",
".",
"objects",
".",
"high_priority",
"(",
")",
".",
"using",
"(",
"'default'",
")",
"mp_qs",
"=",
"Message",
".",
"objects",
".",
"medium_priority",
"(",
")",
".",
"usi... | Yield the messages in the queue in the order they should be sent. | [
"Yield",
"the",
"messages",
"in",
"the",
"queue",
"in",
"the",
"order",
"they",
"should",
"be",
"sent",
"."
] | 129a848090d5de8a3e25067048ba6d3091c3b187 | https://github.com/pinax/django-mailer/blob/129a848090d5de8a3e25067048ba6d3091c3b187/mailer/engine.py#L31-L49 | train | 210,921 |
pinax/django-mailer | mailer/engine.py | send_all | def send_all():
"""
Send all eligible messages in the queue.
"""
# The actual backend to use for sending, defaulting to the Django default.
# To make testing easier this is not stored at module level.
EMAIL_BACKEND = getattr(
settings,
"MAILER_EMAIL_BACKEND",
"django.core.mail.backends.smtp.EmailBackend"
)
acquired, lock = acquire_lock()
if not acquired:
return
start_time = time.time()
deferred = 0
sent = 0
try:
connection = None
for message in prioritize():
try:
if connection is None:
connection = get_connection(backend=EMAIL_BACKEND)
logging.info("sending message '{0}' to {1}".format(
message.subject,
", ".join(message.to_addresses))
)
email = message.email
if email is not None:
email.connection = connection
if not hasattr(email, 'reply_to'):
# Compatability fix for EmailMessage objects
# pickled when running < Django 1.8 and then
# unpickled under Django 1.8
email.reply_to = []
ensure_message_id(email)
email.send()
# connection can't be stored in the MessageLog
email.connection = None
message.email = email # For the sake of MessageLog
MessageLog.objects.log(message, RESULT_SUCCESS)
sent += 1
else:
logging.warning("message discarded due to failure in converting from DB. Added on '%s' with priority '%s'" % (message.when_added, message.priority)) # noqa
message.delete()
except (socket_error, smtplib.SMTPSenderRefused,
smtplib.SMTPRecipientsRefused,
smtplib.SMTPDataError,
smtplib.SMTPAuthenticationError) as err:
message.defer()
logging.info("message deferred due to failure: %s" % err)
MessageLog.objects.log(message, RESULT_FAILURE, log_message=str(err))
deferred += 1
# Get new connection, it case the connection itself has an error.
connection = None
# Check if we reached the limits for the current run
if _limits_reached(sent, deferred):
break
_throttle_emails()
finally:
release_lock(lock)
logging.info("")
logging.info("%s sent; %s deferred;" % (sent, deferred))
logging.info("done in %.2f seconds" % (time.time() - start_time)) | python | def send_all():
"""
Send all eligible messages in the queue.
"""
# The actual backend to use for sending, defaulting to the Django default.
# To make testing easier this is not stored at module level.
EMAIL_BACKEND = getattr(
settings,
"MAILER_EMAIL_BACKEND",
"django.core.mail.backends.smtp.EmailBackend"
)
acquired, lock = acquire_lock()
if not acquired:
return
start_time = time.time()
deferred = 0
sent = 0
try:
connection = None
for message in prioritize():
try:
if connection is None:
connection = get_connection(backend=EMAIL_BACKEND)
logging.info("sending message '{0}' to {1}".format(
message.subject,
", ".join(message.to_addresses))
)
email = message.email
if email is not None:
email.connection = connection
if not hasattr(email, 'reply_to'):
# Compatability fix for EmailMessage objects
# pickled when running < Django 1.8 and then
# unpickled under Django 1.8
email.reply_to = []
ensure_message_id(email)
email.send()
# connection can't be stored in the MessageLog
email.connection = None
message.email = email # For the sake of MessageLog
MessageLog.objects.log(message, RESULT_SUCCESS)
sent += 1
else:
logging.warning("message discarded due to failure in converting from DB. Added on '%s' with priority '%s'" % (message.when_added, message.priority)) # noqa
message.delete()
except (socket_error, smtplib.SMTPSenderRefused,
smtplib.SMTPRecipientsRefused,
smtplib.SMTPDataError,
smtplib.SMTPAuthenticationError) as err:
message.defer()
logging.info("message deferred due to failure: %s" % err)
MessageLog.objects.log(message, RESULT_FAILURE, log_message=str(err))
deferred += 1
# Get new connection, it case the connection itself has an error.
connection = None
# Check if we reached the limits for the current run
if _limits_reached(sent, deferred):
break
_throttle_emails()
finally:
release_lock(lock)
logging.info("")
logging.info("%s sent; %s deferred;" % (sent, deferred))
logging.info("done in %.2f seconds" % (time.time() - start_time)) | [
"def",
"send_all",
"(",
")",
":",
"# The actual backend to use for sending, defaulting to the Django default.",
"# To make testing easier this is not stored at module level.",
"EMAIL_BACKEND",
"=",
"getattr",
"(",
"settings",
",",
"\"MAILER_EMAIL_BACKEND\"",
",",
"\"django.core.mail.ba... | Send all eligible messages in the queue. | [
"Send",
"all",
"eligible",
"messages",
"in",
"the",
"queue",
"."
] | 129a848090d5de8a3e25067048ba6d3091c3b187 | https://github.com/pinax/django-mailer/blob/129a848090d5de8a3e25067048ba6d3091c3b187/mailer/engine.py#L115-L188 | train | 210,922 |
pinax/django-mailer | mailer/engine.py | send_loop | def send_loop():
"""
Loop indefinitely, checking queue at intervals of EMPTY_QUEUE_SLEEP and
sending messages if any are on queue.
"""
while True:
while not Message.objects.all():
logging.debug("sleeping for %s seconds before checking queue again" % EMPTY_QUEUE_SLEEP)
time.sleep(EMPTY_QUEUE_SLEEP)
send_all() | python | def send_loop():
"""
Loop indefinitely, checking queue at intervals of EMPTY_QUEUE_SLEEP and
sending messages if any are on queue.
"""
while True:
while not Message.objects.all():
logging.debug("sleeping for %s seconds before checking queue again" % EMPTY_QUEUE_SLEEP)
time.sleep(EMPTY_QUEUE_SLEEP)
send_all() | [
"def",
"send_loop",
"(",
")",
":",
"while",
"True",
":",
"while",
"not",
"Message",
".",
"objects",
".",
"all",
"(",
")",
":",
"logging",
".",
"debug",
"(",
"\"sleeping for %s seconds before checking queue again\"",
"%",
"EMPTY_QUEUE_SLEEP",
")",
"time",
".",
... | Loop indefinitely, checking queue at intervals of EMPTY_QUEUE_SLEEP and
sending messages if any are on queue. | [
"Loop",
"indefinitely",
"checking",
"queue",
"at",
"intervals",
"of",
"EMPTY_QUEUE_SLEEP",
"and",
"sending",
"messages",
"if",
"any",
"are",
"on",
"queue",
"."
] | 129a848090d5de8a3e25067048ba6d3091c3b187 | https://github.com/pinax/django-mailer/blob/129a848090d5de8a3e25067048ba6d3091c3b187/mailer/engine.py#L191-L201 | train | 210,923 |
django-getpaid/django-plans | plans/importer.py | import_name | def import_name(name):
""" import module given by str or pass the module if it is not str """
if isinstance(name, str):
components = name.split('.')
mod = __import__('.'.join(components[0:-1]), globals(), locals(), [components[-1]] )
return getattr(mod, components[-1])
else:
return name | python | def import_name(name):
""" import module given by str or pass the module if it is not str """
if isinstance(name, str):
components = name.split('.')
mod = __import__('.'.join(components[0:-1]), globals(), locals(), [components[-1]] )
return getattr(mod, components[-1])
else:
return name | [
"def",
"import_name",
"(",
"name",
")",
":",
"if",
"isinstance",
"(",
"name",
",",
"str",
")",
":",
"components",
"=",
"name",
".",
"split",
"(",
"'.'",
")",
"mod",
"=",
"__import__",
"(",
"'.'",
".",
"join",
"(",
"components",
"[",
"0",
":",
"-",
... | import module given by str or pass the module if it is not str | [
"import",
"module",
"given",
"by",
"str",
"or",
"pass",
"the",
"module",
"if",
"it",
"is",
"not",
"str"
] | 6897fcb0ed02bfba0c689292961532fe400b7ba2 | https://github.com/django-getpaid/django-plans/blob/6897fcb0ed02bfba0c689292961532fe400b7ba2/plans/importer.py#L1-L8 | train | 210,924 |
django-getpaid/django-plans | plans/admin.py | copy_plan | def copy_plan(modeladmin, request, queryset):
"""
Admin command for duplicating plans preserving quotas and pricings.
"""
for plan in queryset:
plan_copy = deepcopy(plan)
plan_copy.id = None
plan_copy.available = False
plan_copy.default = False
plan_copy.created = None
plan_copy.save(force_insert=True)
for pricing in plan.planpricing_set.all():
pricing.id = None
pricing.plan = plan_copy
pricing.save(force_insert=True)
for quota in plan.planquota_set.all():
quota.id = None
quota.plan = plan_copy
quota.save(force_insert=True) | python | def copy_plan(modeladmin, request, queryset):
"""
Admin command for duplicating plans preserving quotas and pricings.
"""
for plan in queryset:
plan_copy = deepcopy(plan)
plan_copy.id = None
plan_copy.available = False
plan_copy.default = False
plan_copy.created = None
plan_copy.save(force_insert=True)
for pricing in plan.planpricing_set.all():
pricing.id = None
pricing.plan = plan_copy
pricing.save(force_insert=True)
for quota in plan.planquota_set.all():
quota.id = None
quota.plan = plan_copy
quota.save(force_insert=True) | [
"def",
"copy_plan",
"(",
"modeladmin",
",",
"request",
",",
"queryset",
")",
":",
"for",
"plan",
"in",
"queryset",
":",
"plan_copy",
"=",
"deepcopy",
"(",
"plan",
")",
"plan_copy",
".",
"id",
"=",
"None",
"plan_copy",
".",
"available",
"=",
"False",
"pla... | Admin command for duplicating plans preserving quotas and pricings. | [
"Admin",
"command",
"for",
"duplicating",
"plans",
"preserving",
"quotas",
"and",
"pricings",
"."
] | 6897fcb0ed02bfba0c689292961532fe400b7ba2 | https://github.com/django-getpaid/django-plans/blob/6897fcb0ed02bfba0c689292961532fe400b7ba2/plans/admin.py#L43-L63 | train | 210,925 |
django-getpaid/django-plans | plans/views.py | CreateOrderView.recalculate | def recalculate(self, amount, billing_info):
"""
Calculates and return pre-filled Order
"""
order = Order(pk=-1)
order.amount = amount
order.currency = self.get_currency()
country = getattr(billing_info, 'country', None)
if not country is None:
country = country.code
tax_number = getattr(billing_info, 'tax_number', None)
# Calculating tax can be complex task (e.g. VIES webservice call)
# To ensure that tax calculated on order preview will be the same on final order
# tax rate is cached for a given billing data (as this value only depends on it)
tax_session_key = "tax_%s_%s" % (tax_number, country)
tax = self.request.session.get(tax_session_key)
if tax is None:
taxation_policy = getattr(settings, 'PLANS_TAXATION_POLICY', None)
if not taxation_policy:
raise ImproperlyConfigured('PLANS_TAXATION_POLICY is not set')
taxation_policy = import_name(taxation_policy)
tax = str(taxation_policy.get_tax_rate(tax_number, country))
# Because taxation policy could return None which clutters with saving this value
# into cache, we use str() representation of this value
self.request.session[tax_session_key] = tax
order.tax = Decimal(tax) if tax != 'None' else None
return order | python | def recalculate(self, amount, billing_info):
"""
Calculates and return pre-filled Order
"""
order = Order(pk=-1)
order.amount = amount
order.currency = self.get_currency()
country = getattr(billing_info, 'country', None)
if not country is None:
country = country.code
tax_number = getattr(billing_info, 'tax_number', None)
# Calculating tax can be complex task (e.g. VIES webservice call)
# To ensure that tax calculated on order preview will be the same on final order
# tax rate is cached for a given billing data (as this value only depends on it)
tax_session_key = "tax_%s_%s" % (tax_number, country)
tax = self.request.session.get(tax_session_key)
if tax is None:
taxation_policy = getattr(settings, 'PLANS_TAXATION_POLICY', None)
if not taxation_policy:
raise ImproperlyConfigured('PLANS_TAXATION_POLICY is not set')
taxation_policy = import_name(taxation_policy)
tax = str(taxation_policy.get_tax_rate(tax_number, country))
# Because taxation policy could return None which clutters with saving this value
# into cache, we use str() representation of this value
self.request.session[tax_session_key] = tax
order.tax = Decimal(tax) if tax != 'None' else None
return order | [
"def",
"recalculate",
"(",
"self",
",",
"amount",
",",
"billing_info",
")",
":",
"order",
"=",
"Order",
"(",
"pk",
"=",
"-",
"1",
")",
"order",
".",
"amount",
"=",
"amount",
"order",
".",
"currency",
"=",
"self",
".",
"get_currency",
"(",
")",
"count... | Calculates and return pre-filled Order | [
"Calculates",
"and",
"return",
"pre",
"-",
"filled",
"Order"
] | 6897fcb0ed02bfba0c689292961532fe400b7ba2 | https://github.com/django-getpaid/django-plans/blob/6897fcb0ed02bfba0c689292961532fe400b7ba2/plans/views.py#L179-L209 | train | 210,926 |
django-getpaid/django-plans | plans/views.py | CreateOrderView.get_all_context | def get_all_context(self):
"""
Retrieves Plan and Pricing for current order creation
"""
self.plan_pricing = get_object_or_404(PlanPricing.objects.all().select_related('plan', 'pricing'),
Q(pk=self.kwargs['pk']) & Q(plan__available=True) & (
Q(plan__customized=self.request.user) | Q(
plan__customized__isnull=True)))
# User is not allowed to create new order for Plan when he has different Plan
# He should use Plan Change View for this kind of action
if not self.request.user.userplan.is_expired() and self.request.user.userplan.plan != self.plan_pricing.plan:
raise Http404
self.plan = self.plan_pricing.plan
self.pricing = self.plan_pricing.pricing | python | def get_all_context(self):
"""
Retrieves Plan and Pricing for current order creation
"""
self.plan_pricing = get_object_or_404(PlanPricing.objects.all().select_related('plan', 'pricing'),
Q(pk=self.kwargs['pk']) & Q(plan__available=True) & (
Q(plan__customized=self.request.user) | Q(
plan__customized__isnull=True)))
# User is not allowed to create new order for Plan when he has different Plan
# He should use Plan Change View for this kind of action
if not self.request.user.userplan.is_expired() and self.request.user.userplan.plan != self.plan_pricing.plan:
raise Http404
self.plan = self.plan_pricing.plan
self.pricing = self.plan_pricing.pricing | [
"def",
"get_all_context",
"(",
"self",
")",
":",
"self",
".",
"plan_pricing",
"=",
"get_object_or_404",
"(",
"PlanPricing",
".",
"objects",
".",
"all",
"(",
")",
".",
"select_related",
"(",
"'plan'",
",",
"'pricing'",
")",
",",
"Q",
"(",
"pk",
"=",
"self... | Retrieves Plan and Pricing for current order creation | [
"Retrieves",
"Plan",
"and",
"Pricing",
"for",
"current",
"order",
"creation"
] | 6897fcb0ed02bfba0c689292961532fe400b7ba2 | https://github.com/django-getpaid/django-plans/blob/6897fcb0ed02bfba0c689292961532fe400b7ba2/plans/views.py#L223-L239 | train | 210,927 |
django-getpaid/django-plans | plans/listeners.py | create_proforma_invoice | def create_proforma_invoice(sender, instance, created, **kwargs):
"""
For every Order if there are defined billing_data creates invoice proforma,
which is an order confirmation document
"""
if created:
Invoice.create(instance, Invoice.INVOICE_TYPES['PROFORMA']) | python | def create_proforma_invoice(sender, instance, created, **kwargs):
"""
For every Order if there are defined billing_data creates invoice proforma,
which is an order confirmation document
"""
if created:
Invoice.create(instance, Invoice.INVOICE_TYPES['PROFORMA']) | [
"def",
"create_proforma_invoice",
"(",
"sender",
",",
"instance",
",",
"created",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"created",
":",
"Invoice",
".",
"create",
"(",
"instance",
",",
"Invoice",
".",
"INVOICE_TYPES",
"[",
"'PROFORMA'",
"]",
")"
] | For every Order if there are defined billing_data creates invoice proforma,
which is an order confirmation document | [
"For",
"every",
"Order",
"if",
"there",
"are",
"defined",
"billing_data",
"creates",
"invoice",
"proforma",
"which",
"is",
"an",
"order",
"confirmation",
"document"
] | 6897fcb0ed02bfba0c689292961532fe400b7ba2 | https://github.com/django-getpaid/django-plans/blob/6897fcb0ed02bfba0c689292961532fe400b7ba2/plans/listeners.py#L12-L18 | train | 210,928 |
django-getpaid/django-plans | plans/validators.py | QuotaValidator.get_quota_value | def get_quota_value(self, user, quota_dict=None):
"""
Returns quota value for a given user
"""
if quota_dict is None:
quota_dict = get_user_quota(user)
return quota_dict.get(self.code, self.default_quota_value) | python | def get_quota_value(self, user, quota_dict=None):
"""
Returns quota value for a given user
"""
if quota_dict is None:
quota_dict = get_user_quota(user)
return quota_dict.get(self.code, self.default_quota_value) | [
"def",
"get_quota_value",
"(",
"self",
",",
"user",
",",
"quota_dict",
"=",
"None",
")",
":",
"if",
"quota_dict",
"is",
"None",
":",
"quota_dict",
"=",
"get_user_quota",
"(",
"user",
")",
"return",
"quota_dict",
".",
"get",
"(",
"self",
".",
"code",
",",... | Returns quota value for a given user | [
"Returns",
"quota",
"value",
"for",
"a",
"given",
"user"
] | 6897fcb0ed02bfba0c689292961532fe400b7ba2 | https://github.com/django-getpaid/django-plans/blob/6897fcb0ed02bfba0c689292961532fe400b7ba2/plans/validators.py#L22-L29 | train | 210,929 |
django-getpaid/django-plans | plans/contrib.py | send_template_email | def send_template_email(recipients, title_template, body_template, context, language):
"""Sends e-mail using templating system"""
send_emails = getattr(settings, 'SEND_PLANS_EMAILS', True)
if not send_emails:
return
site_name = getattr(settings, 'SITE_NAME', 'Please define settings.SITE_NAME')
domain = getattr(settings, 'SITE_URL', None)
if domain is None:
try:
Site = apps.get_model('sites', 'Site')
current_site = Site.objects.get_current()
site_name = current_site.name
domain = current_site.domain
except LookupError:
pass
context.update({'site_name': site_name, 'site_domain': domain})
if language is not None:
translation.activate(language)
mail_title_template = loader.get_template(title_template)
mail_body_template = loader.get_template(body_template)
title = mail_title_template.render(context)
body = mail_body_template.render(context)
try:
email_from = getattr(settings, 'DEFAULT_FROM_EMAIL')
except AttributeError:
raise ImproperlyConfigured('DEFAULT_FROM_EMAIL setting needed for sending e-mails')
mail.send_mail(title, body, email_from, recipients)
if language is not None:
translation.deactivate()
email_logger.info(u"Email (%s) sent to %s\nTitle: %s\n%s\n\n" % (language, recipients, title, body)) | python | def send_template_email(recipients, title_template, body_template, context, language):
"""Sends e-mail using templating system"""
send_emails = getattr(settings, 'SEND_PLANS_EMAILS', True)
if not send_emails:
return
site_name = getattr(settings, 'SITE_NAME', 'Please define settings.SITE_NAME')
domain = getattr(settings, 'SITE_URL', None)
if domain is None:
try:
Site = apps.get_model('sites', 'Site')
current_site = Site.objects.get_current()
site_name = current_site.name
domain = current_site.domain
except LookupError:
pass
context.update({'site_name': site_name, 'site_domain': domain})
if language is not None:
translation.activate(language)
mail_title_template = loader.get_template(title_template)
mail_body_template = loader.get_template(body_template)
title = mail_title_template.render(context)
body = mail_body_template.render(context)
try:
email_from = getattr(settings, 'DEFAULT_FROM_EMAIL')
except AttributeError:
raise ImproperlyConfigured('DEFAULT_FROM_EMAIL setting needed for sending e-mails')
mail.send_mail(title, body, email_from, recipients)
if language is not None:
translation.deactivate()
email_logger.info(u"Email (%s) sent to %s\nTitle: %s\n%s\n\n" % (language, recipients, title, body)) | [
"def",
"send_template_email",
"(",
"recipients",
",",
"title_template",
",",
"body_template",
",",
"context",
",",
"language",
")",
":",
"send_emails",
"=",
"getattr",
"(",
"settings",
",",
"'SEND_PLANS_EMAILS'",
",",
"True",
")",
"if",
"not",
"send_emails",
":"... | Sends e-mail using templating system | [
"Sends",
"e",
"-",
"mail",
"using",
"templating",
"system"
] | 6897fcb0ed02bfba0c689292961532fe400b7ba2 | https://github.com/django-getpaid/django-plans/blob/6897fcb0ed02bfba0c689292961532fe400b7ba2/plans/contrib.py#L13-L52 | train | 210,930 |
django-getpaid/django-plans | plans/plan_change.py | PlanChangePolicy._calculate_day_cost | def _calculate_day_cost(self, plan, period):
"""
Finds most fitted plan pricing for a given period, and calculate day cost
"""
plan_pricings = plan.planpricing_set.order_by('-pricing__period').select_related('pricing')
selected_pricing = None
for plan_pricing in plan_pricings:
selected_pricing = plan_pricing
if plan_pricing.pricing.period <= period:
break
if selected_pricing:
return (selected_pricing.price / selected_pricing.pricing.period).quantize(Decimal('1.00'))
raise ValueError('Plan %s has no pricings.' % plan) | python | def _calculate_day_cost(self, plan, period):
"""
Finds most fitted plan pricing for a given period, and calculate day cost
"""
plan_pricings = plan.planpricing_set.order_by('-pricing__period').select_related('pricing')
selected_pricing = None
for plan_pricing in plan_pricings:
selected_pricing = plan_pricing
if plan_pricing.pricing.period <= period:
break
if selected_pricing:
return (selected_pricing.price / selected_pricing.pricing.period).quantize(Decimal('1.00'))
raise ValueError('Plan %s has no pricings.' % plan) | [
"def",
"_calculate_day_cost",
"(",
"self",
",",
"plan",
",",
"period",
")",
":",
"plan_pricings",
"=",
"plan",
".",
"planpricing_set",
".",
"order_by",
"(",
"'-pricing__period'",
")",
".",
"select_related",
"(",
"'pricing'",
")",
"selected_pricing",
"=",
"None",... | Finds most fitted plan pricing for a given period, and calculate day cost | [
"Finds",
"most",
"fitted",
"plan",
"pricing",
"for",
"a",
"given",
"period",
"and",
"calculate",
"day",
"cost"
] | 6897fcb0ed02bfba0c689292961532fe400b7ba2 | https://github.com/django-getpaid/django-plans/blob/6897fcb0ed02bfba0c689292961532fe400b7ba2/plans/plan_change.py#L6-L21 | train | 210,931 |
django-getpaid/django-plans | plans/plan_change.py | PlanChangePolicy.get_change_price | def get_change_price(self, plan_old, plan_new, period):
"""
Calculates total price of plan change. Returns None if no payment is required.
"""
if period is None or period < 1:
return None
plan_old_day_cost = self._calculate_day_cost(plan_old, period)
plan_new_day_cost = self._calculate_day_cost(plan_new, period)
if plan_new_day_cost <= plan_old_day_cost:
return self._calculate_final_price(period, None)
else:
return self._calculate_final_price(period, plan_new_day_cost - plan_old_day_cost) | python | def get_change_price(self, plan_old, plan_new, period):
"""
Calculates total price of plan change. Returns None if no payment is required.
"""
if period is None or period < 1:
return None
plan_old_day_cost = self._calculate_day_cost(plan_old, period)
plan_new_day_cost = self._calculate_day_cost(plan_new, period)
if plan_new_day_cost <= plan_old_day_cost:
return self._calculate_final_price(period, None)
else:
return self._calculate_final_price(period, plan_new_day_cost - plan_old_day_cost) | [
"def",
"get_change_price",
"(",
"self",
",",
"plan_old",
",",
"plan_new",
",",
"period",
")",
":",
"if",
"period",
"is",
"None",
"or",
"period",
"<",
"1",
":",
"return",
"None",
"plan_old_day_cost",
"=",
"self",
".",
"_calculate_day_cost",
"(",
"plan_old",
... | Calculates total price of plan change. Returns None if no payment is required. | [
"Calculates",
"total",
"price",
"of",
"plan",
"change",
".",
"Returns",
"None",
"if",
"no",
"payment",
"is",
"required",
"."
] | 6897fcb0ed02bfba0c689292961532fe400b7ba2 | https://github.com/django-getpaid/django-plans/blob/6897fcb0ed02bfba0c689292961532fe400b7ba2/plans/plan_change.py#L29-L42 | train | 210,932 |
k-bx/python-semver | semver.py | comparator | def comparator(operator):
""" Wrap a VersionInfo binary op method in a type-check """
@wraps(operator)
def wrapper(self, other):
if not isinstance(other, (VersionInfo, dict)):
return NotImplemented
return operator(self, other)
return wrapper | python | def comparator(operator):
""" Wrap a VersionInfo binary op method in a type-check """
@wraps(operator)
def wrapper(self, other):
if not isinstance(other, (VersionInfo, dict)):
return NotImplemented
return operator(self, other)
return wrapper | [
"def",
"comparator",
"(",
"operator",
")",
":",
"@",
"wraps",
"(",
"operator",
")",
"def",
"wrapper",
"(",
"self",
",",
"other",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"(",
"VersionInfo",
",",
"dict",
")",
")",
":",
"return",
"NotImp... | Wrap a VersionInfo binary op method in a type-check | [
"Wrap",
"a",
"VersionInfo",
"binary",
"op",
"method",
"in",
"a",
"type",
"-",
"check"
] | c399fe7a2075dd699a79d27d7a16d4bee953d2ed | https://github.com/k-bx/python-semver/blob/c399fe7a2075dd699a79d27d7a16d4bee953d2ed/semver.py#L77-L84 | train | 210,933 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | LDAGibbs.set_sampled_topics | def set_sampled_topics(self, sampled_topics):
"""
Allocate sampled topics to the documents rather than estimate them.
Automatically generate term-topic and document-topic matrices.
"""
assert sampled_topics.dtype == np.int and \
len(sampled_topics.shape) <= 2
if len(sampled_topics.shape) == 1:
self.sampled_topics = \
sampled_topics.reshape(1, sampled_topics.shape[0])
else:
self.sampled_topics = sampled_topics
self.samples = self.sampled_topics.shape[0]
self.tt = self.tt_comp(self.sampled_topics)
self.dt = self.dt_comp(self.sampled_topics) | python | def set_sampled_topics(self, sampled_topics):
"""
Allocate sampled topics to the documents rather than estimate them.
Automatically generate term-topic and document-topic matrices.
"""
assert sampled_topics.dtype == np.int and \
len(sampled_topics.shape) <= 2
if len(sampled_topics.shape) == 1:
self.sampled_topics = \
sampled_topics.reshape(1, sampled_topics.shape[0])
else:
self.sampled_topics = sampled_topics
self.samples = self.sampled_topics.shape[0]
self.tt = self.tt_comp(self.sampled_topics)
self.dt = self.dt_comp(self.sampled_topics) | [
"def",
"set_sampled_topics",
"(",
"self",
",",
"sampled_topics",
")",
":",
"assert",
"sampled_topics",
".",
"dtype",
"==",
"np",
".",
"int",
"and",
"len",
"(",
"sampled_topics",
".",
"shape",
")",
"<=",
"2",
"if",
"len",
"(",
"sampled_topics",
".",
"shape"... | Allocate sampled topics to the documents rather than estimate them.
Automatically generate term-topic and document-topic matrices. | [
"Allocate",
"sampled",
"topics",
"to",
"the",
"documents",
"rather",
"than",
"estimate",
"them",
".",
"Automatically",
"generate",
"term",
"-",
"topic",
"and",
"document",
"-",
"topic",
"matrices",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L69-L88 | train | 210,934 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | LDAGibbs.dt_comp | def dt_comp(self, sampled_topics):
"""
Compute document-topic matrix from sampled_topics.
"""
samples = sampled_topics.shape[0]
dt = np.zeros((self.D, self.K, samples))
for s in range(samples):
dt[:, :, s] = \
samplers_lda.dt_comp(self.docid, sampled_topics[s, :],
self.N, self.K, self.D, self.alpha)
return dt | python | def dt_comp(self, sampled_topics):
"""
Compute document-topic matrix from sampled_topics.
"""
samples = sampled_topics.shape[0]
dt = np.zeros((self.D, self.K, samples))
for s in range(samples):
dt[:, :, s] = \
samplers_lda.dt_comp(self.docid, sampled_topics[s, :],
self.N, self.K, self.D, self.alpha)
return dt | [
"def",
"dt_comp",
"(",
"self",
",",
"sampled_topics",
")",
":",
"samples",
"=",
"sampled_topics",
".",
"shape",
"[",
"0",
"]",
"dt",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"D",
",",
"self",
".",
"K",
",",
"samples",
")",
")",
"for",
"s",... | Compute document-topic matrix from sampled_topics. | [
"Compute",
"document",
"-",
"topic",
"matrix",
"from",
"sampled_topics",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L137-L151 | train | 210,935 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | LDAGibbs.tt_comp | def tt_comp(self, sampled_topics):
"""
Compute term-topic matrix from sampled_topics.
"""
samples = sampled_topics.shape[0]
tt = np.zeros((self.V, self.K, samples))
for s in range(samples):
tt[:, :, s] = \
samplers_lda.tt_comp(self.tokens, sampled_topics[s, :],
self.N, self.V, self.K, self.beta)
return tt | python | def tt_comp(self, sampled_topics):
"""
Compute term-topic matrix from sampled_topics.
"""
samples = sampled_topics.shape[0]
tt = np.zeros((self.V, self.K, samples))
for s in range(samples):
tt[:, :, s] = \
samplers_lda.tt_comp(self.tokens, sampled_topics[s, :],
self.N, self.V, self.K, self.beta)
return tt | [
"def",
"tt_comp",
"(",
"self",
",",
"sampled_topics",
")",
":",
"samples",
"=",
"sampled_topics",
".",
"shape",
"[",
"0",
"]",
"tt",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"V",
",",
"self",
".",
"K",
",",
"samples",
")",
")",
"for",
"s",... | Compute term-topic matrix from sampled_topics. | [
"Compute",
"term",
"-",
"topic",
"matrix",
"from",
"sampled_topics",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L153-L167 | train | 210,936 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | LDAGibbs.topic_content | def topic_content(self, W, output_file="topic_description.csv"):
"""
Print top W words in each topic to file.
"""
topic_top_probs = []
topic_top_words = []
tt = self.tt_avg(False)
for t in range(self.K):
top_word_indices = list(tt[:, t].argsort()[-W:][::-1])
topic_top_probs.append(np.round(np.sort(tt[:, t])[-W:][::-1], 3))
topic_top_words.append([list(self.token_key.keys())
[list(self.token_key.values()).index(i)]
for i in top_word_indices])
with codecs.open(output_file, "w", "utf-8") as f:
for t in range(self.K):
words = ','.join(topic_top_words[t])
probs = ','.join([str(i) for i in topic_top_probs[t]])
f.write("topic" + str(t) + ',')
f.write("%s\n" % words)
f.write(" " + ',')
f.write("%s\n" % probs) | python | def topic_content(self, W, output_file="topic_description.csv"):
"""
Print top W words in each topic to file.
"""
topic_top_probs = []
topic_top_words = []
tt = self.tt_avg(False)
for t in range(self.K):
top_word_indices = list(tt[:, t].argsort()[-W:][::-1])
topic_top_probs.append(np.round(np.sort(tt[:, t])[-W:][::-1], 3))
topic_top_words.append([list(self.token_key.keys())
[list(self.token_key.values()).index(i)]
for i in top_word_indices])
with codecs.open(output_file, "w", "utf-8") as f:
for t in range(self.K):
words = ','.join(topic_top_words[t])
probs = ','.join([str(i) for i in topic_top_probs[t]])
f.write("topic" + str(t) + ',')
f.write("%s\n" % words)
f.write(" " + ',')
f.write("%s\n" % probs) | [
"def",
"topic_content",
"(",
"self",
",",
"W",
",",
"output_file",
"=",
"\"topic_description.csv\"",
")",
":",
"topic_top_probs",
"=",
"[",
"]",
"topic_top_words",
"=",
"[",
"]",
"tt",
"=",
"self",
".",
"tt_avg",
"(",
"False",
")",
"for",
"t",
"in",
"ran... | Print top W words in each topic to file. | [
"Print",
"top",
"W",
"words",
"in",
"each",
"topic",
"to",
"file",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L169-L194 | train | 210,937 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | LDAGibbs.perplexity | def perplexity(self):
"""
Compute perplexity for each sample.
"""
return samplers_lda.perplexity_comp(self.docid, self.tokens,
self.tt, self.dt, self.N,
self.K, self.samples) | python | def perplexity(self):
"""
Compute perplexity for each sample.
"""
return samplers_lda.perplexity_comp(self.docid, self.tokens,
self.tt, self.dt, self.N,
self.K, self.samples) | [
"def",
"perplexity",
"(",
"self",
")",
":",
"return",
"samplers_lda",
".",
"perplexity_comp",
"(",
"self",
".",
"docid",
",",
"self",
".",
"tokens",
",",
"self",
".",
"tt",
",",
"self",
".",
"dt",
",",
"self",
".",
"N",
",",
"self",
".",
"K",
",",
... | Compute perplexity for each sample. | [
"Compute",
"perplexity",
"for",
"each",
"sample",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L196-L204 | train | 210,938 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | LDAGibbs.samples_keep | def samples_keep(self, index):
"""
Keep subset of samples. If index is an integer, keep last N=index
samples. If index is a list, keep the samples
corresponding to the index values in the list.
"""
try:
if isinstance(index, (int, long)):
index = range(self.samples)[-index:]
except(NameError):
if isinstance(index,int):
index = range(self.samples)[-index:]
self.sampled_topics = np.take(self.sampled_topics, index, axis=0)
self.tt = np.take(self.tt, index, axis=2)
self.dt = np.take(self.dt, index, axis=2)
self.samples = len(index) | python | def samples_keep(self, index):
"""
Keep subset of samples. If index is an integer, keep last N=index
samples. If index is a list, keep the samples
corresponding to the index values in the list.
"""
try:
if isinstance(index, (int, long)):
index = range(self.samples)[-index:]
except(NameError):
if isinstance(index,int):
index = range(self.samples)[-index:]
self.sampled_topics = np.take(self.sampled_topics, index, axis=0)
self.tt = np.take(self.tt, index, axis=2)
self.dt = np.take(self.dt, index, axis=2)
self.samples = len(index) | [
"def",
"samples_keep",
"(",
"self",
",",
"index",
")",
":",
"try",
":",
"if",
"isinstance",
"(",
"index",
",",
"(",
"int",
",",
"long",
")",
")",
":",
"index",
"=",
"range",
"(",
"self",
".",
"samples",
")",
"[",
"-",
"index",
":",
"]",
"except",... | Keep subset of samples. If index is an integer, keep last N=index
samples. If index is a list, keep the samples
corresponding to the index values in the list. | [
"Keep",
"subset",
"of",
"samples",
".",
"If",
"index",
"is",
"an",
"integer",
"keep",
"last",
"N",
"=",
"index",
"samples",
".",
"If",
"index",
"is",
"a",
"list",
"keep",
"the",
"samples",
"corresponding",
"to",
"the",
"index",
"values",
"in",
"the",
"... | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L206-L224 | train | 210,939 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | LDAGibbs.tt_avg | def tt_avg(self, print_output=True, output_file="tt.csv"):
"""
Compute average term-topic matrix, and print to file if
print_output=True.
"""
avg = self.tt.mean(axis=2)
if print_output:
np.savetxt(output_file, avg, delimiter=",")
return avg | python | def tt_avg(self, print_output=True, output_file="tt.csv"):
"""
Compute average term-topic matrix, and print to file if
print_output=True.
"""
avg = self.tt.mean(axis=2)
if print_output:
np.savetxt(output_file, avg, delimiter=",")
return avg | [
"def",
"tt_avg",
"(",
"self",
",",
"print_output",
"=",
"True",
",",
"output_file",
"=",
"\"tt.csv\"",
")",
":",
"avg",
"=",
"self",
".",
"tt",
".",
"mean",
"(",
"axis",
"=",
"2",
")",
"if",
"print_output",
":",
"np",
".",
"savetxt",
"(",
"output_fil... | Compute average term-topic matrix, and print to file if
print_output=True. | [
"Compute",
"average",
"term",
"-",
"topic",
"matrix",
"and",
"print",
"to",
"file",
"if",
"print_output",
"=",
"True",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L226-L236 | train | 210,940 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | LDAGibbs.dict_print | def dict_print(self, output_file="dict.csv"):
"""
Print mapping from tokens to numeric indices.
"""
with codecs.open(output_file, "w", encoding='utf-8') as f:
for (v, k) in self.token_key.items():
f.write("%s,%d\n" % (v, k)) | python | def dict_print(self, output_file="dict.csv"):
"""
Print mapping from tokens to numeric indices.
"""
with codecs.open(output_file, "w", encoding='utf-8') as f:
for (v, k) in self.token_key.items():
f.write("%s,%d\n" % (v, k)) | [
"def",
"dict_print",
"(",
"self",
",",
"output_file",
"=",
"\"dict.csv\"",
")",
":",
"with",
"codecs",
".",
"open",
"(",
"output_file",
",",
"\"w\"",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"f",
":",
"for",
"(",
"v",
",",
"k",
")",
"in",
"self",
... | Print mapping from tokens to numeric indices. | [
"Print",
"mapping",
"from",
"tokens",
"to",
"numeric",
"indices",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L250-L258 | train | 210,941 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | QueryGibbs.query | def query(self, query_samples):
"""
Query docs with query_samples number of Gibbs
sampling iterations.
"""
self.sampled_topics = np.zeros((self.samples, self.N),
dtype=np.int)
for s in range(self.samples):
self.sampled_topics[s, :] = \
samplers_lda.sampler_query(self.docid, self.tokens,
self.topic_seed,
np.ascontiguousarray(
self.tt[:, :, s],
dtype=np.float),
self.N, self.K, self.D,
self.alpha, query_samples)
print("Sample %d queried" % s)
self.dt = np.zeros((self.D, self.K, self.samples))
for s in range(self.samples):
self.dt[:, :, s] = \
samplers_lda.dt_comp(self.docid, self.sampled_topics[s, :],
self.N, self.K, self.D, self.alpha) | python | def query(self, query_samples):
"""
Query docs with query_samples number of Gibbs
sampling iterations.
"""
self.sampled_topics = np.zeros((self.samples, self.N),
dtype=np.int)
for s in range(self.samples):
self.sampled_topics[s, :] = \
samplers_lda.sampler_query(self.docid, self.tokens,
self.topic_seed,
np.ascontiguousarray(
self.tt[:, :, s],
dtype=np.float),
self.N, self.K, self.D,
self.alpha, query_samples)
print("Sample %d queried" % s)
self.dt = np.zeros((self.D, self.K, self.samples))
for s in range(self.samples):
self.dt[:, :, s] = \
samplers_lda.dt_comp(self.docid, self.sampled_topics[s, :],
self.N, self.K, self.D, self.alpha) | [
"def",
"query",
"(",
"self",
",",
"query_samples",
")",
":",
"self",
".",
"sampled_topics",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"samples",
",",
"self",
".",
"N",
")",
",",
"dtype",
"=",
"np",
".",
"int",
")",
"for",
"s",
"in",
"range"... | Query docs with query_samples number of Gibbs
sampling iterations. | [
"Query",
"docs",
"with",
"query_samples",
"number",
"of",
"Gibbs",
"sampling",
"iterations",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L325-L353 | train | 210,942 |
alan-turing-institute/topic-modelling-tools | topicmodels/LDA/gibbs.py | QueryGibbs.dt_avg | def dt_avg(self, print_output=True, output_file="dt_query.csv"):
"""
Compute average document-topic matrix,
and print to file if print_output=True.
"""
avg = self.dt.mean(axis=2)
if print_output:
np.savetxt(output_file, avg, delimiter=",")
return avg | python | def dt_avg(self, print_output=True, output_file="dt_query.csv"):
"""
Compute average document-topic matrix,
and print to file if print_output=True.
"""
avg = self.dt.mean(axis=2)
if print_output:
np.savetxt(output_file, avg, delimiter=",")
return avg | [
"def",
"dt_avg",
"(",
"self",
",",
"print_output",
"=",
"True",
",",
"output_file",
"=",
"\"dt_query.csv\"",
")",
":",
"avg",
"=",
"self",
".",
"dt",
".",
"mean",
"(",
"axis",
"=",
"2",
")",
"if",
"print_output",
":",
"np",
".",
"savetxt",
"(",
"outp... | Compute average document-topic matrix,
and print to file if print_output=True. | [
"Compute",
"average",
"document",
"-",
"topic",
"matrix",
"and",
"print",
"to",
"file",
"if",
"print_output",
"=",
"True",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/LDA/gibbs.py#L365-L375 | train | 210,943 |
alan-turing-institute/topic-modelling-tools | topicmodels/preprocess.py | RawDocs.phrase_replace | def phrase_replace(self, replace_dict):
"""
Replace phrases with single token, mapping defined in replace_dict
"""
def r(tokens):
text = ' ' + ' '.join(tokens)
for k, v in replace_dict.items():
text = text.replace(" " + k + " ", " " + v + " ")
return text.split()
self.stems = list(map(r, self.stems)) | python | def phrase_replace(self, replace_dict):
"""
Replace phrases with single token, mapping defined in replace_dict
"""
def r(tokens):
text = ' ' + ' '.join(tokens)
for k, v in replace_dict.items():
text = text.replace(" " + k + " ", " " + v + " ")
return text.split()
self.stems = list(map(r, self.stems)) | [
"def",
"phrase_replace",
"(",
"self",
",",
"replace_dict",
")",
":",
"def",
"r",
"(",
"tokens",
")",
":",
"text",
"=",
"' '",
"+",
"' '",
".",
"join",
"(",
"tokens",
")",
"for",
"k",
",",
"v",
"in",
"replace_dict",
".",
"items",
"(",
")",
":",
"t... | Replace phrases with single token, mapping defined in replace_dict | [
"Replace",
"phrases",
"with",
"single",
"token",
"mapping",
"defined",
"in",
"replace_dict"
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/preprocess.py#L76-L87 | train | 210,944 |
alan-turing-institute/topic-modelling-tools | topicmodels/preprocess.py | RawDocs.stem | def stem(self):
"""
Stem tokens with Porter Stemmer.
"""
def s(tokens):
return [PorterStemmer().stem(t) for t in tokens]
self.stems = list(map(s, self.tokens)) | python | def stem(self):
"""
Stem tokens with Porter Stemmer.
"""
def s(tokens):
return [PorterStemmer().stem(t) for t in tokens]
self.stems = list(map(s, self.tokens)) | [
"def",
"stem",
"(",
"self",
")",
":",
"def",
"s",
"(",
"tokens",
")",
":",
"return",
"[",
"PorterStemmer",
"(",
")",
".",
"stem",
"(",
"t",
")",
"for",
"t",
"in",
"tokens",
"]",
"self",
".",
"stems",
"=",
"list",
"(",
"map",
"(",
"s",
",",
"s... | Stem tokens with Porter Stemmer. | [
"Stem",
"tokens",
"with",
"Porter",
"Stemmer",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/preprocess.py#L108-L116 | train | 210,945 |
alan-turing-institute/topic-modelling-tools | topicmodels/preprocess.py | RawDocs.bigram | def bigram(self, items):
"""
generate bigrams of either items = "tokens" or "stems"
"""
def bigram_join(tok_list):
text = nltk.bigrams(tok_list)
return list(map(lambda x: x[0] + '.' + x[1], text))
if items == "tokens":
self.bigrams = list(map(bigram_join, self.tokens))
elif items == "stems":
self.bigrams = list(map(bigram_join, self.stems))
else:
raise ValueError("Items must be either \'tokens\' or \'stems\'.") | python | def bigram(self, items):
"""
generate bigrams of either items = "tokens" or "stems"
"""
def bigram_join(tok_list):
text = nltk.bigrams(tok_list)
return list(map(lambda x: x[0] + '.' + x[1], text))
if items == "tokens":
self.bigrams = list(map(bigram_join, self.tokens))
elif items == "stems":
self.bigrams = list(map(bigram_join, self.stems))
else:
raise ValueError("Items must be either \'tokens\' or \'stems\'.") | [
"def",
"bigram",
"(",
"self",
",",
"items",
")",
":",
"def",
"bigram_join",
"(",
"tok_list",
")",
":",
"text",
"=",
"nltk",
".",
"bigrams",
"(",
"tok_list",
")",
"return",
"list",
"(",
"map",
"(",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
"+",
"'.'... | generate bigrams of either items = "tokens" or "stems" | [
"generate",
"bigrams",
"of",
"either",
"items",
"=",
"tokens",
"or",
"stems"
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/preprocess.py#L118-L133 | train | 210,946 |
alan-turing-institute/topic-modelling-tools | topicmodels/multimix/inference.py | EM.E_step | def E_step(self):
"""
compute type probabilities given current parameter estimates.
"""
# on the first iteration of estimation, this will be called
if not hasattr(self, 'type_prob'):
self.type_prob = np.empty((self.N, self.K))
temp_probs = np.zeros((self.N, self.K))
for i in range(self.N):
for k in range(self.K):
temp_probs[i, k] = \
np.log(self.rho[k]) + np.dot(self.feature_counts[i, :],
np.log(self.mu[k, :]))
temp_probsZ = temp_probs - np.max(temp_probs, axis=1)[:, np.newaxis]
self.type_prob = np.exp(temp_probsZ) / \
np.exp(temp_probsZ).sum(axis=1)[:, np.newaxis]
return np.log(np.exp(temp_probsZ).sum(axis=1)).sum() + \
np.max(temp_probs, axis=1).sum() | python | def E_step(self):
"""
compute type probabilities given current parameter estimates.
"""
# on the first iteration of estimation, this will be called
if not hasattr(self, 'type_prob'):
self.type_prob = np.empty((self.N, self.K))
temp_probs = np.zeros((self.N, self.K))
for i in range(self.N):
for k in range(self.K):
temp_probs[i, k] = \
np.log(self.rho[k]) + np.dot(self.feature_counts[i, :],
np.log(self.mu[k, :]))
temp_probsZ = temp_probs - np.max(temp_probs, axis=1)[:, np.newaxis]
self.type_prob = np.exp(temp_probsZ) / \
np.exp(temp_probsZ).sum(axis=1)[:, np.newaxis]
return np.log(np.exp(temp_probsZ).sum(axis=1)).sum() + \
np.max(temp_probs, axis=1).sum() | [
"def",
"E_step",
"(",
"self",
")",
":",
"# on the first iteration of estimation, this will be called",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'type_prob'",
")",
":",
"self",
".",
"type_prob",
"=",
"np",
".",
"empty",
"(",
"(",
"self",
".",
"N",
",",
"sel... | compute type probabilities given current parameter estimates. | [
"compute",
"type",
"probabilities",
"given",
"current",
"parameter",
"estimates",
"."
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/multimix/inference.py#L37-L60 | train | 210,947 |
alan-turing-institute/topic-modelling-tools | topicmodels/multimix/inference.py | EM.M_step | def M_step(self):
"""
generate new parameter estimates given updated type distribution
"""
for k in range(self.K):
self.rho[k] = self.type_prob[:, k].sum() / self.N
for k in range(self.K):
for m in range(self.M):
temp_prob = np.dot(self.type_prob[:, k],
self.feature_counts[:, m])
if temp_prob < 1e-99:
temp_prob = 1e-99
self.mu[k, m] = temp_prob / np.dot(self.type_prob[:, k],
self.observations) | python | def M_step(self):
"""
generate new parameter estimates given updated type distribution
"""
for k in range(self.K):
self.rho[k] = self.type_prob[:, k].sum() / self.N
for k in range(self.K):
for m in range(self.M):
temp_prob = np.dot(self.type_prob[:, k],
self.feature_counts[:, m])
if temp_prob < 1e-99:
temp_prob = 1e-99
self.mu[k, m] = temp_prob / np.dot(self.type_prob[:, k],
self.observations) | [
"def",
"M_step",
"(",
"self",
")",
":",
"for",
"k",
"in",
"range",
"(",
"self",
".",
"K",
")",
":",
"self",
".",
"rho",
"[",
"k",
"]",
"=",
"self",
".",
"type_prob",
"[",
":",
",",
"k",
"]",
".",
"sum",
"(",
")",
"/",
"self",
".",
"N",
"f... | generate new parameter estimates given updated type distribution | [
"generate",
"new",
"parameter",
"estimates",
"given",
"updated",
"type",
"distribution"
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/multimix/inference.py#L62-L78 | train | 210,948 |
alan-turing-institute/topic-modelling-tools | topicmodels/multimix/inference.py | EM.estimate | def estimate(self, maxiter=250, convergence=1e-7):
"""
run EM algorithm until convergence, or until maxiter reached
"""
self.loglik = np.zeros(maxiter)
iter = 0
while iter < maxiter:
self.loglik[iter] = self.E_step()
if np.isnan(self.loglik[iter]):
print("undefined log-likelihood")
break
self.M_step()
if self.loglik[iter] - self.loglik[iter - 1] < 0 and iter > 0:
print("log-likelihood decreased by %f at iteration %d"
% (self.loglik[iter] - self.loglik[iter - 1],
iter))
elif self.loglik[iter] - self.loglik[iter - 1] < convergence \
and iter > 0:
print("convergence at iteration %d, loglik = %f" %
(iter, self.loglik[iter]))
self.loglik = self.loglik[self.loglik < 0]
break
iter += 1 | python | def estimate(self, maxiter=250, convergence=1e-7):
"""
run EM algorithm until convergence, or until maxiter reached
"""
self.loglik = np.zeros(maxiter)
iter = 0
while iter < maxiter:
self.loglik[iter] = self.E_step()
if np.isnan(self.loglik[iter]):
print("undefined log-likelihood")
break
self.M_step()
if self.loglik[iter] - self.loglik[iter - 1] < 0 and iter > 0:
print("log-likelihood decreased by %f at iteration %d"
% (self.loglik[iter] - self.loglik[iter - 1],
iter))
elif self.loglik[iter] - self.loglik[iter - 1] < convergence \
and iter > 0:
print("convergence at iteration %d, loglik = %f" %
(iter, self.loglik[iter]))
self.loglik = self.loglik[self.loglik < 0]
break
iter += 1 | [
"def",
"estimate",
"(",
"self",
",",
"maxiter",
"=",
"250",
",",
"convergence",
"=",
"1e-7",
")",
":",
"self",
".",
"loglik",
"=",
"np",
".",
"zeros",
"(",
"maxiter",
")",
"iter",
"=",
"0",
"while",
"iter",
"<",
"maxiter",
":",
"self",
".",
"loglik... | run EM algorithm until convergence, or until maxiter reached | [
"run",
"EM",
"algorithm",
"until",
"convergence",
"or",
"until",
"maxiter",
"reached"
] | f0cf90cdd06f1072e824b446f201c7469b9de5df | https://github.com/alan-turing-institute/topic-modelling-tools/blob/f0cf90cdd06f1072e824b446f201c7469b9de5df/topicmodels/multimix/inference.py#L80-L109 | train | 210,949 |
vega/ipyvega | vega/utils.py | get_content | def get_content(path):
"""Get content of file."""
with codecs.open(abs_path(path), encoding='utf-8') as f:
return f.read() | python | def get_content(path):
"""Get content of file."""
with codecs.open(abs_path(path), encoding='utf-8') as f:
return f.read() | [
"def",
"get_content",
"(",
"path",
")",
":",
"with",
"codecs",
".",
"open",
"(",
"abs_path",
"(",
"path",
")",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"f",
":",
"return",
"f",
".",
"read",
"(",
")"
] | Get content of file. | [
"Get",
"content",
"of",
"file",
"."
] | 80663a734a7eac7482e2fa4684599d68f177b8f3 | https://github.com/vega/ipyvega/blob/80663a734a7eac7482e2fa4684599d68f177b8f3/vega/utils.py#L14-L17 | train | 210,950 |
vega/ipyvega | vega/utils.py | sanitize_dataframe | def sanitize_dataframe(df):
"""Sanitize a DataFrame to prepare it for serialization.
* Make a copy
* Raise ValueError if it has a hierarchical index.
* Convert categoricals to strings.
* Convert np.bool_ dtypes to Python bool objects
* Convert np.int dtypes to Python int objects
* Convert floats to objects and replace NaNs/infs with None.
* Convert DateTime dtypes into appropriate string representations
"""
import pandas as pd
import numpy as np
df = df.copy()
if isinstance(df.index, pd.core.index.MultiIndex):
raise ValueError('Hierarchical indices not supported')
if isinstance(df.columns, pd.core.index.MultiIndex):
raise ValueError('Hierarchical indices not supported')
def to_list_if_array(val):
if isinstance(val, np.ndarray):
return val.tolist()
else:
return val
for col_name, dtype in df.dtypes.iteritems():
if str(dtype) == 'category':
# XXXX: work around bug in to_json for categorical types
# https://github.com/pydata/pandas/issues/10778
df[col_name] = df[col_name].astype(str)
elif str(dtype) == 'bool':
# convert numpy bools to objects; np.bool is not JSON serializable
df[col_name] = df[col_name].astype(object)
elif np.issubdtype(dtype, np.integer):
# convert integers to objects; np.int is not JSON serializable
df[col_name] = df[col_name].astype(object)
elif np.issubdtype(dtype, np.floating):
# For floats, convert to Python float: np.float is not JSON serializable
# Also convert NaN/inf values to null, as they are not JSON serializable
col = df[col_name]
bad_values = col.isnull() | np.isinf(col)
df[col_name] = col.astype(object).where(~bad_values, None)
elif str(dtype).startswith('datetime'):
# Convert datetimes to strings
# astype(str) will choose the appropriate resolution
df[col_name] = df[col_name].astype(str).replace('NaT', '')
elif dtype == object:
# Convert numpy arrays saved as objects to lists
# Arrays are not JSON serializable
col = df[col_name].apply(to_list_if_array, convert_dtype=False)
df[col_name] = col.where(col.notnull(), None)
return df | python | def sanitize_dataframe(df):
"""Sanitize a DataFrame to prepare it for serialization.
* Make a copy
* Raise ValueError if it has a hierarchical index.
* Convert categoricals to strings.
* Convert np.bool_ dtypes to Python bool objects
* Convert np.int dtypes to Python int objects
* Convert floats to objects and replace NaNs/infs with None.
* Convert DateTime dtypes into appropriate string representations
"""
import pandas as pd
import numpy as np
df = df.copy()
if isinstance(df.index, pd.core.index.MultiIndex):
raise ValueError('Hierarchical indices not supported')
if isinstance(df.columns, pd.core.index.MultiIndex):
raise ValueError('Hierarchical indices not supported')
def to_list_if_array(val):
if isinstance(val, np.ndarray):
return val.tolist()
else:
return val
for col_name, dtype in df.dtypes.iteritems():
if str(dtype) == 'category':
# XXXX: work around bug in to_json for categorical types
# https://github.com/pydata/pandas/issues/10778
df[col_name] = df[col_name].astype(str)
elif str(dtype) == 'bool':
# convert numpy bools to objects; np.bool is not JSON serializable
df[col_name] = df[col_name].astype(object)
elif np.issubdtype(dtype, np.integer):
# convert integers to objects; np.int is not JSON serializable
df[col_name] = df[col_name].astype(object)
elif np.issubdtype(dtype, np.floating):
# For floats, convert to Python float: np.float is not JSON serializable
# Also convert NaN/inf values to null, as they are not JSON serializable
col = df[col_name]
bad_values = col.isnull() | np.isinf(col)
df[col_name] = col.astype(object).where(~bad_values, None)
elif str(dtype).startswith('datetime'):
# Convert datetimes to strings
# astype(str) will choose the appropriate resolution
df[col_name] = df[col_name].astype(str).replace('NaT', '')
elif dtype == object:
# Convert numpy arrays saved as objects to lists
# Arrays are not JSON serializable
col = df[col_name].apply(to_list_if_array, convert_dtype=False)
df[col_name] = col.where(col.notnull(), None)
return df | [
"def",
"sanitize_dataframe",
"(",
"df",
")",
":",
"import",
"pandas",
"as",
"pd",
"import",
"numpy",
"as",
"np",
"df",
"=",
"df",
".",
"copy",
"(",
")",
"if",
"isinstance",
"(",
"df",
".",
"index",
",",
"pd",
".",
"core",
".",
"index",
".",
"MultiI... | Sanitize a DataFrame to prepare it for serialization.
* Make a copy
* Raise ValueError if it has a hierarchical index.
* Convert categoricals to strings.
* Convert np.bool_ dtypes to Python bool objects
* Convert np.int dtypes to Python int objects
* Convert floats to objects and replace NaNs/infs with None.
* Convert DateTime dtypes into appropriate string representations | [
"Sanitize",
"a",
"DataFrame",
"to",
"prepare",
"it",
"for",
"serialization",
"."
] | 80663a734a7eac7482e2fa4684599d68f177b8f3 | https://github.com/vega/ipyvega/blob/80663a734a7eac7482e2fa4684599d68f177b8f3/vega/utils.py#L25-L78 | train | 210,951 |
vega/ipyvega | vega/utils.py | prepare_spec | def prepare_spec(spec, data=None):
"""Prepare a Vega-Lite spec for sending to the frontend.
This allows data to be passed in either as part of the spec
or separately. If separately, the data is assumed to be a
pandas DataFrame or object that can be converted to to a DataFrame.
Note that if data is not None, this modifies spec in-place
"""
import pandas as pd
if isinstance(data, pd.DataFrame):
# We have to do the isinstance test first because we can't
# compare a DataFrame to None.
data = sanitize_dataframe(data)
spec['data'] = {'values': data.to_dict(orient='records')}
elif data is None:
# Assume data is within spec & do nothing
# It may be deep in the spec rather than at the top level
pass
else:
# As a last resort try to pass the data to a DataFrame and use it
data = pd.DataFrame(data)
data = sanitize_dataframe(data)
spec['data'] = {'values': data.to_dict(orient='records')}
return spec | python | def prepare_spec(spec, data=None):
"""Prepare a Vega-Lite spec for sending to the frontend.
This allows data to be passed in either as part of the spec
or separately. If separately, the data is assumed to be a
pandas DataFrame or object that can be converted to to a DataFrame.
Note that if data is not None, this modifies spec in-place
"""
import pandas as pd
if isinstance(data, pd.DataFrame):
# We have to do the isinstance test first because we can't
# compare a DataFrame to None.
data = sanitize_dataframe(data)
spec['data'] = {'values': data.to_dict(orient='records')}
elif data is None:
# Assume data is within spec & do nothing
# It may be deep in the spec rather than at the top level
pass
else:
# As a last resort try to pass the data to a DataFrame and use it
data = pd.DataFrame(data)
data = sanitize_dataframe(data)
spec['data'] = {'values': data.to_dict(orient='records')}
return spec | [
"def",
"prepare_spec",
"(",
"spec",
",",
"data",
"=",
"None",
")",
":",
"import",
"pandas",
"as",
"pd",
"if",
"isinstance",
"(",
"data",
",",
"pd",
".",
"DataFrame",
")",
":",
"# We have to do the isinstance test first because we can't",
"# compare a DataFrame to No... | Prepare a Vega-Lite spec for sending to the frontend.
This allows data to be passed in either as part of the spec
or separately. If separately, the data is assumed to be a
pandas DataFrame or object that can be converted to to a DataFrame.
Note that if data is not None, this modifies spec in-place | [
"Prepare",
"a",
"Vega",
"-",
"Lite",
"spec",
"for",
"sending",
"to",
"the",
"frontend",
"."
] | 80663a734a7eac7482e2fa4684599d68f177b8f3 | https://github.com/vega/ipyvega/blob/80663a734a7eac7482e2fa4684599d68f177b8f3/vega/utils.py#L81-L106 | train | 210,952 |
vega/ipyvega | vega/base.py | VegaBase._repr_mimebundle_ | def _repr_mimebundle_(self, include=None, exclude=None):
"""Display the visualization in the Jupyter notebook."""
id = uuid.uuid4()
return (
{'application/javascript': self._generate_js(id)},
{'jupyter-vega': '#{0}'.format(id)},
) | python | def _repr_mimebundle_(self, include=None, exclude=None):
"""Display the visualization in the Jupyter notebook."""
id = uuid.uuid4()
return (
{'application/javascript': self._generate_js(id)},
{'jupyter-vega': '#{0}'.format(id)},
) | [
"def",
"_repr_mimebundle_",
"(",
"self",
",",
"include",
"=",
"None",
",",
"exclude",
"=",
"None",
")",
":",
"id",
"=",
"uuid",
".",
"uuid4",
"(",
")",
"return",
"(",
"{",
"'application/javascript'",
":",
"self",
".",
"_generate_js",
"(",
"id",
")",
"}... | Display the visualization in the Jupyter notebook. | [
"Display",
"the",
"visualization",
"in",
"the",
"Jupyter",
"notebook",
"."
] | 80663a734a7eac7482e2fa4684599d68f177b8f3 | https://github.com/vega/ipyvega/blob/80663a734a7eac7482e2fa4684599d68f177b8f3/vega/base.py#L37-L43 | train | 210,953 |
nylas/nylas-python | examples/native-authentication-gmail/server.py | pass_creds_to_nylas | def pass_creds_to_nylas():
"""
This view loads the credentials from Google and passes them to Nylas,
to set up native authentication.
"""
# If you haven't already connected with Google, this won't work.
if not google.authorized:
return "Error: not yet connected with Google!", 400
if "refresh_token" not in google.token:
# We're missing the refresh token from Google, and the only way to get
# a new one is to force reauthentication. That's annoying.
return (
(
"Error: missing Google refresh token. "
"Uncomment the `reprompt_consent` line in the code to fix this."
),
500,
)
# Look up the user's name and email address from Google.
google_resp = google.get("/oauth2/v2/userinfo?fields=name,email")
assert google_resp.ok, "Received failure response from Google userinfo API"
google_userinfo = google_resp.json()
# Start the connection process by looking up all the information that
# Nylas needs in order to connect, and sending it to the authorize API.
nylas_authorize_data = {
"client_id": app.config["NYLAS_OAUTH_CLIENT_ID"],
"name": google_userinfo["name"],
"email_address": google_userinfo["email"],
"provider": "gmail",
"settings": {
"google_client_id": app.config["GOOGLE_OAUTH_CLIENT_ID"],
"google_client_secret": app.config["GOOGLE_OAUTH_CLIENT_SECRET"],
"google_refresh_token": google.token["refresh_token"],
},
}
nylas_authorize_resp = requests.post(
"https://api.nylas.com/connect/authorize", json=nylas_authorize_data
)
assert nylas_authorize_resp.ok, "Received failure response from Nylas authorize API"
nylas_code = nylas_authorize_resp.json()["code"]
# Now that we've got the `code` from the authorize response,
# pass it to the token response to complete the connection.
nylas_token_data = {
"client_id": app.config["NYLAS_OAUTH_CLIENT_ID"],
"client_secret": app.config["NYLAS_OAUTH_CLIENT_SECRET"],
"code": nylas_code,
}
nylas_token_resp = requests.post(
"https://api.nylas.com/connect/token", json=nylas_token_data
)
assert nylas_token_resp.ok, "Received failure response from Nylas token API"
nylas_access_token = nylas_token_resp.json()["access_token"]
# Great, we've connected Google to Nylas! In the process, Nylas gave us
# an OAuth access token, which we'll need in order to make API requests
# to Nylas in the future. We'll save that access token in the Flask session,
# so we can pick it up later and use it when we need it.
session["nylas_access_token"] = nylas_access_token
# We're all done here. Redirect the user back to the home page,
# which will pick up the access token we just saved.
return redirect(url_for("index")) | python | def pass_creds_to_nylas():
"""
This view loads the credentials from Google and passes them to Nylas,
to set up native authentication.
"""
# If you haven't already connected with Google, this won't work.
if not google.authorized:
return "Error: not yet connected with Google!", 400
if "refresh_token" not in google.token:
# We're missing the refresh token from Google, and the only way to get
# a new one is to force reauthentication. That's annoying.
return (
(
"Error: missing Google refresh token. "
"Uncomment the `reprompt_consent` line in the code to fix this."
),
500,
)
# Look up the user's name and email address from Google.
google_resp = google.get("/oauth2/v2/userinfo?fields=name,email")
assert google_resp.ok, "Received failure response from Google userinfo API"
google_userinfo = google_resp.json()
# Start the connection process by looking up all the information that
# Nylas needs in order to connect, and sending it to the authorize API.
nylas_authorize_data = {
"client_id": app.config["NYLAS_OAUTH_CLIENT_ID"],
"name": google_userinfo["name"],
"email_address": google_userinfo["email"],
"provider": "gmail",
"settings": {
"google_client_id": app.config["GOOGLE_OAUTH_CLIENT_ID"],
"google_client_secret": app.config["GOOGLE_OAUTH_CLIENT_SECRET"],
"google_refresh_token": google.token["refresh_token"],
},
}
nylas_authorize_resp = requests.post(
"https://api.nylas.com/connect/authorize", json=nylas_authorize_data
)
assert nylas_authorize_resp.ok, "Received failure response from Nylas authorize API"
nylas_code = nylas_authorize_resp.json()["code"]
# Now that we've got the `code` from the authorize response,
# pass it to the token response to complete the connection.
nylas_token_data = {
"client_id": app.config["NYLAS_OAUTH_CLIENT_ID"],
"client_secret": app.config["NYLAS_OAUTH_CLIENT_SECRET"],
"code": nylas_code,
}
nylas_token_resp = requests.post(
"https://api.nylas.com/connect/token", json=nylas_token_data
)
assert nylas_token_resp.ok, "Received failure response from Nylas token API"
nylas_access_token = nylas_token_resp.json()["access_token"]
# Great, we've connected Google to Nylas! In the process, Nylas gave us
# an OAuth access token, which we'll need in order to make API requests
# to Nylas in the future. We'll save that access token in the Flask session,
# so we can pick it up later and use it when we need it.
session["nylas_access_token"] = nylas_access_token
# We're all done here. Redirect the user back to the home page,
# which will pick up the access token we just saved.
return redirect(url_for("index")) | [
"def",
"pass_creds_to_nylas",
"(",
")",
":",
"# If you haven't already connected with Google, this won't work.",
"if",
"not",
"google",
".",
"authorized",
":",
"return",
"\"Error: not yet connected with Google!\"",
",",
"400",
"if",
"\"refresh_token\"",
"not",
"in",
"google",... | This view loads the credentials from Google and passes them to Nylas,
to set up native authentication. | [
"This",
"view",
"loads",
"the",
"credentials",
"from",
"Google",
"and",
"passes",
"them",
"to",
"Nylas",
"to",
"set",
"up",
"native",
"authentication",
"."
] | c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8 | https://github.com/nylas/nylas-python/blob/c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8/examples/native-authentication-gmail/server.py#L137-L202 | train | 210,954 |
nylas/nylas-python | examples/native-authentication-exchange/server.py | pass_creds_to_nylas | def pass_creds_to_nylas(name, email, password, server_host=None):
"""
Passes Exchange credentials to Nylas, to set up native authentication.
"""
# Start the connection process by looking up all the information that
# Nylas needs in order to connect, and sending it to the authorize API.
nylas_authorize_data = {
"client_id": app.config["NYLAS_OAUTH_CLIENT_ID"],
"name": name,
"email_address": email,
"provider": "exchange",
"settings": {"username": email, "password": password},
}
if server_host:
nylas_authorize_data["settings"]["eas_server_host"] = server_host
nylas_authorize_resp = requests.post(
"https://api.nylas.com/connect/authorize", json=nylas_authorize_data
)
if not nylas_authorize_resp.ok:
message = nylas_authorize_resp.json()["message"]
raise APIError(message)
nylas_code = nylas_authorize_resp.json()["code"]
# Now that we've got the `code` from the authorize response,
# pass it to the token response to complete the connection.
nylas_token_data = {
"client_id": app.config["NYLAS_OAUTH_CLIENT_ID"],
"client_secret": app.config["NYLAS_OAUTH_CLIENT_SECRET"],
"code": nylas_code,
}
nylas_token_resp = requests.post(
"https://api.nylas.com/connect/token", json=nylas_token_data
)
if not nylas_token_resp.ok:
message = nylas_token_resp.json()["message"]
raise APIError(message)
nylas_access_token = nylas_token_resp.json()["access_token"]
# Great, we've connected the Exchange account to Nylas!
# In the process, Nylas gave us an OAuth access token, which we'll need
# in order to make API requests to Nylas in the future.
# We'll save that access token in the Flask session, so we can pick it up
# later and use it when we need it.
session["nylas_access_token"] = nylas_access_token
# We're all done here. Redirect the user back to the success page,
# which will pick up the access token we just saved.
return redirect(url_for("success")) | python | def pass_creds_to_nylas(name, email, password, server_host=None):
"""
Passes Exchange credentials to Nylas, to set up native authentication.
"""
# Start the connection process by looking up all the information that
# Nylas needs in order to connect, and sending it to the authorize API.
nylas_authorize_data = {
"client_id": app.config["NYLAS_OAUTH_CLIENT_ID"],
"name": name,
"email_address": email,
"provider": "exchange",
"settings": {"username": email, "password": password},
}
if server_host:
nylas_authorize_data["settings"]["eas_server_host"] = server_host
nylas_authorize_resp = requests.post(
"https://api.nylas.com/connect/authorize", json=nylas_authorize_data
)
if not nylas_authorize_resp.ok:
message = nylas_authorize_resp.json()["message"]
raise APIError(message)
nylas_code = nylas_authorize_resp.json()["code"]
# Now that we've got the `code` from the authorize response,
# pass it to the token response to complete the connection.
nylas_token_data = {
"client_id": app.config["NYLAS_OAUTH_CLIENT_ID"],
"client_secret": app.config["NYLAS_OAUTH_CLIENT_SECRET"],
"code": nylas_code,
}
nylas_token_resp = requests.post(
"https://api.nylas.com/connect/token", json=nylas_token_data
)
if not nylas_token_resp.ok:
message = nylas_token_resp.json()["message"]
raise APIError(message)
nylas_access_token = nylas_token_resp.json()["access_token"]
# Great, we've connected the Exchange account to Nylas!
# In the process, Nylas gave us an OAuth access token, which we'll need
# in order to make API requests to Nylas in the future.
# We'll save that access token in the Flask session, so we can pick it up
# later and use it when we need it.
session["nylas_access_token"] = nylas_access_token
# We're all done here. Redirect the user back to the success page,
# which will pick up the access token we just saved.
return redirect(url_for("success")) | [
"def",
"pass_creds_to_nylas",
"(",
"name",
",",
"email",
",",
"password",
",",
"server_host",
"=",
"None",
")",
":",
"# Start the connection process by looking up all the information that",
"# Nylas needs in order to connect, and sending it to the authorize API.",
"nylas_authorize_da... | Passes Exchange credentials to Nylas, to set up native authentication. | [
"Passes",
"Exchange",
"credentials",
"to",
"Nylas",
"to",
"set",
"up",
"native",
"authentication",
"."
] | c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8 | https://github.com/nylas/nylas-python/blob/c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8/examples/native-authentication-exchange/server.py#L98-L148 | train | 210,955 |
nylas/nylas-python | nylas/client/client.py | APIClient._get_resource_raw | def _get_resource_raw(
self, cls, id, extra=None, headers=None, stream=False, **filters
):
"""Get an individual REST resource"""
headers = headers or {}
headers.update(self.session.headers)
postfix = "/{}".format(extra) if extra else ""
if cls.api_root != "a":
url = "{}/{}/{}{}".format(self.api_server, cls.collection_name, id, postfix)
else:
url = "{}/a/{}/{}/{}{}".format(
self.api_server, self.app_id, cls.collection_name, id, postfix
)
converted_filters = convert_datetimes_to_timestamps(
filters, cls.datetime_filter_attrs
)
url = str(URLObject(url).add_query_params(converted_filters.items()))
response = self._get_http_session(cls.api_root).get(
url, headers=headers, stream=stream
)
return _validate(response) | python | def _get_resource_raw(
self, cls, id, extra=None, headers=None, stream=False, **filters
):
"""Get an individual REST resource"""
headers = headers or {}
headers.update(self.session.headers)
postfix = "/{}".format(extra) if extra else ""
if cls.api_root != "a":
url = "{}/{}/{}{}".format(self.api_server, cls.collection_name, id, postfix)
else:
url = "{}/a/{}/{}/{}{}".format(
self.api_server, self.app_id, cls.collection_name, id, postfix
)
converted_filters = convert_datetimes_to_timestamps(
filters, cls.datetime_filter_attrs
)
url = str(URLObject(url).add_query_params(converted_filters.items()))
response = self._get_http_session(cls.api_root).get(
url, headers=headers, stream=stream
)
return _validate(response) | [
"def",
"_get_resource_raw",
"(",
"self",
",",
"cls",
",",
"id",
",",
"extra",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"stream",
"=",
"False",
",",
"*",
"*",
"filters",
")",
":",
"headers",
"=",
"headers",
"or",
"{",
"}",
"headers",
".",
"upd... | Get an individual REST resource | [
"Get",
"an",
"individual",
"REST",
"resource"
] | c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8 | https://github.com/nylas/nylas-python/blob/c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8/nylas/client/client.py#L285-L307 | train | 210,956 |
nylas/nylas-python | nylas/utils.py | convert_datetimes_to_timestamps | def convert_datetimes_to_timestamps(data, datetime_attrs):
"""
Given a dictionary of data, and a dictionary of datetime attributes,
return a new dictionary that converts any datetime attributes that may
be present to their timestamped equivalent.
"""
if not data:
return data
new_data = {}
for key, value in data.items():
if key in datetime_attrs and isinstance(value, datetime):
new_key = datetime_attrs[key]
new_data[new_key] = timestamp_from_dt(value)
else:
new_data[key] = value
return new_data | python | def convert_datetimes_to_timestamps(data, datetime_attrs):
"""
Given a dictionary of data, and a dictionary of datetime attributes,
return a new dictionary that converts any datetime attributes that may
be present to their timestamped equivalent.
"""
if not data:
return data
new_data = {}
for key, value in data.items():
if key in datetime_attrs and isinstance(value, datetime):
new_key = datetime_attrs[key]
new_data[new_key] = timestamp_from_dt(value)
else:
new_data[key] = value
return new_data | [
"def",
"convert_datetimes_to_timestamps",
"(",
"data",
",",
"datetime_attrs",
")",
":",
"if",
"not",
"data",
":",
"return",
"data",
"new_data",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"data",
".",
"items",
"(",
")",
":",
"if",
"key",
"in",
"da... | Given a dictionary of data, and a dictionary of datetime attributes,
return a new dictionary that converts any datetime attributes that may
be present to their timestamped equivalent. | [
"Given",
"a",
"dictionary",
"of",
"data",
"and",
"a",
"dictionary",
"of",
"datetime",
"attributes",
"return",
"a",
"new",
"dictionary",
"that",
"converts",
"any",
"datetime",
"attributes",
"that",
"may",
"be",
"present",
"to",
"their",
"timestamped",
"equivalent... | c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8 | https://github.com/nylas/nylas-python/blob/c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8/nylas/utils.py#L15-L32 | train | 210,957 |
nylas/nylas-python | examples/webhooks/server.py | verify_signature | def verify_signature(message, key, signature):
"""
This function will verify the authenticity of a digital signature.
For security purposes, Nylas includes a digital signature in the headers
of every webhook notification, so that clients can verify that the
webhook request came from Nylas and no one else. The signing key
is your OAuth client secret, which only you and Nylas know.
"""
digest = hmac.new(key, msg=message, digestmod=hashlib.sha256).hexdigest()
return digest == signature | python | def verify_signature(message, key, signature):
"""
This function will verify the authenticity of a digital signature.
For security purposes, Nylas includes a digital signature in the headers
of every webhook notification, so that clients can verify that the
webhook request came from Nylas and no one else. The signing key
is your OAuth client secret, which only you and Nylas know.
"""
digest = hmac.new(key, msg=message, digestmod=hashlib.sha256).hexdigest()
return digest == signature | [
"def",
"verify_signature",
"(",
"message",
",",
"key",
",",
"signature",
")",
":",
"digest",
"=",
"hmac",
".",
"new",
"(",
"key",
",",
"msg",
"=",
"message",
",",
"digestmod",
"=",
"hashlib",
".",
"sha256",
")",
".",
"hexdigest",
"(",
")",
"return",
... | This function will verify the authenticity of a digital signature.
For security purposes, Nylas includes a digital signature in the headers
of every webhook notification, so that clients can verify that the
webhook request came from Nylas and no one else. The signing key
is your OAuth client secret, which only you and Nylas know. | [
"This",
"function",
"will",
"verify",
"the",
"authenticity",
"of",
"a",
"digital",
"signature",
".",
"For",
"security",
"purposes",
"Nylas",
"includes",
"a",
"digital",
"signature",
"in",
"the",
"headers",
"of",
"every",
"webhook",
"notification",
"so",
"that",
... | c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8 | https://github.com/nylas/nylas-python/blob/c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8/examples/webhooks/server.py#L111-L120 | train | 210,958 |
nylas/nylas-python | examples/webhooks/server.py | process_delta | def process_delta(delta):
"""
This is the part of the code where you would process the information
from the webhook notification. Each delta is one change that happened,
and might require fetching message IDs, updating your database,
and so on.
However, because this is just an example project, we'll just print
out information about the notification, so you can see what
information is being sent.
"""
kwargs = {
"type": delta["type"],
"date": datetime.datetime.utcfromtimestamp(delta["date"]),
"object_id": delta["object_data"]["id"],
}
print(" * {type} at {date} with ID {object_id}".format(**kwargs)) | python | def process_delta(delta):
"""
This is the part of the code where you would process the information
from the webhook notification. Each delta is one change that happened,
and might require fetching message IDs, updating your database,
and so on.
However, because this is just an example project, we'll just print
out information about the notification, so you can see what
information is being sent.
"""
kwargs = {
"type": delta["type"],
"date": datetime.datetime.utcfromtimestamp(delta["date"]),
"object_id": delta["object_data"]["id"],
}
print(" * {type} at {date} with ID {object_id}".format(**kwargs)) | [
"def",
"process_delta",
"(",
"delta",
")",
":",
"kwargs",
"=",
"{",
"\"type\"",
":",
"delta",
"[",
"\"type\"",
"]",
",",
"\"date\"",
":",
"datetime",
".",
"datetime",
".",
"utcfromtimestamp",
"(",
"delta",
"[",
"\"date\"",
"]",
")",
",",
"\"object_id\"",
... | This is the part of the code where you would process the information
from the webhook notification. Each delta is one change that happened,
and might require fetching message IDs, updating your database,
and so on.
However, because this is just an example project, we'll just print
out information about the notification, so you can see what
information is being sent. | [
"This",
"is",
"the",
"part",
"of",
"the",
"code",
"where",
"you",
"would",
"process",
"the",
"information",
"from",
"the",
"webhook",
"notification",
".",
"Each",
"delta",
"is",
"one",
"change",
"that",
"happened",
"and",
"might",
"require",
"fetching",
"mes... | c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8 | https://github.com/nylas/nylas-python/blob/c3e4dfc152b09bb8f886b1c64c6919b1f642cbc8/examples/webhooks/server.py#L124-L140 | train | 210,959 |
mbr/tinyrpc | tinyrpc/protocols/jsonrpc.py | FixedErrorMessageMixin.error_respond | def error_respond(self):
"""Converts the error to an error response object.
:return: An error response object ready to be serialized and sent to the client.
:rtype: :py:class:`JSONRPCErrorResponse`
"""
response = JSONRPCErrorResponse()
response.error = self.message
response.unique_id = None
response._jsonrpc_error_code = self.jsonrpc_error_code
if hasattr(self, 'data'):
response.data = self.data
return response | python | def error_respond(self):
"""Converts the error to an error response object.
:return: An error response object ready to be serialized and sent to the client.
:rtype: :py:class:`JSONRPCErrorResponse`
"""
response = JSONRPCErrorResponse()
response.error = self.message
response.unique_id = None
response._jsonrpc_error_code = self.jsonrpc_error_code
if hasattr(self, 'data'):
response.data = self.data
return response | [
"def",
"error_respond",
"(",
"self",
")",
":",
"response",
"=",
"JSONRPCErrorResponse",
"(",
")",
"response",
".",
"error",
"=",
"self",
".",
"message",
"response",
".",
"unique_id",
"=",
"None",
"response",
".",
"_jsonrpc_error_code",
"=",
"self",
".",
"jso... | Converts the error to an error response object.
:return: An error response object ready to be serialized and sent to the client.
:rtype: :py:class:`JSONRPCErrorResponse` | [
"Converts",
"the",
"error",
"to",
"an",
"error",
"response",
"object",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/protocols/jsonrpc.py#L101-L114 | train | 210,960 |
mbr/tinyrpc | tinyrpc/protocols/jsonrpc.py | JSONRPCRequest.error_respond | def error_respond(self, error):
"""Create an error response to this request.
When processing the request produces an error condition this method can be used to
create the error response object.
:param error: Specifies what error occurred.
:type error: str or Exception
:returns: An error response object that can be serialized and sent to the client.
:rtype: :py:class:`JSONRPCErrorResponse`
"""
if self.unique_id is None:
return None
response = JSONRPCErrorResponse()
response.unique_id = None if self.one_way else self.unique_id
code, msg, data = _get_code_message_and_data(error)
response.error = msg
response._jsonrpc_error_code = code
if data:
response.data = data
return response | python | def error_respond(self, error):
"""Create an error response to this request.
When processing the request produces an error condition this method can be used to
create the error response object.
:param error: Specifies what error occurred.
:type error: str or Exception
:returns: An error response object that can be serialized and sent to the client.
:rtype: :py:class:`JSONRPCErrorResponse`
"""
if self.unique_id is None:
return None
response = JSONRPCErrorResponse()
response.unique_id = None if self.one_way else self.unique_id
code, msg, data = _get_code_message_and_data(error)
response.error = msg
response._jsonrpc_error_code = code
if data:
response.data = data
return response | [
"def",
"error_respond",
"(",
"self",
",",
"error",
")",
":",
"if",
"self",
".",
"unique_id",
"is",
"None",
":",
"return",
"None",
"response",
"=",
"JSONRPCErrorResponse",
"(",
")",
"response",
".",
"unique_id",
"=",
"None",
"if",
"self",
".",
"one_way",
... | Create an error response to this request.
When processing the request produces an error condition this method can be used to
create the error response object.
:param error: Specifies what error occurred.
:type error: str or Exception
:returns: An error response object that can be serialized and sent to the client.
:rtype: :py:class:`JSONRPCErrorResponse` | [
"Create",
"an",
"error",
"response",
"to",
"this",
"request",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/protocols/jsonrpc.py#L372-L395 | train | 210,961 |
mbr/tinyrpc | tinyrpc/protocols/jsonrpc.py | JSONRPCRequest.respond | def respond(self, result):
"""Create a response to this request.
When processing the request completed successfully this method can be used to
create a response object.
:param result: The result of the invoked method.
:type result: Anything that can be encoded by JSON.
:returns: A response object that can be serialized and sent to the client.
:rtype: :py:class:`JSONRPCSuccessResponse`
"""
if self.one_way or self.unique_id is None:
return None
response = JSONRPCSuccessResponse()
response.result = result
response.unique_id = self.unique_id
return response | python | def respond(self, result):
"""Create a response to this request.
When processing the request completed successfully this method can be used to
create a response object.
:param result: The result of the invoked method.
:type result: Anything that can be encoded by JSON.
:returns: A response object that can be serialized and sent to the client.
:rtype: :py:class:`JSONRPCSuccessResponse`
"""
if self.one_way or self.unique_id is None:
return None
response = JSONRPCSuccessResponse()
response.result = result
response.unique_id = self.unique_id
return response | [
"def",
"respond",
"(",
"self",
",",
"result",
")",
":",
"if",
"self",
".",
"one_way",
"or",
"self",
".",
"unique_id",
"is",
"None",
":",
"return",
"None",
"response",
"=",
"JSONRPCSuccessResponse",
"(",
")",
"response",
".",
"result",
"=",
"result",
"res... | Create a response to this request.
When processing the request completed successfully this method can be used to
create a response object.
:param result: The result of the invoked method.
:type result: Anything that can be encoded by JSON.
:returns: A response object that can be serialized and sent to the client.
:rtype: :py:class:`JSONRPCSuccessResponse` | [
"Create",
"a",
"response",
"to",
"this",
"request",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/protocols/jsonrpc.py#L397-L416 | train | 210,962 |
mbr/tinyrpc | tinyrpc/protocols/jsonrpc.py | JSONRPCProtocol.parse_reply | def parse_reply(self, data):
"""Deserializes and validates a response.
Called by the client to reconstruct the serialized :py:class:`JSONRPCResponse`.
:param bytes data: The data stream received by the transport layer containing the
serialized request.
:return: A reconstructed response.
:rtype: :py:class:`JSONRPCSuccessResponse` or :py:class:`JSONRPCErrorResponse`
:raises InvalidReplyError: if the response is not valid JSON or does not conform
to the standard.
"""
if isinstance(data, bytes):
data = data.decode()
try:
rep = json.loads(data)
except Exception as e:
raise InvalidReplyError(e)
for k in rep.keys():
if not k in self._ALLOWED_REPLY_KEYS:
raise InvalidReplyError('Key not allowed: %s' % k)
if 'jsonrpc' not in rep:
raise InvalidReplyError('Missing jsonrpc (version) in response.')
if rep['jsonrpc'] != self.JSON_RPC_VERSION:
raise InvalidReplyError('Wrong JSONRPC version')
if 'id' not in rep:
raise InvalidReplyError('Missing id in response')
if ('error' in rep) and ('result' in rep):
raise InvalidReplyError(
'Reply must contain exactly one of result and error.'
)
if 'error' in rep:
response = JSONRPCErrorResponse()
error = rep['error']
response.error = error["message"]
response._jsonrpc_error_code = error["code"]
if "data" in error:
response.data = error["data"]
else:
response = JSONRPCSuccessResponse()
response.result = rep.get('result', None)
response.unique_id = rep['id']
return response | python | def parse_reply(self, data):
"""Deserializes and validates a response.
Called by the client to reconstruct the serialized :py:class:`JSONRPCResponse`.
:param bytes data: The data stream received by the transport layer containing the
serialized request.
:return: A reconstructed response.
:rtype: :py:class:`JSONRPCSuccessResponse` or :py:class:`JSONRPCErrorResponse`
:raises InvalidReplyError: if the response is not valid JSON or does not conform
to the standard.
"""
if isinstance(data, bytes):
data = data.decode()
try:
rep = json.loads(data)
except Exception as e:
raise InvalidReplyError(e)
for k in rep.keys():
if not k in self._ALLOWED_REPLY_KEYS:
raise InvalidReplyError('Key not allowed: %s' % k)
if 'jsonrpc' not in rep:
raise InvalidReplyError('Missing jsonrpc (version) in response.')
if rep['jsonrpc'] != self.JSON_RPC_VERSION:
raise InvalidReplyError('Wrong JSONRPC version')
if 'id' not in rep:
raise InvalidReplyError('Missing id in response')
if ('error' in rep) and ('result' in rep):
raise InvalidReplyError(
'Reply must contain exactly one of result and error.'
)
if 'error' in rep:
response = JSONRPCErrorResponse()
error = rep['error']
response.error = error["message"]
response._jsonrpc_error_code = error["code"]
if "data" in error:
response.data = error["data"]
else:
response = JSONRPCSuccessResponse()
response.result = rep.get('result', None)
response.unique_id = rep['id']
return response | [
"def",
"parse_reply",
"(",
"self",
",",
"data",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"bytes",
")",
":",
"data",
"=",
"data",
".",
"decode",
"(",
")",
"try",
":",
"rep",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"except",
"Exception",
... | Deserializes and validates a response.
Called by the client to reconstruct the serialized :py:class:`JSONRPCResponse`.
:param bytes data: The data stream received by the transport layer containing the
serialized request.
:return: A reconstructed response.
:rtype: :py:class:`JSONRPCSuccessResponse` or :py:class:`JSONRPCErrorResponse`
:raises InvalidReplyError: if the response is not valid JSON or does not conform
to the standard. | [
"Deserializes",
"and",
"validates",
"a",
"response",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/protocols/jsonrpc.py#L529-L580 | train | 210,963 |
mbr/tinyrpc | tinyrpc/protocols/jsonrpc.py | JSONRPCProtocol.parse_request | def parse_request(self, data):
"""Deserializes and validates a request.
Called by the server to reconstruct the serialized :py:class:`JSONRPCRequest`.
:param bytes data: The data stream received by the transport layer containing the
serialized request.
:return: A reconstructed request.
:rtype: :py:class:`JSONRPCRequest`
:raises JSONRPCParseError: if the ``data`` cannot be parsed as valid JSON.
:raises JSONRPCInvalidRequestError: if the request does not comply with the standard.
"""
if isinstance(data, bytes):
data = data.decode()
try:
req = json.loads(data)
except Exception as e:
raise JSONRPCParseError()
if isinstance(req, list):
# batch request
requests = JSONRPCBatchRequest()
for subreq in req:
try:
requests.append(self._parse_subrequest(subreq))
except RPCError as e:
requests.append(e)
except Exception as e:
requests.append(JSONRPCInvalidRequestError())
if not requests:
raise JSONRPCInvalidRequestError()
return requests
else:
return self._parse_subrequest(req) | python | def parse_request(self, data):
"""Deserializes and validates a request.
Called by the server to reconstruct the serialized :py:class:`JSONRPCRequest`.
:param bytes data: The data stream received by the transport layer containing the
serialized request.
:return: A reconstructed request.
:rtype: :py:class:`JSONRPCRequest`
:raises JSONRPCParseError: if the ``data`` cannot be parsed as valid JSON.
:raises JSONRPCInvalidRequestError: if the request does not comply with the standard.
"""
if isinstance(data, bytes):
data = data.decode()
try:
req = json.loads(data)
except Exception as e:
raise JSONRPCParseError()
if isinstance(req, list):
# batch request
requests = JSONRPCBatchRequest()
for subreq in req:
try:
requests.append(self._parse_subrequest(subreq))
except RPCError as e:
requests.append(e)
except Exception as e:
requests.append(JSONRPCInvalidRequestError())
if not requests:
raise JSONRPCInvalidRequestError()
return requests
else:
return self._parse_subrequest(req) | [
"def",
"parse_request",
"(",
"self",
",",
"data",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"bytes",
")",
":",
"data",
"=",
"data",
".",
"decode",
"(",
")",
"try",
":",
"req",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"except",
"Exception"... | Deserializes and validates a request.
Called by the server to reconstruct the serialized :py:class:`JSONRPCRequest`.
:param bytes data: The data stream received by the transport layer containing the
serialized request.
:return: A reconstructed request.
:rtype: :py:class:`JSONRPCRequest`
:raises JSONRPCParseError: if the ``data`` cannot be parsed as valid JSON.
:raises JSONRPCInvalidRequestError: if the request does not comply with the standard. | [
"Deserializes",
"and",
"validates",
"a",
"request",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/protocols/jsonrpc.py#L582-L617 | train | 210,964 |
mbr/tinyrpc | tinyrpc/protocols/jsonrpc.py | JSONRPCProtocol.raise_error | def raise_error(self, error):
"""Recreates the exception.
Creates a :py:class:`~tinyrpc.protocols.jsonrpc.JSONRPCError` instance
and raises it.
This allows the error, message and data attributes of the original
exception to propagate into the client code.
The :py:attr:`~tinyrpc.protocols.RPCProtocol.raises_error` flag controls if the exception obejct is
raised or returned.
:returns: the exception object if it is not allowed to raise it.
:raises JSONRPCError: when the exception can be raised.
The exception object will contain ``message``, ``code`` and optionally a
``data`` property.
"""
exc = JSONRPCError(error)
if self.raises_errors:
raise exc
return exc | python | def raise_error(self, error):
"""Recreates the exception.
Creates a :py:class:`~tinyrpc.protocols.jsonrpc.JSONRPCError` instance
and raises it.
This allows the error, message and data attributes of the original
exception to propagate into the client code.
The :py:attr:`~tinyrpc.protocols.RPCProtocol.raises_error` flag controls if the exception obejct is
raised or returned.
:returns: the exception object if it is not allowed to raise it.
:raises JSONRPCError: when the exception can be raised.
The exception object will contain ``message``, ``code`` and optionally a
``data`` property.
"""
exc = JSONRPCError(error)
if self.raises_errors:
raise exc
return exc | [
"def",
"raise_error",
"(",
"self",
",",
"error",
")",
":",
"exc",
"=",
"JSONRPCError",
"(",
"error",
")",
"if",
"self",
".",
"raises_errors",
":",
"raise",
"exc",
"return",
"exc"
] | Recreates the exception.
Creates a :py:class:`~tinyrpc.protocols.jsonrpc.JSONRPCError` instance
and raises it.
This allows the error, message and data attributes of the original
exception to propagate into the client code.
The :py:attr:`~tinyrpc.protocols.RPCProtocol.raises_error` flag controls if the exception obejct is
raised or returned.
:returns: the exception object if it is not allowed to raise it.
:raises JSONRPCError: when the exception can be raised.
The exception object will contain ``message``, ``code`` and optionally a
``data`` property. | [
"Recreates",
"the",
"exception",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/protocols/jsonrpc.py#L651-L670 | train | 210,965 |
mbr/tinyrpc | tinyrpc/transports/wsgi.py | WsgiServerTransport.handle | def handle(self, environ, start_response):
"""WSGI handler function.
The transport will serve a request by reading the message and putting
it into an internal buffer. It will then block until another
concurrently running function sends a reply using :py:meth:`send_reply`.
The reply will then be sent to the client being handled and handle will
return.
"""
request = Request(environ)
request.max_content_length = self.max_content_length
access_control_headers = {
'Access-Control-Allow-Methods': 'POST',
'Access-Control-Allow-Origin': self.allow_origin,
'Access-Control-Allow-Headers': \
'Content-Type, X-Requested-With, Accept, Origin'
}
if request.method == 'OPTIONS':
response = Response(headers=access_control_headers)
elif request.method == 'POST':
# message is encoded in POST, read it...
msg = request.stream.read()
# create new context
context = self._queue_class()
self.messages.put((context, msg))
# ...and send the reply
response = Response(context.get(), headers=access_control_headers)
else:
# nothing else supported at the moment
response = Response('Only POST supported', 405)
return response(environ, start_response) | python | def handle(self, environ, start_response):
"""WSGI handler function.
The transport will serve a request by reading the message and putting
it into an internal buffer. It will then block until another
concurrently running function sends a reply using :py:meth:`send_reply`.
The reply will then be sent to the client being handled and handle will
return.
"""
request = Request(environ)
request.max_content_length = self.max_content_length
access_control_headers = {
'Access-Control-Allow-Methods': 'POST',
'Access-Control-Allow-Origin': self.allow_origin,
'Access-Control-Allow-Headers': \
'Content-Type, X-Requested-With, Accept, Origin'
}
if request.method == 'OPTIONS':
response = Response(headers=access_control_headers)
elif request.method == 'POST':
# message is encoded in POST, read it...
msg = request.stream.read()
# create new context
context = self._queue_class()
self.messages.put((context, msg))
# ...and send the reply
response = Response(context.get(), headers=access_control_headers)
else:
# nothing else supported at the moment
response = Response('Only POST supported', 405)
return response(environ, start_response) | [
"def",
"handle",
"(",
"self",
",",
"environ",
",",
"start_response",
")",
":",
"request",
"=",
"Request",
"(",
"environ",
")",
"request",
".",
"max_content_length",
"=",
"self",
".",
"max_content_length",
"access_control_headers",
"=",
"{",
"'Access-Control-Allow-... | WSGI handler function.
The transport will serve a request by reading the message and putting
it into an internal buffer. It will then block until another
concurrently running function sends a reply using :py:meth:`send_reply`.
The reply will then be sent to the client being handled and handle will
return. | [
"WSGI",
"handler",
"function",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/transports/wsgi.py#L48-L86 | train | 210,966 |
mbr/tinyrpc | tinyrpc/server/__init__.py | RPCServer.receive_one_message | def receive_one_message(self):
"""Handle a single request.
Polls the transport for a new message.
After a new message has arrived :py:meth:`_spawn` is called with a handler
function and arguments to handle the request.
The handler function will try to decode the message using the supplied
protocol, if that fails, an error response will be sent. After decoding
the message, the dispatcher will be asked to handle the resulting
request and the return value (either an error or a result) will be sent
back to the client using the transport.
"""
context, message = self.transport.receive_message()
if callable(self.trace):
self.trace('-->', context, message)
# assuming protocol is threadsafe and dispatcher is theadsafe, as
# long as its immutable
def handle_message(context, message):
try:
request = self.protocol.parse_request(message)
except tinyrpc.exc.RPCError as e:
response = e.error_respond()
else:
response = self.dispatcher.dispatch(request)
# send reply
if response is not None:
result = response.serialize()
if callable(self.trace):
self.trace('<--', context, result)
self.transport.send_reply(context, result)
self._spawn(handle_message, context, message) | python | def receive_one_message(self):
"""Handle a single request.
Polls the transport for a new message.
After a new message has arrived :py:meth:`_spawn` is called with a handler
function and arguments to handle the request.
The handler function will try to decode the message using the supplied
protocol, if that fails, an error response will be sent. After decoding
the message, the dispatcher will be asked to handle the resulting
request and the return value (either an error or a result) will be sent
back to the client using the transport.
"""
context, message = self.transport.receive_message()
if callable(self.trace):
self.trace('-->', context, message)
# assuming protocol is threadsafe and dispatcher is theadsafe, as
# long as its immutable
def handle_message(context, message):
try:
request = self.protocol.parse_request(message)
except tinyrpc.exc.RPCError as e:
response = e.error_respond()
else:
response = self.dispatcher.dispatch(request)
# send reply
if response is not None:
result = response.serialize()
if callable(self.trace):
self.trace('<--', context, result)
self.transport.send_reply(context, result)
self._spawn(handle_message, context, message) | [
"def",
"receive_one_message",
"(",
"self",
")",
":",
"context",
",",
"message",
"=",
"self",
".",
"transport",
".",
"receive_message",
"(",
")",
"if",
"callable",
"(",
"self",
".",
"trace",
")",
":",
"self",
".",
"trace",
"(",
"'-->'",
",",
"context",
... | Handle a single request.
Polls the transport for a new message.
After a new message has arrived :py:meth:`_spawn` is called with a handler
function and arguments to handle the request.
The handler function will try to decode the message using the supplied
protocol, if that fails, an error response will be sent. After decoding
the message, the dispatcher will be asked to handle the resulting
request and the return value (either an error or a result) will be sent
back to the client using the transport. | [
"Handle",
"a",
"single",
"request",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/server/__init__.py#L71-L107 | train | 210,967 |
mbr/tinyrpc | tinyrpc/protocols/__init__.py | RPCProtocol.raise_error | def raise_error(self, error):
"""Raises the exception in the client.
Called by the client to convert the :py:class:`RPCErrorResponse` into an Exception
and raise or return it depending on the :py:attr:`raises_errors` attribute.
:param error: The error response received from the server.
:type error: :py:class:`RPCResponse`
:rtype: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is False.
:raises: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is True.
"""
ex = exc.RPCError('Error calling remote procedure: %s' % error.error['message'])
if self.raises_errors:
raise ex
return ex | python | def raise_error(self, error):
"""Raises the exception in the client.
Called by the client to convert the :py:class:`RPCErrorResponse` into an Exception
and raise or return it depending on the :py:attr:`raises_errors` attribute.
:param error: The error response received from the server.
:type error: :py:class:`RPCResponse`
:rtype: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is False.
:raises: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is True.
"""
ex = exc.RPCError('Error calling remote procedure: %s' % error.error['message'])
if self.raises_errors:
raise ex
return ex | [
"def",
"raise_error",
"(",
"self",
",",
"error",
")",
":",
"ex",
"=",
"exc",
".",
"RPCError",
"(",
"'Error calling remote procedure: %s'",
"%",
"error",
".",
"error",
"[",
"'message'",
"]",
")",
"if",
"self",
".",
"raises_errors",
":",
"raise",
"ex",
"retu... | Raises the exception in the client.
Called by the client to convert the :py:class:`RPCErrorResponse` into an Exception
and raise or return it depending on the :py:attr:`raises_errors` attribute.
:param error: The error response received from the server.
:type error: :py:class:`RPCResponse`
:rtype: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is False.
:raises: :py:exc:`~tinyrpc.exc.RPCError` when :py:attr:`raises_errors` is True. | [
"Raises",
"the",
"exception",
"in",
"the",
"client",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/protocols/__init__.py#L292-L306 | train | 210,968 |
mbr/tinyrpc | tinyrpc/transports/zmq.py | ZmqServerTransport.create | def create(cls, zmq_context, endpoint):
"""Create new server transport.
Instead of creating the socket yourself, you can call this function and
merely pass the :py:class:`zmq.core.context.Context` instance.
By passing a context imported from :py:mod:`zmq.green`, you can use
green (gevent) 0mq sockets as well.
:param zmq_context: A 0mq context.
:param endpoint: The endpoint clients will connect to.
"""
socket = zmq_context.socket(zmq.ROUTER)
socket.bind(endpoint)
return cls(socket) | python | def create(cls, zmq_context, endpoint):
"""Create new server transport.
Instead of creating the socket yourself, you can call this function and
merely pass the :py:class:`zmq.core.context.Context` instance.
By passing a context imported from :py:mod:`zmq.green`, you can use
green (gevent) 0mq sockets as well.
:param zmq_context: A 0mq context.
:param endpoint: The endpoint clients will connect to.
"""
socket = zmq_context.socket(zmq.ROUTER)
socket.bind(endpoint)
return cls(socket) | [
"def",
"create",
"(",
"cls",
",",
"zmq_context",
",",
"endpoint",
")",
":",
"socket",
"=",
"zmq_context",
".",
"socket",
"(",
"zmq",
".",
"ROUTER",
")",
"socket",
".",
"bind",
"(",
"endpoint",
")",
"return",
"cls",
"(",
"socket",
")"
] | Create new server transport.
Instead of creating the socket yourself, you can call this function and
merely pass the :py:class:`zmq.core.context.Context` instance.
By passing a context imported from :py:mod:`zmq.green`, you can use
green (gevent) 0mq sockets as well.
:param zmq_context: A 0mq context.
:param endpoint: The endpoint clients will connect to. | [
"Create",
"new",
"server",
"transport",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/transports/zmq.py#L28-L42 | train | 210,969 |
mbr/tinyrpc | tinyrpc/transports/zmq.py | ZmqClientTransport.create | def create(cls, zmq_context, endpoint):
"""Create new client transport.
Instead of creating the socket yourself, you can call this function and
merely pass the :py:class:`zmq.core.context.Context` instance.
By passing a context imported from :py:mod:`zmq.green`, you can use
green (gevent) 0mq sockets as well.
:param zmq_context: A 0mq context.
:param endpoint: The endpoint the server is bound to.
"""
socket = zmq_context.socket(zmq.REQ)
socket.connect(endpoint)
return cls(socket) | python | def create(cls, zmq_context, endpoint):
"""Create new client transport.
Instead of creating the socket yourself, you can call this function and
merely pass the :py:class:`zmq.core.context.Context` instance.
By passing a context imported from :py:mod:`zmq.green`, you can use
green (gevent) 0mq sockets as well.
:param zmq_context: A 0mq context.
:param endpoint: The endpoint the server is bound to.
"""
socket = zmq_context.socket(zmq.REQ)
socket.connect(endpoint)
return cls(socket) | [
"def",
"create",
"(",
"cls",
",",
"zmq_context",
",",
"endpoint",
")",
":",
"socket",
"=",
"zmq_context",
".",
"socket",
"(",
"zmq",
".",
"REQ",
")",
"socket",
".",
"connect",
"(",
"endpoint",
")",
"return",
"cls",
"(",
"socket",
")"
] | Create new client transport.
Instead of creating the socket yourself, you can call this function and
merely pass the :py:class:`zmq.core.context.Context` instance.
By passing a context imported from :py:mod:`zmq.green`, you can use
green (gevent) 0mq sockets as well.
:param zmq_context: A 0mq context.
:param endpoint: The endpoint the server is bound to. | [
"Create",
"new",
"client",
"transport",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/transports/zmq.py#L62-L76 | train | 210,970 |
mbr/tinyrpc | tinyrpc/transports/cgi.py | CGIServerTransport.receive_message | def receive_message(self):
"""Receive a message from the transport.
Blocks until a message has been received. May return a context
opaque to clients that should be passed to :py:func:`send_reply`
to identify the client later on.
:return: A tuple consisting of ``(context, message)``.
"""
if not ('REQUEST_METHOD' in os.environ
and os.environ['REQUEST_METHOD'] == 'POST'):
print("Status: 405 Method not Allowed; only POST is accepted")
exit(0)
# POST
content_length = int(os.environ['CONTENT_LENGTH'])
request_json = sys.stdin.read(content_length)
request_json = urlparse.unquote(request_json)
# context isn't used with cgi
return None, request_json | python | def receive_message(self):
"""Receive a message from the transport.
Blocks until a message has been received. May return a context
opaque to clients that should be passed to :py:func:`send_reply`
to identify the client later on.
:return: A tuple consisting of ``(context, message)``.
"""
if not ('REQUEST_METHOD' in os.environ
and os.environ['REQUEST_METHOD'] == 'POST'):
print("Status: 405 Method not Allowed; only POST is accepted")
exit(0)
# POST
content_length = int(os.environ['CONTENT_LENGTH'])
request_json = sys.stdin.read(content_length)
request_json = urlparse.unquote(request_json)
# context isn't used with cgi
return None, request_json | [
"def",
"receive_message",
"(",
"self",
")",
":",
"if",
"not",
"(",
"'REQUEST_METHOD'",
"in",
"os",
".",
"environ",
"and",
"os",
".",
"environ",
"[",
"'REQUEST_METHOD'",
"]",
"==",
"'POST'",
")",
":",
"print",
"(",
"\"Status: 405 Method not Allowed; only POST is ... | Receive a message from the transport.
Blocks until a message has been received. May return a context
opaque to clients that should be passed to :py:func:`send_reply`
to identify the client later on.
:return: A tuple consisting of ``(context, message)``. | [
"Receive",
"a",
"message",
"from",
"the",
"transport",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/transports/cgi.py#L26-L46 | train | 210,971 |
mbr/tinyrpc | tinyrpc/transports/cgi.py | CGIServerTransport.send_reply | def send_reply(self, context, reply):
"""Sends a reply to a client.
The client is usually identified by passing ``context`` as returned
from the original :py:func:`receive_message` call.
Messages must be bytes, it is up to the sender to convert the message
beforehand. A non-bytes value raises a :py:exc:`TypeError`.
:param any context: A context returned by :py:func:`receive_message`.
:param bytes reply: A binary to send back as the reply.
"""
# context isn't used with cgi
# Using sys.stdout.buffer.write() fails as stdout is on occasion monkey patched
# to AsyncFile which doesn't support the buffer attribute.
print("Status: 200 OK")
print("Content-Type: application/json")
print("Cache-Control: no-cache")
print("Pragma: no-cache")
print("Content-Length: %d" % len(reply))
print()
print(reply.decode()) | python | def send_reply(self, context, reply):
"""Sends a reply to a client.
The client is usually identified by passing ``context`` as returned
from the original :py:func:`receive_message` call.
Messages must be bytes, it is up to the sender to convert the message
beforehand. A non-bytes value raises a :py:exc:`TypeError`.
:param any context: A context returned by :py:func:`receive_message`.
:param bytes reply: A binary to send back as the reply.
"""
# context isn't used with cgi
# Using sys.stdout.buffer.write() fails as stdout is on occasion monkey patched
# to AsyncFile which doesn't support the buffer attribute.
print("Status: 200 OK")
print("Content-Type: application/json")
print("Cache-Control: no-cache")
print("Pragma: no-cache")
print("Content-Length: %d" % len(reply))
print()
print(reply.decode()) | [
"def",
"send_reply",
"(",
"self",
",",
"context",
",",
"reply",
")",
":",
"# context isn't used with cgi",
"# Using sys.stdout.buffer.write() fails as stdout is on occasion monkey patched",
"# to AsyncFile which doesn't support the buffer attribute.",
"print",
"(",
"\"Status: 200 OK\""... | Sends a reply to a client.
The client is usually identified by passing ``context`` as returned
from the original :py:func:`receive_message` call.
Messages must be bytes, it is up to the sender to convert the message
beforehand. A non-bytes value raises a :py:exc:`TypeError`.
:param any context: A context returned by :py:func:`receive_message`.
:param bytes reply: A binary to send back as the reply. | [
"Sends",
"a",
"reply",
"to",
"a",
"client",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/transports/cgi.py#L48-L70 | train | 210,972 |
mbr/tinyrpc | tinyrpc/server/gevent.py | RPCServerGreenlets._spawn | def _spawn(self, func, *args, **kwargs):
"""Spawn a handler function.
Spawns the supplied ``func`` with ``*args`` and ``**kwargs``
as a gevent greenlet.
:param func: A callable to call.
:param args: Arguments to ``func``.
:param kwargs: Keyword arguments to ``func``.
"""
gevent.spawn(func, *args, **kwargs) | python | def _spawn(self, func, *args, **kwargs):
"""Spawn a handler function.
Spawns the supplied ``func`` with ``*args`` and ``**kwargs``
as a gevent greenlet.
:param func: A callable to call.
:param args: Arguments to ``func``.
:param kwargs: Keyword arguments to ``func``.
"""
gevent.spawn(func, *args, **kwargs) | [
"def",
"_spawn",
"(",
"self",
",",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"gevent",
".",
"spawn",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Spawn a handler function.
Spawns the supplied ``func`` with ``*args`` and ``**kwargs``
as a gevent greenlet.
:param func: A callable to call.
:param args: Arguments to ``func``.
:param kwargs: Keyword arguments to ``func``. | [
"Spawn",
"a",
"handler",
"function",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/server/gevent.py#L21-L31 | train | 210,973 |
mbr/tinyrpc | tinyrpc/client.py | RPCClient.call | def call(self, method, args, kwargs, one_way=False):
"""Calls the requested method and returns the result.
If an error occured, an :py:class:`~tinyrpc.exc.RPCError` instance
is raised.
:param method: Name of the method to call.
:param args: Arguments to pass to the method.
:param kwargs: Keyword arguments to pass to the method.
:param one_way: Whether or not a reply is desired.
"""
req = self.protocol.create_request(method, args, kwargs, one_way)
rep = self._send_and_handle_reply(req, one_way)
if one_way:
return
return rep.result | python | def call(self, method, args, kwargs, one_way=False):
"""Calls the requested method and returns the result.
If an error occured, an :py:class:`~tinyrpc.exc.RPCError` instance
is raised.
:param method: Name of the method to call.
:param args: Arguments to pass to the method.
:param kwargs: Keyword arguments to pass to the method.
:param one_way: Whether or not a reply is desired.
"""
req = self.protocol.create_request(method, args, kwargs, one_way)
rep = self._send_and_handle_reply(req, one_way)
if one_way:
return
return rep.result | [
"def",
"call",
"(",
"self",
",",
"method",
",",
"args",
",",
"kwargs",
",",
"one_way",
"=",
"False",
")",
":",
"req",
"=",
"self",
".",
"protocol",
".",
"create_request",
"(",
"method",
",",
"args",
",",
"kwargs",
",",
"one_way",
")",
"rep",
"=",
"... | Calls the requested method and returns the result.
If an error occured, an :py:class:`~tinyrpc.exc.RPCError` instance
is raised.
:param method: Name of the method to call.
:param args: Arguments to pass to the method.
:param kwargs: Keyword arguments to pass to the method.
:param one_way: Whether or not a reply is desired. | [
"Calls",
"the",
"requested",
"method",
"and",
"returns",
"the",
"result",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/client.py#L64-L82 | train | 210,974 |
mbr/tinyrpc | tinyrpc/client.py | RPCClient.call_all | def call_all(self, requests):
"""Calls the methods in the request in parallel.
When the :py:mod:`gevent` module is already loaded it is assumed to be
correctly initialized, including monkey patching if necessary.
In that case the RPC calls defined by ``requests`` is performed in
parallel otherwise the methods are called sequentially.
:param requests: A listof either :py:class:`~tinyrpc.client.RPCCall` or :py:class:`~tinyrpc.client.RPCCallTo` elements.
When RPCCallTo is used each element defines a transport.
Otherwise the default transport set when RPCClient is
created is used.
:return: A list with replies matching the order of the requests.
"""
threads = []
if 'gevent' in sys.modules:
# assume that gevent is available and functional, make calls in parallel
import gevent
for r in requests:
req = self.protocol.create_request(r.method, r.args, r.kwargs)
tr = r.transport.transport if len(r) == 4 else None
threads.append(
gevent.spawn(
self._send_and_handle_reply, req, False, tr, True
)
)
gevent.joinall(threads)
return [t.value for t in threads]
else:
# call serially
for r in requests:
req = self.protocol.create_request(r.method, r.args, r.kwargs)
tr = r.transport.transport if len(r) == 4 else None
threads.append(
self._send_and_handle_reply(req, False, tr, True)
)
return threads | python | def call_all(self, requests):
"""Calls the methods in the request in parallel.
When the :py:mod:`gevent` module is already loaded it is assumed to be
correctly initialized, including monkey patching if necessary.
In that case the RPC calls defined by ``requests`` is performed in
parallel otherwise the methods are called sequentially.
:param requests: A listof either :py:class:`~tinyrpc.client.RPCCall` or :py:class:`~tinyrpc.client.RPCCallTo` elements.
When RPCCallTo is used each element defines a transport.
Otherwise the default transport set when RPCClient is
created is used.
:return: A list with replies matching the order of the requests.
"""
threads = []
if 'gevent' in sys.modules:
# assume that gevent is available and functional, make calls in parallel
import gevent
for r in requests:
req = self.protocol.create_request(r.method, r.args, r.kwargs)
tr = r.transport.transport if len(r) == 4 else None
threads.append(
gevent.spawn(
self._send_and_handle_reply, req, False, tr, True
)
)
gevent.joinall(threads)
return [t.value for t in threads]
else:
# call serially
for r in requests:
req = self.protocol.create_request(r.method, r.args, r.kwargs)
tr = r.transport.transport if len(r) == 4 else None
threads.append(
self._send_and_handle_reply(req, False, tr, True)
)
return threads | [
"def",
"call_all",
"(",
"self",
",",
"requests",
")",
":",
"threads",
"=",
"[",
"]",
"if",
"'gevent'",
"in",
"sys",
".",
"modules",
":",
"# assume that gevent is available and functional, make calls in parallel",
"import",
"gevent",
"for",
"r",
"in",
"requests",
"... | Calls the methods in the request in parallel.
When the :py:mod:`gevent` module is already loaded it is assumed to be
correctly initialized, including monkey patching if necessary.
In that case the RPC calls defined by ``requests`` is performed in
parallel otherwise the methods are called sequentially.
:param requests: A listof either :py:class:`~tinyrpc.client.RPCCall` or :py:class:`~tinyrpc.client.RPCCallTo` elements.
When RPCCallTo is used each element defines a transport.
Otherwise the default transport set when RPCClient is
created is used.
:return: A list with replies matching the order of the requests. | [
"Calls",
"the",
"methods",
"in",
"the",
"request",
"in",
"parallel",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/client.py#L84-L121 | train | 210,975 |
mbr/tinyrpc | tinyrpc/client.py | RPCClient.batch_call | def batch_call(self, calls):
"""Experimental, use at your own peril."""
req = self.protocol.create_batch_request()
for call_args in calls:
req.append(self.protocol.create_request(*call_args))
return self._send_and_handle_reply(req) | python | def batch_call(self, calls):
"""Experimental, use at your own peril."""
req = self.protocol.create_batch_request()
for call_args in calls:
req.append(self.protocol.create_request(*call_args))
return self._send_and_handle_reply(req) | [
"def",
"batch_call",
"(",
"self",
",",
"calls",
")",
":",
"req",
"=",
"self",
".",
"protocol",
".",
"create_batch_request",
"(",
")",
"for",
"call_args",
"in",
"calls",
":",
"req",
".",
"append",
"(",
"self",
".",
"protocol",
".",
"create_request",
"(",
... | Experimental, use at your own peril. | [
"Experimental",
"use",
"at",
"your",
"own",
"peril",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/client.py#L131-L138 | train | 210,976 |
mbr/tinyrpc | tinyrpc/dispatch/__init__.py | public | def public(name=None):
"""Decorator. Mark a method as eligible for registration by a dispatcher.
The dispatchers :py:func:`~tinyrpc.dispatch.RPCDispatcher.register_instance` function
will do the actual registration of the marked method.
The difference with :py:func:`~tinyrpc.dispatch.RPCDispatcher.public` is that this decorator does
not register with a dispatcher, therefore binding the marked methods with a dispatcher is delayed
until runtime.
It also becomes possible to bind with multiple dispatchers.
:param name: The name to register the function with.
:type name: str or None
Example:
.. code-block:: python
def class Baz(object):
def not_exposed(self);
# ...
@public('do_something')
def visible_method(arg1):
# ...
baz = Baz()
dispatch = RPCDispatcher()
dispatch.register_instance(baz, 'bazzies`)
# Baz.visible_method is now callable via RPC as bazzies.do_something
``@public`` is a shortcut for ``@public()``.
"""
if callable(name):
f = name
f._rpc_public_name = f.__name__
return f
def _(f):
f._rpc_public_name = name or f.__name__
return f
return _ | python | def public(name=None):
"""Decorator. Mark a method as eligible for registration by a dispatcher.
The dispatchers :py:func:`~tinyrpc.dispatch.RPCDispatcher.register_instance` function
will do the actual registration of the marked method.
The difference with :py:func:`~tinyrpc.dispatch.RPCDispatcher.public` is that this decorator does
not register with a dispatcher, therefore binding the marked methods with a dispatcher is delayed
until runtime.
It also becomes possible to bind with multiple dispatchers.
:param name: The name to register the function with.
:type name: str or None
Example:
.. code-block:: python
def class Baz(object):
def not_exposed(self);
# ...
@public('do_something')
def visible_method(arg1):
# ...
baz = Baz()
dispatch = RPCDispatcher()
dispatch.register_instance(baz, 'bazzies`)
# Baz.visible_method is now callable via RPC as bazzies.do_something
``@public`` is a shortcut for ``@public()``.
"""
if callable(name):
f = name
f._rpc_public_name = f.__name__
return f
def _(f):
f._rpc_public_name = name or f.__name__
return f
return _ | [
"def",
"public",
"(",
"name",
"=",
"None",
")",
":",
"if",
"callable",
"(",
"name",
")",
":",
"f",
"=",
"name",
"f",
".",
"_rpc_public_name",
"=",
"f",
".",
"__name__",
"return",
"f",
"def",
"_",
"(",
"f",
")",
":",
"f",
".",
"_rpc_public_name",
... | Decorator. Mark a method as eligible for registration by a dispatcher.
The dispatchers :py:func:`~tinyrpc.dispatch.RPCDispatcher.register_instance` function
will do the actual registration of the marked method.
The difference with :py:func:`~tinyrpc.dispatch.RPCDispatcher.public` is that this decorator does
not register with a dispatcher, therefore binding the marked methods with a dispatcher is delayed
until runtime.
It also becomes possible to bind with multiple dispatchers.
:param name: The name to register the function with.
:type name: str or None
Example:
.. code-block:: python
def class Baz(object):
def not_exposed(self);
# ...
@public('do_something')
def visible_method(arg1):
# ...
baz = Baz()
dispatch = RPCDispatcher()
dispatch.register_instance(baz, 'bazzies`)
# Baz.visible_method is now callable via RPC as bazzies.do_something
``@public`` is a shortcut for ``@public()``. | [
"Decorator",
".",
"Mark",
"a",
"method",
"as",
"eligible",
"for",
"registration",
"by",
"a",
"dispatcher",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/dispatch/__init__.py#L17-L59 | train | 210,977 |
mbr/tinyrpc | tinyrpc/dispatch/__init__.py | RPCDispatcher.public | def public(self, name=None):
"""Convenient decorator.
Allows easy registering of functions to this dispatcher. Example:
.. code-block:: python
dispatch = RPCDispatcher()
@dispatch.public
def foo(bar):
# ...
class Baz(object):
def not_exposed(self):
# ...
@dispatch.public(name='do_something')
def visible_method(arg1)
# ...
:param str name: Name to register callable with.
"""
if callable(name):
self.add_method(name)
return name
def _(f):
self.add_method(f, name=name)
return f
return _ | python | def public(self, name=None):
"""Convenient decorator.
Allows easy registering of functions to this dispatcher. Example:
.. code-block:: python
dispatch = RPCDispatcher()
@dispatch.public
def foo(bar):
# ...
class Baz(object):
def not_exposed(self):
# ...
@dispatch.public(name='do_something')
def visible_method(arg1)
# ...
:param str name: Name to register callable with.
"""
if callable(name):
self.add_method(name)
return name
def _(f):
self.add_method(f, name=name)
return f
return _ | [
"def",
"public",
"(",
"self",
",",
"name",
"=",
"None",
")",
":",
"if",
"callable",
"(",
"name",
")",
":",
"self",
".",
"add_method",
"(",
"name",
")",
"return",
"name",
"def",
"_",
"(",
"f",
")",
":",
"self",
".",
"add_method",
"(",
"f",
",",
... | Convenient decorator.
Allows easy registering of functions to this dispatcher. Example:
.. code-block:: python
dispatch = RPCDispatcher()
@dispatch.public
def foo(bar):
# ...
class Baz(object):
def not_exposed(self):
# ...
@dispatch.public(name='do_something')
def visible_method(arg1)
# ...
:param str name: Name to register callable with. | [
"Convenient",
"decorator",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/dispatch/__init__.py#L69-L100 | train | 210,978 |
mbr/tinyrpc | tinyrpc/dispatch/__init__.py | RPCDispatcher.add_subdispatch | def add_subdispatch(self, dispatcher, prefix=''):
"""Adds a subdispatcher, possibly in its own namespace.
:param dispatcher: The dispatcher to add as a subdispatcher.
:type dispatcher: RPCDispatcher
:param str prefix: A prefix. All of the new subdispatchers methods will be
available as prefix + their original name.
"""
self.subdispatchers.setdefault(prefix, []).append(dispatcher) | python | def add_subdispatch(self, dispatcher, prefix=''):
"""Adds a subdispatcher, possibly in its own namespace.
:param dispatcher: The dispatcher to add as a subdispatcher.
:type dispatcher: RPCDispatcher
:param str prefix: A prefix. All of the new subdispatchers methods will be
available as prefix + their original name.
"""
self.subdispatchers.setdefault(prefix, []).append(dispatcher) | [
"def",
"add_subdispatch",
"(",
"self",
",",
"dispatcher",
",",
"prefix",
"=",
"''",
")",
":",
"self",
".",
"subdispatchers",
".",
"setdefault",
"(",
"prefix",
",",
"[",
"]",
")",
".",
"append",
"(",
"dispatcher",
")"
] | Adds a subdispatcher, possibly in its own namespace.
:param dispatcher: The dispatcher to add as a subdispatcher.
:type dispatcher: RPCDispatcher
:param str prefix: A prefix. All of the new subdispatchers methods will be
available as prefix + their original name. | [
"Adds",
"a",
"subdispatcher",
"possibly",
"in",
"its",
"own",
"namespace",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/dispatch/__init__.py#L102-L110 | train | 210,979 |
mbr/tinyrpc | tinyrpc/dispatch/__init__.py | RPCDispatcher.get_method | def get_method(self, name):
"""Retrieve a previously registered method.
Checks if a method matching ``name`` has been registered.
If :py:func:`get_method` cannot find a method, every subdispatcher
with a prefix matching the method name is checked as well.
:param str name: Function to find.
:returns: The callable implementing the function.
:rtype: callable
:raises: :py:exc:`~tinyrpc.exc.MethodNotFoundError`
"""
if name in self.method_map:
return self.method_map[name]
for prefix, subdispatchers in self.subdispatchers.items():
if name.startswith(prefix):
for sd in subdispatchers:
try:
return sd.get_method(name[len(prefix):])
except exc.MethodNotFoundError:
pass
raise exc.MethodNotFoundError(name) | python | def get_method(self, name):
"""Retrieve a previously registered method.
Checks if a method matching ``name`` has been registered.
If :py:func:`get_method` cannot find a method, every subdispatcher
with a prefix matching the method name is checked as well.
:param str name: Function to find.
:returns: The callable implementing the function.
:rtype: callable
:raises: :py:exc:`~tinyrpc.exc.MethodNotFoundError`
"""
if name in self.method_map:
return self.method_map[name]
for prefix, subdispatchers in self.subdispatchers.items():
if name.startswith(prefix):
for sd in subdispatchers:
try:
return sd.get_method(name[len(prefix):])
except exc.MethodNotFoundError:
pass
raise exc.MethodNotFoundError(name) | [
"def",
"get_method",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
"in",
"self",
".",
"method_map",
":",
"return",
"self",
".",
"method_map",
"[",
"name",
"]",
"for",
"prefix",
",",
"subdispatchers",
"in",
"self",
".",
"subdispatchers",
".",
"items",... | Retrieve a previously registered method.
Checks if a method matching ``name`` has been registered.
If :py:func:`get_method` cannot find a method, every subdispatcher
with a prefix matching the method name is checked as well.
:param str name: Function to find.
:returns: The callable implementing the function.
:rtype: callable
:raises: :py:exc:`~tinyrpc.exc.MethodNotFoundError` | [
"Retrieve",
"a",
"previously",
"registered",
"method",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/dispatch/__init__.py#L132-L156 | train | 210,980 |
mbr/tinyrpc | tinyrpc/dispatch/__init__.py | RPCDispatcher.register_instance | def register_instance(self, obj, prefix=''):
"""Create new subdispatcher and register all public object methods on
it.
To be used in conjunction with the :py:func:`public`
decorator (*not* :py:func:`RPCDispatcher.public`).
:param obj: The object whose public methods should be made available.
:type obj: object
:param str prefix: A prefix for the new subdispatcher.
"""
dispatch = self.__class__()
for name, f in inspect.getmembers(
obj, lambda f: callable(f) and hasattr(f, '_rpc_public_name')):
dispatch.add_method(f, f._rpc_public_name)
# add to dispatchers
self.add_subdispatch(dispatch, prefix) | python | def register_instance(self, obj, prefix=''):
"""Create new subdispatcher and register all public object methods on
it.
To be used in conjunction with the :py:func:`public`
decorator (*not* :py:func:`RPCDispatcher.public`).
:param obj: The object whose public methods should be made available.
:type obj: object
:param str prefix: A prefix for the new subdispatcher.
"""
dispatch = self.__class__()
for name, f in inspect.getmembers(
obj, lambda f: callable(f) and hasattr(f, '_rpc_public_name')):
dispatch.add_method(f, f._rpc_public_name)
# add to dispatchers
self.add_subdispatch(dispatch, prefix) | [
"def",
"register_instance",
"(",
"self",
",",
"obj",
",",
"prefix",
"=",
"''",
")",
":",
"dispatch",
"=",
"self",
".",
"__class__",
"(",
")",
"for",
"name",
",",
"f",
"in",
"inspect",
".",
"getmembers",
"(",
"obj",
",",
"lambda",
"f",
":",
"callable"... | Create new subdispatcher and register all public object methods on
it.
To be used in conjunction with the :py:func:`public`
decorator (*not* :py:func:`RPCDispatcher.public`).
:param obj: The object whose public methods should be made available.
:type obj: object
:param str prefix: A prefix for the new subdispatcher. | [
"Create",
"new",
"subdispatcher",
"and",
"register",
"all",
"public",
"object",
"methods",
"on",
"it",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/dispatch/__init__.py#L158-L175 | train | 210,981 |
mbr/tinyrpc | tinyrpc/dispatch/__init__.py | RPCDispatcher.dispatch | def dispatch(self, request):
"""Fully handle request.
The dispatch method determines which method to call, calls it and
returns a response containing a result.
No exceptions will be thrown, rather, every exception will be turned
into a response using :py:func:`~tinyrpc.RPCRequest.error_respond`.
If a method isn't found, a :py:exc:`~tinyrpc.exc.MethodNotFoundError`
response will be returned. If any error occurs outside of the requested
method, a :py:exc:`~tinyrpc.exc.ServerError` without any error
information will be returend.
If the method is found and called but throws an exception, the
exception thrown is used as a response instead. This is the only case
in which information from the exception is possibly propagated back to
the client, as the exception is part of the requested method.
:py:class:`~tinyrpc.RPCBatchRequest` instances are handled by handling
all its children in order and collecting the results, then returning an
:py:class:`~tinyrpc.RPCBatchResponse` with the results.
:param request: The request containing the function to be called and its parameters.
:type request: ~tinyrpc.protocols.RPCRequest
:return: The result produced by calling the requested function.
:rtype: ~tinyrpc.protocols.RPCResponse
:raises ~exc.MethodNotFoundError: If the requested function is not published.
:raises ~exc.ServerError: If some other error occurred.
.. Note::
The :py:exc:`~tinyrpc.exc.ServerError` is raised for any kind of exception not
raised by the called function itself or :py:exc:`~tinyrpc.exc.MethodNotFoundError`.
"""
if hasattr(request, 'create_batch_response'):
results = [self._dispatch(req) for req in request]
response = request.create_batch_response()
if response is not None:
response.extend(results)
return response
else:
return self._dispatch(request) | python | def dispatch(self, request):
"""Fully handle request.
The dispatch method determines which method to call, calls it and
returns a response containing a result.
No exceptions will be thrown, rather, every exception will be turned
into a response using :py:func:`~tinyrpc.RPCRequest.error_respond`.
If a method isn't found, a :py:exc:`~tinyrpc.exc.MethodNotFoundError`
response will be returned. If any error occurs outside of the requested
method, a :py:exc:`~tinyrpc.exc.ServerError` without any error
information will be returend.
If the method is found and called but throws an exception, the
exception thrown is used as a response instead. This is the only case
in which information from the exception is possibly propagated back to
the client, as the exception is part of the requested method.
:py:class:`~tinyrpc.RPCBatchRequest` instances are handled by handling
all its children in order and collecting the results, then returning an
:py:class:`~tinyrpc.RPCBatchResponse` with the results.
:param request: The request containing the function to be called and its parameters.
:type request: ~tinyrpc.protocols.RPCRequest
:return: The result produced by calling the requested function.
:rtype: ~tinyrpc.protocols.RPCResponse
:raises ~exc.MethodNotFoundError: If the requested function is not published.
:raises ~exc.ServerError: If some other error occurred.
.. Note::
The :py:exc:`~tinyrpc.exc.ServerError` is raised for any kind of exception not
raised by the called function itself or :py:exc:`~tinyrpc.exc.MethodNotFoundError`.
"""
if hasattr(request, 'create_batch_response'):
results = [self._dispatch(req) for req in request]
response = request.create_batch_response()
if response is not None:
response.extend(results)
return response
else:
return self._dispatch(request) | [
"def",
"dispatch",
"(",
"self",
",",
"request",
")",
":",
"if",
"hasattr",
"(",
"request",
",",
"'create_batch_response'",
")",
":",
"results",
"=",
"[",
"self",
".",
"_dispatch",
"(",
"req",
")",
"for",
"req",
"in",
"request",
"]",
"response",
"=",
"r... | Fully handle request.
The dispatch method determines which method to call, calls it and
returns a response containing a result.
No exceptions will be thrown, rather, every exception will be turned
into a response using :py:func:`~tinyrpc.RPCRequest.error_respond`.
If a method isn't found, a :py:exc:`~tinyrpc.exc.MethodNotFoundError`
response will be returned. If any error occurs outside of the requested
method, a :py:exc:`~tinyrpc.exc.ServerError` without any error
information will be returend.
If the method is found and called but throws an exception, the
exception thrown is used as a response instead. This is the only case
in which information from the exception is possibly propagated back to
the client, as the exception is part of the requested method.
:py:class:`~tinyrpc.RPCBatchRequest` instances are handled by handling
all its children in order and collecting the results, then returning an
:py:class:`~tinyrpc.RPCBatchResponse` with the results.
:param request: The request containing the function to be called and its parameters.
:type request: ~tinyrpc.protocols.RPCRequest
:return: The result produced by calling the requested function.
:rtype: ~tinyrpc.protocols.RPCResponse
:raises ~exc.MethodNotFoundError: If the requested function is not published.
:raises ~exc.ServerError: If some other error occurred.
.. Note::
The :py:exc:`~tinyrpc.exc.ServerError` is raised for any kind of exception not
raised by the called function itself or :py:exc:`~tinyrpc.exc.MethodNotFoundError`. | [
"Fully",
"handle",
"request",
"."
] | 59ccf62452b3f37e8411ff0309a3a99857d05e19 | https://github.com/mbr/tinyrpc/blob/59ccf62452b3f37e8411ff0309a3a99857d05e19/tinyrpc/dispatch/__init__.py#L177-L221 | train | 210,982 |
mailgun/expiringdict | expiringdict/__init__.py | ExpiringDict.pop | def pop(self, key, default=None):
""" Get item from the dict and remove it.
Return default if expired or does not exist. Never raise KeyError.
"""
with self.lock:
try:
item = OrderedDict.__getitem__(self, key)
del self[key]
return item[0]
except KeyError:
return default | python | def pop(self, key, default=None):
""" Get item from the dict and remove it.
Return default if expired or does not exist. Never raise KeyError.
"""
with self.lock:
try:
item = OrderedDict.__getitem__(self, key)
del self[key]
return item[0]
except KeyError:
return default | [
"def",
"pop",
"(",
"self",
",",
"key",
",",
"default",
"=",
"None",
")",
":",
"with",
"self",
".",
"lock",
":",
"try",
":",
"item",
"=",
"OrderedDict",
".",
"__getitem__",
"(",
"self",
",",
"key",
")",
"del",
"self",
"[",
"key",
"]",
"return",
"i... | Get item from the dict and remove it.
Return default if expired or does not exist. Never raise KeyError. | [
"Get",
"item",
"from",
"the",
"dict",
"and",
"remove",
"it",
"."
] | 750048022cde40d35721253a88fbaa2df1781e94 | https://github.com/mailgun/expiringdict/blob/750048022cde40d35721253a88fbaa2df1781e94/expiringdict/__init__.py#L84-L95 | train | 210,983 |
mailgun/expiringdict | expiringdict/__init__.py | ExpiringDict.get | def get(self, key, default=None, with_age=False):
" Return the value for key if key is in the dictionary, else default. "
try:
return self.__getitem__(key, with_age)
except KeyError:
if with_age:
return default, None
else:
return default | python | def get(self, key, default=None, with_age=False):
" Return the value for key if key is in the dictionary, else default. "
try:
return self.__getitem__(key, with_age)
except KeyError:
if with_age:
return default, None
else:
return default | [
"def",
"get",
"(",
"self",
",",
"key",
",",
"default",
"=",
"None",
",",
"with_age",
"=",
"False",
")",
":",
"try",
":",
"return",
"self",
".",
"__getitem__",
"(",
"key",
",",
"with_age",
")",
"except",
"KeyError",
":",
"if",
"with_age",
":",
"return... | Return the value for key if key is in the dictionary, else default. | [
"Return",
"the",
"value",
"for",
"key",
"if",
"key",
"is",
"in",
"the",
"dictionary",
"else",
"default",
"."
] | 750048022cde40d35721253a88fbaa2df1781e94 | https://github.com/mailgun/expiringdict/blob/750048022cde40d35721253a88fbaa2df1781e94/expiringdict/__init__.py#L109-L117 | train | 210,984 |
ns1/ns1-python | ns1/records.py | Record.reload | def reload(self, callback=None, errback=None):
"""
Reload record data from the API.
"""
return self.load(reload=True, callback=callback, errback=errback) | python | def reload(self, callback=None, errback=None):
"""
Reload record data from the API.
"""
return self.load(reload=True, callback=callback, errback=errback) | [
"def",
"reload",
"(",
"self",
",",
"callback",
"=",
"None",
",",
"errback",
"=",
"None",
")",
":",
"return",
"self",
".",
"load",
"(",
"reload",
"=",
"True",
",",
"callback",
"=",
"callback",
",",
"errback",
"=",
"errback",
")"
] | Reload record data from the API. | [
"Reload",
"record",
"data",
"from",
"the",
"API",
"."
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/records.py#L49-L53 | train | 210,985 |
ns1/ns1-python | ns1/records.py | Record.load | def load(self, callback=None, errback=None, reload=False):
"""
Load record data from the API.
"""
if not reload and self.data:
raise RecordException('record already loaded')
def success(result, *args):
self._parseModel(result)
if callback:
return callback(self)
else:
return self
return self._rest.retrieve(self.parentZone.zone,
self.domain, self.type,
callback=success, errback=errback) | python | def load(self, callback=None, errback=None, reload=False):
"""
Load record data from the API.
"""
if not reload and self.data:
raise RecordException('record already loaded')
def success(result, *args):
self._parseModel(result)
if callback:
return callback(self)
else:
return self
return self._rest.retrieve(self.parentZone.zone,
self.domain, self.type,
callback=success, errback=errback) | [
"def",
"load",
"(",
"self",
",",
"callback",
"=",
"None",
",",
"errback",
"=",
"None",
",",
"reload",
"=",
"False",
")",
":",
"if",
"not",
"reload",
"and",
"self",
".",
"data",
":",
"raise",
"RecordException",
"(",
"'record already loaded'",
")",
"def",
... | Load record data from the API. | [
"Load",
"record",
"data",
"from",
"the",
"API",
"."
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/records.py#L55-L70 | train | 210,986 |
ns1/ns1-python | ns1/records.py | Record.delete | def delete(self, callback=None, errback=None):
"""
Delete the record from the zone, including all advanced configuration,
meta data, etc.
"""
if not self.data:
raise RecordException('record not loaded')
def success(result, *args):
if callback:
return callback(result)
else:
return result
return self._rest.delete(self.parentZone.zone,
self.domain, self.type,
callback=success, errback=errback) | python | def delete(self, callback=None, errback=None):
"""
Delete the record from the zone, including all advanced configuration,
meta data, etc.
"""
if not self.data:
raise RecordException('record not loaded')
def success(result, *args):
if callback:
return callback(result)
else:
return result
return self._rest.delete(self.parentZone.zone,
self.domain, self.type,
callback=success, errback=errback) | [
"def",
"delete",
"(",
"self",
",",
"callback",
"=",
"None",
",",
"errback",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"data",
":",
"raise",
"RecordException",
"(",
"'record not loaded'",
")",
"def",
"success",
"(",
"result",
",",
"*",
"args",
")... | Delete the record from the zone, including all advanced configuration,
meta data, etc. | [
"Delete",
"the",
"record",
"from",
"the",
"zone",
"including",
"all",
"advanced",
"configuration",
"meta",
"data",
"etc",
"."
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/records.py#L72-L88 | train | 210,987 |
ns1/ns1-python | ns1/records.py | Record.qps | def qps(self, callback=None, errback=None):
"""
Return the current QPS for this record
:rtype: dict
:return: QPS information
"""
if not self.data:
raise RecordException('record not loaded')
stats = Stats(self.parentZone.config)
return stats.qps(zone=self.parentZone.zone,
domain=self.domain,
type=self.type,
callback=callback,
errback=errback) | python | def qps(self, callback=None, errback=None):
"""
Return the current QPS for this record
:rtype: dict
:return: QPS information
"""
if not self.data:
raise RecordException('record not loaded')
stats = Stats(self.parentZone.config)
return stats.qps(zone=self.parentZone.zone,
domain=self.domain,
type=self.type,
callback=callback,
errback=errback) | [
"def",
"qps",
"(",
"self",
",",
"callback",
"=",
"None",
",",
"errback",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"data",
":",
"raise",
"RecordException",
"(",
"'record not loaded'",
")",
"stats",
"=",
"Stats",
"(",
"self",
".",
"parentZone",
"... | Return the current QPS for this record
:rtype: dict
:return: QPS information | [
"Return",
"the",
"current",
"QPS",
"for",
"this",
"record"
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/records.py#L132-L146 | train | 210,988 |
ns1/ns1-python | ns1/records.py | Record.addAnswers | def addAnswers(self, answers, callback=None, errback=None, **kwargs):
"""
Add answers to the record.
:param answers: answers structure. See the class note on answer format.
"""
if not self.data:
raise RecordException('record not loaded')
orig_answers = self.data['answers']
new_answers = self._rest._getAnswersForBody(answers)
orig_answers.extend(new_answers)
return self.update(answers=orig_answers, callback=callback,
errback=errback, **kwargs) | python | def addAnswers(self, answers, callback=None, errback=None, **kwargs):
"""
Add answers to the record.
:param answers: answers structure. See the class note on answer format.
"""
if not self.data:
raise RecordException('record not loaded')
orig_answers = self.data['answers']
new_answers = self._rest._getAnswersForBody(answers)
orig_answers.extend(new_answers)
return self.update(answers=orig_answers, callback=callback,
errback=errback, **kwargs) | [
"def",
"addAnswers",
"(",
"self",
",",
"answers",
",",
"callback",
"=",
"None",
",",
"errback",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"data",
":",
"raise",
"RecordException",
"(",
"'record not loaded'",
")",
"orig_answ... | Add answers to the record.
:param answers: answers structure. See the class note on answer format. | [
"Add",
"answers",
"to",
"the",
"record",
"."
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/records.py#L165-L177 | train | 210,989 |
ns1/ns1-python | ns1/config.py | Config.createFromAPIKey | def createFromAPIKey(self, apikey, maybeWriteDefault=False):
"""
Create a basic config from a single API key
:param str apikey: NS1 API Key, as created in the NS1 portal
:param bool maybeWriteDefault: If True and DEFAULT_CONFIG_FILE doesn't\
exist write out the resulting config there.
"""
self._data = {
'default_key': 'default',
'keys': {
'default': {
'key': apikey,
'desc': 'imported API key'
}
}
}
self._keyID = 'default'
self._doDefaults()
if maybeWriteDefault:
path = os.path.expanduser(self.DEFAULT_CONFIG_FILE)
self.write(path) | python | def createFromAPIKey(self, apikey, maybeWriteDefault=False):
"""
Create a basic config from a single API key
:param str apikey: NS1 API Key, as created in the NS1 portal
:param bool maybeWriteDefault: If True and DEFAULT_CONFIG_FILE doesn't\
exist write out the resulting config there.
"""
self._data = {
'default_key': 'default',
'keys': {
'default': {
'key': apikey,
'desc': 'imported API key'
}
}
}
self._keyID = 'default'
self._doDefaults()
if maybeWriteDefault:
path = os.path.expanduser(self.DEFAULT_CONFIG_FILE)
self.write(path) | [
"def",
"createFromAPIKey",
"(",
"self",
",",
"apikey",
",",
"maybeWriteDefault",
"=",
"False",
")",
":",
"self",
".",
"_data",
"=",
"{",
"'default_key'",
":",
"'default'",
",",
"'keys'",
":",
"{",
"'default'",
":",
"{",
"'key'",
":",
"apikey",
",",
"'des... | Create a basic config from a single API key
:param str apikey: NS1 API Key, as created in the NS1 portal
:param bool maybeWriteDefault: If True and DEFAULT_CONFIG_FILE doesn't\
exist write out the resulting config there. | [
"Create",
"a",
"basic",
"config",
"from",
"a",
"single",
"API",
"key"
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/config.py#L55-L76 | train | 210,990 |
ns1/ns1-python | ns1/config.py | Config.loadFromFile | def loadFromFile(self, path):
"""
Load JSON config file from disk at the given path
:param str path: path to config file
"""
if '~' in path:
path = os.path.expanduser(path)
f = open(path)
body = f.read()
f.close()
self._path = path
self.loadFromString(body) | python | def loadFromFile(self, path):
"""
Load JSON config file from disk at the given path
:param str path: path to config file
"""
if '~' in path:
path = os.path.expanduser(path)
f = open(path)
body = f.read()
f.close()
self._path = path
self.loadFromString(body) | [
"def",
"loadFromFile",
"(",
"self",
",",
"path",
")",
":",
"if",
"'~'",
"in",
"path",
":",
"path",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"path",
")",
"f",
"=",
"open",
"(",
"path",
")",
"body",
"=",
"f",
".",
"read",
"(",
")",
"f",
... | Load JSON config file from disk at the given path
:param str path: path to config file | [
"Load",
"JSON",
"config",
"file",
"from",
"disk",
"at",
"the",
"given",
"path"
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/config.py#L100-L112 | train | 210,991 |
ns1/ns1-python | ns1/config.py | Config.write | def write(self, path=None):
"""
Write config data to disk. If this config object already has a path,
it will write to it. If it doesn't, one must be passed during this
call.
:param str path: path to config file
"""
if not self._path and not path:
raise ConfigException('no config path given')
if path:
self._path = path
if '~' in self._path:
self._path = os.path.expanduser(self._path)
f = open(self._path, 'w')
f.write(json.dumps(self._data))
f.close() | python | def write(self, path=None):
"""
Write config data to disk. If this config object already has a path,
it will write to it. If it doesn't, one must be passed during this
call.
:param str path: path to config file
"""
if not self._path and not path:
raise ConfigException('no config path given')
if path:
self._path = path
if '~' in self._path:
self._path = os.path.expanduser(self._path)
f = open(self._path, 'w')
f.write(json.dumps(self._data))
f.close() | [
"def",
"write",
"(",
"self",
",",
"path",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"_path",
"and",
"not",
"path",
":",
"raise",
"ConfigException",
"(",
"'no config path given'",
")",
"if",
"path",
":",
"self",
".",
"_path",
"=",
"path",
"if",
... | Write config data to disk. If this config object already has a path,
it will write to it. If it doesn't, one must be passed during this
call.
:param str path: path to config file | [
"Write",
"config",
"data",
"to",
"disk",
".",
"If",
"this",
"config",
"object",
"already",
"has",
"a",
"path",
"it",
"will",
"write",
"to",
"it",
".",
"If",
"it",
"doesn",
"t",
"one",
"must",
"be",
"passed",
"during",
"this",
"call",
"."
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/config.py#L114-L130 | train | 210,992 |
ns1/ns1-python | ns1/config.py | Config.useKeyID | def useKeyID(self, keyID):
"""
Use the given API key config specified by `keyID` during subsequent
API calls
:param str keyID: an index into the 'keys' maintained in this config
"""
if keyID not in self._data['keys']:
raise ConfigException('keyID does not exist: %s' % keyID)
self._keyID = keyID | python | def useKeyID(self, keyID):
"""
Use the given API key config specified by `keyID` during subsequent
API calls
:param str keyID: an index into the 'keys' maintained in this config
"""
if keyID not in self._data['keys']:
raise ConfigException('keyID does not exist: %s' % keyID)
self._keyID = keyID | [
"def",
"useKeyID",
"(",
"self",
",",
"keyID",
")",
":",
"if",
"keyID",
"not",
"in",
"self",
".",
"_data",
"[",
"'keys'",
"]",
":",
"raise",
"ConfigException",
"(",
"'keyID does not exist: %s'",
"%",
"keyID",
")",
"self",
".",
"_keyID",
"=",
"keyID"
] | Use the given API key config specified by `keyID` during subsequent
API calls
:param str keyID: an index into the 'keys' maintained in this config | [
"Use",
"the",
"given",
"API",
"key",
"config",
"specified",
"by",
"keyID",
"during",
"subsequent",
"API",
"calls"
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/config.py#L132-L141 | train | 210,993 |
ns1/ns1-python | ns1/config.py | Config.getKeyConfig | def getKeyConfig(self, keyID=None):
"""
Get key configuration specified by `keyID`, or current keyID.
:param str keyID: optional keyID to retrieve, or current if not passed
:return: a dict of the request (or current) key config
"""
k = keyID if keyID is not None else self._keyID
if not k or k not in self._data['keys']:
raise ConfigException('request key does not exist: %s' % k)
return self._data['keys'][k] | python | def getKeyConfig(self, keyID=None):
"""
Get key configuration specified by `keyID`, or current keyID.
:param str keyID: optional keyID to retrieve, or current if not passed
:return: a dict of the request (or current) key config
"""
k = keyID if keyID is not None else self._keyID
if not k or k not in self._data['keys']:
raise ConfigException('request key does not exist: %s' % k)
return self._data['keys'][k] | [
"def",
"getKeyConfig",
"(",
"self",
",",
"keyID",
"=",
"None",
")",
":",
"k",
"=",
"keyID",
"if",
"keyID",
"is",
"not",
"None",
"else",
"self",
".",
"_keyID",
"if",
"not",
"k",
"or",
"k",
"not",
"in",
"self",
".",
"_data",
"[",
"'keys'",
"]",
":"... | Get key configuration specified by `keyID`, or current keyID.
:param str keyID: optional keyID to retrieve, or current if not passed
:return: a dict of the request (or current) key config | [
"Get",
"key",
"configuration",
"specified",
"by",
"keyID",
"or",
"current",
"keyID",
"."
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/config.py#L151-L161 | train | 210,994 |
ns1/ns1-python | ns1/config.py | Config.isKeyWriteLocked | def isKeyWriteLocked(self, keyID=None):
"""
Determine if a key config is write locked.
:param str keyID: optional keyID to retrieve, or current if not passed
:return: True if the given (or current) keyID is writeLocked
"""
kcfg = self.getKeyConfig(keyID)
return 'writeLock' in kcfg and kcfg['writeLock'] is True | python | def isKeyWriteLocked(self, keyID=None):
"""
Determine if a key config is write locked.
:param str keyID: optional keyID to retrieve, or current if not passed
:return: True if the given (or current) keyID is writeLocked
"""
kcfg = self.getKeyConfig(keyID)
return 'writeLock' in kcfg and kcfg['writeLock'] is True | [
"def",
"isKeyWriteLocked",
"(",
"self",
",",
"keyID",
"=",
"None",
")",
":",
"kcfg",
"=",
"self",
".",
"getKeyConfig",
"(",
"keyID",
")",
"return",
"'writeLock'",
"in",
"kcfg",
"and",
"kcfg",
"[",
"'writeLock'",
"]",
"is",
"True"
] | Determine if a key config is write locked.
:param str keyID: optional keyID to retrieve, or current if not passed
:return: True if the given (or current) keyID is writeLocked | [
"Determine",
"if",
"a",
"key",
"config",
"is",
"write",
"locked",
"."
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/config.py#L163-L171 | train | 210,995 |
ns1/ns1-python | ns1/config.py | Config.getAPIKey | def getAPIKey(self, keyID=None):
"""
Retrieve the NS1 API Key for the given keyID
:param str keyID: optional keyID to retrieve, or current if not passed
:return: API Key for the given keyID
"""
kcfg = self.getKeyConfig(keyID)
if 'key' not in kcfg:
raise ConfigException('invalid config: missing api key')
return kcfg['key'] | python | def getAPIKey(self, keyID=None):
"""
Retrieve the NS1 API Key for the given keyID
:param str keyID: optional keyID to retrieve, or current if not passed
:return: API Key for the given keyID
"""
kcfg = self.getKeyConfig(keyID)
if 'key' not in kcfg:
raise ConfigException('invalid config: missing api key')
return kcfg['key'] | [
"def",
"getAPIKey",
"(",
"self",
",",
"keyID",
"=",
"None",
")",
":",
"kcfg",
"=",
"self",
".",
"getKeyConfig",
"(",
"keyID",
")",
"if",
"'key'",
"not",
"in",
"kcfg",
":",
"raise",
"ConfigException",
"(",
"'invalid config: missing api key'",
")",
"return",
... | Retrieve the NS1 API Key for the given keyID
:param str keyID: optional keyID to retrieve, or current if not passed
:return: API Key for the given keyID | [
"Retrieve",
"the",
"NS1",
"API",
"Key",
"for",
"the",
"given",
"keyID"
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/config.py#L173-L183 | train | 210,996 |
ns1/ns1-python | ns1/config.py | Config.getEndpoint | def getEndpoint(self):
"""
Retrieve the NS1 API Endpoint URL that will be used for requests.
:return: URL of the NS1 API that will be used for requests
"""
port = ''
endpoint = ''
keyConfig = self.getKeyConfig()
if 'port' in keyConfig:
port = ':' + keyConfig['port']
elif self._data['port'] != self.PORT:
port = ':' + self._data['port']
if 'endpoint' in keyConfig:
endpoint = keyConfig['endpoint']
else:
endpoint = self._data['endpoint']
return 'https://%s%s/%s/' % (endpoint,
port,
self._data['api_version']) | python | def getEndpoint(self):
"""
Retrieve the NS1 API Endpoint URL that will be used for requests.
:return: URL of the NS1 API that will be used for requests
"""
port = ''
endpoint = ''
keyConfig = self.getKeyConfig()
if 'port' in keyConfig:
port = ':' + keyConfig['port']
elif self._data['port'] != self.PORT:
port = ':' + self._data['port']
if 'endpoint' in keyConfig:
endpoint = keyConfig['endpoint']
else:
endpoint = self._data['endpoint']
return 'https://%s%s/%s/' % (endpoint,
port,
self._data['api_version']) | [
"def",
"getEndpoint",
"(",
"self",
")",
":",
"port",
"=",
"''",
"endpoint",
"=",
"''",
"keyConfig",
"=",
"self",
".",
"getKeyConfig",
"(",
")",
"if",
"'port'",
"in",
"keyConfig",
":",
"port",
"=",
"':'",
"+",
"keyConfig",
"[",
"'port'",
"]",
"elif",
... | Retrieve the NS1 API Endpoint URL that will be used for requests.
:return: URL of the NS1 API that will be used for requests | [
"Retrieve",
"the",
"NS1",
"API",
"Endpoint",
"URL",
"that",
"will",
"be",
"used",
"for",
"requests",
"."
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/config.py#L185-L204 | train | 210,997 |
ns1/ns1-python | ns1/ipam.py | Network.load | def load(self, callback=None, errback=None, reload=False):
"""
Load network data from the API.
"""
if not reload and self.data:
raise NetworkException('Network already loaded')
def success(result, *args):
self.data = result
self.id = result['id']
self.name = result['name']
self.report = self._rest.report(self.id)
if callback:
return callback(self)
else:
return self
if self.id is None:
if self.name is None:
raise NetworkException('Must at least specify an id or name')
else:
self.id = [network for network in self._rest.list()
if network['name'] == self.name][0]['id']
return self._rest.retrieve(self.id, callback=success,
errback=errback) | python | def load(self, callback=None, errback=None, reload=False):
"""
Load network data from the API.
"""
if not reload and self.data:
raise NetworkException('Network already loaded')
def success(result, *args):
self.data = result
self.id = result['id']
self.name = result['name']
self.report = self._rest.report(self.id)
if callback:
return callback(self)
else:
return self
if self.id is None:
if self.name is None:
raise NetworkException('Must at least specify an id or name')
else:
self.id = [network for network in self._rest.list()
if network['name'] == self.name][0]['id']
return self._rest.retrieve(self.id, callback=success,
errback=errback) | [
"def",
"load",
"(",
"self",
",",
"callback",
"=",
"None",
",",
"errback",
"=",
"None",
",",
"reload",
"=",
"False",
")",
":",
"if",
"not",
"reload",
"and",
"self",
".",
"data",
":",
"raise",
"NetworkException",
"(",
"'Network already loaded'",
")",
"def"... | Load network data from the API. | [
"Load",
"network",
"data",
"from",
"the",
"API",
"."
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/ipam.py#L56-L81 | train | 210,998 |
ns1/ns1-python | ns1/ipam.py | Network.delete | def delete(self, callback=None, errback=None):
"""
Delete the Network and all associated addresses
"""
return self._rest.delete(self.id, callback=callback, errback=errback) | python | def delete(self, callback=None, errback=None):
"""
Delete the Network and all associated addresses
"""
return self._rest.delete(self.id, callback=callback, errback=errback) | [
"def",
"delete",
"(",
"self",
",",
"callback",
"=",
"None",
",",
"errback",
"=",
"None",
")",
":",
"return",
"self",
".",
"_rest",
".",
"delete",
"(",
"self",
".",
"id",
",",
"callback",
"=",
"callback",
",",
"errback",
"=",
"errback",
")"
] | Delete the Network and all associated addresses | [
"Delete",
"the",
"Network",
"and",
"all",
"associated",
"addresses"
] | f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e | https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/ipam.py#L83-L87 | train | 210,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.