hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7ffc70a895ff1fedde4da3b9fc978e6ba6f7910 | 8,030 | py | Python | controllers/mpr.py | unimauro/eden | b739d334e6828d0db14b3790f2f5e2666fc83576 | [
"MIT"
] | null | null | null | controllers/mpr.py | unimauro/eden | b739d334e6828d0db14b3790f2f5e2666fc83576 | [
"MIT"
] | null | null | null | controllers/mpr.py | unimauro/eden | b739d334e6828d0db14b3790f2f5e2666fc83576 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Missing Person Registry
@author: nursix
"""
module = request.controller
prefix = request.controller
resourcename = request.function
if prefix not in deployment_settings.modules:
raise HTTP(404, body="Module disabled: %s" % prefix)
MISSING = str(T("Missing"))
FOUND = str(T("Found"))
DETAILS = str(T("Details"))
action = lambda l, u: dict(label=str(l), url=str(u), _class="action-btn")
# -----------------------------------------------------------------------------
def index():
""" Home Page """
try:
module_name = deployment_settings.modules[prefix].name_nice
except:
module_name = T("Missing Persons Registry")
prefix = "pr"
resourcename = "person"
tablename = "%s_%s" % (prefix, resourcename)
table = s3db[tablename]
report_url = URL(c="mpr", f=resourcename,
args=["[id]", "note"],
vars=dict(status="missing"))
s3mgr.configure(tablename,
create_next=report_url,
list_fields=["id",
"first_name",
"middle_name",
"last_name",
"picture",
"gender",
"age_group",
"missing"])
def prep(r):
if r.representation == "html":
if not r.id and not r.method:
r.method = "search"
else:
redirect(URL(resourcename, args=request.args))
return True
response.s3.prep = prep
def postp(r, output):
response.s3.actions = []
if not r.component:
open_button_label = DETAILS
if auth.s3_logged_in():
mreport = URL(resourcename,
args=["[id]", "note", "create"],
vars=dict(status="missing"))
freport = URL(resourcename,
args=["[id]", "note", "create"],
vars=dict(status="found"))
response.s3.actions = [action(MISSING, mreport),
action(FOUND, freport)]
# Is the current user reported missing?
if isinstance(output, dict):
person = s3_logged_in_person()
if person and db.pr_person[person].missing:
myself = URL(resourcename,
args=[person, "note", "create"],
vars=dict(status="found"))
output.update(myself=myself)
else:
open_button_label = UPDATE
#linkto = r.resource.crud._linkto(r, update=True)("[id]")
linkto = URL(resourcename,
args=["[id]", "note"])
response.s3.actions.append(action(open_button_label, linkto))
return output
response.s3.postp = postp
output = s3_rest_controller(prefix, resourcename,
module_name=module_name)
response.view = "mpr/index.html"
response.title = module_name
return output
# -----------------------------------------------------------------------------
def person():
""" Missing Persons List """
prefix = "pr"
tablename = "%s_%s" % (prefix, resourcename)
table = s3db[tablename]
s3.crud_strings[tablename].update(
title_display = T("Missing Person Details"),
title_list = T("Missing Persons Registry"),
subtitle_list = T("Missing Persons"),
label_list_button = T("List Missing Persons"),
msg_list_empty = T("No Persons currently reported missing"))
s3mgr.configure("pr_group_membership",
list_fields=["id",
"group_id",
"group_head",
"description"
])
s3mgr.configure(tablename,
create_next = URL(c="mpr", f="person",
args=["[id]", "note", "create"],
vars=dict(status="missing")),
list_fields=["id",
"first_name",
"middle_name",
"last_name",
"picture",
"gender",
"age_group",
"missing"
])
def prep(r):
if r.interactive and not r.id:
r.resource.add_filter(db.pr_person.missing == True)
if r.component_name == "config":
_config = s3db.gis_config
defaults = db(_config.id == 1).select(limitby=(0, 1)).first()
for key in defaults.keys():
if key not in ["id",
"uuid",
"mci",
"update_record",
"delete_record"]:
_config[key].default = defaults[key]
elif r.component_name == "note":
ntable = db.pr_note
status = r.vars.get("status", None)
if status:
if status == "missing":
ntable.status.default = 1
ntable.status.writable = False
ntable.timestmp.label = T("Date/Time when last seen")
ntable.note_text.label = T("Circumstances of disappearance")
s3.crud_strings[str(ntable)].update(
title_create = "Add Missing Report",
subtitle_create = "Add Missing Report")
elif status == "found":
ntable.status.default = 2
ntable.status.writable = False
ntable.timestmp.label = T("Date/Time when found")
ntable.note_text.label = T("Comments")
s3.crud_strings[str(ntable)].update(
title_create = "Add Find Report",
subtitle_create = "Add Find Report")
else:
ntable.status.default = 99
ntable.status.writable = True
return True
response.s3.prep = prep
def postp(r, output):
if r.interactive:
if not r.component:
label = READ
linkto = URL(f="person",
args=("[id]", "note"))
else:
label = UPDATE
linkto = r.resource.crud._linkto(r)("[id]")
response.s3.actions = [action(label, linkto)]
if not r.component:
label = FOUND
linkto = URL(f="person",
args=("[id]", "note", "create"),
vars=dict(status="found"))
response.s3.actions.append(action(label, linkto))
return output
response.s3.postp = postp
ptable = db.pr_person
ptable.missing.default = True
ptable.missing.readable = False
ptable.missing.writable = False
ptable.pe_label.readable = False
ptable.pe_label.writable = False
ptable.occupation.readable = False
ptable.occupation.writable = False
mpr_tabs = [(T("Person Details"), None),
(T("Physical Description"), "physical_description"),
(T("Images"), "image"),
(T("Identity"), "identity"),
(T("Address"), "address"),
(T("Contact Data"), "contact"),
(T("Journal"), "note")]
rheader = lambda r: s3db.pr_rheader(r, tabs=mpr_tabs)
output = s3_rest_controller("pr", resourcename, rheader=rheader)
return output
# -----------------------------------------------------------------------------
| 37.699531 | 80 | 0.456787 |
module = request.controller
prefix = request.controller
resourcename = request.function
if prefix not in deployment_settings.modules:
raise HTTP(404, body="Module disabled: %s" % prefix)
MISSING = str(T("Missing"))
FOUND = str(T("Found"))
DETAILS = str(T("Details"))
action = lambda l, u: dict(label=str(l), url=str(u), _class="action-btn")
def index():
try:
module_name = deployment_settings.modules[prefix].name_nice
except:
module_name = T("Missing Persons Registry")
prefix = "pr"
resourcename = "person"
tablename = "%s_%s" % (prefix, resourcename)
table = s3db[tablename]
report_url = URL(c="mpr", f=resourcename,
args=["[id]", "note"],
vars=dict(status="missing"))
s3mgr.configure(tablename,
create_next=report_url,
list_fields=["id",
"first_name",
"middle_name",
"last_name",
"picture",
"gender",
"age_group",
"missing"])
def prep(r):
if r.representation == "html":
if not r.id and not r.method:
r.method = "search"
else:
redirect(URL(resourcename, args=request.args))
return True
response.s3.prep = prep
def postp(r, output):
response.s3.actions = []
if not r.component:
open_button_label = DETAILS
if auth.s3_logged_in():
mreport = URL(resourcename,
args=["[id]", "note", "create"],
vars=dict(status="missing"))
freport = URL(resourcename,
args=["[id]", "note", "create"],
vars=dict(status="found"))
response.s3.actions = [action(MISSING, mreport),
action(FOUND, freport)]
if isinstance(output, dict):
person = s3_logged_in_person()
if person and db.pr_person[person].missing:
myself = URL(resourcename,
args=[person, "note", "create"],
vars=dict(status="found"))
output.update(myself=myself)
else:
open_button_label = UPDATE
linkto = URL(resourcename,
args=["[id]", "note"])
response.s3.actions.append(action(open_button_label, linkto))
return output
response.s3.postp = postp
output = s3_rest_controller(prefix, resourcename,
module_name=module_name)
response.view = "mpr/index.html"
response.title = module_name
return output
def person():
prefix = "pr"
tablename = "%s_%s" % (prefix, resourcename)
table = s3db[tablename]
s3.crud_strings[tablename].update(
title_display = T("Missing Person Details"),
title_list = T("Missing Persons Registry"),
subtitle_list = T("Missing Persons"),
label_list_button = T("List Missing Persons"),
msg_list_empty = T("No Persons currently reported missing"))
s3mgr.configure("pr_group_membership",
list_fields=["id",
"group_id",
"group_head",
"description"
])
s3mgr.configure(tablename,
create_next = URL(c="mpr", f="person",
args=["[id]", "note", "create"],
vars=dict(status="missing")),
list_fields=["id",
"first_name",
"middle_name",
"last_name",
"picture",
"gender",
"age_group",
"missing"
])
def prep(r):
if r.interactive and not r.id:
r.resource.add_filter(db.pr_person.missing == True)
if r.component_name == "config":
_config = s3db.gis_config
defaults = db(_config.id == 1).select(limitby=(0, 1)).first()
for key in defaults.keys():
if key not in ["id",
"uuid",
"mci",
"update_record",
"delete_record"]:
_config[key].default = defaults[key]
elif r.component_name == "note":
ntable = db.pr_note
status = r.vars.get("status", None)
if status:
if status == "missing":
ntable.status.default = 1
ntable.status.writable = False
ntable.timestmp.label = T("Date/Time when last seen")
ntable.note_text.label = T("Circumstances of disappearance")
s3.crud_strings[str(ntable)].update(
title_create = "Add Missing Report",
subtitle_create = "Add Missing Report")
elif status == "found":
ntable.status.default = 2
ntable.status.writable = False
ntable.timestmp.label = T("Date/Time when found")
ntable.note_text.label = T("Comments")
s3.crud_strings[str(ntable)].update(
title_create = "Add Find Report",
subtitle_create = "Add Find Report")
else:
ntable.status.default = 99
ntable.status.writable = True
return True
response.s3.prep = prep
def postp(r, output):
if r.interactive:
if not r.component:
label = READ
linkto = URL(f="person",
args=("[id]", "note"))
else:
label = UPDATE
linkto = r.resource.crud._linkto(r)("[id]")
response.s3.actions = [action(label, linkto)]
if not r.component:
label = FOUND
linkto = URL(f="person",
args=("[id]", "note", "create"),
vars=dict(status="found"))
response.s3.actions.append(action(label, linkto))
return output
response.s3.postp = postp
ptable = db.pr_person
ptable.missing.default = True
ptable.missing.readable = False
ptable.missing.writable = False
ptable.pe_label.readable = False
ptable.pe_label.writable = False
ptable.occupation.readable = False
ptable.occupation.writable = False
mpr_tabs = [(T("Person Details"), None),
(T("Physical Description"), "physical_description"),
(T("Images"), "image"),
(T("Identity"), "identity"),
(T("Address"), "address"),
(T("Contact Data"), "contact"),
(T("Journal"), "note")]
rheader = lambda r: s3db.pr_rheader(r, tabs=mpr_tabs)
output = s3_rest_controller("pr", resourcename, rheader=rheader)
return output
| true | true |
f7ffc903e9f54175818bbd77845748fe3151ac64 | 15,055 | py | Python | jina/types/document/graph.py | ezhaohongwei/jina | 9769f2e35eb8a196304a145409f959a7beac0432 | [
"Apache-2.0"
] | 1 | 2021-06-14T00:35:14.000Z | 2021-06-14T00:35:14.000Z | jina/types/document/graph.py | ezhaohongwei/jina | 9769f2e35eb8a196304a145409f959a7beac0432 | [
"Apache-2.0"
] | null | null | null | jina/types/document/graph.py | ezhaohongwei/jina | 9769f2e35eb8a196304a145409f959a7beac0432 | [
"Apache-2.0"
] | null | null | null | from typing import Optional, Iterator, Tuple, Dict, Iterable
import numpy as np
from . import Document, DocumentSourceType
from ..arrays import ChunkArray
from ..struct import StructView
from ..ndarray.sparse.scipy import SparseNdArray
from ...importer import ImportExtensions
from ...logging.predefined import default_logger
__all__ = ['GraphDocument']
if False:
from scipy.sparse import coo_matrix
from dgl import DGLGraph
class GraphDocument(Document):
"""
:class:`GraphDocument` is a data type created based on Jina primitive data type :class:`Document`.
It adds functionality that lets you work with a `Document` as a `directed graph` where all its chunks are the nodes in the `graph`.
It exposes functionality to access and manipulate `graph related info` from the `DocumentProto` such as adjacency and edge features.
.. warning::
- It assumes that every ``chunk`` of a ``document`` is a node of a graph.
:param document: the document to construct from. If ``bytes`` is given
then deserialize a :class:`DocumentProto`; ``dict`` is given then
parse a :class:`DocumentProto` from it; ``str`` is given, then consider
it as a JSON string and parse a :class:`DocumentProto` from it; finally,
one can also give `DocumentProto` directly, then depending on the ``copy``,
it builds a view or a copy from it.
:param copy: when ``document`` is given as a :class:`DocumentProto` object, build a
view (i.e. weak reference) from it or a deep copy from it.
:param kwargs: further key value arguments
"""
def __init__(
self,
document: Optional[DocumentSourceType] = None,
copy: bool = False,
**kwargs,
):
self._check_installed_array_packages()
super().__init__(document=document, copy=copy, **kwargs)
self._node_id_to_offset = {
node.id: offset for offset, node in enumerate(self.nodes)
} # dangerous because document is stateless, try to work only with proto
@staticmethod
def _check_installed_array_packages():
from ... import JINA_GLOBAL
if JINA_GLOBAL.scipy_installed is None:
JINA_GLOBAL.scipy_installed = False
with ImportExtensions(
required=True,
pkg_name='scipy',
help_text=f'GraphDocument requires scipy to be installed for sparse matrix support.',
):
import scipy
JINA_GLOBAL.scipy_installed = True
def add_node(self, node: 'Document'):
"""
Add a a node to the graph
:param node: the node to be added to the graph
"""
if node.id in self._node_id_to_offset:
default_logger.debug(f'Document {node.id} is already a node of the graph')
return
self._node_id_to_offset[node.id] = len(self.nodes)
self.nodes.append(node)
def remove_node(self, node: 'Document'):
"""
Remove a node from the graph along with the edges that may contain it
:param node: the node to be removed from the graph
"""
from scipy.sparse import coo_matrix
if node.id not in self._node_id_to_offset:
default_logger.debug(
f'Trying to remove document {node.id} from the graph while is not a node of the graph'
)
return
offset = self._node_id_to_offset[node.id]
if self.num_edges > 0:
edges_to_remove = []
for edge_id, (row, col) in enumerate(
zip(self.adjacency.row, self.adjacency.col)
):
if row.item() == offset or col.item() == offset:
edge_features_keys = (
f'{self.nodes[row.item()].id}-{self.nodes[col.item()]}'
)
edges_to_remove.append((edge_id, edge_features_keys))
for edge_id, edge_features_key in reversed(edges_to_remove):
self._remove_edge_id(edge_id, edge_features_key)
if self.num_edges > 0:
row = np.copy(self.adjacency.row)
col = np.copy(self.adjacency.col)
data = np.copy(self.adjacency.data)
for i in range(self.num_edges):
if self.adjacency.row[i] > offset:
row[i] = row[i] - 1
if self.adjacency.col[i] > offset:
col[i] = col[i] - 1
SparseNdArray(
self._pb_body.graph.adjacency, sp_format='coo'
).value = coo_matrix((data, (row, col)))
del self.nodes[offset]
self._node_id_to_offset = {
node.id: offset for offset, node in enumerate(self.nodes)
}
def add_edge(
self, doc1: 'Document', doc2: 'Document', features: Optional[Dict] = None
):
"""
Add an edge to the graph connecting `doc1` with `doc2`
:param doc1: the starting node for this edge
:param doc2: the ending node for this edge
:param features: Optional features dictionary to be added to this new created edge
"""
from scipy.sparse import coo_matrix
self.add_node(doc1)
self.add_node(doc2)
current_adjacency = self.adjacency
doc1_node_offset = self._node_id_to_offset[doc1.id]
doc2_node_offset = self._node_id_to_offset[doc2.id]
row = (
np.append(current_adjacency.row, doc1_node_offset)
if current_adjacency is not None
else np.array([doc1_node_offset])
)
col = (
np.append(current_adjacency.col, doc2_node_offset)
if current_adjacency is not None
else np.array([doc2_node_offset])
)
data = (
np.append(current_adjacency.data, 1)
if current_adjacency is not None
else np.array([1])
)
SparseNdArray(
self._pb_body.graph.adjacency, sp_format='coo'
).value = coo_matrix((data, (row, col)))
if features is not None:
self.edge_features[f'{doc1.id}-{doc2.id}'] = features
def _remove_edge_id(self, edge_id: int, edge_feature_key: str):
from scipy.sparse import coo_matrix
if self.adjacency is not None:
if edge_id > self.num_edges:
raise Exception(
f'Trying to remove edge {edge_id} while number of edges is {self.num_edges}'
)
row = np.delete(self.adjacency.row, edge_id)
col = np.delete(self.adjacency.col, edge_id)
data = np.delete(self.adjacency.data, edge_id)
if row.shape[0] > 0:
SparseNdArray(
self._pb_body.graph.adjacency, sp_format='coo'
).value = coo_matrix((data, (row, col)))
else:
SparseNdArray(
self._pb_body.graph.adjacency, sp_format='coo'
).value = coo_matrix((0, 0))
if edge_feature_key in self.edge_features:
del self.edge_features[edge_feature_key]
def remove_edge(self, doc1: 'Document', doc2: 'Document'):
"""
Remove a node from the graph along with the edges that may contain it
:param doc1: the starting node for this edge
:param doc2: the ending node for this edge
"""
offset1 = self._node_id_to_offset[doc1.id]
offset2 = self._node_id_to_offset[doc2.id]
for edge_id, (row, col) in enumerate(
zip(self.adjacency.row, self.adjacency.col)
):
if row.item() == offset1 and col.item() == offset2:
self._remove_edge_id(edge_id, f'{doc1.id}-{doc2.id}')
@property
def edge_features(self):
"""
The dictionary of edge features, indexed by `edge_id` in the `edge list`
.. # noqa: DAR201
"""
return StructView(self._pb_body.graph.edge_features)
@property
def adjacency(self):
"""
The adjacency list for this graph,
.. # noqa: DAR201
"""
return SparseNdArray(self._pb_body.graph.adjacency, sp_format='coo').value
@property
def num_nodes(self) -> int:
"""
The number of nodes in the graph
.. # noqa: DAR201
"""
return len(self.nodes)
@property
def num_edges(self) -> int:
"""
The number of edges in the graph
.. # noqa: DAR201
"""
adjacency = self.adjacency
return adjacency.data.shape[0] if adjacency is not None else 0
@property
def nodes(self):
"""
The nodes list for this graph
.. # noqa: DAR201
"""
return self.chunks
def get_out_degree(self, doc: 'Document') -> int:
"""
The out degree of the doc node
.. # noqa: DAR201
:param doc: the document node from which to extract the outdegree.
"""
out_edges = self.get_outgoing_nodes(doc)
return len(out_edges) if out_edges else 0
def get_in_degree(self, doc: 'Document') -> int:
"""
The in degree of the doc node
.. # noqa: DAR201
:param doc: the document node from which to extract the indegree.
"""
in_edges = self.get_incoming_nodes(doc)
return len(in_edges) if in_edges else 0
@nodes.setter
def nodes(self, value: Iterable['Document']):
"""Set all nodes of the current document.
:param value: the array of nodes of this document
"""
self.chunks = value
def get_outgoing_nodes(self, doc: 'Document') -> Optional[ChunkArray]:
"""
Get all the outgoing edges from `doc`
.. # noqa: DAR201
:param doc: the document node from which to extract the outgoing nodes.
"""
if self.adjacency is not None and doc.id in self._node_id_to_offset:
offset = self._node_id_to_offset[doc.id]
return ChunkArray(
[
self.nodes[col.item()]
for (row, col) in zip(self.adjacency.row, self.adjacency.col)
if row.item() == offset
],
reference_doc=self,
)
def get_incoming_nodes(self, doc: 'Document') -> Optional[ChunkArray]:
"""
Get all the outgoing edges from `doc`
.. # noqa: DAR201
:param doc: the document node from which to extract the incoming nodes.
"""
if self.adjacency is not None and doc.id in self._node_id_to_offset:
offset = self._node_id_to_offset[doc.id]
return ChunkArray(
[
self.nodes[row.item()]
for (row, col) in zip(self.adjacency.row, self.adjacency.col)
if col.item() == offset
],
reference_doc=self,
)
@staticmethod
def load_from_dgl_graph(dgl_graph: 'DGLGraph') -> 'GraphDocument':
"""
Construct a GraphDocument from of graph with type `DGLGraph`
.. # noqa: DAR201
:param dgl_graph: the graph from which to construct a `GraphDocument`.
.. warning::
- This method only deals with the graph structure (nodes and conectivity) graph
features that are task specific are ignored.
"""
jina_graph = GraphDocument()
nodeid_to_doc = {}
for node in dgl_graph.nodes():
node_doc = Document()
nodeid_to_doc[int(node)] = node_doc
jina_graph.add_node(node_doc)
for node_source, node_destination in zip(*dgl_graph.edges()):
jina_graph.add_edge(
nodeid_to_doc[int(node_source)], nodeid_to_doc[int(node_destination)]
)
return jina_graph
def to_dgl_graph(self) -> 'DGLGraph':
"""
Construct a `dgl.DGLGraph` from a `GraphDocument` instance.
.. warning::
- This method only deals with the graph structure (nodes and conectivity) graph
features that are task specific are ignored.
.. # noqa: DAR201
"""
from ... import JINA_GLOBAL
if JINA_GLOBAL.dgl_installed is None:
JINA_GLOBAL.dgl_installed = False
with ImportExtensions(
required=True,
pkg_name='dgl',
help_text=f'to_dgl_graph method requires dgl to be installed',
):
import dgl
JINA_GLOBAL.dgl_installed = True
if JINA_GLOBAL.torch_installed is None:
JINA_GLOBAL.torch_installed = False
with ImportExtensions(
required=True,
pkg_name='torch',
help_text=f'to_dgl_graph method requires torch to be installed',
):
import torch
JINA_GLOBAL.torch_installed = True
import torch
import dgl
if self.adjacency is None:
default_logger.debug(
f'Trying to convert to dgl graph without \
for GraphDocument.id = {self.id} without adjacency matrix'
)
dgl_graph = dgl.DGLGraph()
dgl_graph.add_nodes(self.num_nodes)
return dgl_graph
else:
source_nodes = torch.tensor(self.adjacency.row.copy())
destination_nodes = torch.tensor(self.adjacency.col.copy())
return dgl.graph((source_nodes, destination_nodes))
def __iter__(self) -> Iterator[Tuple['Document']]:
if self.adjacency is not None:
for (row, col) in zip(self.adjacency.row, self.adjacency.col):
yield self.nodes[row.item()], self.nodes[col.item()]
else:
default_logger.debug(f'Trying to iterate over a graph without edges')
def __mermaid_str__(self):
if len(self.nodes) == 0:
return super().__mermaid_str__()
results = []
printed_ids = set()
_node_id_node_mermaid_id = {}
for node in self.nodes:
_node_id_node_mermaid_id[node.id] = node._mermaid_id
for in_node, out_node in self:
in_node_mermaid_id = _node_id_node_mermaid_id[in_node.id]
if in_node_mermaid_id not in printed_ids:
in_node._mermaid_id = in_node_mermaid_id
printed_ids.add(in_node_mermaid_id)
results.append(in_node.__mermaid_str__())
out_node_mermaid_id = _node_id_node_mermaid_id[out_node.id]
if out_node_mermaid_id not in printed_ids:
out_node._mermaid_id = out_node_mermaid_id
printed_ids.add(out_node_mermaid_id)
results.append(out_node.__mermaid_str__())
results.append(f'{in_node_mermaid_id[:3]} --> {out_node_mermaid_id[:3]}')
return '\n'.join(results)
| 35.09324 | 136 | 0.583527 | from typing import Optional, Iterator, Tuple, Dict, Iterable
import numpy as np
from . import Document, DocumentSourceType
from ..arrays import ChunkArray
from ..struct import StructView
from ..ndarray.sparse.scipy import SparseNdArray
from ...importer import ImportExtensions
from ...logging.predefined import default_logger
__all__ = ['GraphDocument']
if False:
from scipy.sparse import coo_matrix
from dgl import DGLGraph
class GraphDocument(Document):
def __init__(
self,
document: Optional[DocumentSourceType] = None,
copy: bool = False,
**kwargs,
):
self._check_installed_array_packages()
super().__init__(document=document, copy=copy, **kwargs)
self._node_id_to_offset = {
node.id: offset for offset, node in enumerate(self.nodes)
}
@staticmethod
def _check_installed_array_packages():
from ... import JINA_GLOBAL
if JINA_GLOBAL.scipy_installed is None:
JINA_GLOBAL.scipy_installed = False
with ImportExtensions(
required=True,
pkg_name='scipy',
help_text=f'GraphDocument requires scipy to be installed for sparse matrix support.',
):
import scipy
JINA_GLOBAL.scipy_installed = True
def add_node(self, node: 'Document'):
if node.id in self._node_id_to_offset:
default_logger.debug(f'Document {node.id} is already a node of the graph')
return
self._node_id_to_offset[node.id] = len(self.nodes)
self.nodes.append(node)
def remove_node(self, node: 'Document'):
from scipy.sparse import coo_matrix
if node.id not in self._node_id_to_offset:
default_logger.debug(
f'Trying to remove document {node.id} from the graph while is not a node of the graph'
)
return
offset = self._node_id_to_offset[node.id]
if self.num_edges > 0:
edges_to_remove = []
for edge_id, (row, col) in enumerate(
zip(self.adjacency.row, self.adjacency.col)
):
if row.item() == offset or col.item() == offset:
edge_features_keys = (
f'{self.nodes[row.item()].id}-{self.nodes[col.item()]}'
)
edges_to_remove.append((edge_id, edge_features_keys))
for edge_id, edge_features_key in reversed(edges_to_remove):
self._remove_edge_id(edge_id, edge_features_key)
if self.num_edges > 0:
row = np.copy(self.adjacency.row)
col = np.copy(self.adjacency.col)
data = np.copy(self.adjacency.data)
for i in range(self.num_edges):
if self.adjacency.row[i] > offset:
row[i] = row[i] - 1
if self.adjacency.col[i] > offset:
col[i] = col[i] - 1
SparseNdArray(
self._pb_body.graph.adjacency, sp_format='coo'
).value = coo_matrix((data, (row, col)))
del self.nodes[offset]
self._node_id_to_offset = {
node.id: offset for offset, node in enumerate(self.nodes)
}
def add_edge(
self, doc1: 'Document', doc2: 'Document', features: Optional[Dict] = None
):
from scipy.sparse import coo_matrix
self.add_node(doc1)
self.add_node(doc2)
current_adjacency = self.adjacency
doc1_node_offset = self._node_id_to_offset[doc1.id]
doc2_node_offset = self._node_id_to_offset[doc2.id]
row = (
np.append(current_adjacency.row, doc1_node_offset)
if current_adjacency is not None
else np.array([doc1_node_offset])
)
col = (
np.append(current_adjacency.col, doc2_node_offset)
if current_adjacency is not None
else np.array([doc2_node_offset])
)
data = (
np.append(current_adjacency.data, 1)
if current_adjacency is not None
else np.array([1])
)
SparseNdArray(
self._pb_body.graph.adjacency, sp_format='coo'
).value = coo_matrix((data, (row, col)))
if features is not None:
self.edge_features[f'{doc1.id}-{doc2.id}'] = features
def _remove_edge_id(self, edge_id: int, edge_feature_key: str):
from scipy.sparse import coo_matrix
if self.adjacency is not None:
if edge_id > self.num_edges:
raise Exception(
f'Trying to remove edge {edge_id} while number of edges is {self.num_edges}'
)
row = np.delete(self.adjacency.row, edge_id)
col = np.delete(self.adjacency.col, edge_id)
data = np.delete(self.adjacency.data, edge_id)
if row.shape[0] > 0:
SparseNdArray(
self._pb_body.graph.adjacency, sp_format='coo'
).value = coo_matrix((data, (row, col)))
else:
SparseNdArray(
self._pb_body.graph.adjacency, sp_format='coo'
).value = coo_matrix((0, 0))
if edge_feature_key in self.edge_features:
del self.edge_features[edge_feature_key]
def remove_edge(self, doc1: 'Document', doc2: 'Document'):
offset1 = self._node_id_to_offset[doc1.id]
offset2 = self._node_id_to_offset[doc2.id]
for edge_id, (row, col) in enumerate(
zip(self.adjacency.row, self.adjacency.col)
):
if row.item() == offset1 and col.item() == offset2:
self._remove_edge_id(edge_id, f'{doc1.id}-{doc2.id}')
@property
def edge_features(self):
return StructView(self._pb_body.graph.edge_features)
@property
def adjacency(self):
return SparseNdArray(self._pb_body.graph.adjacency, sp_format='coo').value
@property
def num_nodes(self) -> int:
return len(self.nodes)
@property
def num_edges(self) -> int:
adjacency = self.adjacency
return adjacency.data.shape[0] if adjacency is not None else 0
@property
def nodes(self):
return self.chunks
def get_out_degree(self, doc: 'Document') -> int:
out_edges = self.get_outgoing_nodes(doc)
return len(out_edges) if out_edges else 0
def get_in_degree(self, doc: 'Document') -> int:
in_edges = self.get_incoming_nodes(doc)
return len(in_edges) if in_edges else 0
@nodes.setter
def nodes(self, value: Iterable['Document']):
self.chunks = value
def get_outgoing_nodes(self, doc: 'Document') -> Optional[ChunkArray]:
if self.adjacency is not None and doc.id in self._node_id_to_offset:
offset = self._node_id_to_offset[doc.id]
return ChunkArray(
[
self.nodes[col.item()]
for (row, col) in zip(self.adjacency.row, self.adjacency.col)
if row.item() == offset
],
reference_doc=self,
)
def get_incoming_nodes(self, doc: 'Document') -> Optional[ChunkArray]:
if self.adjacency is not None and doc.id in self._node_id_to_offset:
offset = self._node_id_to_offset[doc.id]
return ChunkArray(
[
self.nodes[row.item()]
for (row, col) in zip(self.adjacency.row, self.adjacency.col)
if col.item() == offset
],
reference_doc=self,
)
@staticmethod
def load_from_dgl_graph(dgl_graph: 'DGLGraph') -> 'GraphDocument':
jina_graph = GraphDocument()
nodeid_to_doc = {}
for node in dgl_graph.nodes():
node_doc = Document()
nodeid_to_doc[int(node)] = node_doc
jina_graph.add_node(node_doc)
for node_source, node_destination in zip(*dgl_graph.edges()):
jina_graph.add_edge(
nodeid_to_doc[int(node_source)], nodeid_to_doc[int(node_destination)]
)
return jina_graph
def to_dgl_graph(self) -> 'DGLGraph':
from ... import JINA_GLOBAL
if JINA_GLOBAL.dgl_installed is None:
JINA_GLOBAL.dgl_installed = False
with ImportExtensions(
required=True,
pkg_name='dgl',
help_text=f'to_dgl_graph method requires dgl to be installed',
):
import dgl
JINA_GLOBAL.dgl_installed = True
if JINA_GLOBAL.torch_installed is None:
JINA_GLOBAL.torch_installed = False
with ImportExtensions(
required=True,
pkg_name='torch',
help_text=f'to_dgl_graph method requires torch to be installed',
):
import torch
JINA_GLOBAL.torch_installed = True
import torch
import dgl
if self.adjacency is None:
default_logger.debug(
f'Trying to convert to dgl graph without \
for GraphDocument.id = {self.id} without adjacency matrix'
)
dgl_graph = dgl.DGLGraph()
dgl_graph.add_nodes(self.num_nodes)
return dgl_graph
else:
source_nodes = torch.tensor(self.adjacency.row.copy())
destination_nodes = torch.tensor(self.adjacency.col.copy())
return dgl.graph((source_nodes, destination_nodes))
def __iter__(self) -> Iterator[Tuple['Document']]:
if self.adjacency is not None:
for (row, col) in zip(self.adjacency.row, self.adjacency.col):
yield self.nodes[row.item()], self.nodes[col.item()]
else:
default_logger.debug(f'Trying to iterate over a graph without edges')
def __mermaid_str__(self):
if len(self.nodes) == 0:
return super().__mermaid_str__()
results = []
printed_ids = set()
_node_id_node_mermaid_id = {}
for node in self.nodes:
_node_id_node_mermaid_id[node.id] = node._mermaid_id
for in_node, out_node in self:
in_node_mermaid_id = _node_id_node_mermaid_id[in_node.id]
if in_node_mermaid_id not in printed_ids:
in_node._mermaid_id = in_node_mermaid_id
printed_ids.add(in_node_mermaid_id)
results.append(in_node.__mermaid_str__())
out_node_mermaid_id = _node_id_node_mermaid_id[out_node.id]
if out_node_mermaid_id not in printed_ids:
out_node._mermaid_id = out_node_mermaid_id
printed_ids.add(out_node_mermaid_id)
results.append(out_node.__mermaid_str__())
results.append(f'{in_node_mermaid_id[:3]} --> {out_node_mermaid_id[:3]}')
return '\n'.join(results)
| true | true |
f7ffca35834bcc4fdd7810febd9a45485379d3e3 | 16,354 | py | Python | waferscreen/inst_control/Keysight_USB_VNA.py | chw3k5/WaferScreen | c0ca7fe939fe7cd0b722b7d6129b148c03a7505c | [
"Apache-2.0"
] | 1 | 2021-07-30T19:06:07.000Z | 2021-07-30T19:06:07.000Z | waferscreen/inst_control/Keysight_USB_VNA.py | chw3k5/WaferScreen | c0ca7fe939fe7cd0b722b7d6129b148c03a7505c | [
"Apache-2.0"
] | 8 | 2021-04-22T20:47:48.000Z | 2021-07-30T19:06:01.000Z | waferscreen/inst_control/Keysight_USB_VNA.py | chw3k5/WaferScreen | c0ca7fe939fe7cd0b722b7d6129b148c03a7505c | [
"Apache-2.0"
] | null | null | null | import visa
import math
import numpy as np
import time
class USBVNA():
""" Keysight USB VNA instrument class. """
def __init__(self, address="TCPIP0::687UWAVE-TEST::hislip_PXI10_CHASSIS1_SLOT1_INDEX0,4880::INSTR"):
self.ResourceManager = visa.ResourceManager()
self.ctrl = self.ResourceManager.open_resource("%s" % address, write_termination='\n')
self.ctrl.timeout = 1000000
self.ctrl.vna_id = self.ctrl.query("*IDN?").rstrip()
print("Connected to : " + self.ctrl.vna_id)
def close(self):
""" closes the VISA instance """
self.ctrl.write("INIT:CONT ON")
self.ctrl.close()
print("VNA control closed")
def preset(self):
"""presets PNA"""
self.ctrl.write("SYST:FPR")
self.ctrl.write("*CLS")
# print(self.ctrl.query("*STB?"))
# print(self.ctrl.query("*SRE?"))
print("VNA Preset")
def wait(self):
self.ctrl.write("*WAI")
def setup2port(self):
"""sets up 2 port measurement"""
time.sleep(0.1)
self.ctrl.meas_type = "FULL"
self.ctrl.write("DISP:Window1:STATE ON")
# self.ctrl.write("DISP:Window2:STATE ON")
self.ctrl.write("CALC:PAR:DEL:ALL")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas11','S11'")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas12','S12'")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas21','S21'")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas22','S22'")
self.ctrl.write("DISP:Window1:Trace1:FEED 'Meas11'")
self.ctrl.write("DISP:Window1:Trace2:FEED 'Meas12'")
self.ctrl.write("DISP:Window1:Trace3:FEED 'Meas21'")
self.ctrl.write("DISP:Window1:Trace4:FEED 'Meas22'")
# self.ctrl.write("CONT:CHAN:INT:CONT 1")
self.ctrl.write("INIT:CONT OFF") # turn off continuous triggering
self.ctrl.write("TRIG:SOUR MAN") # set trigger to manual
self.ctrl.write("TRIG:SCOP ALL") # trigger all channels sequentially
self.ctrl.write("SENS:SWE:MODE CONT") # allow channels to trigger repeatedly
self.ctrl.write("*WAI")
self.reset_sweep()
self.avg_inquire()
self.sweep_inquire()
self.freqs_inquire()
self.ifbw_inquire()
self.power_inquire()
print("2 Port Measurement Set up")
def setup_thru(self):
"""sets up a simple S21 measurement"""
time.sleep(0.1)
self.ctrl.meas_type = "THRU"
self.ctrl.write("DISP:Window1:STATE ON")
self.ctrl.write("CALC:PAR:DEL:ALL")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas21','S21'")
# define display format for this measurement? ... CALC:FORM MLOG or MLIN
self.ctrl.write("DISP:Window1:Trace1:FEED 'Meas21'")
# self.ctrl.write("CONT:CHAN:INT:CONT 1")
self.ctrl.write("INIT:CONT OFF")
self.ctrl.write("TRIG:SOUR MAN")
self.ctrl.write("*WAI")
self.reset_sweep()
self.avg_inquire()
self.sweep_inquire()
self.freqs_inquire()
self.ifbw_inquire()
self.power_inquire()
print("Thru Measurement Set Up")
def set_cal(self, calset="ArbitraryCalSet", calstate='OFF'):
if calstate == 'ON':
self.ctrl.write("SENS:CORR:CSET:ACT \"%s\",1" % calset)
self.ctrl.write("*WAI")
time.sleep(0.1)
self.avg_inquire()
self.sweep_inquire()
self.freqs_inquire()
self.ifbw_inquire()
self.power_inquire()
print("Using Cal Set: " + calset)
print("PNA State post Cal Set Application: ")
if self.ctrl.avestate == 1:
print("Averaging ON with " + str(int(self.ctrl.avecount)) + " points")
else:
print("Averaging OFF")
if self.ctrl.sweeptype == "LIN":
print("Linear Freq. Sweep with " + str(int(self.ctrl.sweeppoints)) + " points")
elif self.ctrl.sweeptype == "LOG":
print("Logarithmic Freq. Sweep with " + str(int(self.ctrl.sweeppoints)) + " points")
else:
print("Unrecognized Sweep Type")
print("Sweep time: " + str(self.ctrl.sweeptime) + " seconds")
print("IF Bandwidth: " + str(self.ctrl.ifbw) + "Hz")
print("Measurement from " + str(self.ctrl.freqstart / 1e9) + "GHz to " + str(
float(self.ctrl.freqstop) / 1e9) + "GHz")
print("Source 1 Power: %.2f dBm" % self.ctrl.powersource1)
print("Source 2 Power: %.2f dBm" % self.ctrl.powersource2)
self.reset_sweep()
elif calstate == 'OFF':
self.ctrl.write("SENS:CORR OFF")
print("Taking Un-Calibrated Data")
def set_sweeptype(self, sweeptype="lin"):
test_type = sweeptype.lower().strip()
if test_type == "lin":
self.ctrl.write("SENS:SWE:TYPE LIN")
elif test_type == "log":
self.ctrl.write("SENS:SWE:TYPE LOG")
else:
raise KeyError(F"{sweeptype} is not a recognized sweeptype.")
def set_num_freq_points(self, num_freq_points):
self.ctrl.write("SENS:SWE:POIN %d" % num_freq_points)
def set_sweep(self, num_freq_points, sweeptype="lin"):
self.set_num_freq_points(num_freq_points=num_freq_points)
self.set_sweeptype(sweeptype)
self.sweep_inquire()
print("Sweep type = " + self.ctrl.sweeptype)
print("Sweep points = " + str(self.ctrl.sweeppoints))
self.reset_sweep()
def sweep_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.sweeptype = self.ctrl.query("SENS:SWE:TYPE?").rstrip()
self.ctrl.write("*WAI")
self.ctrl.sweeppoints = int(self.ctrl.query("SENS:SWE:POIN?"))
self.ctrl.write("*WAI")
self.ctrl.sweeptime = float(self.ctrl.query("SENS:SWE:TIME?")) # in milliseconds
self.ctrl.write("*WAI")
def set_freq_limits(self, start=0.01, stop=50.0):
self.ctrl.write("SENS:FREQ:STAR %fghz " % start)
self.ctrl.write("SENS:FREQ:STOP %fghz" % stop)
self.freqs_inquire()
print("Freq Start = " + str(1e-9 * self.ctrl.freqstart) + "GHz")
print("Freq Stop = " + str(1e-9 * self.ctrl.freqstop) + "GHz")
self.sweep_inquire()
self.reset_sweep()
def set_freq_center(self, center=21.755, span=43.49):
self.ctrl.write("SENS:FREQ:CENT %fghz " % center)
self.ctrl.write("SENS:FREQ:SPAN %fghz " % span)
self.freqs_inquire()
print("Freq Center = " + str(1e-9 * self.ctrl.freqcent) + "GHz")
print("Span = " + str(1e-9 * self.ctrl.freqspan) + "GHz")
self.sweep_inquire()
self.reset_sweep()
def set_center_freq_GHz(self, center_freq_GHz):
self.ctrl.write("SENS:FREQ:CENT %fghz " % center_freq_GHz)
def set_span_GHz(self, span_GHz):
self.ctrl.write("SENS:FREQ:SPAN %fghz " % span_GHz)
def freqs_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.freqstart = float(self.ctrl.query("SENS:FREQ:STAR?"))
self.ctrl.write("*WAI")
self.ctrl.freqstop = float(self.ctrl.query("SENS:FREQ:STOP?"))
self.ctrl.write("*WAI")
self.ctrl.freqcent = float(self.ctrl.query("SENS:FREQ:CENT?"))
self.ctrl.write("*WAI")
self.ctrl.freqspan = float(self.ctrl.query("SENS:FREQ:SPAN?"))
self.ctrl.write("*WAI")
def set_avg(self, count=1, mode="sweep"):
if count > 1:
self.ctrl.write("SENS:AVER ON")
self.ctrl.write("SENS:AVER:COUN %d" % count)
self.ctrl.write("SENS:AVER:MODE %s" % mode)
else:
self.ctrl.write("SENS:AVER OFF")
self.avg_inquire()
if self.ctrl.avestate == 1:
print("Averaging ON")
print("Averaging COUNT = " + str(self.ctrl.avecount))
print("Averaging MODE = " + self.ctrl.avemode.rstrip())
elif self.ctrl.avestate == 0:
print("Averaging OFF")
self.sweep_inquire()
self.reset_sweep()
def avg_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.avestate = int(self.ctrl.query("SENS:AVER:STAT?"))
self.ctrl.write("*WAI")
if self.ctrl.avestate == 1:
self.ctrl.avemode = self.ctrl.query("SENS:AVER:MODE?")
self.ctrl.write("*WAI")
self.ctrl.avecount = int(self.ctrl.query("SENS:AVER:COUN?"))
def avg_clear(self):
self.ctrl.write("SENS:AVER:CLE")
def set_ifbw(self, ifbw=100, track=None):
self.ctrl.write("SENS:BWID:RES %d " % ifbw)
self.ctrl.write("*WAI")
# print("IF Bandwidth set to :" + str(ifbw) + "Hz")
if track == True:
self.ctrl.write("SENS:BWID:TRAC ON")
elif track == False:
self.ctrl.write("SENS:BWID:TRAC OFF")
self.ctrl.write("*WAI")
self.ifbw_inquire()
print('IF Bandwidth set to: %.1fHz' % self.ctrl.ifbw)
if self.ctrl.ifbwtrack == 1:
print("IF Bandwidth Tracking ON")
elif self.ctrl.ifbwtrack == 0:
print("IF Bandwidth Tracking OFF")
self.sweep_inquire()
self.reset_sweep()
def set_if_bw_Hz(self, if_bw_Hz):
self.ctrl.write(F"SENS:BWID:RES {if_bw_Hz}")
def ifbw_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.ifbw = float(self.ctrl.query("SENS:BWID:RES?"))
self.ctrl.write("*WAI")
self.ctrl.ifbwtrack = int(self.ctrl.query("SENS:BWID:TRAC?"))
self.ctrl.write("*WAI")
def set_port_power_dBm(self, port_power_dBm, port=1):
self.ctrl.write(F"SOUR:POW{port}:LEV {port_power_dBm} ")
def set_power_on(self):
self.ctrl.write("SOUR:POW1:MODE ON")
def set_power_off(self):
self.ctrl.write("SOUR:POW1:MODE OFF")
def set_power(self, port=1, level=-5, state='ON'):
if state == 'ON':
if port == 1:
self.ctrl.write("SOUR:POW1:LEV %f " % level)
#
if port == 2:
self.ctrl.write("SOUR:POW2:LEV %f " % level)
# self.ctrl.write("SOUR:POW2:MODE ON")
elif state == 'OFF':
if port == 1:
self.ctrl.write("SOUR:POW1:MODE OFF")
if port == 2:
self.ctrl.write("SOUR:POW2:MODE OFF")
else:
print("Port " + str(port) + " power state not recognized")
self.power_inquire()
print("Port 1 Power set to: %.2fdBm" % self.ctrl.powersource1)
print("Port 2 Power set to: %.2fdBm" % self.ctrl.powersource2)
self.sweep_inquire()
self.reset_sweep()
def power_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.powersource1 = float(self.ctrl.query("SOUR:POW1:LEV?"))
self.ctrl.write("*WAI")
self.ctrl.powersource2 = float(self.ctrl.query("SOUR:POW2:LEV?"))
self.ctrl.write("*WAI")
def trig_sweep(self):
self.sweep_inquire()
print("")
print("Sweep time is %.2f seconds" % float(self.ctrl.sweeptime))
if self.ctrl.avestate == 1: # averaging ON
self.avg_clear()
# use stat oper cond ave to check that averaging is done
for i in range(0, self.ctrl.avecount):
self.ctrl.write("INIT:IMM")
self.ctrl.write("*WAI")
self.ctrl.query("*OPC?")
print("Sweep %d/%d finished" % (i + 1, self.ctrl.avecount))
self.ctrl.trig1 = True
else: # averaging OFF
if self.ctrl.trig1 == False:
print("Triggering VNA Sweep")
self.ctrl.write("INIT:IMM")
self.ctrl.write("*WAI")
self.ctrl.query("*OPC?")
self.ctrl.trig1 = True
print("Sweep finished")
def get_trace(self, trace=1, format="LM"):
if trace == 1:
self.ctrl.write("CALC:PAR:SEL \'Meas11\'")
elif trace == 2:
self.ctrl.write("CALC:PAR:SEL \'Meas12\'")
elif trace == 3:
self.ctrl.write("CALC:PAR:SEL \'Meas21\'")
elif trace == 4:
self.ctrl.write("CALC:PAR:SEL \'Meas22\'")
else:
print("Not a recognized trace")
return 0
# print("Triggering VNA Sweep")
# self.trig_sweep()
self.ctrl.write("*WAI")
self.ctrl.write("CALC:DATA? SDATA")
rawtrace = self.ctrl.read()
self.ctrl.write("*WAI")
tracesplit = rawtrace.split(",")
if format == "LM":
traceLM = []
tracePH = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceLM.append(10 * math.log10(float(tracesplit[i - 1]) ** 2 + float(tracesplit[i]) ** 2))
tracePH.append(180 / math.pi * math.atan2(float(tracesplit[i]), float(tracesplit[i - 1])))
return (traceLM, tracePH)
elif format == "RI":
traceR = []
traceI = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceR.append(float(tracesplit[i - 1]))
traceI.append(float(tracesplit[i]))
traceR = np.array(traceR)
traceI = np.array(traceI)
return (traceR, traceI)
elif format == "COM":
tracecom = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
tracecom.append(tracesplit[i - 1] + 1j * tracesplit[i])
return tracecom
else:
print("Data Format not recognized")
return 0
def get_S21(self, format='LM'):
self.ctrl.write("CALC:PAR:SEL \'Meas21\'")
# print("Triggering VNA Sweep")
# self.trig_sweep()
self.ctrl.write("*WAI")
self.ctrl.write("CALC:DATA? SDATA")
rawtrace = self.ctrl.read()
self.ctrl.write("*WAI")
tracesplit = rawtrace.split(",")
if format == 'LM':
traceLM = []
tracePH = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceLM.append(10.0 * math.log10(float(tracesplit[i - 1]) ** 2 + float(tracesplit[i]) ** 2))
tracePH.append(180.0 / math.pi * math.atan2(float(tracesplit[i]), float(tracesplit[i - 1])))
traceLM = np.array(traceLM)
tracePH = np.array(tracePH)
return (traceLM, tracePH)
elif format == 'RI':
traceR = []
traceI = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceR.append(float(tracesplit[i - 1]))
traceI.append(float(tracesplit[i]))
traceR = np.array(traceR)
traceI = np.array(traceI)
return traceR, traceI
else:
print('Format not recognized!')
return 0
def get_S12(self, format='LM'):
self.ctrl.write("CALC:PAR:SEL \'Meas12\'")
# print("Triggering VNA Sweep")
# self.trig_sweep()
self.ctrl.write("*WAI")
self.ctrl.write("CALC:DATA? SDATA")
rawtrace = self.ctrl.read()
self.ctrl.write("*WAI")
tracesplit = rawtrace.split(",")
if format == 'LM':
traceLM = []
tracePH = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceLM.append(10.0 * math.log10(float(tracesplit[i - 1]) ** 2 + float(tracesplit[i]) ** 2))
tracePH.append(180.0 / math.pi * math.atan2(float(tracesplit[i]), float(tracesplit[i - 1])))
traceLM = np.array(traceLM)
tracePH = np.array(tracePH)
return (traceLM, tracePH)
elif format == 'RI':
traceR = []
traceI = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceR.append(float(tracesplit[i - 1]))
traceI.append(float(tracesplit[i]))
traceR = np.array(traceR)
traceI = np.array(traceI)
return (traceR, traceI)
else:
print('Format not recognized!')
return 0
def reset_sweep(self):
self.ctrl.trig1 = False
self.ctrl.trig2 = False | 39.598063 | 112 | 0.548918 | import visa
import math
import numpy as np
import time
class USBVNA():
def __init__(self, address="TCPIP0::687UWAVE-TEST::hislip_PXI10_CHASSIS1_SLOT1_INDEX0,4880::INSTR"):
self.ResourceManager = visa.ResourceManager()
self.ctrl = self.ResourceManager.open_resource("%s" % address, write_termination='\n')
self.ctrl.timeout = 1000000
self.ctrl.vna_id = self.ctrl.query("*IDN?").rstrip()
print("Connected to : " + self.ctrl.vna_id)
def close(self):
self.ctrl.write("INIT:CONT ON")
self.ctrl.close()
print("VNA control closed")
def preset(self):
self.ctrl.write("SYST:FPR")
self.ctrl.write("*CLS")
print("VNA Preset")
def wait(self):
self.ctrl.write("*WAI")
def setup2port(self):
time.sleep(0.1)
self.ctrl.meas_type = "FULL"
self.ctrl.write("DISP:Window1:STATE ON")
self.ctrl.write("CALC:PAR:DEL:ALL")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas11','S11'")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas12','S12'")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas21','S21'")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas22','S22'")
self.ctrl.write("DISP:Window1:Trace1:FEED 'Meas11'")
self.ctrl.write("DISP:Window1:Trace2:FEED 'Meas12'")
self.ctrl.write("DISP:Window1:Trace3:FEED 'Meas21'")
self.ctrl.write("DISP:Window1:Trace4:FEED 'Meas22'")
self.ctrl.write("INIT:CONT OFF")
self.ctrl.write("TRIG:SOUR MAN")
self.ctrl.write("TRIG:SCOP ALL")
self.ctrl.write("SENS:SWE:MODE CONT")
self.ctrl.write("*WAI")
self.reset_sweep()
self.avg_inquire()
self.sweep_inquire()
self.freqs_inquire()
self.ifbw_inquire()
self.power_inquire()
print("2 Port Measurement Set up")
def setup_thru(self):
time.sleep(0.1)
self.ctrl.meas_type = "THRU"
self.ctrl.write("DISP:Window1:STATE ON")
self.ctrl.write("CALC:PAR:DEL:ALL")
self.ctrl.write("CALC:PAR:DEF:EXT 'Meas21','S21'")
self.ctrl.write("DISP:Window1:Trace1:FEED 'Meas21'")
self.ctrl.write("INIT:CONT OFF")
self.ctrl.write("TRIG:SOUR MAN")
self.ctrl.write("*WAI")
self.reset_sweep()
self.avg_inquire()
self.sweep_inquire()
self.freqs_inquire()
self.ifbw_inquire()
self.power_inquire()
print("Thru Measurement Set Up")
def set_cal(self, calset="ArbitraryCalSet", calstate='OFF'):
if calstate == 'ON':
self.ctrl.write("SENS:CORR:CSET:ACT \"%s\",1" % calset)
self.ctrl.write("*WAI")
time.sleep(0.1)
self.avg_inquire()
self.sweep_inquire()
self.freqs_inquire()
self.ifbw_inquire()
self.power_inquire()
print("Using Cal Set: " + calset)
print("PNA State post Cal Set Application: ")
if self.ctrl.avestate == 1:
print("Averaging ON with " + str(int(self.ctrl.avecount)) + " points")
else:
print("Averaging OFF")
if self.ctrl.sweeptype == "LIN":
print("Linear Freq. Sweep with " + str(int(self.ctrl.sweeppoints)) + " points")
elif self.ctrl.sweeptype == "LOG":
print("Logarithmic Freq. Sweep with " + str(int(self.ctrl.sweeppoints)) + " points")
else:
print("Unrecognized Sweep Type")
print("Sweep time: " + str(self.ctrl.sweeptime) + " seconds")
print("IF Bandwidth: " + str(self.ctrl.ifbw) + "Hz")
print("Measurement from " + str(self.ctrl.freqstart / 1e9) + "GHz to " + str(
float(self.ctrl.freqstop) / 1e9) + "GHz")
print("Source 1 Power: %.2f dBm" % self.ctrl.powersource1)
print("Source 2 Power: %.2f dBm" % self.ctrl.powersource2)
self.reset_sweep()
elif calstate == 'OFF':
self.ctrl.write("SENS:CORR OFF")
print("Taking Un-Calibrated Data")
def set_sweeptype(self, sweeptype="lin"):
test_type = sweeptype.lower().strip()
if test_type == "lin":
self.ctrl.write("SENS:SWE:TYPE LIN")
elif test_type == "log":
self.ctrl.write("SENS:SWE:TYPE LOG")
else:
raise KeyError(F"{sweeptype} is not a recognized sweeptype.")
def set_num_freq_points(self, num_freq_points):
self.ctrl.write("SENS:SWE:POIN %d" % num_freq_points)
def set_sweep(self, num_freq_points, sweeptype="lin"):
self.set_num_freq_points(num_freq_points=num_freq_points)
self.set_sweeptype(sweeptype)
self.sweep_inquire()
print("Sweep type = " + self.ctrl.sweeptype)
print("Sweep points = " + str(self.ctrl.sweeppoints))
self.reset_sweep()
def sweep_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.sweeptype = self.ctrl.query("SENS:SWE:TYPE?").rstrip()
self.ctrl.write("*WAI")
self.ctrl.sweeppoints = int(self.ctrl.query("SENS:SWE:POIN?"))
self.ctrl.write("*WAI")
self.ctrl.sweeptime = float(self.ctrl.query("SENS:SWE:TIME?"))
self.ctrl.write("*WAI")
def set_freq_limits(self, start=0.01, stop=50.0):
self.ctrl.write("SENS:FREQ:STAR %fghz " % start)
self.ctrl.write("SENS:FREQ:STOP %fghz" % stop)
self.freqs_inquire()
print("Freq Start = " + str(1e-9 * self.ctrl.freqstart) + "GHz")
print("Freq Stop = " + str(1e-9 * self.ctrl.freqstop) + "GHz")
self.sweep_inquire()
self.reset_sweep()
def set_freq_center(self, center=21.755, span=43.49):
self.ctrl.write("SENS:FREQ:CENT %fghz " % center)
self.ctrl.write("SENS:FREQ:SPAN %fghz " % span)
self.freqs_inquire()
print("Freq Center = " + str(1e-9 * self.ctrl.freqcent) + "GHz")
print("Span = " + str(1e-9 * self.ctrl.freqspan) + "GHz")
self.sweep_inquire()
self.reset_sweep()
def set_center_freq_GHz(self, center_freq_GHz):
self.ctrl.write("SENS:FREQ:CENT %fghz " % center_freq_GHz)
def set_span_GHz(self, span_GHz):
self.ctrl.write("SENS:FREQ:SPAN %fghz " % span_GHz)
def freqs_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.freqstart = float(self.ctrl.query("SENS:FREQ:STAR?"))
self.ctrl.write("*WAI")
self.ctrl.freqstop = float(self.ctrl.query("SENS:FREQ:STOP?"))
self.ctrl.write("*WAI")
self.ctrl.freqcent = float(self.ctrl.query("SENS:FREQ:CENT?"))
self.ctrl.write("*WAI")
self.ctrl.freqspan = float(self.ctrl.query("SENS:FREQ:SPAN?"))
self.ctrl.write("*WAI")
def set_avg(self, count=1, mode="sweep"):
if count > 1:
self.ctrl.write("SENS:AVER ON")
self.ctrl.write("SENS:AVER:COUN %d" % count)
self.ctrl.write("SENS:AVER:MODE %s" % mode)
else:
self.ctrl.write("SENS:AVER OFF")
self.avg_inquire()
if self.ctrl.avestate == 1:
print("Averaging ON")
print("Averaging COUNT = " + str(self.ctrl.avecount))
print("Averaging MODE = " + self.ctrl.avemode.rstrip())
elif self.ctrl.avestate == 0:
print("Averaging OFF")
self.sweep_inquire()
self.reset_sweep()
def avg_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.avestate = int(self.ctrl.query("SENS:AVER:STAT?"))
self.ctrl.write("*WAI")
if self.ctrl.avestate == 1:
self.ctrl.avemode = self.ctrl.query("SENS:AVER:MODE?")
self.ctrl.write("*WAI")
self.ctrl.avecount = int(self.ctrl.query("SENS:AVER:COUN?"))
def avg_clear(self):
self.ctrl.write("SENS:AVER:CLE")
def set_ifbw(self, ifbw=100, track=None):
self.ctrl.write("SENS:BWID:RES %d " % ifbw)
self.ctrl.write("*WAI")
if track == True:
self.ctrl.write("SENS:BWID:TRAC ON")
elif track == False:
self.ctrl.write("SENS:BWID:TRAC OFF")
self.ctrl.write("*WAI")
self.ifbw_inquire()
print('IF Bandwidth set to: %.1fHz' % self.ctrl.ifbw)
if self.ctrl.ifbwtrack == 1:
print("IF Bandwidth Tracking ON")
elif self.ctrl.ifbwtrack == 0:
print("IF Bandwidth Tracking OFF")
self.sweep_inquire()
self.reset_sweep()
def set_if_bw_Hz(self, if_bw_Hz):
self.ctrl.write(F"SENS:BWID:RES {if_bw_Hz}")
def ifbw_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.ifbw = float(self.ctrl.query("SENS:BWID:RES?"))
self.ctrl.write("*WAI")
self.ctrl.ifbwtrack = int(self.ctrl.query("SENS:BWID:TRAC?"))
self.ctrl.write("*WAI")
def set_port_power_dBm(self, port_power_dBm, port=1):
self.ctrl.write(F"SOUR:POW{port}:LEV {port_power_dBm} ")
def set_power_on(self):
self.ctrl.write("SOUR:POW1:MODE ON")
def set_power_off(self):
self.ctrl.write("SOUR:POW1:MODE OFF")
def set_power(self, port=1, level=-5, state='ON'):
if state == 'ON':
if port == 1:
self.ctrl.write("SOUR:POW1:LEV %f " % level)
if port == 2:
self.ctrl.write("SOUR:POW2:LEV %f " % level)
elif state == 'OFF':
if port == 1:
self.ctrl.write("SOUR:POW1:MODE OFF")
if port == 2:
self.ctrl.write("SOUR:POW2:MODE OFF")
else:
print("Port " + str(port) + " power state not recognized")
self.power_inquire()
print("Port 1 Power set to: %.2fdBm" % self.ctrl.powersource1)
print("Port 2 Power set to: %.2fdBm" % self.ctrl.powersource2)
self.sweep_inquire()
self.reset_sweep()
def power_inquire(self):
self.ctrl.write("*WAI")
self.ctrl.powersource1 = float(self.ctrl.query("SOUR:POW1:LEV?"))
self.ctrl.write("*WAI")
self.ctrl.powersource2 = float(self.ctrl.query("SOUR:POW2:LEV?"))
self.ctrl.write("*WAI")
def trig_sweep(self):
self.sweep_inquire()
print("")
print("Sweep time is %.2f seconds" % float(self.ctrl.sweeptime))
if self.ctrl.avestate == 1:
self.avg_clear()
for i in range(0, self.ctrl.avecount):
self.ctrl.write("INIT:IMM")
self.ctrl.write("*WAI")
self.ctrl.query("*OPC?")
print("Sweep %d/%d finished" % (i + 1, self.ctrl.avecount))
self.ctrl.trig1 = True
else:
if self.ctrl.trig1 == False:
print("Triggering VNA Sweep")
self.ctrl.write("INIT:IMM")
self.ctrl.write("*WAI")
self.ctrl.query("*OPC?")
self.ctrl.trig1 = True
print("Sweep finished")
def get_trace(self, trace=1, format="LM"):
if trace == 1:
self.ctrl.write("CALC:PAR:SEL \'Meas11\'")
elif trace == 2:
self.ctrl.write("CALC:PAR:SEL \'Meas12\'")
elif trace == 3:
self.ctrl.write("CALC:PAR:SEL \'Meas21\'")
elif trace == 4:
self.ctrl.write("CALC:PAR:SEL \'Meas22\'")
else:
print("Not a recognized trace")
return 0
self.ctrl.write("*WAI")
self.ctrl.write("CALC:DATA? SDATA")
rawtrace = self.ctrl.read()
self.ctrl.write("*WAI")
tracesplit = rawtrace.split(",")
if format == "LM":
traceLM = []
tracePH = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceLM.append(10 * math.log10(float(tracesplit[i - 1]) ** 2 + float(tracesplit[i]) ** 2))
tracePH.append(180 / math.pi * math.atan2(float(tracesplit[i]), float(tracesplit[i - 1])))
return (traceLM, tracePH)
elif format == "RI":
traceR = []
traceI = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceR.append(float(tracesplit[i - 1]))
traceI.append(float(tracesplit[i]))
traceR = np.array(traceR)
traceI = np.array(traceI)
return (traceR, traceI)
elif format == "COM":
tracecom = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
tracecom.append(tracesplit[i - 1] + 1j * tracesplit[i])
return tracecom
else:
print("Data Format not recognized")
return 0
def get_S21(self, format='LM'):
self.ctrl.write("CALC:PAR:SEL \'Meas21\'")
self.ctrl.write("*WAI")
self.ctrl.write("CALC:DATA? SDATA")
rawtrace = self.ctrl.read()
self.ctrl.write("*WAI")
tracesplit = rawtrace.split(",")
if format == 'LM':
traceLM = []
tracePH = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceLM.append(10.0 * math.log10(float(tracesplit[i - 1]) ** 2 + float(tracesplit[i]) ** 2))
tracePH.append(180.0 / math.pi * math.atan2(float(tracesplit[i]), float(tracesplit[i - 1])))
traceLM = np.array(traceLM)
tracePH = np.array(tracePH)
return (traceLM, tracePH)
elif format == 'RI':
traceR = []
traceI = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceR.append(float(tracesplit[i - 1]))
traceI.append(float(tracesplit[i]))
traceR = np.array(traceR)
traceI = np.array(traceI)
return traceR, traceI
else:
print('Format not recognized!')
return 0
def get_S12(self, format='LM'):
self.ctrl.write("CALC:PAR:SEL \'Meas12\'")
self.ctrl.write("*WAI")
self.ctrl.write("CALC:DATA? SDATA")
rawtrace = self.ctrl.read()
self.ctrl.write("*WAI")
tracesplit = rawtrace.split(",")
if format == 'LM':
traceLM = []
tracePH = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceLM.append(10.0 * math.log10(float(tracesplit[i - 1]) ** 2 + float(tracesplit[i]) ** 2))
tracePH.append(180.0 / math.pi * math.atan2(float(tracesplit[i]), float(tracesplit[i - 1])))
traceLM = np.array(traceLM)
tracePH = np.array(tracePH)
return (traceLM, tracePH)
elif format == 'RI':
traceR = []
traceI = []
for i in range(0, len(tracesplit)):
if i % 2 == 1:
traceR.append(float(tracesplit[i - 1]))
traceI.append(float(tracesplit[i]))
traceR = np.array(traceR)
traceI = np.array(traceI)
return (traceR, traceI)
else:
print('Format not recognized!')
return 0
def reset_sweep(self):
self.ctrl.trig1 = False
self.ctrl.trig2 = False | true | true |
f7ffcb16d89f9bf0a3c1691bf12b9bef581801b5 | 96 | py | Python | venv/lib/python3.8/site-packages/numpy/typing/_add_docstring.py | GiulianaPola/select_repeats | 17a0d053d4f874e42cf654dd142168c2ec8fbd11 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/numpy/typing/_add_docstring.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/numpy/typing/_add_docstring.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/7e/f8/99/031081079c35398e4f9e73fd1e3d836bb92b12cf22c698fcca4ff468b3 | 96 | 96 | 0.895833 | /home/runner/.cache/pip/pool/7e/f8/99/031081079c35398e4f9e73fd1e3d836bb92b12cf22c698fcca4ff468b3 | false | true |
f7ffcd40685acb634ad20f9e3382d254b3867683 | 448 | py | Python | cride/betfriends/admin.py | albertoaldanar/betmatcherAPI | c0590025efd79f4e489f9c9433b17554ea6ba23f | [
"MIT"
] | null | null | null | cride/betfriends/admin.py | albertoaldanar/betmatcherAPI | c0590025efd79f4e489f9c9433b17554ea6ba23f | [
"MIT"
] | 7 | 2020-06-05T20:53:27.000Z | 2022-03-11T23:47:12.000Z | cride/betfriends/admin.py | albertoaldanar/betmatcherAPI | c0590025efd79f4e489f9c9433b17554ea6ba23f | [
"MIT"
] | null | null | null | #django
from django.db import models
from django.contrib import admin
#model
from cride.betfriends.models import BetFriend, FriendRequest
@admin.register(BetFriend)
class BetFriendAdmin(admin.ModelAdmin):
list_display= (
"user_a",
"user_b"
)
@admin.register(FriendRequest)
class FriendRequestAdmin(admin.ModelAdmin):
list_display= (
"is_accepted",
"received_by",
"sent_by",
"id"
)
search_fields = ("sent_by",)
| 17.92 | 60 | 0.720982 |
from django.db import models
from django.contrib import admin
from cride.betfriends.models import BetFriend, FriendRequest
@admin.register(BetFriend)
class BetFriendAdmin(admin.ModelAdmin):
list_display= (
"user_a",
"user_b"
)
@admin.register(FriendRequest)
class FriendRequestAdmin(admin.ModelAdmin):
list_display= (
"is_accepted",
"received_by",
"sent_by",
"id"
)
search_fields = ("sent_by",)
| true | true |
f7ffcd7ff9af3f9a9c18348e4ad75cb41e127bd8 | 2,285 | py | Python | examples/example_background_image.py | o0morgan0o/CustomTkinter | f7d4f902a91d87c7f078963d6e8b757d84abd743 | [
"CC0-1.0"
] | 1 | 2022-02-18T03:45:01.000Z | 2022-02-18T03:45:01.000Z | examples/example_background_image.py | o0morgan0o/CustomTkinter | f7d4f902a91d87c7f078963d6e8b757d84abd743 | [
"CC0-1.0"
] | null | null | null | examples/example_background_image.py | o0morgan0o/CustomTkinter | f7d4f902a91d87c7f078963d6e8b757d84abd743 | [
"CC0-1.0"
] | null | null | null | import tkinter
import tkinter.messagebox
import customtkinter
from PIL import Image, ImageTk
import os
customtkinter.set_appearance_mode("System") # Other: "Light", "Dark"
PATH = os.path.dirname(os.path.realpath(__file__))
class App(tkinter.Tk):
APP_NAME = "CustomTkinter background gardient"
WIDTH = 900
HEIGHT = 600
def __init__(self, *args, **kwargs):
customtkinter.enable_macos_darkmode()
tkinter.Tk.__init__(self, *args, **kwargs)
self.title(App.APP_NAME)
self.geometry(str(App.WIDTH) + "x" + str(App.HEIGHT))
self.minsize(App.WIDTH, App.HEIGHT)
self.maxsize(App.WIDTH, App.HEIGHT)
self.protocol("WM_DELETE_WINDOW", self.on_closing)
self.bind("<Command-q>", self.on_closing)
self.bind("<Command-w>", self.on_closing)
self.createcommand('tk::mac::Quit', self.on_closing)
self.image = Image.open(PATH + "/test_images/bg_gradient.jpg").resize((self.WIDTH, self.HEIGHT))
self.photo = ImageTk.PhotoImage(self.image)
self.image_label = tkinter.Label(master=self, image=self.photo)
self.image_label.place(relx=0.5, rely=0.5, anchor=tkinter.CENTER)
self.frame = customtkinter.CTkFrame(master=self,
width=300,
height=App.HEIGHT,
corner_radius=0)
self.frame.place(relx=0.5, rely=0.5, anchor=tkinter.CENTER)
self.button_1 = customtkinter.CTkButton(master=self.frame, text="button 1",
corner_radius=10, command=self.button_event, width=200)
self.button_1.place(relx=0.5, rely=0.6, anchor=tkinter.CENTER)
self.button_2 = customtkinter.CTkButton(master=self.frame, text="button 2",
corner_radius=10, command=self.button_event, width=200)
self.button_2.place(relx=0.5, rely=0.7, anchor=tkinter.CENTER)
def button_event(self):
print("Button pressed")
def on_closing(self, event=0):
customtkinter.disable_macos_darkmode()
self.destroy()
def start(self):
self.mainloop()
if __name__ == "__main__":
app = App()
app.start()
| 34.104478 | 104 | 0.611379 | import tkinter
import tkinter.messagebox
import customtkinter
from PIL import Image, ImageTk
import os
customtkinter.set_appearance_mode("System")
PATH = os.path.dirname(os.path.realpath(__file__))
class App(tkinter.Tk):
APP_NAME = "CustomTkinter background gardient"
WIDTH = 900
HEIGHT = 600
def __init__(self, *args, **kwargs):
customtkinter.enable_macos_darkmode()
tkinter.Tk.__init__(self, *args, **kwargs)
self.title(App.APP_NAME)
self.geometry(str(App.WIDTH) + "x" + str(App.HEIGHT))
self.minsize(App.WIDTH, App.HEIGHT)
self.maxsize(App.WIDTH, App.HEIGHT)
self.protocol("WM_DELETE_WINDOW", self.on_closing)
self.bind("<Command-q>", self.on_closing)
self.bind("<Command-w>", self.on_closing)
self.createcommand('tk::mac::Quit', self.on_closing)
self.image = Image.open(PATH + "/test_images/bg_gradient.jpg").resize((self.WIDTH, self.HEIGHT))
self.photo = ImageTk.PhotoImage(self.image)
self.image_label = tkinter.Label(master=self, image=self.photo)
self.image_label.place(relx=0.5, rely=0.5, anchor=tkinter.CENTER)
self.frame = customtkinter.CTkFrame(master=self,
width=300,
height=App.HEIGHT,
corner_radius=0)
self.frame.place(relx=0.5, rely=0.5, anchor=tkinter.CENTER)
self.button_1 = customtkinter.CTkButton(master=self.frame, text="button 1",
corner_radius=10, command=self.button_event, width=200)
self.button_1.place(relx=0.5, rely=0.6, anchor=tkinter.CENTER)
self.button_2 = customtkinter.CTkButton(master=self.frame, text="button 2",
corner_radius=10, command=self.button_event, width=200)
self.button_2.place(relx=0.5, rely=0.7, anchor=tkinter.CENTER)
def button_event(self):
print("Button pressed")
def on_closing(self, event=0):
customtkinter.disable_macos_darkmode()
self.destroy()
def start(self):
self.mainloop()
if __name__ == "__main__":
app = App()
app.start()
| true | true |
f7ffce14bbfc62748b0adfbfb4990a11b8e8de47 | 780 | py | Python | ward_tests/test_entities.py | koichiro8/learning | ffc0785dfeeeee4240aed0a1742a64b86ff28369 | [
"MIT"
] | null | null | null | ward_tests/test_entities.py | koichiro8/learning | ffc0785dfeeeee4240aed0a1742a64b86ff28369 | [
"MIT"
] | null | null | null | ward_tests/test_entities.py | koichiro8/learning | ffc0785dfeeeee4240aed0a1742a64b86ff28369 | [
"MIT"
] | null | null | null | from ward import raises, test
from learning.entities import Todo
from learning.errors import TitleLengthError
@test("create todo")
def _():
todo = Todo("test todo")
assert todo.title == "test todo"
assert not todo.id
assert not todo.done
assert not todo.created_at
@test("title length is less than 0")
def _():
with raises(TitleLengthError) as exc:
Todo("")
assert str(exc.raised) == "title length is not between 1 and 256, length: 0"
for title, id in [("a" * 257, "single byte"), ("あ" * 257, "multi byte")]:
@test(f"title length is greater than 256 [{id}]")
def _():
with raises(TitleLengthError) as exc:
Todo(title)
assert str(exc.raised) == "title length is not between 1 and 256, length: 257"
| 23.636364 | 86 | 0.641026 | from ward import raises, test
from learning.entities import Todo
from learning.errors import TitleLengthError
@test("create todo")
def _():
todo = Todo("test todo")
assert todo.title == "test todo"
assert not todo.id
assert not todo.done
assert not todo.created_at
@test("title length is less than 0")
def _():
with raises(TitleLengthError) as exc:
Todo("")
assert str(exc.raised) == "title length is not between 1 and 256, length: 0"
for title, id in [("a" * 257, "single byte"), ("あ" * 257, "multi byte")]:
@test(f"title length is greater than 256 [{id}]")
def _():
with raises(TitleLengthError) as exc:
Todo(title)
assert str(exc.raised) == "title length is not between 1 and 256, length: 257"
| true | true |
f7ffcedcc27c5491cbbdab7873d4aa1a5d0241a3 | 3,648 | py | Python | cortex_DIM_DEMI/models/coordinates.py | RayRuizhiLiao/DIM_DEMI | 6b0cb14f56f1aee232e553e75ea81a722b9e71dd | [
"BSD-3-Clause"
] | null | null | null | cortex_DIM_DEMI/models/coordinates.py | RayRuizhiLiao/DIM_DEMI | 6b0cb14f56f1aee232e553e75ea81a722b9e71dd | [
"BSD-3-Clause"
] | null | null | null | cortex_DIM_DEMI/models/coordinates.py | RayRuizhiLiao/DIM_DEMI | 6b0cb14f56f1aee232e553e75ea81a722b9e71dd | [
"BSD-3-Clause"
] | null | null | null | '''Coordinate task
'''
from cortex.plugins import ModelPlugin
import torch
import torch.nn.functional as F
from cortex_DIM_DEMI.nn_modules.mi_networks import MI1x1ConvNet
class CoordinatePredictor(ModelPlugin):
'''Coordinate prediction
'''
defaults = dict(
data=dict(batch_size=dict(train=64, test=64),
inputs=dict(inputs='data.images'), skip_last_batch=True),
train=dict(save_on_lowest='losses.encoder', epochs=1000),
optimizer=dict(learning_rate=1e-4)
)
def build(self, encoder, config, task_idx=None):
'''
Args:
task_idx: Indices for coordinate task.
'''
self.nets.encoder = encoder
if task_idx is not None:
self.task_idx = task_idx
elif 'local_task_idx' not in config.keys():
raise ValueError('')
else:
self.task_idx = config['local_task_idx']
# Create MI nn_modules.
X = self.inputs('data.images')
outs = self.nets.encoder(X, return_all_activations=True)
L, G = [outs[i] for i in self.task_idx]
local_size = L.size()[1:]
dim_x = local_size[1]
dim_y = local_size[2]
n_coords = dim_x + dim_y
global_size = G.size()[1:]
n_inputs = global_size[0] + local_size[0]
if len(global_size) != 1:
raise NotImplementedError('Global vector must be 1d')
# Set up ground truth labels
self.labels = torch.zeros((n_coords, dim_x, dim_y)).float().to(L.device)
for i in range(dim_x):
for j in range(dim_y):
self.labels[i, i, j] = 1.
self.labels[dim_x + j, i, j] = 1.
coord_net = MI1x1ConvNet(n_inputs, n_coords).to(X.device)
def extract(outs, coord_net=None):
'''Wrapper function to be put in encoder forward for speed.
Args:
outs (list): List of activations
coord_net (nn.Module): Network to predict coordinates of every location.
Returns:
tuple: local, global outputs
'''
L, G = [outs[i] for i in self.task_idx]
input = torch.cat([L, G[:, :, None, None].expand(-1, -1, L.size(2), L.size(3))], dim=1)
logits = coord_net(input)
return logits
self.nets.encoder.module.add_network(self.name, extract,
networks=dict(coord_net=coord_net))
def routine(self, outs=None, scale=1.0):
'''
Args:
scale: Scaling term for loss on the encoder.
'''
logits = outs[self.name]
labels_ex = self.labels[None, :, :, :].expand(logits.size(0), -1, -1, -1)
x_logits, y_logits = torch.chunk(logits, 2, dim=1)
x_labels, y_labels = torch.chunk(labels_ex, 2, dim=1)
x_sm_out = F.log_softmax(x_logits, dim=1)
y_sm_out = F.log_softmax(y_logits, dim=1)
x_loss = -(x_labels * x_sm_out).sum(1).mean()
y_loss = -(y_labels * y_sm_out).sum(1).mean()
loss = x_loss + y_loss
# Computing accuracies.
x_labels = torch.max(x_labels.data, 1)[1]
y_labels = torch.max(y_labels.data, 1)[1]
x_pred = torch.max(x_logits.data, 1)[1]
y_pred = torch.max(y_logits.data, 1)[1]
x_correct = 100. * x_pred.eq(x_labels.data).float().cpu().mean()
y_correct = 100. * y_pred.eq(y_labels.data).float().cpu().mean()
self.add_losses(encoder=scale * loss)
self.add_results(x_accuracy=x_correct, y_accuracy=y_correct, total_accuracy=0.5 * (x_correct + y_correct))
| 31.721739 | 114 | 0.578673 |
from cortex.plugins import ModelPlugin
import torch
import torch.nn.functional as F
from cortex_DIM_DEMI.nn_modules.mi_networks import MI1x1ConvNet
class CoordinatePredictor(ModelPlugin):
defaults = dict(
data=dict(batch_size=dict(train=64, test=64),
inputs=dict(inputs='data.images'), skip_last_batch=True),
train=dict(save_on_lowest='losses.encoder', epochs=1000),
optimizer=dict(learning_rate=1e-4)
)
def build(self, encoder, config, task_idx=None):
self.nets.encoder = encoder
if task_idx is not None:
self.task_idx = task_idx
elif 'local_task_idx' not in config.keys():
raise ValueError('')
else:
self.task_idx = config['local_task_idx']
X = self.inputs('data.images')
outs = self.nets.encoder(X, return_all_activations=True)
L, G = [outs[i] for i in self.task_idx]
local_size = L.size()[1:]
dim_x = local_size[1]
dim_y = local_size[2]
n_coords = dim_x + dim_y
global_size = G.size()[1:]
n_inputs = global_size[0] + local_size[0]
if len(global_size) != 1:
raise NotImplementedError('Global vector must be 1d')
self.labels = torch.zeros((n_coords, dim_x, dim_y)).float().to(L.device)
for i in range(dim_x):
for j in range(dim_y):
self.labels[i, i, j] = 1.
self.labels[dim_x + j, i, j] = 1.
coord_net = MI1x1ConvNet(n_inputs, n_coords).to(X.device)
def extract(outs, coord_net=None):
L, G = [outs[i] for i in self.task_idx]
input = torch.cat([L, G[:, :, None, None].expand(-1, -1, L.size(2), L.size(3))], dim=1)
logits = coord_net(input)
return logits
self.nets.encoder.module.add_network(self.name, extract,
networks=dict(coord_net=coord_net))
def routine(self, outs=None, scale=1.0):
logits = outs[self.name]
labels_ex = self.labels[None, :, :, :].expand(logits.size(0), -1, -1, -1)
x_logits, y_logits = torch.chunk(logits, 2, dim=1)
x_labels, y_labels = torch.chunk(labels_ex, 2, dim=1)
x_sm_out = F.log_softmax(x_logits, dim=1)
y_sm_out = F.log_softmax(y_logits, dim=1)
x_loss = -(x_labels * x_sm_out).sum(1).mean()
y_loss = -(y_labels * y_sm_out).sum(1).mean()
loss = x_loss + y_loss
x_labels = torch.max(x_labels.data, 1)[1]
y_labels = torch.max(y_labels.data, 1)[1]
x_pred = torch.max(x_logits.data, 1)[1]
y_pred = torch.max(y_logits.data, 1)[1]
x_correct = 100. * x_pred.eq(x_labels.data).float().cpu().mean()
y_correct = 100. * y_pred.eq(y_labels.data).float().cpu().mean()
self.add_losses(encoder=scale * loss)
self.add_results(x_accuracy=x_correct, y_accuracy=y_correct, total_accuracy=0.5 * (x_correct + y_correct))
| true | true |
f7ffcf00f72cd8efd21b59834792d7c0a9a8deaf | 12,294 | py | Python | ievv_opensource/utils/choices_with_meta.py | appressoas/ievv_opensource | 63e87827952ddc8f6f86145b79478ef21d6a0990 | [
"BSD-3-Clause"
] | null | null | null | ievv_opensource/utils/choices_with_meta.py | appressoas/ievv_opensource | 63e87827952ddc8f6f86145b79478ef21d6a0990 | [
"BSD-3-Clause"
] | 37 | 2015-10-26T09:14:12.000Z | 2022-02-10T10:35:33.000Z | ievv_opensource/utils/choices_with_meta.py | appressoas/ievv_opensource | 63e87827952ddc8f6f86145b79478ef21d6a0990 | [
"BSD-3-Clause"
] | 1 | 2015-11-06T07:56:34.000Z | 2015-11-06T07:56:34.000Z | from collections import OrderedDict
class Choice(object):
"""
A choice in a :class:`.ChoicesWithMeta`.
This basic choice class supports value, label and description,
but you should subclass this (and possibly also :class:`.ChoicesWithMeta`)
if you need more metadata.
.. attribute:: value
The value which is typically stored in the database, or
sent as the actual POST data value in forms.
.. attribute:: label
A short user-friendly label for the choice.
.. attribute:: description
A user-friendly longer description of the choice.
"""
def __init__(self, value, label=None, description='',
attributename=None):
"""
Args:
value: The value for the choice.
The value which is typically stored in the database, or
sent as the actual POST data value in forms.
label: A user-friendly short label for the choice.
This is normally marked for translation.
Defaults to ``value`` if ``bool(label) == False``.
description: A user-friendly longer description of the choice.
This is normally marked for translation.
Not required.
"""
self.value = value
self.label = label or value
self.description = description
if attributename is None:
self.attributename = self._value_to_attributename(value)
else:
self.attributename = attributename
@classmethod
def get_classpath(cls):
return f'{cls.__module__}.{cls.__name__}'
@property
def classpath(self):
return self.__class__.get_classpath()
def _value_to_attributename(self, value):
valuestring = str(value)
attributename = valuestring.upper().replace('-', '_').replace(' ', '_')
if len(attributename) > 0 and attributename[0].isdigit():
attributename = '_{}'.format(attributename)
return attributename
def get_short_label(self):
"""
Get a short label for the choice.
Defaults to returning :attr:`~.Choice.label`, but you
can override this in subclasses.
"""
return self.label
def get_long_label(self):
"""
Get a long label for the choice.
Generating by joining :attr:`~.Choice.label` and :attr:`~.Choice.description`
with ``" - "``.
"""
if self.description:
return '{} - {}'.format(self.label, self.description)
else:
return self.label
def __str__(self):
return self.value
@property
def translated_label(self):
from django.utils.translation import gettext
return gettext(self.label)
@property
def translated_description(self):
from django.utils.translation import gettext
return gettext(self.description)
def as_serializable_data(self):
return {
'value': self.value,
'label': self.translated_label,
'description': self.translated_description
}
class ChoicesWithMeta(object):
"""
An object oriented structure for model/form field choices.
Unlike the simple ``(value, label)`` tuple used in
Django, this miniframework supports more metadata because
the choices are defined through :class:`.Choice` (which you can subclass
and extend).
Compatible with the ``choices``-attribute used in Django
(I.E.: django.forms.ChoiceField, django.forms.CharField, ...)
through the :meth:`~.ChoicesWithMeta.iter_as_django_choices_short`
and :meth:`~.ChoicesWithMeta.iter_as_django_choices_long` methods.
Examples:
Usage in Django model::
class User(models.Model):
username = models.CharField(max_length=255, unique=True)
USERTYPE_CHOICES = choices_with_meta.ChoicesWithMeta(
choices_with_meta.Choice(value='normal', label='Normal'),
choices_with_meta.Choice(value='editor', label='Editor'),
choices_with_meta.Choice(value='admin', label='Admin')
)
usertype = models.CharField(
max_length=255,
choices=USERTYPE_CHOICES.iter_as_django_choices_short,
default=USERTYPE_CHOICES.NORMAL
)
Lets say you want to provivide a bit more information about
each choice. Then you can use the ``description`` parameter
for :class:`.Choice`::
choices_with_meta.Choice(
value='admin',
label='Admin',
description='An administrator user with access to everything.')
You will most likely also want to update the ``choices``-argument for
your choice field to use :meth:`~.ChoicesWithMeta.iter_as_django_choices_long`
(to include description in the default label), or you may just want
to do that in certain views. You can also extend the :class:`Choice class<.Choice>`
and add your own logic in :meth:`.get_long_label` and :meth:`.get_short_label`.
ChoicesWithMeta makes it easier to provide good exception messages::
if "somechoice" not in User.USERTYPE_CHOICES:
raise ValidationError({
'usertype': 'Must be one of: {}'.format(
User.USERTYPE_CHOICES.get_values_as_commaseparated_string()
)
})
Getting choices by value is easy::
User.USERTYPE_CHOICES['admin'].label
You can access values as an attribute of the ChoicesWithMeta object
as the value uppercased with ``-`` and space replaced with ``_``::
User.USERTYPE_CHOICES.ADMIN.label
Getting choices by index is also easy::
User.USERTYPE_CHOICES.get_choice_at_index(1).label
Getting the first choice (typically the default choice) is easy::
User.USERTYPE_CHOICES.get_first_choice()
You can iterate over the choices or values::
for choice in User.USERTYPE_CHOICES.iterchoices():
pass
for value in User.USERTYPE_CHOICES.itervalues():
pass
You can get the values as a list or as a comma separated string::
User.USERTYPE_CHOICES.get_values_as_list()
User.USERTYPE_CHOICES.get_values_as_commaseparated_string()
"""
def __init__(self, *choices):
self.choices = OrderedDict()
for choice in self.get_default_choices():
self.add(choice)
for choice in choices:
self.add(choice)
def get_by_value(self, value, fallback=None):
"""
Get the :class:`.Choice` with the provided ``value``.
Args:
value: The value to lookup.
fallback: Fallback value if ``value`` is not registered as a choice value.
Returns:
.Choice: The Choice matching the value if it exists, otherwise return ``fallback``.
"""
return self.choices.get(value, fallback)
def __getitem__(self, value):
"""
Get the :class:`.Choice` with the provided ``value``.
Raises:
KeyError: If no :class:`.Choice` with the provided
``value`` is in the ChoicesWithMeta.
"""
return self.choices[value]
def __contains__(self, value):
"""
Check if ``value`` is one of the choices.
Returns:
bool: True a :class:`.Choice` with the provided ``value``
is in the ChoicesWithMeta.
"""
return value in self.choices
def __len__(self):
"""
Get the number of choices.
"""
return len(self.choices)
def get_default_choices(self):
"""
Lets say you have a field where a set of default choices
make sense. You then create a subclass of ChoicesWithMeta
and override this method to return these default choices.
Developers can still override this method for special
cases, but the default will be that these default choices
are included.
This is most useful when choices are a Django setting
that users of your app can override.
Returns:
iterable: An iterable of :class:`.Choice` objects.
"""
return []
def get_choice_at_index(self, index):
"""
Args:
index: The numeric index of the choice.
Raises:
IndexError: If the index does not correspond to a choice.
Returns:
.Choice: The :class:`.Choice` at the provided index.
"""
keys = list(self.choices.keys())
value = keys[index]
return self.choices[value]
def get_first_choice(self):
"""
Uses :meth:`.get_choice_at_index` to get the first (index=0) choice.
If there is no first choice, ``None`` is returned.
"""
try:
return self.get_choice_at_index(0)
except IndexError:
return None
def add(self, choice):
"""
Add a :class:`.Choice`.
Args:
choice: A :class:`.Choice` object.
"""
if choice.value in self.choices:
raise KeyError('A choice with value "{}" alredy exists.'.format(choice.value))
self.choices[choice.value] = choice
setattr(self, choice.attributename, choice)
def remove(self, value):
"""
Remove a choice by value.
Args:
value: The value to remove from the ChoicesWithMeta.
"""
if value in self.choices:
attributename = self.choices[value].attributename
delattr(self, attributename)
del self.choices[value]
else:
raise KeyError('{value} is not a valid choice value.'.format(value=value))
def itervalues(self):
"""
Iterate over the choices yielding only the values in the added order.
"""
return self.choices.keys()
def iterchoices(self, *extra_choices):
"""
Iterate over the choices yielding :class:`.Choice` objects in the added order.
Args:
*extra_choices: Extra choices. Zero or more extra choices (:class:`.Choice` objects) to
yield at the end. Can be used in cases where you allow invalid choices if they are
already set (so you want to include the current invalid choice along with the
correct choices).
"""
for choice in self.choices.values():
yield choice
for choice in extra_choices:
yield choice
def iter_as_django_choices_short(self, *extra_choices):
"""
Iterate over the choices as a Django choices list,
where each item is a ``(value, label)``-tuple.
Uses :meth:`.Choice.get_short_label` to create the ``label``.
Args:
*extra_choices: See :meth:`.iterchoices`.
"""
for choice in self.iterchoices(*extra_choices):
yield choice.value, choice.get_short_label()
def iter_as_django_choices_long(self, *extra_choices):
"""
Iterate over the choices as a Django choices list,
where each item is a ``(value, label)``-tuple.
Uses :meth:`.Choice.get_long_label` to create the ``label``.
Args:
*extra_choices: See :meth:`.iterchoices`.
"""
for choice in self.iterchoices(*extra_choices):
yield choice.value, choice.get_long_label()
def get_values_as_list(self):
"""
Get the values of all choices in the added order as a list.
"""
return list(self.itervalues())
def get_values_as_commaseparated_string(self):
"""
Get the values as a comma-separated string.
Perfect for showing available choices in error messages.
Returns:
String with all the values separated by comma.
"""
return ', '.join(self.itervalues())
def __str__(self):
return 'ChoicesWithMeta({})'.format(self.get_values_as_commaseparated_string())
| 33.048387 | 99 | 0.602652 | from collections import OrderedDict
class Choice(object):
def __init__(self, value, label=None, description='',
attributename=None):
self.value = value
self.label = label or value
self.description = description
if attributename is None:
self.attributename = self._value_to_attributename(value)
else:
self.attributename = attributename
@classmethod
def get_classpath(cls):
return f'{cls.__module__}.{cls.__name__}'
@property
def classpath(self):
return self.__class__.get_classpath()
def _value_to_attributename(self, value):
valuestring = str(value)
attributename = valuestring.upper().replace('-', '_').replace(' ', '_')
if len(attributename) > 0 and attributename[0].isdigit():
attributename = '_{}'.format(attributename)
return attributename
def get_short_label(self):
return self.label
def get_long_label(self):
if self.description:
return '{} - {}'.format(self.label, self.description)
else:
return self.label
def __str__(self):
return self.value
@property
def translated_label(self):
from django.utils.translation import gettext
return gettext(self.label)
@property
def translated_description(self):
from django.utils.translation import gettext
return gettext(self.description)
def as_serializable_data(self):
return {
'value': self.value,
'label': self.translated_label,
'description': self.translated_description
}
class ChoicesWithMeta(object):
def __init__(self, *choices):
self.choices = OrderedDict()
for choice in self.get_default_choices():
self.add(choice)
for choice in choices:
self.add(choice)
def get_by_value(self, value, fallback=None):
return self.choices.get(value, fallback)
def __getitem__(self, value):
return self.choices[value]
def __contains__(self, value):
return value in self.choices
def __len__(self):
return len(self.choices)
def get_default_choices(self):
return []
def get_choice_at_index(self, index):
keys = list(self.choices.keys())
value = keys[index]
return self.choices[value]
def get_first_choice(self):
try:
return self.get_choice_at_index(0)
except IndexError:
return None
def add(self, choice):
if choice.value in self.choices:
raise KeyError('A choice with value "{}" alredy exists.'.format(choice.value))
self.choices[choice.value] = choice
setattr(self, choice.attributename, choice)
def remove(self, value):
if value in self.choices:
attributename = self.choices[value].attributename
delattr(self, attributename)
del self.choices[value]
else:
raise KeyError('{value} is not a valid choice value.'.format(value=value))
def itervalues(self):
return self.choices.keys()
def iterchoices(self, *extra_choices):
for choice in self.choices.values():
yield choice
for choice in extra_choices:
yield choice
def iter_as_django_choices_short(self, *extra_choices):
for choice in self.iterchoices(*extra_choices):
yield choice.value, choice.get_short_label()
def iter_as_django_choices_long(self, *extra_choices):
for choice in self.iterchoices(*extra_choices):
yield choice.value, choice.get_long_label()
def get_values_as_list(self):
return list(self.itervalues())
def get_values_as_commaseparated_string(self):
return ', '.join(self.itervalues())
def __str__(self):
return 'ChoicesWithMeta({})'.format(self.get_values_as_commaseparated_string())
| true | true |
f7ffcf3141c9a7705268082eb3e32b2c0285b192 | 840 | py | Python | SmallObjectAugmentation/Helpers.py | riciche/SimpleCVReproduction | 4075de39f9c61f1359668a413f6a5d98903fcf97 | [
"Apache-2.0"
] | 923 | 2020-01-11T06:36:53.000Z | 2022-03-31T00:26:57.000Z | SmallObjectAugmentation/Helpers.py | riciche/SimpleCVReproduction | 4075de39f9c61f1359668a413f6a5d98903fcf97 | [
"Apache-2.0"
] | 25 | 2020-02-27T08:35:46.000Z | 2022-01-25T08:54:19.000Z | SmallObjectAugmentation/Helpers.py | riciche/SimpleCVReproduction | 4075de39f9c61f1359668a413f6a5d98903fcf97 | [
"Apache-2.0"
] | 262 | 2020-01-02T02:19:40.000Z | 2022-03-23T04:56:16.000Z | import glob
import cv2 as cv2
import numpy as np
import matplotlib.pyplot as plt
# import random
import math
from tqdm import tqdm
def load_images(path):
image_list = []
images = glob.glob(path)
for index in range(len(images)):
image = cv2.cvtColor(cv2.imread(images[index]), cv2.COLOR_BGR2RGB)
image_list.append(image)
# image_list.append(cv2.resize(image,(1280,720)))
return image_list
def read_images(path):
images = glob.glob(path)
return images
def load_images_from_path(path):
image_list = []
for p in tqdm(path):
image = cv2.cvtColor(cv2.imread(p), cv2.COLOR_BGR2RGB)
image_list.append(image)
return image_list
def replace_labels(path):
labelpath = []
for p in path:
labelpath.append(p.replace('.jpg', '.txt'))
return labelpath
| 21.538462 | 74 | 0.672619 | import glob
import cv2 as cv2
import numpy as np
import matplotlib.pyplot as plt
import math
from tqdm import tqdm
def load_images(path):
image_list = []
images = glob.glob(path)
for index in range(len(images)):
image = cv2.cvtColor(cv2.imread(images[index]), cv2.COLOR_BGR2RGB)
image_list.append(image)
return image_list
def read_images(path):
images = glob.glob(path)
return images
def load_images_from_path(path):
image_list = []
for p in tqdm(path):
image = cv2.cvtColor(cv2.imread(p), cv2.COLOR_BGR2RGB)
image_list.append(image)
return image_list
def replace_labels(path):
labelpath = []
for p in path:
labelpath.append(p.replace('.jpg', '.txt'))
return labelpath
| true | true |
f7ffd0288f1b19a06643b308e542a6f30c83599e | 1,892 | py | Python | tests/python/unittest/test_container.py | jheo4/incubator-tvm | c4c61cb766608fb2f0fd8c9facc480a43afed3f5 | [
"Apache-2.0"
] | 3 | 2020-03-12T10:25:51.000Z | 2020-08-05T05:36:23.000Z | tests/python/unittest/test_container.py | jheo4/incubator-tvm | c4c61cb766608fb2f0fd8c9facc480a43afed3f5 | [
"Apache-2.0"
] | null | null | null | tests/python/unittest/test_container.py | jheo4/incubator-tvm | c4c61cb766608fb2f0fd8c9facc480a43afed3f5 | [
"Apache-2.0"
] | 1 | 2018-10-19T18:11:41.000Z | 2018-10-19T18:11:41.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import tvm
from tvm import nd, relay
from tvm.runtime import container as _container
def test_adt_constructor():
arr = nd.array([1, 2, 3])
fields = [arr, arr]
y = _container.ADT(0, [arr, arr])
assert len(y) == 2
assert isinstance(y, _container.ADT)
y[0:1][-1] == arr
assert y.tag == 0
assert isinstance(arr, nd.NDArray)
def test_tuple_object():
x = relay.var(
'x',
type_annotation=relay.ty.TupleType([
relay.ty.TensorType((), 'int32'),
relay.ty.TensorType((), 'int32')
]))
fn = relay.Function([x], relay.expr.TupleGetItem(x, 0))
mod = tvm.IRModule.from_expr(fn)
exe = relay.create_executor(
kind="vm", mod=mod, ctx=nd.cpu(), target="llvm")
f = exe.evaluate()
value_tuple = _container.tuple_object(
[nd.array(np.array(11)),
nd.array(np.array(12))])
# pass an ADT object to evaluate
out = f(value_tuple)
tvm.testing.assert_allclose(out.asnumpy(), np.array(11))
if __name__ == "__main__":
test_adt_constructor()
test_tuple_object()
| 31.016393 | 62 | 0.679704 |
import numpy as np
import tvm
from tvm import nd, relay
from tvm.runtime import container as _container
def test_adt_constructor():
arr = nd.array([1, 2, 3])
fields = [arr, arr]
y = _container.ADT(0, [arr, arr])
assert len(y) == 2
assert isinstance(y, _container.ADT)
y[0:1][-1] == arr
assert y.tag == 0
assert isinstance(arr, nd.NDArray)
def test_tuple_object():
x = relay.var(
'x',
type_annotation=relay.ty.TupleType([
relay.ty.TensorType((), 'int32'),
relay.ty.TensorType((), 'int32')
]))
fn = relay.Function([x], relay.expr.TupleGetItem(x, 0))
mod = tvm.IRModule.from_expr(fn)
exe = relay.create_executor(
kind="vm", mod=mod, ctx=nd.cpu(), target="llvm")
f = exe.evaluate()
value_tuple = _container.tuple_object(
[nd.array(np.array(11)),
nd.array(np.array(12))])
out = f(value_tuple)
tvm.testing.assert_allclose(out.asnumpy(), np.array(11))
if __name__ == "__main__":
test_adt_constructor()
test_tuple_object()
| true | true |
f7ffd02d9ed661742b46d73f7b22da07a388b08e | 435 | py | Python | tests/storage_api_methods/test_exists.py | Stuvros/django-selectel-storage | 076f7e3c58d9391e2e7e27feb0526736d101c2b5 | [
"MIT"
] | 27 | 2015-01-28T09:17:09.000Z | 2021-06-21T20:48:01.000Z | tests/storage_api_methods/test_exists.py | Stuvros/django-selectel-storage | 076f7e3c58d9391e2e7e27feb0526736d101c2b5 | [
"MIT"
] | 9 | 2015-08-07T15:03:00.000Z | 2020-05-01T04:54:02.000Z | tests/storage_api_methods/test_exists.py | Stuvros/django-selectel-storage | 076f7e3c58d9391e2e7e27feb0526736d101c2b5 | [
"MIT"
] | 19 | 2015-05-20T14:16:25.000Z | 2022-03-31T06:31:59.000Z | import uuid
def test_exists_returns_false_when_the_file_does_not_exist(selectel_storage):
non_existing_file = '{0}/non-exist.txt'.format(uuid.uuid4())
assert not selectel_storage.exists(non_existing_file)
def test_exists_returns_true_when_the_file_exists(
selectel_storage,
create_file
):
existing_file = create_file('exists.txt', 'Yup, it\'s exists!')
assert selectel_storage.exists(existing_file)
| 29 | 77 | 0.772414 | import uuid
def test_exists_returns_false_when_the_file_does_not_exist(selectel_storage):
non_existing_file = '{0}/non-exist.txt'.format(uuid.uuid4())
assert not selectel_storage.exists(non_existing_file)
def test_exists_returns_true_when_the_file_exists(
selectel_storage,
create_file
):
existing_file = create_file('exists.txt', 'Yup, it\'s exists!')
assert selectel_storage.exists(existing_file)
| true | true |
f7ffd18e7769c69a9cf1e7e2015ee21596bf701b | 309 | py | Python | Others/code_festival/code-thanks-festival-2018-open/b/main.py | KATO-Hiro/AtCoder | cbbdb18e95110b604728a54aed83a6ed6b993fde | [
"CC0-1.0"
] | 2 | 2020-06-12T09:54:23.000Z | 2021-05-04T01:34:07.000Z | Others/code_festival/code-thanks-festival-2018-open/b/main.py | KATO-Hiro/AtCoder | cbbdb18e95110b604728a54aed83a6ed6b993fde | [
"CC0-1.0"
] | 961 | 2020-06-23T07:26:22.000Z | 2022-03-31T21:34:52.000Z | Others/code_festival/code-thanks-festival-2018-open/b/main.py | KATO-Hiro/AtCoder | cbbdb18e95110b604728a54aed83a6ed6b993fde | [
"CC0-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
def main():
import sys
input = sys.stdin.readline
x, y = map(int, input().split())
a = 3 * x - y
b = -x + 3 * y
if a >= 0 and a % 8 == 0 and b >= 0 and b % 8 == 0:
print("Yes")
else:
print("No")
if __name__ == "__main__":
main()
| 14.045455 | 55 | 0.436893 |
def main():
import sys
input = sys.stdin.readline
x, y = map(int, input().split())
a = 3 * x - y
b = -x + 3 * y
if a >= 0 and a % 8 == 0 and b >= 0 and b % 8 == 0:
print("Yes")
else:
print("No")
if __name__ == "__main__":
main()
| true | true |
f7ffd1d561da0917f45b033bfcbc3fd07f8c6797 | 3,983 | py | Python | synapse/events/validator.py | whitemike889/synapse | 97bf3077550915161765fdd1cf9290d8039a55f9 | [
"Apache-2.0"
] | null | null | null | synapse/events/validator.py | whitemike889/synapse | 97bf3077550915161765fdd1cf9290d8039a55f9 | [
"Apache-2.0"
] | null | null | null | synapse/events/validator.py | whitemike889/synapse | 97bf3077550915161765fdd1cf9290d8039a55f9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six import string_types
from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes, Membership
from synapse.api.errors import Codes, SynapseError
from synapse.api.room_versions import EventFormatVersions
from synapse.types import EventID, RoomID, UserID
class EventValidator(object):
def validate_new(self, event):
"""Validates the event has roughly the right format
Args:
event (FrozenEvent)
"""
self.validate_builder(event)
if event.format_version == EventFormatVersions.V1:
EventID.from_string(event.event_id)
required = [
"auth_events",
"content",
"hashes",
"origin",
"prev_events",
"sender",
"type",
]
for k in required:
if not hasattr(event, k):
raise SynapseError(400, "Event does not have key %s" % (k,))
# Check that the following keys have string values
event_strings = ["origin"]
for s in event_strings:
if not isinstance(getattr(event, s), string_types):
raise SynapseError(400, "'%s' not a string type" % (s,))
if event.type == EventTypes.Aliases:
if "aliases" in event.content:
for alias in event.content["aliases"]:
if len(alias) > MAX_ALIAS_LENGTH:
raise SynapseError(
400,
(
"Can't create aliases longer than"
" %d characters" % (MAX_ALIAS_LENGTH,)
),
Codes.INVALID_PARAM,
)
def validate_builder(self, event):
"""Validates that the builder/event has roughly the right format. Only
checks values that we expect a proto event to have, rather than all the
fields an event would have
Args:
event (EventBuilder|FrozenEvent)
"""
strings = ["room_id", "sender", "type"]
if hasattr(event, "state_key"):
strings.append("state_key")
for s in strings:
if not isinstance(getattr(event, s), string_types):
raise SynapseError(400, "Not '%s' a string type" % (s,))
RoomID.from_string(event.room_id)
UserID.from_string(event.sender)
if event.type == EventTypes.Message:
strings = ["body", "msgtype"]
self._ensure_strings(event.content, strings)
elif event.type == EventTypes.Topic:
self._ensure_strings(event.content, ["topic"])
elif event.type == EventTypes.Name:
self._ensure_strings(event.content, ["name"])
elif event.type == EventTypes.Member:
if "membership" not in event.content:
raise SynapseError(400, "Content has not membership key")
if event.content["membership"] not in Membership.LIST:
raise SynapseError(400, "Invalid membership key")
def _ensure_strings(self, d, keys):
for s in keys:
if s not in d:
raise SynapseError(400, "'%s' not in content" % (s,))
if not isinstance(d[s], string_types):
raise SynapseError(400, "'%s' not a string type" % (s,))
| 34.634783 | 79 | 0.584233 |
from six import string_types
from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes, Membership
from synapse.api.errors import Codes, SynapseError
from synapse.api.room_versions import EventFormatVersions
from synapse.types import EventID, RoomID, UserID
class EventValidator(object):
def validate_new(self, event):
self.validate_builder(event)
if event.format_version == EventFormatVersions.V1:
EventID.from_string(event.event_id)
required = [
"auth_events",
"content",
"hashes",
"origin",
"prev_events",
"sender",
"type",
]
for k in required:
if not hasattr(event, k):
raise SynapseError(400, "Event does not have key %s" % (k,))
event_strings = ["origin"]
for s in event_strings:
if not isinstance(getattr(event, s), string_types):
raise SynapseError(400, "'%s' not a string type" % (s,))
if event.type == EventTypes.Aliases:
if "aliases" in event.content:
for alias in event.content["aliases"]:
if len(alias) > MAX_ALIAS_LENGTH:
raise SynapseError(
400,
(
"Can't create aliases longer than"
" %d characters" % (MAX_ALIAS_LENGTH,)
),
Codes.INVALID_PARAM,
)
def validate_builder(self, event):
strings = ["room_id", "sender", "type"]
if hasattr(event, "state_key"):
strings.append("state_key")
for s in strings:
if not isinstance(getattr(event, s), string_types):
raise SynapseError(400, "Not '%s' a string type" % (s,))
RoomID.from_string(event.room_id)
UserID.from_string(event.sender)
if event.type == EventTypes.Message:
strings = ["body", "msgtype"]
self._ensure_strings(event.content, strings)
elif event.type == EventTypes.Topic:
self._ensure_strings(event.content, ["topic"])
elif event.type == EventTypes.Name:
self._ensure_strings(event.content, ["name"])
elif event.type == EventTypes.Member:
if "membership" not in event.content:
raise SynapseError(400, "Content has not membership key")
if event.content["membership"] not in Membership.LIST:
raise SynapseError(400, "Invalid membership key")
def _ensure_strings(self, d, keys):
for s in keys:
if s not in d:
raise SynapseError(400, "'%s' not in content" % (s,))
if not isinstance(d[s], string_types):
raise SynapseError(400, "'%s' not a string type" % (s,))
| true | true |
f7ffd352b51ea5078d9b782189356b229c3fc8e5 | 858 | py | Python | Mp4ToWav.py | louisondumont/Emotion-Classification-Ravdess | 9c5317cde9d3c440e050018db124f51d1f56245a | [
"MIT"
] | null | null | null | Mp4ToWav.py | louisondumont/Emotion-Classification-Ravdess | 9c5317cde9d3c440e050018db124f51d1f56245a | [
"MIT"
] | 12 | 2020-11-13T19:04:46.000Z | 2022-03-12T00:45:54.000Z | Mp4ToWav.py | louisondumont/Emotion-Classification-Ravdess | 9c5317cde9d3c440e050018db124f51d1f56245a | [
"MIT"
] | null | null | null | import os
import subprocess
# Loop through the filesystem
for root, dirs, files in os.walk("./folder", topdown=False):
# Loop through files
for name in files:
# Consider only mp4
if name.endswith('.mp4'):
# Using ffmpeg to convert the mp4 in wav
# Example command: "ffmpeg -i C:/test.mp4 -ab 160k -ac 2 -ar 44100 -vn audio.wav"
command = "ffmpeg -i /Users/marcogdepinto/Desktop" + root[1:] + "/" + name + " " + "-ab 160k -ac 2 -ar 44100 -vn /Users/marcogdepinto/Desktop/ConvertedFolder/" + name[:-3] + "wav"
#print(command)
# Execute conversion
try:
subprocess.call(command, shell=True)
# Skip the file in case of error
except ValueError:
continue
| 35.75 | 192 | 0.538462 | import os
import subprocess
for root, dirs, files in os.walk("./folder", topdown=False):
for name in files:
if name.endswith('.mp4'):
command = "ffmpeg -i /Users/marcogdepinto/Desktop" + root[1:] + "/" + name + " " + "-ab 160k -ac 2 -ar 44100 -vn /Users/marcogdepinto/Desktop/ConvertedFolder/" + name[:-3] + "wav"
try:
subprocess.call(command, shell=True)
except ValueError:
continue
| true | true |
f7ffd455683de04fed0cdc139e75533fc64de951 | 215 | py | Python | devel/apps/ik/models/__init__.py | riscoscloverleaf/chatcube | a7184ef76108f90a74a88d3183a3d21c1249a0f5 | [
"MIT"
] | null | null | null | devel/apps/ik/models/__init__.py | riscoscloverleaf/chatcube | a7184ef76108f90a74a88d3183a3d21c1249a0f5 | [
"MIT"
] | null | null | null | devel/apps/ik/models/__init__.py | riscoscloverleaf/chatcube | a7184ef76108f90a74a88d3183a3d21c1249a0f5 | [
"MIT"
] | null | null | null | from .members import Member, MemberOnline
from .member_propertis import Country
from .member_settings import MemberSettings
from .globalvars import GlobalVars
from .chat import Chat, ChatMember, Message, OpenedChat
| 35.833333 | 55 | 0.846512 | from .members import Member, MemberOnline
from .member_propertis import Country
from .member_settings import MemberSettings
from .globalvars import GlobalVars
from .chat import Chat, ChatMember, Message, OpenedChat
| true | true |
f7ffd4f2c78c5417120fbdc7b598952e92567bc9 | 3,070 | py | Python | nagnodeconfig.py | DheerajCidda/Nagios-Redfish-API-Integration | 7a433fc670e5367ccfb30711bbdcd964f5e1e4d1 | [
"BSD-3-Clause"
] | null | null | null | nagnodeconfig.py | DheerajCidda/Nagios-Redfish-API-Integration | 7a433fc670e5367ccfb30711bbdcd964f5e1e4d1 | [
"BSD-3-Clause"
] | null | null | null | nagnodeconfig.py | DheerajCidda/Nagios-Redfish-API-Integration | 7a433fc670e5367ccfb30711bbdcd964f5e1e4d1 | [
"BSD-3-Clause"
] | null | null | null | import os
import sys
import time
import configparser
config = configparser.ConfigParser()
config.read('nagios_node_config.conf')
hostList = config['new_nodes']['new_ip_list']
if hostList == "":
print ("\nNo new Redfish enabled node available for Nagios configuration.\n")
#print (config['config_nodes']['config_ip_list'])
sys.exit(0)
else:
print ("\nThe following Redfish-enabled nodes are found for Nagios configuration:\n")
print (hostList)
print("\n")
file = open(r"/usr/local/nagios/etc/objects/hosts.cfg","w")
hosts = hostList.split(',')
for host in hosts:
name, ip = host.split(':')
#HOST Informaton
print ("\n\nConfiguring Redfish node: ["+name+"] for Nagios monitoring\n\n")
file.write("\ndefine host{")
file.write("\n\tuse\tlinux-server")
file.write("\n\thost_name\t"+name)
file.write("\n\talias\tlocalhost")
file.write("\n\taddress\t"+ip)
file.write("\n\t}")
#Services/Hardware components information
# BMC Check
file.write("\ndefine service{")
file.write("\n\tuse\tlocal-service")
file.write("\n\thost_name\t"+name)
file.write("\nservice_description\tcheck bmc health")
file.write("\ncheck_command\tcheck-bmc-health")
file.write("\n\t}")
# CPU Check
file.write("\ndefine service{")
file.write("\n\tuse\tlocal-service")
file.write("\n\thost_name\t"+name)
file.write("\nservice_description\tcheck cpu health")
file.write("\ncheck_command\tcheck-cpu-health")
file.write("\n\t}")
# Memory Check
file.write("\ndefine service{")
file.write("\n\tuse\tlocal-service")
file.write("\n\thost_name\t"+name)
file.write("\nservice_description\tcheck memory health")
file.write("\ncheck_command\tcheck-memory-health")
file.write("\n\t}")
# Storage Check
file.write("\ndefine service{")
file.write("\n\tuse\tlocal-service")
file.write("\n\thost_name\t"+name)
file.write("\nservice_description\tcheck storage health")
file.write("\ncheck_command\tcheck-storage-health")
file.write("\n\t}")
time.sleep(1)
file.close()
# If this is first time that Redfish node(s) are being configured for Nagios
if config['config_nodes']['config_ip_list'] == "":
config['config_nodes']['config_ip_list'] = hostList
else:
config['config_nodes']['config_ip_list']=config['config_nodes']['config_ip_list']+','+new_ip_list
# Set new_ip_list empty
config['new_nodes']['new_ip_list']=''
with open('nagios_node_config.conf', 'w') as configfile:
config.write(configfile)
# stop Apache and Nagios services
os.system("sudo service httpd stop")
os.system("sudo service nagios stop")
# Start Apache and Nagios Services with new configuration
os.system("sudo service httpd start")
os.system("sudo service nagios start")
| 32.315789 | 105 | 0.628013 | import os
import sys
import time
import configparser
config = configparser.ConfigParser()
config.read('nagios_node_config.conf')
hostList = config['new_nodes']['new_ip_list']
if hostList == "":
print ("\nNo new Redfish enabled node available for Nagios configuration.\n")
sys.exit(0)
else:
print ("\nThe following Redfish-enabled nodes are found for Nagios configuration:\n")
print (hostList)
print("\n")
file = open(r"/usr/local/nagios/etc/objects/hosts.cfg","w")
hosts = hostList.split(',')
for host in hosts:
name, ip = host.split(':')
print ("\n\nConfiguring Redfish node: ["+name+"] for Nagios monitoring\n\n")
file.write("\ndefine host{")
file.write("\n\tuse\tlinux-server")
file.write("\n\thost_name\t"+name)
file.write("\n\talias\tlocalhost")
file.write("\n\taddress\t"+ip)
file.write("\n\t}")
file.write("\ndefine service{")
file.write("\n\tuse\tlocal-service")
file.write("\n\thost_name\t"+name)
file.write("\nservice_description\tcheck bmc health")
file.write("\ncheck_command\tcheck-bmc-health")
file.write("\n\t}")
file.write("\ndefine service{")
file.write("\n\tuse\tlocal-service")
file.write("\n\thost_name\t"+name)
file.write("\nservice_description\tcheck cpu health")
file.write("\ncheck_command\tcheck-cpu-health")
file.write("\n\t}")
file.write("\ndefine service{")
file.write("\n\tuse\tlocal-service")
file.write("\n\thost_name\t"+name)
file.write("\nservice_description\tcheck memory health")
file.write("\ncheck_command\tcheck-memory-health")
file.write("\n\t}")
file.write("\ndefine service{")
file.write("\n\tuse\tlocal-service")
file.write("\n\thost_name\t"+name)
file.write("\nservice_description\tcheck storage health")
file.write("\ncheck_command\tcheck-storage-health")
file.write("\n\t}")
time.sleep(1)
file.close()
if config['config_nodes']['config_ip_list'] == "":
config['config_nodes']['config_ip_list'] = hostList
else:
config['config_nodes']['config_ip_list']=config['config_nodes']['config_ip_list']+','+new_ip_list
config['new_nodes']['new_ip_list']=''
with open('nagios_node_config.conf', 'w') as configfile:
config.write(configfile)
os.system("sudo service httpd stop")
os.system("sudo service nagios stop")
os.system("sudo service httpd start")
os.system("sudo service nagios start")
| true | true |
f7ffd5c942d5daba3c1b542dd0e57743b1dab9fb | 796 | py | Python | migrations/versions/2b46478eeb50_.py | feasiblereplica/kindlebox | 84c7b75eea013e04780d7bdbb77dfc387cebf75a | [
"MIT"
] | 136 | 2015-01-17T22:07:18.000Z | 2022-02-20T05:21:57.000Z | migrations/versions/2b46478eeb50_.py | feasiblereplica/kindlebox | 84c7b75eea013e04780d7bdbb77dfc387cebf75a | [
"MIT"
] | 16 | 2015-02-18T21:56:17.000Z | 2020-01-15T11:48:22.000Z | migrations/versions/2b46478eeb50_.py | feasiblereplica/kindlebox | 84c7b75eea013e04780d7bdbb77dfc387cebf75a | [
"MIT"
] | 19 | 2015-02-19T10:43:53.000Z | 2020-04-23T15:03:36.000Z | """empty message
Revision ID: 2b46478eeb50
Revises: d7fc2abf989
Create Date: 2014-08-10 23:38:50.200231
"""
# revision identifiers, used by Alembic.
revision = '2b46478eeb50'
down_revision = 'd7fc2abf989'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('book', 'book_hash',
existing_type=sa.INTEGER(),
type_=sa.Text(),
existing_nullable=True)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('book', 'book_hash',
existing_type=sa.Text(),
type_=sa.INTEGER(),
existing_nullable=True)
### end Alembic commands ###
| 24.121212 | 63 | 0.629397 |
revision = '2b46478eeb50'
down_revision = 'd7fc2abf989'
from alembic import op
import sqlalchemy as sa
def upgrade():
existing_nullable=True)
| true | true |
f7ffd63a9278cb16add3189d7ab37fae4f0662fe | 3,408 | py | Python | Django/tasking-and-analysis-system-django/tasking-and-analysis-system/apps/reports/forms.py | Yeva9/ITC-projects | 19e967d656c86c64f04cc1ffbe03540f97c6eb34 | [
"MIT"
] | null | null | null | Django/tasking-and-analysis-system-django/tasking-and-analysis-system/apps/reports/forms.py | Yeva9/ITC-projects | 19e967d656c86c64f04cc1ffbe03540f97c6eb34 | [
"MIT"
] | null | null | null | Django/tasking-and-analysis-system-django/tasking-and-analysis-system/apps/reports/forms.py | Yeva9/ITC-projects | 19e967d656c86c64f04cc1ffbe03540f97c6eb34 | [
"MIT"
] | null | null | null | from datetime import datetime
from django import forms
from apps.reports.models import Report
from apps.tasks.forms import DateInput
from apps.upload.models import Upload
from .helper import get_selected_date_list
from apps.constants import General, Errors, Style, ReportsFields
from apps.audit_trail.models import AuditTrail
class DateForm(forms.ModelForm):
"""
This is class for form of DateForm, which has the following fields:
- upload objects: ModelChoiceField
- start date: DateInput
- end date: DateInput
"""
upload_objects_for_selection = forms.ModelChoiceField(
queryset=Upload.objects.all(),
widget=forms.Select(
attrs={
Style.CLASS: ReportsFields.CSS_FOR_DROPDOWN,
Style.STYLE: Style.SELECT_STYLE
})
)
class Meta:
model = Report
fields = [General.START_DATE,
General.END_DATE]
widgets = {
General.START_DATE: DateInput(
attrs=ReportsFields.MIN_MAX_DATES_OF_DATE_DROPDOWN
),
General.END_DATE: DateInput(
attrs=ReportsFields.MIN_MAX_DATES_OF_DATE_DROPDOWN
),
}
def clean(self):
"""
This is overridden clean() method, which is responsible for
validation in the correct order and propagating their errors.
Here we get selected start date, end date. After comparing
of start date and end date program will show errors in each case.
:return: selected data from user
"""
super().clean()
cleaned_data = self.cleaned_data
start_date = cleaned_data.get(General.START_DATE)
end_date = cleaned_data.get(General.END_DATE)
selected_upload_object = cleaned_data[
ReportsFields.UPLOAD_OBJECTS_FOR_SELECTION]
existing_date_keys = selected_upload_object.data.keys()
selected_date_list = get_selected_date_list(selected_upload_object,
start_date,
end_date)
if start_date > end_date:
self._errors[Errors.CONVERSION_ERROR] = self.error_class(
[Errors.CONVERSION_ERROR_MESSAGE])
log_data_error = AuditTrail.objects.create_log(user=str(self.name),
event_title="Unsuccesful attempt of exporting report",
event_description=str(
self.name) + " tryed to export report with invalide date")
log_data_error.save()
elif len(set(selected_date_list) & set(existing_date_keys)) == 0:
self._errors[Errors.OUT_OF_RANGE_ERROR] = self.error_class(
[Errors.OUT_OF_RANGE_ERROR_MESSAGE])
log_data_range_error = AuditTrail.objects.create_log(user=str(self.name),
event_title="Unsuccesful attempt of exporting report",
event_description=str(
self.name) + " tryed to export report with invalide date")
log_data_range_error.save()
return cleaned_data
| 42.074074 | 128 | 0.580399 | from datetime import datetime
from django import forms
from apps.reports.models import Report
from apps.tasks.forms import DateInput
from apps.upload.models import Upload
from .helper import get_selected_date_list
from apps.constants import General, Errors, Style, ReportsFields
from apps.audit_trail.models import AuditTrail
class DateForm(forms.ModelForm):
upload_objects_for_selection = forms.ModelChoiceField(
queryset=Upload.objects.all(),
widget=forms.Select(
attrs={
Style.CLASS: ReportsFields.CSS_FOR_DROPDOWN,
Style.STYLE: Style.SELECT_STYLE
})
)
class Meta:
model = Report
fields = [General.START_DATE,
General.END_DATE]
widgets = {
General.START_DATE: DateInput(
attrs=ReportsFields.MIN_MAX_DATES_OF_DATE_DROPDOWN
),
General.END_DATE: DateInput(
attrs=ReportsFields.MIN_MAX_DATES_OF_DATE_DROPDOWN
),
}
def clean(self):
super().clean()
cleaned_data = self.cleaned_data
start_date = cleaned_data.get(General.START_DATE)
end_date = cleaned_data.get(General.END_DATE)
selected_upload_object = cleaned_data[
ReportsFields.UPLOAD_OBJECTS_FOR_SELECTION]
existing_date_keys = selected_upload_object.data.keys()
selected_date_list = get_selected_date_list(selected_upload_object,
start_date,
end_date)
if start_date > end_date:
self._errors[Errors.CONVERSION_ERROR] = self.error_class(
[Errors.CONVERSION_ERROR_MESSAGE])
log_data_error = AuditTrail.objects.create_log(user=str(self.name),
event_title="Unsuccesful attempt of exporting report",
event_description=str(
self.name) + " tryed to export report with invalide date")
log_data_error.save()
elif len(set(selected_date_list) & set(existing_date_keys)) == 0:
self._errors[Errors.OUT_OF_RANGE_ERROR] = self.error_class(
[Errors.OUT_OF_RANGE_ERROR_MESSAGE])
log_data_range_error = AuditTrail.objects.create_log(user=str(self.name),
event_title="Unsuccesful attempt of exporting report",
event_description=str(
self.name) + " tryed to export report with invalide date")
log_data_range_error.save()
return cleaned_data
| true | true |
f7ffd6c33691ba2cf5d9c28f7ee56a9f33f4003a | 56,019 | py | Python | modules/templates/SAFIRE/config.py | waidyanatha/eden | a275ed7d10c2bf8839de86b7ac7c549186fc94b7 | [
"MIT"
] | 1 | 2018-12-25T05:33:36.000Z | 2018-12-25T05:33:36.000Z | modules/templates/SAFIRE/config.py | waidyanatha/eden | a275ed7d10c2bf8839de86b7ac7c549186fc94b7 | [
"MIT"
] | null | null | null | modules/templates/SAFIRE/config.py | waidyanatha/eden | a275ed7d10c2bf8839de86b7ac7c549186fc94b7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from collections import OrderedDict
from gluon import current, URL
from gluon.storage import Storage
def config(settings):
"""
Template settings for SaFiRe: Sahana First Response
http://eden.sahanafoundation.org/wiki/BluePrint/SAFIRE
"""
T = current.T
settings.base.system_name = T("Sahana First Response")
settings.base.system_name_short = T("SAFIRE")
# PrePopulate data
settings.base.prepopulate.append("SAFIRE")
settings.base.prepopulate_demo.append("SAFIRE/Demo")
# Theme (folder to use for views/layout.html)
#settings.base.theme = "SAFIRE"
# Authentication settings
# Should users be allowed to register themselves?
#settings.security.self_registration = False
# Do new users need to verify their email address?
#settings.auth.registration_requires_verification = True
# Do new users need to be approved by an administrator prior to being able to login?
#settings.auth.registration_requires_approval = True
settings.auth.registration_requests_organisation = True
# Approval emails get sent to all admins
settings.mail.approver = "ADMIN"
settings.auth.registration_link_user_to = {"staff": T("Staff"),
}
settings.auth.registration_link_user_to_default = ["staff"]
# Uncomment to display the Map Legend as a floating DIV
settings.gis.legend = "float"
# Uncomment to Disable the Postcode selector in the LocationSelector
#settings.gis.postcode_selector = False # @ToDo: Vary by country (include in the gis_config!)
# Uncomment to show the Print control:
# http://eden.sahanafoundation.org/wiki/UserGuidelines/Admin/MapPrinting
#settings.gis.print_button = True
# L10n settings
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
settings.L10n.decimal_separator = "."
# Thousands separator for numbers (defaults to space)
settings.L10n.thousands_separator = ","
# Security Policy
# http://eden.sahanafoundation.org/wiki/S3AAA#System-widePolicy
# 1: Simple (default): Global as Reader, Authenticated as Editor
# 2: Editor role required for Update/Delete, unless record owned by session
# 3: Apply Controller ACLs
# 4: Apply both Controller & Function ACLs
# 5: Apply Controller, Function & Table ACLs
# 6: Apply Controller, Function, Table ACLs and Entity Realm
# 7: Apply Controller, Function, Table ACLs and Entity Realm + Hierarchy
# 8: Apply Controller, Function, Table ACLs, Entity Realm + Hierarchy and Delegations
settings.security.policy = 5 # Controller, Function & Table ACLs
# -------------------------------------------------------------------------
# Comment/uncomment modules here to disable/enable them
# Modules menu is defined in modules/eden/menu.py
settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = "Home",
restricted = False, # Use ACLs to control access to this module
#access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = "Administration",
#description = "Site Administration",
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = "Administration",
#description = "Site Administration",
module_type = None # No Menu
)),
("errors", Storage(
name_nice = "Ticket Viewer",
#description = "Needed for Breadcrumbs",
restricted = False,
module_type = None # No Menu
)),
("sync", Storage(
name_nice = "Synchronization",
#description = "Synchronization",
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
#("tour", Storage(
# name_nice = T("Guided Tour Functionality"),
# module_type = None,
#)),
#("translate", Storage(
# name_nice = T("Translation Functionality"),
# #description = "Selective translation of strings based on module.",
# module_type = None,
#)),
("gis", Storage(
name_nice = "Map",
#description = "Situation Awareness & Geospatial Analysis",
module_type = 6, # 6th item in the menu
)),
("pr", Storage(
name_nice = "Person Registry",
#description = "Central point to record details on People",
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
module_type = 10
)),
("org", Storage(
name_nice = "Organizations",
#description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities',
module_type = 1
)),
("hrm", Storage(
name_nice = "Staff",
#description = "Human Resources Management",
module_type = 2,
)),
("vol", Storage(
name_nice = T("Volunteers"),
#description = "Human Resources Management",
module_type = 2,
)),
("cms", Storage(
name_nice = "Content Management",
#description = "Content Management System",
module_type = 10,
)),
("doc", Storage(
name_nice = "Documents",
#description = "A library of digital resources, such as photos, documents and reports",
module_type = 10,
)),
("msg", Storage(
name_nice = "Messaging",
#description = "Sends & Receives Alerts via Email & SMS",
# The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules.
module_type = None,
)),
("supply", Storage(
name_nice = "Supply Chain Management",
#description = "Used within Inventory Management, Request Management and Asset Management",
module_type = None, # Not displayed
)),
("inv", Storage(
name_nice = T("Warehouses"),
#description = "Receiving and Sending Items",
module_type = 4
)),
("asset", Storage(
name_nice = "Assets",
#description = "Recording and Assigning Assets",
module_type = 5,
)),
# Vehicle depends on Assets
("vehicle", Storage(
name_nice = "Vehicles",
#description = "Manage Vehicles",
module_type = 10,
)),
#("budget", Storage(
# name_nice = T("Budgets"),
# #description = "Tracks the location, capacity and breakdown of victims in Shelters",
# module_type = 10
#)),
("fin", Storage(
name_nice = T("Finance"),
module_type = 10
)),
("cr", Storage(
name_nice = T("Shelters"),
#description = "Tracks the location, capacity and breakdown of victims in Shelters",
module_type = 10
)),
("project", Storage(
name_nice = "Tasks",
#description = "Tracking of Projects, Activities and Tasks",
module_type = 2
)),
("req", Storage(
name_nice = "Requests",
#description = "Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.",
module_type = 10,
)),
("hms", Storage(
name_nice = T("Hospitals"),
#description = "Helps to monitor status of hospitals",
module_type = 10
)),
#("dvr", Storage(
# name_nice = T("Disaster Victim Registry"),
# #description = "Allow affected individuals & households to register to receive compensation and distributions",
# module_type = 10,
#)),
("event", Storage(
name_nice = "Events",
#description = "Activate Events (e.g. from Scenario templates) for allocation of appropriate Resources (Human, Assets & Facilities).",
module_type = 10,
)),
#("transport", Storage(
# name_nice = T("Transport"),
# module_type = 10,
#)),
#("stats", Storage(
# name_nice = T("Statistics"),
# #description = "Manages statistics",
# module_type = None,
#)),
])
# -------------------------------------------------------------------------
# CMS
# -------------------------------------------------------------------------
settings.cms.richtext = True
# -------------------------------------------------------------------------
# Organisations
# -------------------------------------------------------------------------
settings.org.documents_tab = True
settings.org.projects_tab = False
# -------------------------------------------------------------------------
# Shelters
# -------------------------------------------------------------------------
settings.cr.people_registration = False
# -------------------------------------------------------------------------
#def customise_cr_shelter_resource(r, tablename):
#table = current.s3db.cr_shelter
#settings.customise_cr_shelter_resource = customise_cr_shelter_resource
# -------------------------------------------------------------------------
# Events
# -------------------------------------------------------------------------
def event_rheader(r):
rheader = None
record = r.record
if record and r.representation == "html":
from gluon import A, DIV, TABLE, TR, TH
from s3 import s3_rheader_tabs
name = r.name
if name == "incident":
if settings.get_incident_label(): # == "Ticket"
label = T("Ticket Details")
else:
label = T("Incident Details")
tabs = [(label, None),
#(T("Tasks"), "task"),
#(T("Human Resources"), "human_resource"),
#(T("Equipment"), "asset"),
(T("Action Plan"), "plan"),
(T("Incident Reports"), "incident_report"),
(T("Logs"), "log"),
(T("Expenses"), "expense"),
(T("Situation Reports"), "sitrep"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
record_id = r.id
incident_type_id = record.incident_type_id
editable = current.auth.s3_has_permission("UPDATE", "event_incident", record_id)
if editable and r.method == "plan":
# Dropdown of Scenarios to select
# @ToDo: Move this to a Popup behind an Action Button, to make it clearer that this isn't a maintained link
# @ToDo: Also add 'Clear' button to clear all elements & start from a blank slate
stable = current.s3db.event_scenario
query = (stable.incident_type_id == incident_type_id) & \
(stable.deleted == False)
scenarios = current.db(query).select(stable.id,
stable.name,
)
if len(scenarios) and r.method != "event":
from gluon import SELECT, OPTION
dropdown = SELECT(_id="scenarios")
dropdown["_data-incident_id"] = record_id
dappend = dropdown.append
dappend(OPTION(T("Select Scenario")))
for s in scenarios:
dappend(OPTION(s.name, _value=s.id))
scenarios = TR(TH("%s: " % T("Apply Scenario")),
dropdown,
)
s3 = current.response.s3
script = "/%s/static/themes/SAFIRE/js/incident_profile.js" % r.application
if script not in s3.scripts:
s3.scripts.append(script)
s3.js_global.append('''i18n.scenarioConfirm="%s"''' % T("Populate Incident with Tasks, Organizations, Positions and Equipment from the Scenario?"))
else:
scenarios = ""
else:
scenarios = ""
if record.exercise:
exercise = TH(T("EXERCISE"))
else:
exercise = TH()
if record.closed:
closed = TH(T("CLOSED"))
else:
closed = TH()
if record.event_id or r.method == "event" or not editable:
event = ""
else:
if settings.get_event_label(): # == "Disaster"
label = T("Assign to Disaster")
else:
label = T("Assign to Event")
event = A(label,
_href = URL(c = "event",
f = "incident",
args = [record_id, "event"],
),
_class = "action-btn"
)
table = r.table
rheader = DIV(TABLE(TR(exercise),
TR(TH("%s: " % table.name.label),
record.name,
),
TR(TH("%s: " % table.incident_type_id.label),
table.incident_type_id.represent(incident_type_id),
),
TR(TH("%s: " % table.location_id.label),
table.location_id.represent(record.location_id),
),
#TR(TH("%s: " % table.severity.label),
# table.severity.represent(record.severity),
# ),
#TR(TH("%s: " % table.level.label),
# table.level.represent(record.level),
# ),
TR(TH("%s: " % table.organisation_id.label),
table.organisation_id.represent(record.organisation_id),
),
TR(TH("%s: " % table.person_id.label),
table.person_id.represent(record.person_id),
),
scenarios,
TR(TH("%s: " % table.comments.label),
record.comments,
),
TR(TH("%s: " % table.date.label),
table.date.represent(record.date),
),
TR(closed),
event,
), rheader_tabs)
elif name == "incident_report":
record_id = r.id
ltable = current.s3db.event_incident_report_incident
query = (ltable.incident_report_id == record_id)
link = current.db(query).select(ltable.incident_id,
limitby = (0, 1)
).first()
if link:
from s3 import S3Represent
represent = S3Represent(lookup="event_incident", show_link=True)
rheader = DIV(TABLE(TR(TH("%s: " % ltable.incident_id.label),
represent(link.incident_id),
),
))
else:
if settings.get_incident_label(): # == "Ticket"
label = T("Assign to Ticket")
else:
label = T("Assign to Incident")
rheader = DIV(A(label,
_href = URL(c = "event",
f = "incident_report",
args = [record_id, "assign"],
),
_class = "action-btn"
))
elif name == "event":
if settings.get_event_label(): # == "Disaster"
label = T("Disaster Details")
else:
label = T("Event Details")
if settings.get_incident_label(): # == "Ticket"
INCIDENTS = T("Tickets")
else:
INCIDENTS = T("Incidents")
tabs = [(label, None),
(INCIDENTS, "incident"),
]
if settings.get_event_impact_tab():
tabs.append((T("Impact"), "impact"))
if settings.get_event_dc_target_tab():
tabs.append((T("Assessment Targets"), "target"))
tabs += [(T("Documents"), "document"),
(T("Photos"), "image"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
table = r.table
rheader = DIV(TABLE(TR(TH("%s: " % table.event_type_id.label),
table.event_type_id.represent(record.event_type_id),
),
TR(TH("%s: " % table.name.label),
record.name,
),
TR(TH("%s: " % table.start_date.label),
table.start_date.represent(record.start_date),
),
TR(TH("%s: " % table.comments.label),
record.comments,
),
), rheader_tabs)
elif name == "scenario":
tabs = [(T("Scenario Details"), None),
#(T("Tasks"), "task"),
#(T("Human Resources"), "human_resource"),
#(T("Equipment"), "asset"),
(T("Action Plan"), "plan"),
(T("Incident Reports"), "incident_report"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
table = r.table
rheader = DIV(TABLE(TR(TH("%s: " % table.incident_type_id.label),
table.incident_type_id.represent(record.incident_type_id),
),
TR(TH("%s: " % table.organisation_id.label),
table.organisation_id.represent(record.organisation_id),
),
TR(TH("%s: " % table.location_id.label),
table.location_id.represent(record.location_id),
),
TR(TH("%s: " % table.name.label),
record.name,
),
TR(TH("%s: " % table.comments.label),
record.comments,
),
), rheader_tabs)
return rheader
# -------------------------------------------------------------------------
def customise_dc_target_resource(r, tablename):
if r.controller in ("event",
"hrm", # Training Event Evaluations
):
return
s3db = current.s3db
from s3 import S3DateFilter, S3LocationFilter, S3OptionsFilter, S3SQLCustomForm, S3SQLInlineLink
crud_form = S3SQLCustomForm(S3SQLInlineLink("event",
field = "event_id",
#label = type_label,
multiple = False,
),
"template_id",
"date",
"location_id",
"comments",
)
filter_widgets = [S3OptionsFilter("event__link.event_id"),
S3LocationFilter(),
S3DateFilter("date"),
]
list_fields = ["event__link.event_id",
"location_id$L1",
"location_id$L2",
"name",
(T("Reporting Date"), "date"),
(T("Reported by"), "created_by"),
]
s3db.configure(tablename,
crud_form = crud_form,
filter_widgets = filter_widgets,
list_fields = list_fields,
)
settings.customise_dc_target_resource = customise_dc_target_resource
# -------------------------------------------------------------------------
def customise_event_event_controller(**attr):
#s3 = current.response.s3
# No sidebar menu
#current.menu.options = None
attr["rheader"] = event_rheader
return attr
settings.customise_event_event_controller = customise_event_event_controller
# -------------------------------------------------------------------------
def customise_event_incident_report_resource(r, tablename):
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Log Call"),
title_display = T("Call Log Details"),
title_list = T("Call Logs"),
title_update = T("Edit Call Log"),
label_list_button = T("List Call Logs"),
label_delete_button = T("Delete Call Log"),
msg_record_created = T("Call Log added"),
msg_record_modified = T("Call Log updated"),
msg_record_deleted = T("Call Log removed"),
msg_list_empty = T("No Calls currently logged"),
)
from s3 import S3LocationSelector
current.s3db.event_incident_report.location_id.widget = \
S3LocationSelector(polygons = True,
show_address = True,
)
settings.customise_event_incident_report_resource = customise_event_incident_report_resource
# -------------------------------------------------------------------------
def customise_event_incident_report_controller(**attr):
from gluon import A
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard postp
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
method = r.method
if method in (None, "create"):
current.s3db.gis_location.addr_street.label = T("Street Address or Location Details")
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm((T("What is it?"), "name"),
"incident_type_id",
(T("Who am I speaking with?"), "reported_by"),
(T("How can we contact you?"), "contact"),
(T("Where did this Incident take place?"), "location_id"),
(T("Explain the Situation?"), "description"),
(T("What are your immediate needs?"), "needs"),
)
r.resource.configure(create_next = URL(args=["[id]", "assign"]),
crud_form = crud_form,
)
return True
s3.prep = custom_prep
# No sidebar menu
current.menu.options = None
req_args = current.request.args
if len(req_args) > 1 and req_args[1] == "assign":
if settings.get_incident_label(): # == "Ticket"
label = T("New Ticket")
else:
label = T("New Incident")
attr["rheader"] = A(label,
_class = "action-btn",
_href = URL(c="event", f="incident",
args = ["create"],
vars = {"incident_report_id": req_args[0]},
),
)
else:
attr["rheader"] = event_rheader
return attr
settings.customise_event_incident_report_controller = customise_event_incident_report_controller
# -------------------------------------------------------------------------
def event_incident_create_onaccept(form):
"""
Alert Lead Agency
"""
organisation_id = form.vars.get("organisation_id")
if organisation_id:
# Alert Lead Agency via their Duty Number
otable = current.s3db.org_organisation_tag
query = (otable.organisation_id == organisation_id) & \
(otable.tag == "duty")
duty = current.db(query).select(otable.value,
limitby = (0, 1)
).first()
if duty:
incident_id = form.vars.get("id")
# @ToDo: i18n
current.msg.send_sms_via_api(duty.value,
"You have been assigned an Incident: %s%s" % (settings.get_base_public_url(),
URL(c="event", f= "incident",
args = incident_id),
))
# -------------------------------------------------------------------------
def customise_event_incident_resource(r, tablename):
from s3 import S3LocationSelector
s3db = current.s3db
table = s3db.event_incident
table.location_id.widget = S3LocationSelector(polygons = True,
show_address = True,
)
f = table.organisation_id
f.readable = f.writable = True
f.label = T("Lead Response Organization")
if r.method == "plan":
table.action_plan.label = T("Event Action Plan")
else:
f = table.action_plan
f.readable = f.writable = False
if r.interactive:
s3db.add_custom_callback(tablename,
"create_onaccept",
event_incident_create_onaccept,
)
settings.customise_event_incident_resource = customise_event_incident_resource
# -------------------------------------------------------------------------
def customise_event_incident_controller(**attr):
s3db = current.s3db
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard postp
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
resource = r.resource
# Redirect to action plan after create
resource.configure(create_next = URL(c="event", f="incident",
args = ["[id]", "plan"]
),
)
method = r.method
if method == "create":
incident_report_id = r.get_vars.get("incident_report_id")
if incident_report_id:
# Got here from incident report assign => "New Incident"
# - prepopulate incident name from report title
# - copy incident type and location from report
# - onaccept: link the incident report to the incident
if r.http == "GET":
from s3 import s3_truncate
rtable = s3db.event_incident_report
incident_report = current.db(rtable.id == incident_report_id).select(rtable.name,
rtable.incident_type_id,
rtable.location_id,
limitby = (0, 1),
).first()
table = r.table
table.name.default = s3_truncate(incident_report.name, 64)
table.incident_type_id.default = incident_report.incident_type_id
table.location_id.default = incident_report.location_id
elif r.http == "POST":
def create_onaccept(form):
s3db.event_incident_report_incident.insert(incident_id = form.vars.id,
incident_report_id = incident_report_id,
)
s3db.add_custom_callback("event_incident",
"create_onaccept",
create_onaccept,
)
elif method == "plan" and settings.get_incident_label(): # == "Ticket"
s3db.event_task
s3db.event_organisation
crud_strings = s3.crud_strings
crud_strings.event_task.msg_list_empty = T("No Tasks currently registered for this ticket")
crud_strings.event_organisation.msg_list_empty = T("No Organizations currently registered in this ticket")
return True
s3.prep = custom_prep
# No sidebar menu
current.menu.options = None
attr["rheader"] = event_rheader
return attr
settings.customise_event_incident_controller = customise_event_incident_controller
# -------------------------------------------------------------------------
def customise_event_asset_resource(r, tablename):
table = current.s3db.event_asset
table.item_id.label = T("Item Type")
table.asset_id.label = T("Specific Item")
if settings.get_incident_label(): # == "Ticket"
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Equipment"),
title_display = T("Equipment Details"),
title_list = T("Equipment"),
title_update = T("Edit Equipment"),
label_list_button = T("List Equipment"),
label_delete_button = T("Remove Equipment from this ticket"),
msg_record_created = T("Equipment added"),
msg_record_modified = T("Equipment updated"),
msg_record_deleted = T("Equipment removed"),
msg_list_empty = T("No Equipment currently registered for this ticket"))
else:
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Equipment"),
title_display = T("Equipment Details"),
title_list = T("Equipment"),
title_update = T("Edit Equipment"),
label_list_button = T("List Equipment"),
label_delete_button = T("Remove Equipment from this incident"),
msg_record_created = T("Equipment added"),
msg_record_modified = T("Equipment updated"),
msg_record_deleted = T("Equipment removed"),
msg_list_empty = T("No Equipment currently registered for this incident"))
settings.customise_event_asset_resource = customise_event_asset_resource
# -------------------------------------------------------------------------
def event_human_resource_onaccept(form, create=True):
"""
When a Position is assigned to an Incident:
- set_event_from_incident
- add Log Entry
- send Notification
"""
db = current.db
s3db = current.s3db
from s3db.event import event_set_event_from_incident
event_set_event_from_incident(form, "event_human_resource")
table = s3db.event_human_resource
form_vars = form.vars
form_vars_get = form_vars.get
link_id = form_vars_get("id")
incident_id = form_vars_get("incident_id")
if not incident_id:
link = db(table.id == link_id).select(table.incident_id,
limitby = (0, 1)
).first()
incident_id = link.incident_id
pe_id = None
if create:
person_id = form_vars_get("person_id")
if person_id:
ptable = s3db.pr_person
person = db(ptable.id == person_id).select(ptable.pe_id,
limitby = (0, 1)
).first()
pe_id = person.pe_id
job_title_id = form_vars_get("job_title_id")
if job_title_id:
s3db.event_incident_log.insert(incident_id = incident_id,
name = "Person Requested",
comments = s3db.event_human_resource.job_title_id.represent(job_title_id),
)
else:
# Update
record = form.record
if record: # Not True for a record merger
from s3dal import Field
changed = {}
for var in form_vars:
vvar = form_vars[var]
if isinstance(vvar, Field):
# modified_by/modified_on
continue
rvar = record.get(var, "NOT_PRESENT")
if rvar != "NOT_PRESENT" and vvar != rvar:
f = table[var]
if var == "pe_id":
pe_id = vvar
type_ = f.type
if type_ == "integer" or \
type_.startswith("reference"):
if vvar:
vvar = int(vvar)
if vvar == rvar:
continue
represent = table[var].represent
if represent:
if hasattr(represent, "show_link"):
represent.show_link = False
else:
represent = lambda o: o
if rvar:
changed[var] = "%s changed from %s to %s" % \
(f.label, represent(rvar), represent(vvar))
else:
changed[var] = "%s changed to %s" % \
(f.label, represent(vvar))
if changed:
table = s3db.event_incident_log
text = []
for var in changed:
text.append(changed[var])
text = "\n".join(text)
table.insert(incident_id = incident_id,
#name = "Person Assigned",
name = "Person Request Updated",
comments = text,
)
if pe_id:
# Notify Assignee
if settings.get_incident_label(): # == "Ticket"
label = T("Ticket")
else:
label = T("Incident")
# @ToDo: i18n
current.msg.send_by_pe_id(pe_id,
subject = "",
message = "You have been assigned to an %s: %s%s" % \
(label,
settings.get_base_public_url(),
URL(c="event", f= "incident",
args = [incident_id, "human_resource", link_id]),
),
contact_method = "SMS"
)
# -------------------------------------------------------------------------
def customise_event_human_resource_resource(r, tablename):
s3db = current.s3db
table = s3db.event_human_resource
# DateTime
from gluon import IS_EMPTY_OR
from s3 import IS_UTC_DATETIME, S3CalendarWidget, S3DateTime
for f in (table.start_date, table.end_date):
f.requires = IS_EMPTY_OR(IS_UTC_DATETIME())
f.represent = lambda dt: S3DateTime.datetime_represent(dt, utc=True)
f.widget = S3CalendarWidget(timepicker = True)
if settings.get_incident_label(): # == "Ticket"
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Person"),
title_display = T("Person Details"),
title_list = T("Personnel"),
title_update = T("Edit Person"),
label_list_button = T("List Personnel"),
label_delete_button = T("Remove Person from this ticket"),
msg_record_created = T("Person added"),
msg_record_modified = T("Person updated"),
msg_record_deleted = T("Person removed"),
msg_list_empty = T("No Persons currently registered for this ticket"))
else:
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Person"),
title_display = T("Person Details"),
title_list = T("Personnel"),
title_update = T("Edit Person"),
label_list_button = T("List Personnel"),
label_delete_button = T("Remove Person from this incident"),
msg_record_created = T("Person added"),
msg_record_modified = T("Person updated"),
msg_record_deleted = T("Person removed"),
msg_list_empty = T("No Persons currently registered for this incident"))
s3db.configure(tablename,
# Deliberately over-rides
create_onaccept = event_human_resource_onaccept,
update_onaccept = lambda form:
event_human_resource_onaccept(form, create=False),
)
settings.customise_event_human_resource_resource = customise_event_human_resource_resource
# -------------------------------------------------------------------------
def customise_event_scenario_controller(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard postp
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
if r.method != "plan":
f = r.table.action_plan
f.readable = f.writable = False
if r.method == "create"and r.http == "POST":
r.resource.configure(create_next = URL(c="event", f="scenario",
args = ["[id]", "plan"]),
)
return True
s3.prep = custom_prep
# No sidebar menu
current.menu.options = None
attr["rheader"] = event_rheader
return attr
settings.customise_event_scenario_controller = customise_event_scenario_controller
# -------------------------------------------------------------------------
def customise_event_scenario_asset_resource(r, tablename):
table = current.s3db.event_scenario_asset
table.item_id.label = T("Item Type")
table.asset_id.label = T("Specific Item")
if settings.get_incident_label(): # == "Ticket"
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Equipment"),
title_display = T("Equipment Details"),
title_list = T("Equipment"),
title_update = T("Edit Equipment"),
label_list_button = T("List Equipment"),
label_delete_button = T("Remove Equipment from this ticket"),
msg_record_created = T("Equipment added"),
msg_record_modified = T("Equipment updated"),
msg_record_deleted = T("Equipment removed"),
msg_list_empty = T("No Equipment currently registered for this ticket"))
else:
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Equipment"),
title_display = T("Equipment Details"),
title_list = T("Equipment"),
title_update = T("Edit Equipment"),
label_list_button = T("List Equipment"),
label_delete_button = T("Remove Equipment from this incident"),
msg_record_created = T("Equipment added"),
msg_record_modified = T("Equipment updated"),
msg_record_deleted = T("Equipment removed"),
msg_list_empty = T("No Equipment currently registered for this incident"))
settings.customise_event_scenario_asset_resource = customise_event_scenario_asset_resource
# -------------------------------------------------------------------------
def customise_event_scenario_human_resource_resource(r, tablename):
if settings.get_incident_label(): # == "Ticket"
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Person"),
title_display = T("Person Details"),
title_list = T("Personnel"),
title_update = T("Edit Person"),
label_list_button = T("List Personnel"),
label_delete_button = T("Remove Person from this ticket"),
msg_record_created = T("Person added"),
msg_record_modified = T("Person updated"),
msg_record_deleted = T("Person removed"),
msg_list_empty = T("No Persons currently registered for this ticket"))
else:
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Person"),
title_display = T("Person Details"),
title_list = T("Personnel"),
title_update = T("Edit Person"),
label_list_button = T("List Personnel"),
label_delete_button = T("Remove Person from this incident"),
msg_record_created = T("Person added"),
msg_record_modified = T("Person updated"),
msg_record_deleted = T("Person removed"),
msg_list_empty = T("No Persons currently registered for this incident"))
settings.customise_event_scenario_human_resource_resource = customise_event_scenario_human_resource_resource
# -------------------------------------------------------------------------
# HRM
# -------------------------------------------------------------------------
settings.hrm.job_title_deploy = True
settings.hrm.org_dependent_job_titles = True
# -------------------------------------------------------------------------
# Organisations
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def customise_org_organisation_resource(r, tablename):
s3db = current.s3db
# Custom Components
s3db.add_components(tablename,
org_organisation_tag = (# On-call Duty Number
{"name": "duty",
"joinby": "organisation_id",
"filterby": {"tag": "duty",
},
"multiple": False,
},
),
)
from s3 import S3SQLCustomForm, S3SQLInlineComponent, S3SQLInlineLink, \
IS_EMPTY_OR, IS_PHONE_NUMBER_MULTI, S3PhoneWidget, s3_phone_represent
# Individual settings for specific tag components
components_get = s3db.resource(tablename).components.get
duty = components_get("duty")
f = duty.table.value
f.represent = s3_phone_represent,
f.requires = IS_EMPTY_OR(IS_PHONE_NUMBER_MULTI())
f.widget = S3PhoneWidget()
crud_form = S3SQLCustomForm("name",
"acronym",
S3SQLInlineLink("organisation_type",
field = "organisation_type_id",
# Default 10 options just triggers which adds unnecessary complexity to a commonly-used form & commonly an early one (create Org when registering)
search = False,
label = T("Type"),
multiple = False,
widget = "multiselect",
),
"country",
(T("Reception Phone #"), "phone"),
S3SQLInlineComponent("duty",
label = T("On-call Duty Number"),
fields = [("", "value")],
multiple = False,
),
"website",
"logo",
"comments",
)
s3db.configure(tablename,
crud_form = crud_form,
)
settings.customise_org_organisation_resource = customise_org_organisation_resource
# -------------------------------------------------------------------------
# Projects
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def project_task_onaccept(form, create=True):
"""
Send Person a Notification when they are assigned to a Task
Log changes in Incident Log
"""
if current.request.function == "scenario":
# Must be a Scenario
# - don't Log
# - don't send Notification
return
db = current.db
s3db = current.s3db
ltable = s3db.event_task
form_vars = form.vars
form_vars_get = form_vars.get
task_id = form_vars_get("id")
link = db(ltable.task_id == task_id).select(ltable.incident_id,
limitby = (0, 1)
).first()
if not link:
# Not attached to an Incident
# - don't Log
# - don't send Notification
return
incident_id = link.incident_id
if create:
pe_id = form_vars_get("pe_id")
# Log
name = form_vars_get("name")
if name:
s3db.event_incident_log.insert(incident_id = incident_id,
name = "Task Created",
comments = name,
)
else:
# Update
pe_id = None
record = form.record
if record: # Not True for a record merger
from s3dal import Field
table = s3db.project_task
changed = {}
for var in form_vars:
vvar = form_vars[var]
if isinstance(vvar, Field):
# modified_by/modified_on
continue
if var == "pe_id":
pe_id = vvar
rvar = record.get(var, "NOT_PRESENT")
if rvar != "NOT_PRESENT" and vvar != rvar:
f = table[var]
type_ = f.type
if type_ == "integer" or \
type_.startswith("reference"):
if vvar:
vvar = int(vvar)
if vvar == rvar:
continue
represent = table[var].represent
if represent:
if hasattr(represent, "show_link"):
represent.show_link = False
else:
represent = lambda o: o
if rvar:
changed[var] = "%s changed from %s to %s" % \
(f.label, represent(rvar), represent(vvar))
else:
changed[var] = "%s changed to %s" % \
(f.label, represent(vvar))
if changed:
table = s3db.event_incident_log
text = []
for var in changed:
text.append(changed[var])
text = "\n".join(text)
table.insert(incident_id = incident_id,
name = "Task Updated",
comments = text,
)
if pe_id:
# Notify Assignee
# @ToDo: i18n
message = "You have been assigned a Task: %s%s" % \
(settings.get_base_public_url(),
URL(c="event", f= "incident",
args = [incident_id, "task", task_id]),
)
from s3db.pr import pr_instance_type
instance_type = pr_instance_type(pe_id)
if instance_type == "org_organisation":
# Notify the Duty Number for the Organisation, not everyone in the Organisation!
otable = s3db.org_organisation
ottable = s3db.org_organisation_tag
query = (otable.pe_id == pe_id) & \
(ottable.organisation_id == otable.id) & \
(ottable.tag == "duty")
duty = db(query).select(ottable.value,
limitby = (0, 1)
).first()
if duty:
current.msg.send_sms_via_api(duty.value,
message)
else:
task_notification = settings.get_event_task_notification()
if task_notification:
current.msg.send_by_pe_id(pe_id,
subject = "%s: Task assigned to you" % settings.get_system_name_short(),
message = message,
contact_method = task_notification)
# -------------------------------------------------------------------------
def customise_project_task_resource(r, tablename):
s3db = current.s3db
f = s3db.project_task.source
f.readable = f.writable = False
s3db.configure(tablename,
# No need to see time log: KISS
crud_form = None,
# NB We deliberatly over-ride the default one
create_onaccept = project_task_onaccept,
# In event_ActionPlan()
#list_fields = ["priority",
# "name",
# "pe_id",
# "status_id",
# "date_due",
# ],
update_onaccept = lambda form:
project_task_onaccept(form, create=False),
)
settings.customise_project_task_resource = customise_project_task_resource
# END =========================================================================
| 45.103865 | 198 | 0.441529 |
from collections import OrderedDict
from gluon import current, URL
from gluon.storage import Storage
def config(settings):
T = current.T
settings.base.system_name = T("Sahana First Response")
settings.base.system_name_short = T("SAFIRE")
settings.base.prepopulate.append("SAFIRE")
settings.base.prepopulate_demo.append("SAFIRE/Demo")
settings.auth.registration_requests_organisation = True
settings.mail.approver = "ADMIN"
settings.auth.registration_link_user_to = {"staff": T("Staff"),
}
settings.auth.registration_link_user_to_default = ["staff"]
settings.gis.legend = "float"
imal_separator = "."
settings.L10n.thousands_separator = ","
settings.security.policy = 5
settings.modules = OrderedDict([
("default", Storage(
name_nice = "Home",
restricted = False, # Use ACLs to control access to this module
#access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = "Administration",
#description = "Site Administration",
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = "Administration",
#description = "Site Administration",
module_type = None # No Menu
)),
("errors", Storage(
name_nice = "Ticket Viewer",
#description = "Needed for Breadcrumbs",
restricted = False,
module_type = None # No Menu
)),
("sync", Storage(
name_nice = "Synchronization",
#description = "Synchronization",
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
#("tour", Storage(
# name_nice = T("Guided Tour Functionality"),
# module_type = None,
#)),
#("translate", Storage(
# name_nice = T("Translation Functionality"),
# #description = "Selective translation of strings based on module.",
# module_type = None,
#)),
("gis", Storage(
name_nice = "Map",
#description = "Situation Awareness & Geospatial Analysis",
module_type = 6, # 6th item in the menu
)),
("pr", Storage(
name_nice = "Person Registry",
#description = "Central point to record details on People",
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
module_type = 10
)),
("org", Storage(
name_nice = "Organizations",
#description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities',
module_type = 1
)),
("hrm", Storage(
name_nice = "Staff",
#description = "Human Resources Management",
module_type = 2,
)),
("vol", Storage(
name_nice = T("Volunteers"),
#description = "Human Resources Management",
module_type = 2,
)),
("cms", Storage(
name_nice = "Content Management",
#description = "Content Management System",
module_type = 10,
)),
("doc", Storage(
name_nice = "Documents",
#description = "A library of digital resources, such as photos, documents and reports",
module_type = 10,
)),
("msg", Storage(
name_nice = "Messaging",
#description = "Sends & Receives Alerts via Email & SMS",
# The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules.
module_type = None,
)),
("supply", Storage(
name_nice = "Supply Chain Management",
#description = "Used within Inventory Management, Request Management and Asset Management",
module_type = None, # Not displayed
)),
("inv", Storage(
name_nice = T("Warehouses"),
#description = "Receiving and Sending Items",
module_type = 4
)),
("asset", Storage(
name_nice = "Assets",
#description = "Recording and Assigning Assets",
module_type = 5,
)),
# Vehicle depends on Assets
("vehicle", Storage(
name_nice = "Vehicles",
#description = "Manage Vehicles",
module_type = 10,
)),
#("budget", Storage(
# name_nice = T("Budgets"),
# #description = "Tracks the location, capacity and breakdown of victims in Shelters",
# module_type = 10
#)),
("fin", Storage(
name_nice = T("Finance"),
module_type = 10
)),
("cr", Storage(
name_nice = T("Shelters"),
#description = "Tracks the location, capacity and breakdown of victims in Shelters",
module_type = 10
)),
("project", Storage(
name_nice = "Tasks",
#description = "Tracking of Projects, Activities and Tasks",
module_type = 2
)),
("req", Storage(
name_nice = "Requests",
#description = "Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.",
module_type = 10,
)),
("hms", Storage(
name_nice = T("Hospitals"),
#description = "Helps to monitor status of hospitals",
module_type = 10
)),
#("dvr", Storage(
# name_nice = T("Disaster Victim Registry"),
# #description = "Allow affected individuals & households to register to receive compensation and distributions",
# module_type = 10,
#)),
("event", Storage(
name_nice = "Events",
#description = "Activate Events (e.g. from Scenario templates) for allocation of appropriate Resources (Human, Assets & Facilities).",
module_type = 10,
)),
#("transport", Storage(
# name_nice = T("Transport"),
# module_type = 10,
#)),
#("stats", Storage(
# name_nice = T("Statistics"),
# #description = "Manages statistics",
# module_type = None,
#)),
])
# -------------------------------------------------------------------------
# CMS
# -------------------------------------------------------------------------
settings.cms.richtext = True
# -------------------------------------------------------------------------
# Organisations
# -------------------------------------------------------------------------
settings.org.documents_tab = True
settings.org.projects_tab = False
# -------------------------------------------------------------------------
# Shelters
# -------------------------------------------------------------------------
settings.cr.people_registration = False
# -------------------------------------------------------------------------
#def customise_cr_shelter_resource(r, tablename):
#table = current.s3db.cr_shelter
#settings.customise_cr_shelter_resource = customise_cr_shelter_resource
# -------------------------------------------------------------------------
# Events
# -------------------------------------------------------------------------
def event_rheader(r):
rheader = None
record = r.record
if record and r.representation == "html":
from gluon import A, DIV, TABLE, TR, TH
from s3 import s3_rheader_tabs
name = r.name
if name == "incident":
if settings.get_incident_label(): # == "Ticket"
label = T("Ticket Details")
else:
label = T("Incident Details")
tabs = [(label, None),
#(T("Tasks"), "task"),
#(T("Human Resources"), "human_resource"),
#(T("Equipment"), "asset"),
(T("Action Plan"), "plan"),
(T("Incident Reports"), "incident_report"),
(T("Logs"), "log"),
(T("Expenses"), "expense"),
(T("Situation Reports"), "sitrep"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
record_id = r.id
incident_type_id = record.incident_type_id
editable = current.auth.s3_has_permission("UPDATE", "event_incident", record_id)
if editable and r.method == "plan":
# Dropdown of Scenarios to select
# @ToDo: Move this to a Popup behind an Action Button, to make it clearer that this isn't a maintained link
stable = current.s3db.event_scenario
query = (stable.incident_type_id == incident_type_id) & \
(stable.deleted == False)
scenarios = current.db(query).select(stable.id,
stable.name,
)
if len(scenarios) and r.method != "event":
from gluon import SELECT, OPTION
dropdown = SELECT(_id="scenarios")
dropdown["_data-incident_id"] = record_id
dappend = dropdown.append
dappend(OPTION(T("Select Scenario")))
for s in scenarios:
dappend(OPTION(s.name, _value=s.id))
scenarios = TR(TH("%s: " % T("Apply Scenario")),
dropdown,
)
s3 = current.response.s3
script = "/%s/static/themes/SAFIRE/js/incident_profile.js" % r.application
if script not in s3.scripts:
s3.scripts.append(script)
s3.js_global.append('''i18n.scenarioConfirm="%s"''' % T("Populate Incident with Tasks, Organizations, Positions and Equipment from the Scenario?"))
else:
scenarios = ""
else:
scenarios = ""
if record.exercise:
exercise = TH(T("EXERCISE"))
else:
exercise = TH()
if record.closed:
closed = TH(T("CLOSED"))
else:
closed = TH()
if record.event_id or r.method == "event" or not editable:
event = ""
else:
if settings.get_event_label():
label = T("Assign to Disaster")
else:
label = T("Assign to Event")
event = A(label,
_href = URL(c = "event",
f = "incident",
args = [record_id, "event"],
),
_class = "action-btn"
)
table = r.table
rheader = DIV(TABLE(TR(exercise),
TR(TH("%s: " % table.name.label),
record.name,
),
TR(TH("%s: " % table.incident_type_id.label),
table.incident_type_id.represent(incident_type_id),
),
TR(TH("%s: " % table.location_id.label),
table.location_id.represent(record.location_id),
),
TR(TH("%s: " % table.organisation_id.label),
table.organisation_id.represent(record.organisation_id),
),
TR(TH("%s: " % table.person_id.label),
table.person_id.represent(record.person_id),
),
scenarios,
TR(TH("%s: " % table.comments.label),
record.comments,
),
TR(TH("%s: " % table.date.label),
table.date.represent(record.date),
),
TR(closed),
event,
), rheader_tabs)
elif name == "incident_report":
record_id = r.id
ltable = current.s3db.event_incident_report_incident
query = (ltable.incident_report_id == record_id)
link = current.db(query).select(ltable.incident_id,
limitby = (0, 1)
).first()
if link:
from s3 import S3Represent
represent = S3Represent(lookup="event_incident", show_link=True)
rheader = DIV(TABLE(TR(TH("%s: " % ltable.incident_id.label),
represent(link.incident_id),
),
))
else:
if settings.get_incident_label():
label = T("Assign to Ticket")
else:
label = T("Assign to Incident")
rheader = DIV(A(label,
_href = URL(c = "event",
f = "incident_report",
args = [record_id, "assign"],
),
_class = "action-btn"
))
elif name == "event":
if settings.get_event_label():
label = T("Disaster Details")
else:
label = T("Event Details")
if settings.get_incident_label():
INCIDENTS = T("Tickets")
else:
INCIDENTS = T("Incidents")
tabs = [(label, None),
(INCIDENTS, "incident"),
]
if settings.get_event_impact_tab():
tabs.append((T("Impact"), "impact"))
if settings.get_event_dc_target_tab():
tabs.append((T("Assessment Targets"), "target"))
tabs += [(T("Documents"), "document"),
(T("Photos"), "image"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
table = r.table
rheader = DIV(TABLE(TR(TH("%s: " % table.event_type_id.label),
table.event_type_id.represent(record.event_type_id),
),
TR(TH("%s: " % table.name.label),
record.name,
),
TR(TH("%s: " % table.start_date.label),
table.start_date.represent(record.start_date),
),
TR(TH("%s: " % table.comments.label),
record.comments,
),
), rheader_tabs)
elif name == "scenario":
tabs = [(T("Scenario Details"), None),
(T("Action Plan"), "plan"),
(T("Incident Reports"), "incident_report"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
table = r.table
rheader = DIV(TABLE(TR(TH("%s: " % table.incident_type_id.label),
table.incident_type_id.represent(record.incident_type_id),
),
TR(TH("%s: " % table.organisation_id.label),
table.organisation_id.represent(record.organisation_id),
),
TR(TH("%s: " % table.location_id.label),
table.location_id.represent(record.location_id),
),
TR(TH("%s: " % table.name.label),
record.name,
),
TR(TH("%s: " % table.comments.label),
record.comments,
),
), rheader_tabs)
return rheader
def customise_dc_target_resource(r, tablename):
if r.controller in ("event",
"hrm",
):
return
s3db = current.s3db
from s3 import S3DateFilter, S3LocationFilter, S3OptionsFilter, S3SQLCustomForm, S3SQLInlineLink
crud_form = S3SQLCustomForm(S3SQLInlineLink("event",
field = "event_id",
multiple = False,
),
"template_id",
"date",
"location_id",
"comments",
)
filter_widgets = [S3OptionsFilter("event__link.event_id"),
S3LocationFilter(),
S3DateFilter("date"),
]
list_fields = ["event__link.event_id",
"location_id$L1",
"location_id$L2",
"name",
(T("Reporting Date"), "date"),
(T("Reported by"), "created_by"),
]
s3db.configure(tablename,
crud_form = crud_form,
filter_widgets = filter_widgets,
list_fields = list_fields,
)
settings.customise_dc_target_resource = customise_dc_target_resource
def customise_event_event_controller(**attr):
attr["rheader"] = event_rheader
return attr
settings.customise_event_event_controller = customise_event_event_controller
def customise_event_incident_report_resource(r, tablename):
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Log Call"),
title_display = T("Call Log Details"),
title_list = T("Call Logs"),
title_update = T("Edit Call Log"),
label_list_button = T("List Call Logs"),
label_delete_button = T("Delete Call Log"),
msg_record_created = T("Call Log added"),
msg_record_modified = T("Call Log updated"),
msg_record_deleted = T("Call Log removed"),
msg_list_empty = T("No Calls currently logged"),
)
from s3 import S3LocationSelector
current.s3db.event_incident_report.location_id.widget = \
S3LocationSelector(polygons = True,
show_address = True,
)
settings.customise_event_incident_report_resource = customise_event_incident_report_resource
def customise_event_incident_report_controller(**attr):
from gluon import A
s3 = current.response.s3
standard_prep = s3.prep
def custom_prep(r):
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
method = r.method
if method in (None, "create"):
current.s3db.gis_location.addr_street.label = T("Street Address or Location Details")
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm((T("What is it?"), "name"),
"incident_type_id",
(T("Who am I speaking with?"), "reported_by"),
(T("How can we contact you?"), "contact"),
(T("Where did this Incident take place?"), "location_id"),
(T("Explain the Situation?"), "description"),
(T("What are your immediate needs?"), "needs"),
)
r.resource.configure(create_next = URL(args=["[id]", "assign"]),
crud_form = crud_form,
)
return True
s3.prep = custom_prep
current.menu.options = None
req_args = current.request.args
if len(req_args) > 1 and req_args[1] == "assign":
if settings.get_incident_label():
label = T("New Ticket")
else:
label = T("New Incident")
attr["rheader"] = A(label,
_class = "action-btn",
_href = URL(c="event", f="incident",
args = ["create"],
vars = {"incident_report_id": req_args[0]},
),
)
else:
attr["rheader"] = event_rheader
return attr
settings.customise_event_incident_report_controller = customise_event_incident_report_controller
def event_incident_create_onaccept(form):
organisation_id = form.vars.get("organisation_id")
if organisation_id:
otable = current.s3db.org_organisation_tag
query = (otable.organisation_id == organisation_id) & \
(otable.tag == "duty")
duty = current.db(query).select(otable.value,
limitby = (0, 1)
).first()
if duty:
incident_id = form.vars.get("id")
current.msg.send_sms_via_api(duty.value,
"You have been assigned an Incident: %s%s" % (settings.get_base_public_url(),
URL(c="event", f= "incident",
args = incident_id),
))
def customise_event_incident_resource(r, tablename):
from s3 import S3LocationSelector
s3db = current.s3db
table = s3db.event_incident
table.location_id.widget = S3LocationSelector(polygons = True,
show_address = True,
)
f = table.organisation_id
f.readable = f.writable = True
f.label = T("Lead Response Organization")
if r.method == "plan":
table.action_plan.label = T("Event Action Plan")
else:
f = table.action_plan
f.readable = f.writable = False
if r.interactive:
s3db.add_custom_callback(tablename,
"create_onaccept",
event_incident_create_onaccept,
)
settings.customise_event_incident_resource = customise_event_incident_resource
def customise_event_incident_controller(**attr):
s3db = current.s3db
s3 = current.response.s3
standard_prep = s3.prep
def custom_prep(r):
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
resource = r.resource
resource.configure(create_next = URL(c="event", f="incident",
args = ["[id]", "plan"]
),
)
method = r.method
if method == "create":
incident_report_id = r.get_vars.get("incident_report_id")
if incident_report_id:
if r.http == "GET":
from s3 import s3_truncate
rtable = s3db.event_incident_report
incident_report = current.db(rtable.id == incident_report_id).select(rtable.name,
rtable.incident_type_id,
rtable.location_id,
limitby = (0, 1),
).first()
table = r.table
table.name.default = s3_truncate(incident_report.name, 64)
table.incident_type_id.default = incident_report.incident_type_id
table.location_id.default = incident_report.location_id
elif r.http == "POST":
def create_onaccept(form):
s3db.event_incident_report_incident.insert(incident_id = form.vars.id,
incident_report_id = incident_report_id,
)
s3db.add_custom_callback("event_incident",
"create_onaccept",
create_onaccept,
)
elif method == "plan" and settings.get_incident_label():
s3db.event_task
s3db.event_organisation
crud_strings = s3.crud_strings
crud_strings.event_task.msg_list_empty = T("No Tasks currently registered for this ticket")
crud_strings.event_organisation.msg_list_empty = T("No Organizations currently registered in this ticket")
return True
s3.prep = custom_prep
current.menu.options = None
attr["rheader"] = event_rheader
return attr
settings.customise_event_incident_controller = customise_event_incident_controller
def customise_event_asset_resource(r, tablename):
table = current.s3db.event_asset
table.item_id.label = T("Item Type")
table.asset_id.label = T("Specific Item")
if settings.get_incident_label():
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Equipment"),
title_display = T("Equipment Details"),
title_list = T("Equipment"),
title_update = T("Edit Equipment"),
label_list_button = T("List Equipment"),
label_delete_button = T("Remove Equipment from this ticket"),
msg_record_created = T("Equipment added"),
msg_record_modified = T("Equipment updated"),
msg_record_deleted = T("Equipment removed"),
msg_list_empty = T("No Equipment currently registered for this ticket"))
else:
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Equipment"),
title_display = T("Equipment Details"),
title_list = T("Equipment"),
title_update = T("Edit Equipment"),
label_list_button = T("List Equipment"),
label_delete_button = T("Remove Equipment from this incident"),
msg_record_created = T("Equipment added"),
msg_record_modified = T("Equipment updated"),
msg_record_deleted = T("Equipment removed"),
msg_list_empty = T("No Equipment currently registered for this incident"))
settings.customise_event_asset_resource = customise_event_asset_resource
def event_human_resource_onaccept(form, create=True):
db = current.db
s3db = current.s3db
from s3db.event import event_set_event_from_incident
event_set_event_from_incident(form, "event_human_resource")
table = s3db.event_human_resource
form_vars = form.vars
form_vars_get = form_vars.get
link_id = form_vars_get("id")
incident_id = form_vars_get("incident_id")
if not incident_id:
link = db(table.id == link_id).select(table.incident_id,
limitby = (0, 1)
).first()
incident_id = link.incident_id
pe_id = None
if create:
person_id = form_vars_get("person_id")
if person_id:
ptable = s3db.pr_person
person = db(ptable.id == person_id).select(ptable.pe_id,
limitby = (0, 1)
).first()
pe_id = person.pe_id
job_title_id = form_vars_get("job_title_id")
if job_title_id:
s3db.event_incident_log.insert(incident_id = incident_id,
name = "Person Requested",
comments = s3db.event_human_resource.job_title_id.represent(job_title_id),
)
else:
record = form.record
if record:
from s3dal import Field
changed = {}
for var in form_vars:
vvar = form_vars[var]
if isinstance(vvar, Field):
continue
rvar = record.get(var, "NOT_PRESENT")
if rvar != "NOT_PRESENT" and vvar != rvar:
f = table[var]
if var == "pe_id":
pe_id = vvar
type_ = f.type
if type_ == "integer" or \
type_.startswith("reference"):
if vvar:
vvar = int(vvar)
if vvar == rvar:
continue
represent = table[var].represent
if represent:
if hasattr(represent, "show_link"):
represent.show_link = False
else:
represent = lambda o: o
if rvar:
changed[var] = "%s changed from %s to %s" % \
(f.label, represent(rvar), represent(vvar))
else:
changed[var] = "%s changed to %s" % \
(f.label, represent(vvar))
if changed:
table = s3db.event_incident_log
text = []
for var in changed:
text.append(changed[var])
text = "\n".join(text)
table.insert(incident_id = incident_id,
name = "Person Request Updated",
comments = text,
)
if pe_id:
if settings.get_incident_label():
label = T("Ticket")
else:
label = T("Incident")
current.msg.send_by_pe_id(pe_id,
subject = "",
message = "You have been assigned to an %s: %s%s" % \
(label,
settings.get_base_public_url(),
URL(c="event", f= "incident",
args = [incident_id, "human_resource", link_id]),
),
contact_method = "SMS"
)
def customise_event_human_resource_resource(r, tablename):
s3db = current.s3db
table = s3db.event_human_resource
from gluon import IS_EMPTY_OR
from s3 import IS_UTC_DATETIME, S3CalendarWidget, S3DateTime
for f in (table.start_date, table.end_date):
f.requires = IS_EMPTY_OR(IS_UTC_DATETIME())
f.represent = lambda dt: S3DateTime.datetime_represent(dt, utc=True)
f.widget = S3CalendarWidget(timepicker = True)
if settings.get_incident_label():
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Person"),
title_display = T("Person Details"),
title_list = T("Personnel"),
title_update = T("Edit Person"),
label_list_button = T("List Personnel"),
label_delete_button = T("Remove Person from this ticket"),
msg_record_created = T("Person added"),
msg_record_modified = T("Person updated"),
msg_record_deleted = T("Person removed"),
msg_list_empty = T("No Persons currently registered for this ticket"))
else:
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Person"),
title_display = T("Person Details"),
title_list = T("Personnel"),
title_update = T("Edit Person"),
label_list_button = T("List Personnel"),
label_delete_button = T("Remove Person from this incident"),
msg_record_created = T("Person added"),
msg_record_modified = T("Person updated"),
msg_record_deleted = T("Person removed"),
msg_list_empty = T("No Persons currently registered for this incident"))
s3db.configure(tablename,
create_onaccept = event_human_resource_onaccept,
update_onaccept = lambda form:
event_human_resource_onaccept(form, create=False),
)
settings.customise_event_human_resource_resource = customise_event_human_resource_resource
def customise_event_scenario_controller(**attr):
s3 = current.response.s3
standard_prep = s3.prep
def custom_prep(r):
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
if r.method != "plan":
f = r.table.action_plan
f.readable = f.writable = False
if r.method == "create"and r.http == "POST":
r.resource.configure(create_next = URL(c="event", f="scenario",
args = ["[id]", "plan"]),
)
return True
s3.prep = custom_prep
current.menu.options = None
attr["rheader"] = event_rheader
return attr
settings.customise_event_scenario_controller = customise_event_scenario_controller
def customise_event_scenario_asset_resource(r, tablename):
table = current.s3db.event_scenario_asset
table.item_id.label = T("Item Type")
table.asset_id.label = T("Specific Item")
if settings.get_incident_label():
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Equipment"),
title_display = T("Equipment Details"),
title_list = T("Equipment"),
title_update = T("Edit Equipment"),
label_list_button = T("List Equipment"),
label_delete_button = T("Remove Equipment from this ticket"),
msg_record_created = T("Equipment added"),
msg_record_modified = T("Equipment updated"),
msg_record_deleted = T("Equipment removed"),
msg_list_empty = T("No Equipment currently registered for this ticket"))
else:
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Equipment"),
title_display = T("Equipment Details"),
title_list = T("Equipment"),
title_update = T("Edit Equipment"),
label_list_button = T("List Equipment"),
label_delete_button = T("Remove Equipment from this incident"),
msg_record_created = T("Equipment added"),
msg_record_modified = T("Equipment updated"),
msg_record_deleted = T("Equipment removed"),
msg_list_empty = T("No Equipment currently registered for this incident"))
settings.customise_event_scenario_asset_resource = customise_event_scenario_asset_resource
def customise_event_scenario_human_resource_resource(r, tablename):
if settings.get_incident_label():
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Person"),
title_display = T("Person Details"),
title_list = T("Personnel"),
title_update = T("Edit Person"),
label_list_button = T("List Personnel"),
label_delete_button = T("Remove Person from this ticket"),
msg_record_created = T("Person added"),
msg_record_modified = T("Person updated"),
msg_record_deleted = T("Person removed"),
msg_list_empty = T("No Persons currently registered for this ticket"))
else:
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Person"),
title_display = T("Person Details"),
title_list = T("Personnel"),
title_update = T("Edit Person"),
label_list_button = T("List Personnel"),
label_delete_button = T("Remove Person from this incident"),
msg_record_created = T("Person added"),
msg_record_modified = T("Person updated"),
msg_record_deleted = T("Person removed"),
msg_list_empty = T("No Persons currently registered for this incident"))
settings.customise_event_scenario_human_resource_resource = customise_event_scenario_human_resource_resource
settings.hrm.job_title_deploy = True
settings.hrm.org_dependent_job_titles = True
def customise_org_organisation_resource(r, tablename):
s3db = current.s3db
s3db.add_components(tablename,
org_organisation_tag = (
{"name": "duty",
"joinby": "organisation_id",
"filterby": {"tag": "duty",
},
"multiple": False,
},
),
)
from s3 import S3SQLCustomForm, S3SQLInlineComponent, S3SQLInlineLink, \
IS_EMPTY_OR, IS_PHONE_NUMBER_MULTI, S3PhoneWidget, s3_phone_represent
components_get = s3db.resource(tablename).components.get
duty = components_get("duty")
f = duty.table.value
f.represent = s3_phone_represent,
f.requires = IS_EMPTY_OR(IS_PHONE_NUMBER_MULTI())
f.widget = S3PhoneWidget()
crud_form = S3SQLCustomForm("name",
"acronym",
S3SQLInlineLink("organisation_type",
field = "organisation_type_id",
search = False,
label = T("Type"),
multiple = False,
widget = "multiselect",
),
"country",
(T("Reception Phone #"), "phone"),
S3SQLInlineComponent("duty",
label = T("On-call Duty Number"),
fields = [("", "value")],
multiple = False,
),
"website",
"logo",
"comments",
)
s3db.configure(tablename,
crud_form = crud_form,
)
settings.customise_org_organisation_resource = customise_org_organisation_resource
def project_task_onaccept(form, create=True):
if current.request.function == "scenario":
# - don't send Notification
return
db = current.db
s3db = current.s3db
ltable = s3db.event_task
form_vars = form.vars
form_vars_get = form_vars.get
task_id = form_vars_get("id")
link = db(ltable.task_id == task_id).select(ltable.incident_id,
limitby = (0, 1)
).first()
if not link:
# - don't send Notification
return
incident_id = link.incident_id
if create:
pe_id = form_vars_get("pe_id")
name = form_vars_get("name")
if name:
s3db.event_incident_log.insert(incident_id = incident_id,
name = "Task Created",
comments = name,
)
else:
pe_id = None
record = form.record
if record:
from s3dal import Field
table = s3db.project_task
changed = {}
for var in form_vars:
vvar = form_vars[var]
if isinstance(vvar, Field):
continue
if var == "pe_id":
pe_id = vvar
rvar = record.get(var, "NOT_PRESENT")
if rvar != "NOT_PRESENT" and vvar != rvar:
f = table[var]
type_ = f.type
if type_ == "integer" or \
type_.startswith("reference"):
if vvar:
vvar = int(vvar)
if vvar == rvar:
continue
represent = table[var].represent
if represent:
if hasattr(represent, "show_link"):
represent.show_link = False
else:
represent = lambda o: o
if rvar:
changed[var] = "%s changed from %s to %s" % \
(f.label, represent(rvar), represent(vvar))
else:
changed[var] = "%s changed to %s" % \
(f.label, represent(vvar))
if changed:
table = s3db.event_incident_log
text = []
for var in changed:
text.append(changed[var])
text = "\n".join(text)
table.insert(incident_id = incident_id,
name = "Task Updated",
comments = text,
)
if pe_id:
message = "You have been assigned a Task: %s%s" % \
(settings.get_base_public_url(),
URL(c="event", f= "incident",
args = [incident_id, "task", task_id]),
)
from s3db.pr import pr_instance_type
instance_type = pr_instance_type(pe_id)
if instance_type == "org_organisation":
otable = s3db.org_organisation
ottable = s3db.org_organisation_tag
query = (otable.pe_id == pe_id) & \
(ottable.organisation_id == otable.id) & \
(ottable.tag == "duty")
duty = db(query).select(ottable.value,
limitby = (0, 1)
).first()
if duty:
current.msg.send_sms_via_api(duty.value,
message)
else:
task_notification = settings.get_event_task_notification()
if task_notification:
current.msg.send_by_pe_id(pe_id,
subject = "%s: Task assigned to you" % settings.get_system_name_short(),
message = message,
contact_method = task_notification)
def customise_project_task_resource(r, tablename):
s3db = current.s3db
f = s3db.project_task.source
f.readable = f.writable = False
s3db.configure(tablename,
crud_form = None,
create_onaccept = project_task_onaccept,
update_onaccept = lambda form:
project_task_onaccept(form, create=False),
)
settings.customise_project_task_resource = customise_project_task_resource
| true | true |
f7ffd7eb79bf3ba2a89c9da9051d078b6be0c139 | 1,735 | py | Python | simdeblur/model/loss/perceptual_loss.py | ljzycmd/SimDeblur | dd2f60c41176b75c4eaf80d740f547c206aa8227 | [
"MIT"
] | 190 | 2021-03-22T13:59:42.000Z | 2022-03-08T21:14:41.000Z | simdeblur/model/loss/perceptual_loss.py | ljzycmd/SimDeblur | dd2f60c41176b75c4eaf80d740f547c206aa8227 | [
"MIT"
] | 9 | 2021-04-26T06:44:40.000Z | 2022-03-25T07:48:30.000Z | simdeblur/model/loss/perceptual_loss.py | ljzycmd/SimDeblur | dd2f60c41176b75c4eaf80d740f547c206aa8227 | [
"MIT"
] | 27 | 2021-03-23T03:11:00.000Z | 2022-03-19T21:26:02.000Z | """ ************************************************
* fileName: perceptual_loss.py
* desc: Perceptual loss using vggnet with conv1_2, conv2_2, conv3_3 feature,
before relu layer.
* author: mingdeng_cao
* date: 2021/07/09 11:08
* last revised: None
************************************************ """
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision.models import vgg19, vgg16
from ..build import LOSS_REGISTRY
@LOSS_REGISTRY.register()
class PerceptualLossVGG19(nn.Module):
def __init__(self, layer_idx=[2, 7, 14], layer_weights=[1, 0.2, 0.04], reduction="sum"):
super().__init__()
self.layer_idx = layer_idx
self.layer_weights = layer_weights
self.vggnet_feats_layers = vgg19(pretrained=True).features
self.reduction = reduction
def vgg_forward(self, img):
selected_feats = []
out = img
self.vggnet_feats_layers = self.vggnet_feats_layers.to(img)
for i, layer in enumerate(self.vggnet_feats_layers):
out = layer(out)
if i in self.layer_idx:
selected_feats.append(out)
if i == self.layer_idx[-1]:
break
assert len(selected_feats) == len(self.layer_idx)
return selected_feats
def forward(self, img1, img2):
selected_feats1 = self.vgg_forward(img1)
selected_feats2 = self.vgg_forward(img2)
loss = 0
for i, (feat1, feat2) in enumerate(zip(selected_feats1, selected_feats2)):
assert feat1.shape == feat2.shape, "The input tensor should be in same shape!"
loss += F.mse_loss(feat1, feat2, reduction=self.reduction) * self.layer_weights[i]
return loss
| 32.735849 | 94 | 0.617291 |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision.models import vgg19, vgg16
from ..build import LOSS_REGISTRY
@LOSS_REGISTRY.register()
class PerceptualLossVGG19(nn.Module):
def __init__(self, layer_idx=[2, 7, 14], layer_weights=[1, 0.2, 0.04], reduction="sum"):
super().__init__()
self.layer_idx = layer_idx
self.layer_weights = layer_weights
self.vggnet_feats_layers = vgg19(pretrained=True).features
self.reduction = reduction
def vgg_forward(self, img):
selected_feats = []
out = img
self.vggnet_feats_layers = self.vggnet_feats_layers.to(img)
for i, layer in enumerate(self.vggnet_feats_layers):
out = layer(out)
if i in self.layer_idx:
selected_feats.append(out)
if i == self.layer_idx[-1]:
break
assert len(selected_feats) == len(self.layer_idx)
return selected_feats
def forward(self, img1, img2):
selected_feats1 = self.vgg_forward(img1)
selected_feats2 = self.vgg_forward(img2)
loss = 0
for i, (feat1, feat2) in enumerate(zip(selected_feats1, selected_feats2)):
assert feat1.shape == feat2.shape, "The input tensor should be in same shape!"
loss += F.mse_loss(feat1, feat2, reduction=self.reduction) * self.layer_weights[i]
return loss
| true | true |
f7ffd8d7f5b9f1db0f196ac262eff03184566218 | 63,299 | py | Python | napalm/nxos_ssh/nxos_ssh.py | remingu/napalm | 884ac086e614c20bdfe2238876d70832d33d3a27 | [
"Apache-2.0"
] | null | null | null | napalm/nxos_ssh/nxos_ssh.py | remingu/napalm | 884ac086e614c20bdfe2238876d70832d33d3a27 | [
"Apache-2.0"
] | null | null | null | napalm/nxos_ssh/nxos_ssh.py | remingu/napalm | 884ac086e614c20bdfe2238876d70832d33d3a27 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2015 Spotify AB. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# import stdlib
from builtins import super
import re
import socket
# import third party lib
from netaddr import IPAddress, IPNetwork
from netaddr.core import AddrFormatError
# import NAPALM Base
from napalm.base import helpers
from napalm.base.exceptions import CommandErrorException, ReplaceConfigException
from napalm.nxos import NXOSDriverBase
# Easier to store these as constants
HOUR_SECONDS = 3600
DAY_SECONDS = 24 * HOUR_SECONDS
WEEK_SECONDS = 7 * DAY_SECONDS
YEAR_SECONDS = 365 * DAY_SECONDS
# STD REGEX PATTERNS
IP_ADDR_REGEX = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
IPV4_ADDR_REGEX = IP_ADDR_REGEX
IPV6_ADDR_REGEX_1 = r"::"
IPV6_ADDR_REGEX_2 = r"[0-9a-fA-F:]{1,39}::[0-9a-fA-F:]{1,39}"
IPV6_ADDR_REGEX_3 = (
r"[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:"
r"[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}"
)
# Should validate IPv6 address using an IP address library after matching with this regex
IPV6_ADDR_REGEX = r"(?:{}|{}|{})".format(
IPV6_ADDR_REGEX_1, IPV6_ADDR_REGEX_2, IPV6_ADDR_REGEX_3
)
IPV4_OR_IPV6_REGEX = r"(?:{}|{})".format(IPV4_ADDR_REGEX, IPV6_ADDR_REGEX)
MAC_REGEX = r"[a-fA-F0-9]{4}\.[a-fA-F0-9]{4}\.[a-fA-F0-9]{4}"
VLAN_REGEX = r"\d{1,4}"
RE_IPADDR = re.compile(r"{}".format(IP_ADDR_REGEX))
RE_MAC = re.compile(r"{}".format(MAC_REGEX))
# Period needed for 32-bit AS Numbers
ASN_REGEX = r"[\d\.]+"
RE_IP_ROUTE_VIA_REGEX = re.compile(
r" (?P<used>[\*| ])via ((?P<ip>" + IPV4_ADDR_REGEX + r")"
r"(%(?P<vrf>\S+))?, )?"
r"((?P<int>[\w./:]+), )?\[(\d+)/(?P<metric>\d+)\]"
r", (?P<age>[\d\w:]+), (?P<source>[\w]+)(-(?P<procnr>\d+))?"
r"(?P<rest>.*)"
)
RE_RT_VRF_NAME = re.compile(r"VRF \"(\S+)\"")
RE_RT_IPV4_ROUTE_PREF = re.compile(r"(" + IPV4_ADDR_REGEX + r"/\d{1,2}), ubest.*")
RE_BGP_PROTO_TAG = re.compile(r"BGP Protocol Tag\s+: (\d+)")
RE_BGP_REMOTE_AS = re.compile(r"remote AS (" + ASN_REGEX + r")")
RE_BGP_COMMUN = re.compile(r"[ ]{10}([\S ]+)")
def parse_intf_section(interface):
"""Parse a single entry from show interfaces output.
Different cases:
mgmt0 is up
admin state is up
Ethernet2/1 is up
admin state is up, Dedicated Interface
Vlan1 is down (Administratively down), line protocol is down, autostate enabled
Ethernet154/1/48 is up (with no 'admin state')
"""
interface = interface.strip()
re_protocol = (
r"^(?P<intf_name>\S+?)\s+is\s+(?P<status>.+?)"
r",\s+line\s+protocol\s+is\s+(?P<protocol>\S+).*$"
)
re_intf_name_state = r"^(?P<intf_name>\S+) is (?P<intf_state>\S+).*"
re_is_enabled_1 = r"^admin state is (?P<is_enabled>\S+)$"
re_is_enabled_2 = r"^admin state is (?P<is_enabled>\S+), "
re_is_enabled_3 = r"^.* is down.*Administratively down.*$"
re_mac = r"^\s+Hardware:\s+(?P<hardware>.*),\s+address:\s+(?P<mac_address>\S+) "
re_speed = (
r"\s+MTU (?P<mtu>\S+)\s+bytes,\s+BW\s+(?P<speed>\S+)\s+(?P<speed_unit>\S+).*$"
)
re_mtu_nve = r"\s+MTU (?P<mtu_nve>\S+)\s+bytes.*$"
re_description_1 = r"^\s+Description:\s+(?P<description>.*) (?:MTU|Internet)"
re_description_2 = r"^\s+Description:\s+(?P<description>.*)$"
re_hardware = r"^.* Hardware: (?P<hardware>\S+)$"
# Check for 'protocol is ' lines
match = re.search(re_protocol, interface, flags=re.M)
if match:
intf_name = match.group("intf_name")
status = match.group("status")
protocol = match.group("protocol")
if "admin" in status.lower():
is_enabled = False
else:
is_enabled = True
is_up = bool("up" in protocol)
else:
# More standard is up, next line admin state is lines
match = re.search(re_intf_name_state, interface)
intf_name = helpers.canonical_interface_name(match.group("intf_name"))
intf_state = match.group("intf_state").strip()
is_up = True if intf_state == "up" else False
admin_state_present = re.search("admin state is", interface)
if admin_state_present:
# Parse cases where 'admin state' string exists
for x_pattern in [re_is_enabled_1, re_is_enabled_2]:
match = re.search(x_pattern, interface, flags=re.M)
if match:
is_enabled = match.group("is_enabled").strip()
is_enabled = True if re.search("up", is_enabled) else False
break
else:
msg = "Error parsing intf, 'admin state' never detected:\n\n{}".format(
interface
)
raise ValueError(msg)
else:
# No 'admin state' should be 'is up' or 'is down' strings
# If interface is up; it is enabled
is_enabled = True
if not is_up:
match = re.search(re_is_enabled_3, interface, flags=re.M)
if match:
is_enabled = False
match = re.search(re_mac, interface, flags=re.M)
if match:
mac_address = match.group("mac_address")
mac_address = helpers.mac(mac_address)
else:
mac_address = ""
match = re.search(re_hardware, interface, flags=re.M)
speed_exist = True
if match:
if match.group("hardware") == "NVE":
match = re.search(re_mtu_nve, interface, flags=re.M)
mtu = int(match.group("mtu_nve"))
speed_exist = False
if speed_exist:
match = re.search(re_speed, interface, flags=re.M)
speed = int(match.group("speed"))
mtu = int(match.group("mtu"))
speed_unit = match.group("speed_unit")
speed_unit = speed_unit.rstrip(",")
# This was alway in Kbit (in the data I saw)
if speed_unit != "Kbit":
msg = "Unexpected speed unit in show interfaces parsing:\n\n{}".format(
interface
)
raise ValueError(msg)
speed = int(round(speed / 1000.0))
else:
speed = -1
description = ""
for x_pattern in [re_description_1, re_description_2]:
match = re.search(x_pattern, interface, flags=re.M)
if match:
description = match.group("description")
break
return {
intf_name: {
"description": description,
"is_enabled": is_enabled,
"is_up": is_up,
"last_flapped": -1.0,
"mac_address": mac_address,
"mtu": mtu,
"speed": speed,
}
}
def convert_hhmmss(hhmmss):
"""Convert hh:mm:ss to seconds."""
fields = hhmmss.split(":")
if len(fields) != 3:
raise ValueError("Received invalid HH:MM:SS data: {}".format(hhmmss))
fields = [int(x) for x in fields]
hours, minutes, seconds = fields
return (hours * 3600) + (minutes * 60) + seconds
def bgp_time_conversion(bgp_uptime):
"""Convert string time to seconds.
Examples
00:14:23
00:13:40
00:00:21
00:00:13
00:00:49
1d11h
1d17h
1w0d
8w5d
1y28w
never
"""
bgp_uptime = bgp_uptime.strip()
uptime_letters = set(["w", "h", "d"])
if "never" in bgp_uptime:
return -1
elif ":" in bgp_uptime:
times = bgp_uptime.split(":")
times = [int(x) for x in times]
hours, minutes, seconds = times
return (hours * 3600) + (minutes * 60) + seconds
# Check if any letters 'w', 'h', 'd' are in the time string
elif uptime_letters & set(bgp_uptime):
form1 = r"(\d+)d(\d+)h" # 1d17h
form2 = r"(\d+)w(\d+)d" # 8w5d
form3 = r"(\d+)y(\d+)w" # 1y28w
match = re.search(form1, bgp_uptime)
if match:
days = int(match.group(1))
hours = int(match.group(2))
return (days * DAY_SECONDS) + (hours * 3600)
match = re.search(form2, bgp_uptime)
if match:
weeks = int(match.group(1))
days = int(match.group(2))
return (weeks * WEEK_SECONDS) + (days * DAY_SECONDS)
match = re.search(form3, bgp_uptime)
if match:
years = int(match.group(1))
weeks = int(match.group(2))
return (years * YEAR_SECONDS) + (weeks * WEEK_SECONDS)
raise ValueError("Unexpected value for BGP uptime string: {}".format(bgp_uptime))
def bgp_normalize_table_data(bgp_table):
"""The 'show bgp all summary vrf all' table can have entries that wrap multiple lines.
2001:db8:4:701::2
4 65535 163664 163693 145 0 0 3w2d 3
2001:db8:e0:dd::1
4 10 327491 327278 145 0 0 3w1d 4
Normalize this so the line wrap doesn't exit.
"""
bgp_table = bgp_table.strip()
bgp_multiline_pattern = r"({})\s*\n".format(IPV4_OR_IPV6_REGEX)
# Strip out the newline
return re.sub(bgp_multiline_pattern, r"\1", bgp_table)
def bgp_table_parser(bgp_table):
"""Generator that parses a line of bgp summary table and returns a dict compatible with NAPALM
Example line:
10.2.1.14 4 10 472516 472238 361 0 0 3w1d 9
"""
bgp_table = bgp_table.strip()
for bgp_entry in bgp_table.splitlines():
bgp_table_fields = bgp_entry.split()
try:
if re.search(r"Shut.*Admin", bgp_entry):
(
peer_ip,
bgp_version,
remote_as,
msg_rcvd,
msg_sent,
_,
_,
_,
uptime,
state_1,
state_2,
) = bgp_table_fields
state_pfxrcd = "{} {}".format(state_1, state_2)
else:
(
peer_ip,
bgp_version,
remote_as,
msg_rcvd,
msg_sent,
_,
_,
_,
uptime,
state_pfxrcd,
) = bgp_table_fields
except ValueError:
raise ValueError(
"Unexpected entry ({}) in BGP summary table".format(bgp_table_fields)
)
is_enabled = True
try:
received_prefixes = int(state_pfxrcd)
is_up = True
except ValueError:
received_prefixes = -1
is_up = False
if re.search(r"Shut.*Admin", state_pfxrcd):
is_enabled = False
if not is_up:
uptime = -1
if uptime != -1:
uptime = bgp_time_conversion(uptime)
yield {
peer_ip: {
"is_enabled": is_enabled,
"uptime": uptime,
"remote_as": helpers.as_number(remote_as),
"is_up": is_up,
"description": "",
"received_prefixes": received_prefixes,
}
}
def bgp_summary_parser(bgp_summary):
"""Parse 'show bgp all summary vrf' output information from NX-OS devices."""
bgp_summary_dict = {}
# Check for BGP summary information lines that have no data
if len(bgp_summary.strip().splitlines()) <= 1:
return {}
allowed_afi = ["ipv4", "ipv6", "l2vpn"]
vrf_regex = r"^BGP summary information for VRF\s+(?P<vrf>\S+),"
afi_regex = (
r"^BGP summary information.*address family (?P<afi>\S+ (?:Unicast|EVPN))"
)
local_router_regex = (
r"^BGP router identifier\s+(?P<router_id>\S+)"
r",\s+local AS number\s+(?P<local_as>\S+)"
)
for pattern in [vrf_regex, afi_regex, local_router_regex]:
match = re.search(pattern, bgp_summary, flags=re.M)
if match:
bgp_summary_dict.update(match.groupdict(1))
# Some post regex cleanup and validation
vrf = bgp_summary_dict["vrf"]
if vrf.lower() == "default":
bgp_summary_dict["vrf"] = "global"
afi = bgp_summary_dict["afi"]
afi = afi.split()[0].lower()
if afi not in allowed_afi:
raise ValueError("AFI ({}) is invalid and not supported.".format(afi))
bgp_summary_dict["afi"] = afi
local_as = bgp_summary_dict["local_as"]
local_as = helpers.as_number(local_as)
match = re.search(IPV4_ADDR_REGEX, bgp_summary_dict["router_id"])
if not match:
raise ValueError(
"BGP router_id ({}) is not valid".format(bgp_summary_dict["router_id"])
)
vrf = bgp_summary_dict["vrf"]
bgp_return_dict = {vrf: {"router_id": bgp_summary_dict["router_id"], "peers": {}}}
# Extract and process the tabular data
tabular_divider = r"^Neighbor\s+.*PfxRcd$"
tabular_data = re.split(tabular_divider, bgp_summary, flags=re.M)
if len(tabular_data) != 2:
msg = "Unexpected data processing BGP summary information:\n\n{}".format(
bgp_summary
)
raise ValueError(msg)
tabular_data = tabular_data[1]
bgp_table = bgp_normalize_table_data(tabular_data)
for bgp_entry in bgp_table_parser(bgp_table):
bgp_return_dict[vrf]["peers"].update(bgp_entry)
bgp_new_dict = {}
for neighbor, bgp_data in bgp_return_dict[vrf]["peers"].items():
received_prefixes = bgp_data.pop("received_prefixes")
bgp_data["address_family"] = {}
prefixes_dict = {
"sent_prefixes": -1,
"accepted_prefixes": -1,
"received_prefixes": received_prefixes,
}
bgp_data["address_family"][afi] = prefixes_dict
bgp_data["local_as"] = local_as
# FIX, hard-coding
bgp_data["remote_id"] = "0.0.0.0"
bgp_new_dict[neighbor] = bgp_data
bgp_return_dict[vrf]["peers"] = bgp_new_dict
return bgp_return_dict
class NXOSSSHDriver(NXOSDriverBase):
def __init__(self, hostname, username, password, timeout=60, optional_args=None):
super().__init__(
hostname, username, password, timeout=timeout, optional_args=optional_args
)
self.platform = "nxos_ssh"
self.connector_type_map = {
"1000base-LH": "LC_CONNECTOR",
"1000base-SX": "LC_CONNECTOR",
"1000base-T": "Unknown",
"10Gbase-LR": "LC_CONNECTOR",
"10Gbase-SR": "LC_CONNECTOR",
"SFP-H10GB-CU1M": "DAC_CONNECTOR",
"SFP-H10GB-CU1.45M": "DAC_CONNECTOR",
"SFP-H10GB-CU3M": "DAC_CONNECTOR",
"SFP-H10GB-CU3.45M": "DAC_CONNECTOR",
}
def open(self):
self.device = self._netmiko_open(
device_type="cisco_nxos", netmiko_optional_args=self.netmiko_optional_args
)
def close(self):
self._netmiko_close()
def _send_command(self, command, raw_text=False, cmd_verify=True):
"""
Wrapper for Netmiko's send_command method.
raw_text argument is not used and is for code sharing with NX-API.
"""
return self.device.send_command(command, cmd_verify=cmd_verify)
def _send_command_list(self, commands, expect_string=None):
"""Wrapper for Netmiko's send_command method (for list of commands."""
output = ""
for command in commands:
output += self.device.send_command(
command,
strip_prompt=False,
strip_command=False,
expect_string=expect_string,
)
return output
def _send_config(self, commands):
if isinstance(commands, str):
commands = (command for command in commands.splitlines() if command)
return self.device.send_config_set(commands)
@staticmethod
def parse_uptime(uptime_str):
"""
Extract the uptime string from the given Cisco IOS Device.
Return the uptime in seconds as an integer
"""
# Initialize to zero
(years, weeks, days, hours, minutes) = (0, 0, 0, 0, 0)
uptime_str = uptime_str.strip()
time_list = uptime_str.split(",")
for element in time_list:
if re.search("year", element):
years = int(element.split()[0])
elif re.search("week", element):
weeks = int(element.split()[0])
elif re.search("day", element):
days = int(element.split()[0])
elif re.search("hour", element):
hours = int(element.split()[0])
elif re.search("minute", element):
minutes = int(element.split()[0])
elif re.search("second", element):
seconds = int(element.split()[0])
uptime_sec = (
(years * YEAR_SECONDS)
+ (weeks * WEEK_SECONDS)
+ (days * DAY_SECONDS)
+ (hours * 3600)
+ (minutes * 60)
+ seconds
)
return uptime_sec
def is_alive(self):
"""Returns a flag with the state of the SSH connection."""
null = chr(0)
try:
if self.device is None:
return {"is_alive": False}
else:
# Try sending ASCII null byte to maintain the connection alive
self._send_command(null, cmd_verify=False)
except (socket.error, EOFError):
# If unable to send, we can tell for sure that the connection is unusable,
# hence return False.
return {"is_alive": False}
return {"is_alive": self.device.remote_conn.transport.is_active()}
def _copy_run_start(self):
output = self.device.save_config()
if "complete" in output.lower():
return True
else:
msg = "Unable to save running-config to startup-config!"
raise CommandErrorException(msg)
def _load_cfg_from_checkpoint(self):
commands = [
"terminal dont-ask",
"rollback running-config file {}".format(self.candidate_cfg),
"no terminal dont-ask",
]
try:
rollback_result = self._send_command_list(commands, expect_string=r"[#>]")
finally:
self.changed = True
msg = rollback_result
if "Rollback failed." in msg:
raise ReplaceConfigException(msg)
def rollback(self):
if self.changed:
commands = [
"terminal dont-ask",
"rollback running-config file {}".format(self.rollback_cfg),
"no terminal dont-ask",
]
result = self._send_command_list(commands, expect_string=r"[#>]")
if "completed" not in result.lower():
raise ReplaceConfigException(result)
# If hostname changes ensure Netmiko state is updated properly
self._netmiko_device.set_base_prompt()
self._copy_run_start()
self.changed = False
def _apply_key_map(self, key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
new_dict[new_key] = str(value)
return new_dict
def _convert_uptime_to_seconds(self, uptime_facts):
seconds = int(uptime_facts["up_days"]) * 24 * 60 * 60
seconds += int(uptime_facts["up_hours"]) * 60 * 60
seconds += int(uptime_facts["up_mins"]) * 60
seconds += int(uptime_facts["up_secs"])
return seconds
def get_facts(self):
"""Return a set of facts from the devices."""
# default values.
vendor = "Cisco"
uptime = -1
serial_number, fqdn, os_version, hostname, domain_name, model = ("",) * 6
# obtain output from device
show_ver = self._send_command("show version")
show_hosts = self._send_command("show hosts")
show_int_status = self._send_command("show interface status")
show_hostname = self._send_command("show hostname")
try:
show_inventory_table = self._get_command_table(
"show inventory | json", "TABLE_inv", "ROW_inv"
)
if isinstance(show_inventory_table, dict):
show_inventory_table = [show_inventory_table]
for row in show_inventory_table:
if row["name"] == '"Chassis"' or row["name"] == "Chassis":
serial_number = row.get("serialnum", "")
break
except ValueError:
show_inventory = self._send_command("show inventory")
find_regexp = r"^NAME:\s+\"(.*)\",.*\n^PID:.*SN:\s+(\w*)"
find = re.findall(find_regexp, show_inventory, re.MULTILINE)
for row in find:
if row[0] == "Chassis":
serial_number = row[1]
break
# uptime/serial_number/IOS version
for line in show_ver.splitlines():
if " uptime is " in line:
_, uptime_str = line.split(" uptime is ")
uptime = self.parse_uptime(uptime_str)
if "system: " in line or "NXOS: " in line:
line = line.strip()
os_version = line.split()[2]
os_version = os_version.strip()
if "cisco" in line and "hassis" in line:
match = re.search(r".cisco (.*) \(", line)
if match:
model = match.group(1).strip()
match = re.search(r".cisco (.* [cC]hassis)", line)
if match:
model = match.group(1).strip()
hostname = show_hostname.strip()
# Determine domain_name and fqdn
for line in show_hosts.splitlines():
if "Default domain" in line:
_, domain_name = re.split(r".*Default domain.*is ", line)
domain_name = domain_name.strip()
break
if hostname.count(".") >= 2:
fqdn = hostname
# Remove domain name from hostname
if domain_name:
hostname = re.sub(re.escape(domain_name) + "$", "", hostname)
hostname = hostname.strip(".")
elif domain_name:
fqdn = "{}.{}".format(hostname, domain_name)
# interface_list filter
interface_list = []
show_int_status = show_int_status.strip()
# Remove the header information
show_int_status = re.sub(
r"(?:^---------+$|^Port .*$|^ .*$)", "", show_int_status, flags=re.M
)
for line in show_int_status.splitlines():
if not line:
continue
interface = line.split()[0]
# Return canonical interface name
interface_list.append(helpers.canonical_interface_name(interface))
return {
"uptime": int(uptime),
"vendor": vendor,
"os_version": str(os_version),
"serial_number": str(serial_number),
"model": str(model),
"hostname": str(hostname),
"fqdn": fqdn,
"interface_list": interface_list,
}
def get_interfaces(self):
"""
Get interface details.
last_flapped is not implemented
Example Output:
{ u'Vlan1': { 'description': u'',
'is_enabled': True,
'is_up': True,
'last_flapped': -1.0,
'mac_address': u'a493.4cc1.67a7',
'speed': 100},
u'Vlan100': { 'description': u'Data Network',
'is_enabled': True,
'is_up': True,
'last_flapped': -1.0,
'mac_address': u'a493.4cc1.67a7',
'speed': 100},
u'Vlan200': { 'description': u'Voice Network',
'is_enabled': True,
'is_up': True,
'last_flapped': -1.0,
'mac_address': u'a493.4cc1.67a7',
'speed': 100}}
"""
interfaces = {}
command = "show interface"
output = self._send_command(command)
if not output:
return {}
# Break output into per-interface sections (note, separator text is retained)
separator1 = r"^\S+\s+is \S+.*\nadmin state is.*$"
separator2 = r"^.* is .*, line protocol is .*$"
separator3 = r"^.* is (?:down|up).*$"
separators = r"({}|{}|{})".format(separator1, separator2, separator3)
interface_lines = re.split(separators, output, flags=re.M)
if len(interface_lines) == 1:
msg = "Unexpected output data in '{}':\n\n{}".format(
command, interface_lines
)
raise ValueError(msg)
# Get rid of the blank data at the beginning
interface_lines.pop(0)
# Must be pairs of data (the separator and section corresponding to it)
if len(interface_lines) % 2 != 0:
msg = "Unexpected output data in '{}':\n\n{}".format(
command, interface_lines
)
raise ValueError(msg)
# Combine the separator and section into one string
intf_iter = iter(interface_lines)
try:
new_interfaces = [line + next(intf_iter, "") for line in intf_iter]
except TypeError:
raise ValueError()
for entry in new_interfaces:
interfaces.update(parse_intf_section(entry))
return interfaces
def get_bgp_neighbors(self):
"""BGP neighbor information.
Supports VRFs and IPv4 and IPv6 AFIs
{
"global": {
"router_id": "1.1.1.103",
"peers": {
"10.99.99.2": {
"is_enabled": true,
"uptime": -1,
"remote_as": 22,
"address_family": {
"ipv4": {
"sent_prefixes": -1,
"accepted_prefixes": -1,
"received_prefixes": -1
}
},
"remote_id": "0.0.0.0",
"local_as": 22,
"is_up": false,
"description": ""
}
}
}
"""
bgp_dict = {}
# get summary output from device
cmd_bgp_all_sum = "show bgp all summary vrf all"
bgp_summary_output = self._send_command(cmd_bgp_all_sum).strip()
section_separator = r"BGP summary information for "
bgp_summary_sections = re.split(section_separator, bgp_summary_output)
if len(bgp_summary_sections):
bgp_summary_sections.pop(0)
for bgp_section in bgp_summary_sections:
bgp_section = section_separator + bgp_section
bgp_dict.update(bgp_summary_parser(bgp_section))
# FIX -- look up logical or behavior we did in Cisco IOS bgp parser (make consistent here)
# FIX -- need to merge IPv6 and IPv4 AFI for same neighbor
return bgp_dict
def cli(self, commands):
cli_output = {}
if type(commands) is not list:
raise TypeError("Please enter a valid list of commands!")
for command in commands:
output = self._send_command(command)
cli_output[str(command)] = output
return cli_output
def get_environment(self):
"""
Get environment facts.
power and fan are currently not implemented
cpu is using 1-minute average
"""
environment = {}
# sys_resources contains cpu and mem output
sys_resources = self._send_command("show system resources")
temp_cmd = "show environment temperature"
# cpu
environment.setdefault("cpu", {})
environment["cpu"]["0"] = {}
environment["cpu"]["0"]["%usage"] = -1.0
system_resources_cpu = helpers.textfsm_extractor(
self, "system_resources", sys_resources
)
for cpu in system_resources_cpu:
cpu_dict = {
cpu.get("cpu_id"): {
"%usage": round(100 - float(cpu.get("cpu_idle")), 2)
}
}
environment["cpu"].update(cpu_dict)
# memory
environment.setdefault("memory", {})
for line in sys_resources.splitlines():
# Memory usage: 16401224K total, 4798280K used, 11602944K free
if "Memory usage:" in line:
proc_total_mem, proc_used_mem, _ = line.split(",")
proc_used_mem = re.search(r"\d+", proc_used_mem).group(0)
proc_total_mem = re.search(r"\d+", proc_total_mem).group(0)
break
else:
raise ValueError("Unexpected output from: {}".format(line))
environment["memory"]["used_ram"] = int(proc_used_mem)
environment["memory"]["available_ram"] = int(proc_total_mem)
# temperature
output = self._send_command(temp_cmd)
environment.setdefault("temperature", {})
for line in output.splitlines():
# Module Sensor MajorThresh MinorThres CurTemp Status
# 1 Intake 70 42 28 Ok
if re.match(r"^[0-9]", line):
module, sensor, is_critical, is_alert, temp, _ = line.split()
is_critical = float(is_critical)
is_alert = float(is_alert)
temp = float(temp)
env_value = {
"is_alert": temp >= is_alert,
"is_critical": temp >= is_critical,
"temperature": temp,
}
location = "{0}-{1}".format(sensor, module)
environment["temperature"][location] = env_value
# Initialize 'power' and 'fan' to default values (not implemented)
environment.setdefault("power", {})
environment["power"]["invalid"] = {
"status": True,
"output": -1.0,
"capacity": -1.0,
}
environment.setdefault("fans", {})
environment["fans"]["invalid"] = {"status": True}
return environment
def get_arp_table(self, vrf=""):
"""
Get arp table information.
Return a list of dictionaries having the following set of keys:
* interface (string)
* mac (string)
* ip (string)
* age (float)
For example::
[
{
'interface' : 'MgmtEth0/RSP0/CPU0/0',
'mac' : '5c:5e:ab:da:3c:f0',
'ip' : '172.17.17.1',
'age' : 12.0
},
{
'interface': 'MgmtEth0/RSP0/CPU0/0',
'mac' : '66:0e:94:96:e0:ff',
'ip' : '172.17.17.2',
'age' : 14.0
}
]
"""
arp_table = []
command = "show ip arp vrf {} | exc INCOMPLETE".format(vrf or "all")
output = self._send_command(command)
separator = r"^Address\s+Age.*Interface.*$"
arp_list = re.split(separator, output, flags=re.M)
if len(arp_list) != 2:
raise ValueError("Error processing arp table output:\n\n{}".format(output))
arp_entries = arp_list[1].strip()
for line in arp_entries.splitlines():
if len(line.split()) >= 4:
# Search for extra characters to strip, currently strip '*', '+', '#', 'D'
line = re.sub(r"\s+[\*\+\#D]{1,4}\s*$", "", line, flags=re.M)
address, age, mac, interface = line.split()
else:
raise ValueError("Unexpected output from: {}".format(line.split()))
if age == "-":
age = -1.0
elif ":" not in age:
# Cisco sometimes returns a sub second arp time 0.411797
try:
age = float(age)
except ValueError:
age = -1.0
else:
age = convert_hhmmss(age)
age = float(age)
age = round(age, 1)
# Validate we matched correctly
if not re.search(RE_IPADDR, address):
raise ValueError("Invalid IP Address detected: {}".format(address))
if not re.search(RE_MAC, mac):
raise ValueError("Invalid MAC Address detected: {}".format(mac))
entry = {
"interface": interface,
"mac": helpers.mac(mac),
"ip": address,
"age": age,
}
arp_table.append(entry)
return arp_table
def _get_ntp_entity(self, peer_type):
ntp_entities = {}
command = "show ntp peers"
output = self._send_command(command)
for line in output.splitlines():
# Skip first two lines and last line of command output
if line == "" or "-----" in line or "Peer IP Address" in line:
continue
elif IPAddress(len(line.split()[0])).is_unicast:
peer_addr = line.split()[0]
ntp_entities[peer_addr] = {}
else:
raise ValueError("Did not correctly find a Peer IP Address")
return ntp_entities
def get_ntp_peers(self):
return self._get_ntp_entity("Peer")
def get_ntp_servers(self):
return self._get_ntp_entity("Server")
def get_interfaces_ip(self):
"""
Get interface IP details. Returns a dictionary of dictionaries.
Sample output:
{
"Ethernet2/3": {
"ipv4": {
"4.4.4.4": {
"prefix_length": 16
}
},
"ipv6": {
"2001:db8::1": {
"prefix_length": 10
},
"fe80::2ec2:60ff:fe4f:feb2": {
"prefix_length": "128"
}
}
},
"Ethernet2/2": {
"ipv4": {
"2.2.2.2": {
"prefix_length": 27
}
}
}
}
"""
interfaces_ip = {}
ipv4_command = "show ip interface vrf all"
ipv6_command = "show ipv6 interface vrf all"
output_v4 = self._send_command(ipv4_command)
output_v6 = self._send_command(ipv6_command)
v4_interfaces = {}
for line in output_v4.splitlines():
# Ethernet2/2, Interface status: protocol-up/link-up/admin-up, iod: 38,
# IP address: 2.2.2.2, IP subnet: 2.2.2.0/27 route-preference: 0, tag: 0
# IP address: 3.3.3.3, IP subnet: 3.3.3.0/25 secondary route-preference: 0, tag: 0
if "Interface status" in line:
interface = line.split(",")[0]
continue
if "IP address" in line:
ip_address = line.split(",")[0].split()[2]
try:
prefix_len = int(line.split()[5].split("/")[1])
except (ValueError, IndexError):
prefix_len = "N/A"
if ip_address == "none":
v4_interfaces.setdefault(interface, {})
else:
val = {"prefix_length": prefix_len}
v4_interfaces.setdefault(interface, {})[ip_address] = val
v6_interfaces = {}
for line in output_v6.splitlines():
# Ethernet2/4, Interface status: protocol-up/link-up/admin-up, iod: 40
# IPv6 address:
# 2001:11:2233::a1/24 [VALID]
# 2001:cc11:22bb:0:2ec2:60ff:fe4f:feb2/64 [VALID]
# IPv6 subnet: 2001::/24
# IPv6 link-local address: fe80::2ec2:60ff:fe4f:feb2 (default) [VALID]
# IPv6 address: fe80::a293:51ff:fe5f:5ce9 [VALID]
if "Interface status" in line:
interface = line.split(",")[0]
continue
if "VALID" in line:
line = line.strip()
if "link-local address" in line:
# match the following format:
# IPv6 link-local address: fe80::2ec2:60ff:fe4f:feb2 (default) [VALID]
ip_address = line.split()[3]
prefix_len = "64"
elif "IPv6 address" in line:
# match the following format:
# IPv6 address: fe80::a293:51ff:fe5f:5ce9 [VALID]
ip_address = line.split()[2]
prefix_len = "64"
else:
ip_address, prefix_len = line.split()[0].split("/")
prefix_len = int(prefix_len)
val = {"prefix_length": prefix_len}
v6_interfaces.setdefault(interface, {})[ip_address] = val
else:
# match the following format:
# IPv6 address: none
v6_interfaces.setdefault(interface, {})
# Join data from intermediate dictionaries.
for interface, data in v4_interfaces.items():
interfaces_ip.setdefault(interface, {"ipv4": {}})["ipv4"] = data
for interface, data in v6_interfaces.items():
interfaces_ip.setdefault(interface, {"ipv6": {}})["ipv6"] = data
return interfaces_ip
def get_mac_address_table(self):
"""
Returns a lists of dictionaries. Each dictionary represents an entry in the MAC Address
Table, having the following keys
* mac (string)
* interface (string)
* vlan (int)
* active (boolean)
* static (boolean)
* moves (int)
* last_move (float)
Format1:
Legend:
* - primary entry, G - Gateway MAC, (R) - Routed MAC, O - Overlay MAC
age - seconds since last seen,+ - primary entry using vPC Peer-Link,
(T) - True, (F) - False
VLAN MAC Address Type age Secure NTFY Ports/SWID.SSID.LID
---------+-----------------+--------+---------+------+----+------------------
* 27 0026.f064.0000 dynamic - F F po1
* 27 001b.54c2.2644 dynamic - F F po1
* 27 0000.0c9f.f2bc dynamic - F F po1
* 27 0026.980a.df44 dynamic - F F po1
* 16 0050.56bb.0164 dynamic - F F po2
* 13 90e2.ba5a.9f30 dynamic - F F eth1/2
* 13 90e2.ba4b.fc78 dynamic - F F eth1/1
39 0100.5e00.4b4b igmp 0 F F Po1 Po2 Po22
110 0100.5e00.0118 igmp 0 F F Po1 Po2
Eth142/1/3 Eth112/1/5
Eth112/1/6 Eth122/1/5
"""
# The '*' is stripped out later
RE_MACTABLE_FORMAT1 = r"^\s+{}\s+{}\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+".format(
VLAN_REGEX, MAC_REGEX
)
RE_MACTABLE_FORMAT2 = r"^\s+{}\s+{}\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+".format(
"-", MAC_REGEX
)
# REGEX dedicated for lines with only interfaces (suite of the previous MAC address)
RE_MACTABLE_FORMAT3 = r"^\s+\S+"
mac_address_table = []
command = "show mac address-table"
output = self._send_command(command)
def remove_prefix(s, prefix):
return s[len(prefix) :] if s.startswith(prefix) else s
def process_mac_fields(vlan, mac, mac_type, interface):
"""Return proper data for mac address fields."""
if mac_type.lower() in ["self", "static", "system"]:
static = True
if vlan.lower() == "all":
vlan = 0
elif vlan == "-":
vlan = 0
if (
interface.lower() == "cpu"
or re.search(r"router", interface.lower())
or re.search(r"switch", interface.lower())
):
interface = ""
else:
static = False
if mac_type.lower() in ["dynamic"]:
active = True
else:
active = False
return {
"mac": helpers.mac(mac),
"interface": helpers.canonical_interface_name(interface),
"vlan": int(vlan),
"static": static,
"active": active,
"moves": -1,
"last_move": -1.0,
}
# Skip the header lines
output = re.split(r"^----.*", output, flags=re.M)[1:]
output = "\n".join(output).strip()
# Strip any leading characters
output = re.sub(r"^[\*\+GOCE]", "", output, flags=re.M)
output = re.sub(r"^\(R\)", "", output, flags=re.M)
output = re.sub(r"^\(T\)", "", output, flags=re.M)
output = re.sub(r"^\(F\)", "", output, flags=re.M)
output = re.sub(r"vPC Peer-Link", "vPC-Peer-Link", output, flags=re.M)
for line in output.splitlines():
# Every 500 Mac's Legend is reprinted, regardless of terminal length
if re.search(r"^Legend", line):
continue
elif re.search(r"^\s+\* \- primary entry", line):
continue
elif re.search(r"^\s+age \-", line):
continue
elif re.search(r"^\s+VLAN", line):
continue
elif re.search(r"^------", line):
continue
elif re.search(r"^\s*$", line):
continue
for pattern in [
RE_MACTABLE_FORMAT1,
RE_MACTABLE_FORMAT2,
RE_MACTABLE_FORMAT3,
]:
if re.search(pattern, line):
fields = line.split()
if len(fields) >= 7:
vlan, mac, mac_type, _, _, _, interface = fields[:7]
mac_address_table.append(
process_mac_fields(vlan, mac, mac_type, interface)
)
# there can be multiples interfaces for the same MAC on the same line
for interface in fields[7:]:
mac_address_table.append(
process_mac_fields(vlan, mac, mac_type, interface)
)
break
# interfaces can overhang to the next line (line only contains interfaces)
elif len(fields) < 7:
for interface in fields:
mac_address_table.append(
process_mac_fields(vlan, mac, mac_type, interface)
)
break
else:
raise ValueError("Unexpected output from: {}".format(repr(line)))
return mac_address_table
def _get_bgp_route_attr(self, destination, vrf, next_hop, ip_version=4):
"""
BGP protocol attributes for get_route_tp
Only IPv4 supported
"""
CMD_SHIBNV = 'show ip bgp neighbors vrf {vrf} | include "is {neigh}"'
search_re_dict = {
"aspath": {
"re": r"AS-Path: ([\d\(\)]([\d\(\) ])*)",
"group": 1,
"default": "",
},
"bgpnh": {
"re": r"[^|\\n][ ]{4}(" + IP_ADDR_REGEX + r")",
"group": 1,
"default": "",
},
"bgpfrom": {
"re": r"from (" + IP_ADDR_REGEX + r")",
"group": 1,
"default": "",
},
"bgpcomm": {
"re": r" Community: ([\w\d\-\: ]+)",
"group": 1,
"default": "",
},
"bgplp": {"re": r"localpref (\d+)", "group": 1, "default": ""},
# external, internal, redist
"bgpie": {"re": r"^: (\w+),", "group": 1, "default": ""},
"vrfimp": {
"re": r"Imported from [\S]+ \(VRF (\S+)\)",
"group": 1,
"default": "",
},
}
bgp_attr = {}
# get BGP AS number
outbgp = self._send_command('show bgp process | include "BGP Protocol Tag"')
matchbgpattr = RE_BGP_PROTO_TAG.match(outbgp)
if not matchbgpattr:
return bgp_attr
bgpas = matchbgpattr.group(1)
if ip_version == 4:
bgpcmd = "show ip bgp vrf {vrf} {destination}".format(
vrf=vrf, destination=destination
)
outbgp = self._send_command(bgpcmd)
outbgpsec = outbgp.split("Path type")
# this should not happen (zero BGP paths)...
if len(outbgpsec) == 1:
return bgp_attr
# process all bgp paths
for bgppath in outbgpsec[1:]:
if "is best path" not in bgppath:
# only best path is added to protocol attributes
continue
# find BGP attributes
for key in search_re_dict:
matchre = re.search(search_re_dict[key]["re"], bgppath)
if matchre:
groupnr = int(search_re_dict[key]["group"])
search_re_dict[key]["result"] = matchre.group(groupnr)
else:
search_re_dict[key]["result"] = search_re_dict[key]["default"]
bgpnh = search_re_dict["bgpnh"]["result"]
# if route is not leaked next hops have to match
if (
not (search_re_dict["bgpie"]["result"] in ["redist", "local"])
) and (bgpnh != next_hop):
# this is not the right route
continue
# find remote AS nr. of this neighbor
bgpcmd = CMD_SHIBNV.format(vrf=vrf, neigh=bgpnh)
outbgpnei = self._send_command(bgpcmd)
matchbgpras = RE_BGP_REMOTE_AS.search(outbgpnei)
if matchbgpras:
bgpras = matchbgpras.group(1)
else:
# next-hop is not known in this vrf, route leaked from
# other vrf or from vpnv4 table?
# get remote AS nr. from as-path if it is ebgp neighbor
# if locally sourced remote AS if undefined
bgpie = search_re_dict["bgpie"]["result"]
if bgpie == "external":
bgpras = bgpie.split(" ")[0].replace("(", "")
elif bgpie == "internal":
bgpras = bgpas
else: # redist, local
bgpras = ""
# community
bothcomm = []
extcomm = []
stdcomm = search_re_dict["bgpcomm"]["result"].split()
commsplit = bgppath.split("Extcommunity:")
if len(commsplit) == 2:
for line in commsplit[1].split("\n")[1:]:
# RT:65004:22
matchcommun = RE_BGP_COMMUN.match(line)
if matchcommun:
extcomm.append(matchcommun.group(1))
else:
# we've reached end of the extended community section
break
bothcomm = stdcomm + extcomm
bgp_attr = {
"as_path": search_re_dict["aspath"]["result"].strip(),
"remote_address": search_re_dict["bgpfrom"]["result"],
"local_preference": int(search_re_dict["bgplp"]["result"]),
"communities": bothcomm,
"local_as": helpers.as_number(bgpas),
}
if bgpras:
bgp_attr["remote_as"] = helpers.as_number(bgpras)
else:
bgp_attr["remote_as"] = 0 # 0? , locally sourced
return bgp_attr
def get_route_to(self, destination="", protocol="", longer=False):
"""
Only IPv4 supported, vrf aware, longer_prefixes parameter ready
"""
if longer:
raise NotImplementedError("Longer prefixes not yet supported for NXOS")
longer_pref = "" # longer_prefixes support, for future use
vrf = ""
ip_version = None
try:
ip_version = IPNetwork(destination).version
except AddrFormatError:
return "Please specify a valid destination!"
if ip_version == 4: # process IPv4 routing table
routes = {}
if vrf:
send_cmd = "show ip route vrf {vrf} {destination} {longer}".format(
vrf=vrf, destination=destination, longer=longer_pref
).rstrip()
else:
send_cmd = "show ip route vrf all {destination} {longer}".format(
destination=destination, longer=longer_pref
).rstrip()
out_sh_ip_rou = self._send_command(send_cmd)
# IP Route Table for VRF "TEST"
for vrfsec in out_sh_ip_rou.split("IP Route Table for ")[1:]:
if "Route not found" in vrfsec:
continue
vrffound = False
preffound = False
nh_list = []
cur_prefix = ""
for line in vrfsec.split("\n"):
if not vrffound:
vrfstr = RE_RT_VRF_NAME.match(line)
if vrfstr:
curvrf = vrfstr.group(1)
vrffound = True
else:
# 10.10.56.0/24, ubest/mbest: 2/0
prefstr = RE_RT_IPV4_ROUTE_PREF.match(line)
if prefstr:
if preffound: # precess previous prefix
if cur_prefix not in routes:
routes[cur_prefix] = []
for nh in nh_list:
routes[cur_prefix].append(nh)
nh_list = []
else:
preffound = True
cur_prefix = prefstr.group(1)
continue
# *via 10.2.49.60, Vlan3013, [0/0], 1y18w, direct
# via 10.17.205.132, Po77.3602, [110/20], 1y18w, ospf-1000,
# type-2, tag 2112
# *via 10.17.207.42, Eth3/7.212, [110/20], 02:19:36, ospf-1000, type-2,
# tag 2121
# *via 10.17.207.73, [1/0], 1y18w, static
# *via 10.17.209.132%vrf2, Po87.3606, [20/20], 1y25w, bgp-65000,
# external, tag 65000
# *via Vlan596, [1/0], 1y18w, static
viastr = RE_IP_ROUTE_VIA_REGEX.match(line)
if viastr:
nh_used = viastr.group("used") == "*"
nh_ip = viastr.group("ip") or ""
# when next hop is leaked from other vrf, for future use
# nh_vrf = viastr.group('vrf')
nh_int = viastr.group("int")
nh_metric = viastr.group("metric")
nh_age = bgp_time_conversion(viastr.group("age"))
nh_source = viastr.group("source")
# for future use
# rest_of_line = viastr.group('rest')
# use only routes from specified protocol
if protocol and protocol != nh_source:
continue
# routing protocol process number, for future use
# nh_source_proc_nr = viastr.group('procnr)
if nh_int:
nh_int_canon = helpers.canonical_interface_name(nh_int)
else:
nh_int_canon = ""
route_entry = {
"protocol": nh_source,
"outgoing_interface": nh_int_canon,
"age": nh_age,
"current_active": nh_used,
"routing_table": curvrf,
"last_active": nh_used,
"next_hop": nh_ip,
"selected_next_hop": nh_used,
"inactive_reason": "",
"preference": int(nh_metric),
}
if nh_source == "bgp":
route_entry[
"protocol_attributes"
] = self._get_bgp_route_attr(cur_prefix, curvrf, nh_ip)
else:
route_entry["protocol_attributes"] = {}
nh_list.append(route_entry)
# process last next hop entries
if preffound:
if cur_prefix not in routes:
routes[cur_prefix] = []
for nh in nh_list:
routes[cur_prefix].append(nh)
return routes
def get_snmp_information(self):
snmp_information = {}
command = "show running-config"
output = self._send_command(command)
snmp_config = helpers.textfsm_extractor(self, "snmp_config", output)
if not snmp_config:
return snmp_information
snmp_information = {
"contact": str(""),
"location": str(""),
"community": {},
"chassis_id": str(""),
}
for snmp_entry in snmp_config:
contact = str(snmp_entry.get("contact", ""))
if contact:
snmp_information["contact"] = contact
location = str(snmp_entry.get("location", ""))
if location:
snmp_information["location"] = location
community_name = str(snmp_entry.get("community", ""))
if not community_name:
continue
if community_name not in snmp_information["community"].keys():
snmp_information["community"][community_name] = {
"acl": str(snmp_entry.get("acl", "")),
"mode": str(snmp_entry.get("mode", "").lower()),
}
else:
acl = str(snmp_entry.get("acl", ""))
if acl:
snmp_information["community"][community_name]["acl"] = acl
mode = str(snmp_entry.get("mode", "").lower())
if mode:
snmp_information["community"][community_name]["mode"] = mode
return snmp_information
def get_users(self):
_CISCO_TO_CISCO_MAP = {"network-admin": 15, "network-operator": 5}
_DEFAULT_USER_DICT = {"password": "", "level": 0, "sshkeys": []}
users = {}
command = "show running-config"
output = self._send_command(command)
section_username_tabled_output = helpers.textfsm_extractor(
self, "users", output
)
for user in section_username_tabled_output:
username = user.get("username", "")
if not username:
continue
if username not in users:
users[username] = _DEFAULT_USER_DICT.copy()
password = user.get("password", "")
if password:
users[username]["password"] = str(password.strip())
level = 0
role = user.get("role", "")
if role.startswith("priv"):
level = int(role.split("-")[-1])
else:
level = _CISCO_TO_CISCO_MAP.get(role, 0)
if level > users.get(username).get("level"):
# unfortunately on Cisco you can set different priv levels for the same user
# Good news though: the device will consider the highest level
users[username]["level"] = level
sshkeytype = user.get("sshkeytype", "")
sshkeyvalue = user.get("sshkeyvalue", "")
if sshkeytype and sshkeyvalue:
if sshkeytype not in ["ssh-rsa", "ssh-dsa"]:
continue
users[username]["sshkeys"].append(str(sshkeyvalue))
return users
def get_vlans(self):
vlans = {}
command = "show vlan brief | json"
vlan_table_raw = self._get_command_table(
command, "TABLE_vlanbriefxbrief", "ROW_vlanbriefxbrief"
)
if isinstance(vlan_table_raw, dict):
vlan_table_raw = [vlan_table_raw]
for vlan in vlan_table_raw:
if "vlanshowplist-ifidx" not in vlan.keys():
vlan["vlanshowplist-ifidx"] = []
vlans[vlan["vlanshowbr-vlanid"]] = {
"name": vlan["vlanshowbr-vlanname"],
"interfaces": self._parse_vlan_ports(vlan["vlanshowplist-ifidx"]),
}
return vlans
def get_optics(self):
command = "show interface transceiver details"
output = self._send_command(command)
# Formatting data into return data structure
optics_detail = {}
# Extraction Regexps
port_ts_re = re.compile(r"^Ether.*?(?=\nEther|\Z)", re.M | re.DOTALL)
port_re = re.compile(r"^(Ether.*)[ ]*?$", re.M)
vendor_re = re.compile("name is (.*)$", re.M)
vendor_part_re = re.compile("part number is (.*)$", re.M)
vendor_rev_re = re.compile("revision is (.*)$", re.M)
serial_no_re = re.compile("serial number is (.*)$", re.M)
type_no_re = re.compile("type is (.*)$", re.M)
rx_instant_re = re.compile(r"Rx Power[ ]+(?:(\S+?)[ ]+dBm|(N.A))", re.M)
tx_instant_re = re.compile(r"Tx Power[ ]+(?:(\S+?)[ ]+dBm|(N.A))", re.M)
current_instant_re = re.compile(r"Current[ ]+(?:(\S+?)[ ]+mA|(N.A))", re.M)
port_ts_l = port_ts_re.findall(output)
for port_ts in port_ts_l:
port = port_re.search(port_ts).group(1)
# No transceiver is present in those case
if "transceiver is not present" in port_ts:
continue
if "transceiver is not applicable" in port_ts:
continue
port_detail = {"physical_channels": {"channel": []}}
# No metric present
vendor = vendor_re.search(port_ts).group(1)
vendor_part = vendor_part_re.search(port_ts).group(1)
vendor_rev = vendor_rev_re.search(port_ts).group(1)
serial_no = serial_no_re.search(port_ts).group(1)
type_s = type_no_re.search(port_ts).group(1)
state = {
"vendor": vendor.strip(),
"vendor_part": vendor_part.strip(),
"vendor_rev": vendor_rev.strip(),
"serial_no": serial_no.strip(),
"connector_type": self.connector_type_map.get(type_s, "Unknown"),
}
if "DOM is not supported" not in port_ts:
res = rx_instant_re.search(port_ts)
input_power = res.group(1) or res.group(2)
res = tx_instant_re.search(port_ts)
output_power = res.group(1) or res.group(2)
res = current_instant_re.search(port_ts)
current = res.group(1) or res.group(2)
# If interface is shutdown it returns "N/A" as output power
# or "N/A" as input power
# Converting that to -100.0 float
try:
float(output_power)
except ValueError:
output_power = -100.0
try:
float(input_power)
except ValueError:
input_power = -100.0
try:
float(current)
except ValueError:
current = -100.0
# Defaulting avg, min, max values to -100.0 since device does not
# return these values
optic_states = {
"index": 0,
"state": {
"input_power": {
"instant": (
float(input_power) if "input_power" else -100.0
),
"avg": -100.0,
"min": -100.0,
"max": -100.0,
},
"output_power": {
"instant": (
float(output_power) if "output_power" else -100.0
),
"avg": -100.0,
"min": -100.0,
"max": -100.0,
},
"laser_bias_current": {
"instant": (float(current) if "current" else -100.0),
"avg": 0.0,
"min": 0.0,
"max": 0.0,
},
},
}
port_detail["physical_channels"]["channel"].append(optic_states)
port_detail["state"] = state
optics_detail[port] = port_detail
return optics_detail
| 38.503041 | 99 | 0.501825 |
from builtins import super
import re
import socket
from netaddr import IPAddress, IPNetwork
from netaddr.core import AddrFormatError
from napalm.base import helpers
from napalm.base.exceptions import CommandErrorException, ReplaceConfigException
from napalm.nxos import NXOSDriverBase
HOUR_SECONDS = 3600
DAY_SECONDS = 24 * HOUR_SECONDS
WEEK_SECONDS = 7 * DAY_SECONDS
YEAR_SECONDS = 365 * DAY_SECONDS
IP_ADDR_REGEX = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
IPV4_ADDR_REGEX = IP_ADDR_REGEX
IPV6_ADDR_REGEX_1 = r"::"
IPV6_ADDR_REGEX_2 = r"[0-9a-fA-F:]{1,39}::[0-9a-fA-F:]{1,39}"
IPV6_ADDR_REGEX_3 = (
r"[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:"
r"[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}:[0-9a-fA-F]{1,3}"
)
IPV6_ADDR_REGEX = r"(?:{}|{}|{})".format(
IPV6_ADDR_REGEX_1, IPV6_ADDR_REGEX_2, IPV6_ADDR_REGEX_3
)
IPV4_OR_IPV6_REGEX = r"(?:{}|{})".format(IPV4_ADDR_REGEX, IPV6_ADDR_REGEX)
MAC_REGEX = r"[a-fA-F0-9]{4}\.[a-fA-F0-9]{4}\.[a-fA-F0-9]{4}"
VLAN_REGEX = r"\d{1,4}"
RE_IPADDR = re.compile(r"{}".format(IP_ADDR_REGEX))
RE_MAC = re.compile(r"{}".format(MAC_REGEX))
ASN_REGEX = r"[\d\.]+"
RE_IP_ROUTE_VIA_REGEX = re.compile(
r" (?P<used>[\*| ])via ((?P<ip>" + IPV4_ADDR_REGEX + r")"
r"(%(?P<vrf>\S+))?, )?"
r"((?P<int>[\w./:]+), )?\[(\d+)/(?P<metric>\d+)\]"
r", (?P<age>[\d\w:]+), (?P<source>[\w]+)(-(?P<procnr>\d+))?"
r"(?P<rest>.*)"
)
RE_RT_VRF_NAME = re.compile(r"VRF \"(\S+)\"")
RE_RT_IPV4_ROUTE_PREF = re.compile(r"(" + IPV4_ADDR_REGEX + r"/\d{1,2}), ubest.*")
RE_BGP_PROTO_TAG = re.compile(r"BGP Protocol Tag\s+: (\d+)")
RE_BGP_REMOTE_AS = re.compile(r"remote AS (" + ASN_REGEX + r")")
RE_BGP_COMMUN = re.compile(r"[ ]{10}([\S ]+)")
def parse_intf_section(interface):
interface = interface.strip()
re_protocol = (
r"^(?P<intf_name>\S+?)\s+is\s+(?P<status>.+?)"
r",\s+line\s+protocol\s+is\s+(?P<protocol>\S+).*$"
)
re_intf_name_state = r"^(?P<intf_name>\S+) is (?P<intf_state>\S+).*"
re_is_enabled_1 = r"^admin state is (?P<is_enabled>\S+)$"
re_is_enabled_2 = r"^admin state is (?P<is_enabled>\S+), "
re_is_enabled_3 = r"^.* is down.*Administratively down.*$"
re_mac = r"^\s+Hardware:\s+(?P<hardware>.*),\s+address:\s+(?P<mac_address>\S+) "
re_speed = (
r"\s+MTU (?P<mtu>\S+)\s+bytes,\s+BW\s+(?P<speed>\S+)\s+(?P<speed_unit>\S+).*$"
)
re_mtu_nve = r"\s+MTU (?P<mtu_nve>\S+)\s+bytes.*$"
re_description_1 = r"^\s+Description:\s+(?P<description>.*) (?:MTU|Internet)"
re_description_2 = r"^\s+Description:\s+(?P<description>.*)$"
re_hardware = r"^.* Hardware: (?P<hardware>\S+)$"
match = re.search(re_protocol, interface, flags=re.M)
if match:
intf_name = match.group("intf_name")
status = match.group("status")
protocol = match.group("protocol")
if "admin" in status.lower():
is_enabled = False
else:
is_enabled = True
is_up = bool("up" in protocol)
else:
match = re.search(re_intf_name_state, interface)
intf_name = helpers.canonical_interface_name(match.group("intf_name"))
intf_state = match.group("intf_state").strip()
is_up = True if intf_state == "up" else False
admin_state_present = re.search("admin state is", interface)
if admin_state_present:
for x_pattern in [re_is_enabled_1, re_is_enabled_2]:
match = re.search(x_pattern, interface, flags=re.M)
if match:
is_enabled = match.group("is_enabled").strip()
is_enabled = True if re.search("up", is_enabled) else False
break
else:
msg = "Error parsing intf, 'admin state' never detected:\n\n{}".format(
interface
)
raise ValueError(msg)
else:
is_enabled = True
if not is_up:
match = re.search(re_is_enabled_3, interface, flags=re.M)
if match:
is_enabled = False
match = re.search(re_mac, interface, flags=re.M)
if match:
mac_address = match.group("mac_address")
mac_address = helpers.mac(mac_address)
else:
mac_address = ""
match = re.search(re_hardware, interface, flags=re.M)
speed_exist = True
if match:
if match.group("hardware") == "NVE":
match = re.search(re_mtu_nve, interface, flags=re.M)
mtu = int(match.group("mtu_nve"))
speed_exist = False
if speed_exist:
match = re.search(re_speed, interface, flags=re.M)
speed = int(match.group("speed"))
mtu = int(match.group("mtu"))
speed_unit = match.group("speed_unit")
speed_unit = speed_unit.rstrip(",")
if speed_unit != "Kbit":
msg = "Unexpected speed unit in show interfaces parsing:\n\n{}".format(
interface
)
raise ValueError(msg)
speed = int(round(speed / 1000.0))
else:
speed = -1
description = ""
for x_pattern in [re_description_1, re_description_2]:
match = re.search(x_pattern, interface, flags=re.M)
if match:
description = match.group("description")
break
return {
intf_name: {
"description": description,
"is_enabled": is_enabled,
"is_up": is_up,
"last_flapped": -1.0,
"mac_address": mac_address,
"mtu": mtu,
"speed": speed,
}
}
def convert_hhmmss(hhmmss):
fields = hhmmss.split(":")
if len(fields) != 3:
raise ValueError("Received invalid HH:MM:SS data: {}".format(hhmmss))
fields = [int(x) for x in fields]
hours, minutes, seconds = fields
return (hours * 3600) + (minutes * 60) + seconds
def bgp_time_conversion(bgp_uptime):
bgp_uptime = bgp_uptime.strip()
uptime_letters = set(["w", "h", "d"])
if "never" in bgp_uptime:
return -1
elif ":" in bgp_uptime:
times = bgp_uptime.split(":")
times = [int(x) for x in times]
hours, minutes, seconds = times
return (hours * 3600) + (minutes * 60) + seconds
elif uptime_letters & set(bgp_uptime):
form1 = r"(\d+)d(\d+)h"
form2 = r"(\d+)w(\d+)d"
form3 = r"(\d+)y(\d+)w"
match = re.search(form1, bgp_uptime)
if match:
days = int(match.group(1))
hours = int(match.group(2))
return (days * DAY_SECONDS) + (hours * 3600)
match = re.search(form2, bgp_uptime)
if match:
weeks = int(match.group(1))
days = int(match.group(2))
return (weeks * WEEK_SECONDS) + (days * DAY_SECONDS)
match = re.search(form3, bgp_uptime)
if match:
years = int(match.group(1))
weeks = int(match.group(2))
return (years * YEAR_SECONDS) + (weeks * WEEK_SECONDS)
raise ValueError("Unexpected value for BGP uptime string: {}".format(bgp_uptime))
def bgp_normalize_table_data(bgp_table):
bgp_table = bgp_table.strip()
bgp_multiline_pattern = r"({})\s*\n".format(IPV4_OR_IPV6_REGEX)
return re.sub(bgp_multiline_pattern, r"\1", bgp_table)
def bgp_table_parser(bgp_table):
bgp_table = bgp_table.strip()
for bgp_entry in bgp_table.splitlines():
bgp_table_fields = bgp_entry.split()
try:
if re.search(r"Shut.*Admin", bgp_entry):
(
peer_ip,
bgp_version,
remote_as,
msg_rcvd,
msg_sent,
_,
_,
_,
uptime,
state_1,
state_2,
) = bgp_table_fields
state_pfxrcd = "{} {}".format(state_1, state_2)
else:
(
peer_ip,
bgp_version,
remote_as,
msg_rcvd,
msg_sent,
_,
_,
_,
uptime,
state_pfxrcd,
) = bgp_table_fields
except ValueError:
raise ValueError(
"Unexpected entry ({}) in BGP summary table".format(bgp_table_fields)
)
is_enabled = True
try:
received_prefixes = int(state_pfxrcd)
is_up = True
except ValueError:
received_prefixes = -1
is_up = False
if re.search(r"Shut.*Admin", state_pfxrcd):
is_enabled = False
if not is_up:
uptime = -1
if uptime != -1:
uptime = bgp_time_conversion(uptime)
yield {
peer_ip: {
"is_enabled": is_enabled,
"uptime": uptime,
"remote_as": helpers.as_number(remote_as),
"is_up": is_up,
"description": "",
"received_prefixes": received_prefixes,
}
}
def bgp_summary_parser(bgp_summary):
bgp_summary_dict = {}
if len(bgp_summary.strip().splitlines()) <= 1:
return {}
allowed_afi = ["ipv4", "ipv6", "l2vpn"]
vrf_regex = r"^BGP summary information for VRF\s+(?P<vrf>\S+),"
afi_regex = (
r"^BGP summary information.*address family (?P<afi>\S+ (?:Unicast|EVPN))"
)
local_router_regex = (
r"^BGP router identifier\s+(?P<router_id>\S+)"
r",\s+local AS number\s+(?P<local_as>\S+)"
)
for pattern in [vrf_regex, afi_regex, local_router_regex]:
match = re.search(pattern, bgp_summary, flags=re.M)
if match:
bgp_summary_dict.update(match.groupdict(1))
vrf = bgp_summary_dict["vrf"]
if vrf.lower() == "default":
bgp_summary_dict["vrf"] = "global"
afi = bgp_summary_dict["afi"]
afi = afi.split()[0].lower()
if afi not in allowed_afi:
raise ValueError("AFI ({}) is invalid and not supported.".format(afi))
bgp_summary_dict["afi"] = afi
local_as = bgp_summary_dict["local_as"]
local_as = helpers.as_number(local_as)
match = re.search(IPV4_ADDR_REGEX, bgp_summary_dict["router_id"])
if not match:
raise ValueError(
"BGP router_id ({}) is not valid".format(bgp_summary_dict["router_id"])
)
vrf = bgp_summary_dict["vrf"]
bgp_return_dict = {vrf: {"router_id": bgp_summary_dict["router_id"], "peers": {}}}
tabular_divider = r"^Neighbor\s+.*PfxRcd$"
tabular_data = re.split(tabular_divider, bgp_summary, flags=re.M)
if len(tabular_data) != 2:
msg = "Unexpected data processing BGP summary information:\n\n{}".format(
bgp_summary
)
raise ValueError(msg)
tabular_data = tabular_data[1]
bgp_table = bgp_normalize_table_data(tabular_data)
for bgp_entry in bgp_table_parser(bgp_table):
bgp_return_dict[vrf]["peers"].update(bgp_entry)
bgp_new_dict = {}
for neighbor, bgp_data in bgp_return_dict[vrf]["peers"].items():
received_prefixes = bgp_data.pop("received_prefixes")
bgp_data["address_family"] = {}
prefixes_dict = {
"sent_prefixes": -1,
"accepted_prefixes": -1,
"received_prefixes": received_prefixes,
}
bgp_data["address_family"][afi] = prefixes_dict
bgp_data["local_as"] = local_as
bgp_data["remote_id"] = "0.0.0.0"
bgp_new_dict[neighbor] = bgp_data
bgp_return_dict[vrf]["peers"] = bgp_new_dict
return bgp_return_dict
class NXOSSSHDriver(NXOSDriverBase):
def __init__(self, hostname, username, password, timeout=60, optional_args=None):
super().__init__(
hostname, username, password, timeout=timeout, optional_args=optional_args
)
self.platform = "nxos_ssh"
self.connector_type_map = {
"1000base-LH": "LC_CONNECTOR",
"1000base-SX": "LC_CONNECTOR",
"1000base-T": "Unknown",
"10Gbase-LR": "LC_CONNECTOR",
"10Gbase-SR": "LC_CONNECTOR",
"SFP-H10GB-CU1M": "DAC_CONNECTOR",
"SFP-H10GB-CU1.45M": "DAC_CONNECTOR",
"SFP-H10GB-CU3M": "DAC_CONNECTOR",
"SFP-H10GB-CU3.45M": "DAC_CONNECTOR",
}
def open(self):
self.device = self._netmiko_open(
device_type="cisco_nxos", netmiko_optional_args=self.netmiko_optional_args
)
def close(self):
self._netmiko_close()
def _send_command(self, command, raw_text=False, cmd_verify=True):
return self.device.send_command(command, cmd_verify=cmd_verify)
def _send_command_list(self, commands, expect_string=None):
output = ""
for command in commands:
output += self.device.send_command(
command,
strip_prompt=False,
strip_command=False,
expect_string=expect_string,
)
return output
def _send_config(self, commands):
if isinstance(commands, str):
commands = (command for command in commands.splitlines() if command)
return self.device.send_config_set(commands)
@staticmethod
def parse_uptime(uptime_str):
(years, weeks, days, hours, minutes) = (0, 0, 0, 0, 0)
uptime_str = uptime_str.strip()
time_list = uptime_str.split(",")
for element in time_list:
if re.search("year", element):
years = int(element.split()[0])
elif re.search("week", element):
weeks = int(element.split()[0])
elif re.search("day", element):
days = int(element.split()[0])
elif re.search("hour", element):
hours = int(element.split()[0])
elif re.search("minute", element):
minutes = int(element.split()[0])
elif re.search("second", element):
seconds = int(element.split()[0])
uptime_sec = (
(years * YEAR_SECONDS)
+ (weeks * WEEK_SECONDS)
+ (days * DAY_SECONDS)
+ (hours * 3600)
+ (minutes * 60)
+ seconds
)
return uptime_sec
def is_alive(self):
null = chr(0)
try:
if self.device is None:
return {"is_alive": False}
else:
self._send_command(null, cmd_verify=False)
except (socket.error, EOFError):
return {"is_alive": False}
return {"is_alive": self.device.remote_conn.transport.is_active()}
def _copy_run_start(self):
output = self.device.save_config()
if "complete" in output.lower():
return True
else:
msg = "Unable to save running-config to startup-config!"
raise CommandErrorException(msg)
def _load_cfg_from_checkpoint(self):
commands = [
"terminal dont-ask",
"rollback running-config file {}".format(self.candidate_cfg),
"no terminal dont-ask",
]
try:
rollback_result = self._send_command_list(commands, expect_string=r"[#>]")
finally:
self.changed = True
msg = rollback_result
if "Rollback failed." in msg:
raise ReplaceConfigException(msg)
def rollback(self):
if self.changed:
commands = [
"terminal dont-ask",
"rollback running-config file {}".format(self.rollback_cfg),
"no terminal dont-ask",
]
result = self._send_command_list(commands, expect_string=r"[#>]")
if "completed" not in result.lower():
raise ReplaceConfigException(result)
self._netmiko_device.set_base_prompt()
self._copy_run_start()
self.changed = False
def _apply_key_map(self, key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
new_dict[new_key] = str(value)
return new_dict
def _convert_uptime_to_seconds(self, uptime_facts):
seconds = int(uptime_facts["up_days"]) * 24 * 60 * 60
seconds += int(uptime_facts["up_hours"]) * 60 * 60
seconds += int(uptime_facts["up_mins"]) * 60
seconds += int(uptime_facts["up_secs"])
return seconds
def get_facts(self):
vendor = "Cisco"
uptime = -1
serial_number, fqdn, os_version, hostname, domain_name, model = ("",) * 6
show_ver = self._send_command("show version")
show_hosts = self._send_command("show hosts")
show_int_status = self._send_command("show interface status")
show_hostname = self._send_command("show hostname")
try:
show_inventory_table = self._get_command_table(
"show inventory | json", "TABLE_inv", "ROW_inv"
)
if isinstance(show_inventory_table, dict):
show_inventory_table = [show_inventory_table]
for row in show_inventory_table:
if row["name"] == '"Chassis"' or row["name"] == "Chassis":
serial_number = row.get("serialnum", "")
break
except ValueError:
show_inventory = self._send_command("show inventory")
find_regexp = r"^NAME:\s+\"(.*)\",.*\n^PID:.*SN:\s+(\w*)"
find = re.findall(find_regexp, show_inventory, re.MULTILINE)
for row in find:
if row[0] == "Chassis":
serial_number = row[1]
break
for line in show_ver.splitlines():
if " uptime is " in line:
_, uptime_str = line.split(" uptime is ")
uptime = self.parse_uptime(uptime_str)
if "system: " in line or "NXOS: " in line:
line = line.strip()
os_version = line.split()[2]
os_version = os_version.strip()
if "cisco" in line and "hassis" in line:
match = re.search(r".cisco (.*) \(", line)
if match:
model = match.group(1).strip()
match = re.search(r".cisco (.* [cC]hassis)", line)
if match:
model = match.group(1).strip()
hostname = show_hostname.strip()
for line in show_hosts.splitlines():
if "Default domain" in line:
_, domain_name = re.split(r".*Default domain.*is ", line)
domain_name = domain_name.strip()
break
if hostname.count(".") >= 2:
fqdn = hostname
if domain_name:
hostname = re.sub(re.escape(domain_name) + "$", "", hostname)
hostname = hostname.strip(".")
elif domain_name:
fqdn = "{}.{}".format(hostname, domain_name)
interface_list = []
show_int_status = show_int_status.strip()
show_int_status = re.sub(
r"(?:^---------+$|^Port .*$|^ .*$)", "", show_int_status, flags=re.M
)
for line in show_int_status.splitlines():
if not line:
continue
interface = line.split()[0]
interface_list.append(helpers.canonical_interface_name(interface))
return {
"uptime": int(uptime),
"vendor": vendor,
"os_version": str(os_version),
"serial_number": str(serial_number),
"model": str(model),
"hostname": str(hostname),
"fqdn": fqdn,
"interface_list": interface_list,
}
def get_interfaces(self):
interfaces = {}
command = "show interface"
output = self._send_command(command)
if not output:
return {}
separator1 = r"^\S+\s+is \S+.*\nadmin state is.*$"
separator2 = r"^.* is .*, line protocol is .*$"
separator3 = r"^.* is (?:down|up).*$"
separators = r"({}|{}|{})".format(separator1, separator2, separator3)
interface_lines = re.split(separators, output, flags=re.M)
if len(interface_lines) == 1:
msg = "Unexpected output data in '{}':\n\n{}".format(
command, interface_lines
)
raise ValueError(msg)
interface_lines.pop(0)
if len(interface_lines) % 2 != 0:
msg = "Unexpected output data in '{}':\n\n{}".format(
command, interface_lines
)
raise ValueError(msg)
intf_iter = iter(interface_lines)
try:
new_interfaces = [line + next(intf_iter, "") for line in intf_iter]
except TypeError:
raise ValueError()
for entry in new_interfaces:
interfaces.update(parse_intf_section(entry))
return interfaces
def get_bgp_neighbors(self):
bgp_dict = {}
cmd_bgp_all_sum = "show bgp all summary vrf all"
bgp_summary_output = self._send_command(cmd_bgp_all_sum).strip()
section_separator = r"BGP summary information for "
bgp_summary_sections = re.split(section_separator, bgp_summary_output)
if len(bgp_summary_sections):
bgp_summary_sections.pop(0)
for bgp_section in bgp_summary_sections:
bgp_section = section_separator + bgp_section
bgp_dict.update(bgp_summary_parser(bgp_section))
return bgp_dict
def cli(self, commands):
cli_output = {}
if type(commands) is not list:
raise TypeError("Please enter a valid list of commands!")
for command in commands:
output = self._send_command(command)
cli_output[str(command)] = output
return cli_output
def get_environment(self):
environment = {}
sys_resources = self._send_command("show system resources")
temp_cmd = "show environment temperature"
environment.setdefault("cpu", {})
environment["cpu"]["0"] = {}
environment["cpu"]["0"]["%usage"] = -1.0
system_resources_cpu = helpers.textfsm_extractor(
self, "system_resources", sys_resources
)
for cpu in system_resources_cpu:
cpu_dict = {
cpu.get("cpu_id"): {
"%usage": round(100 - float(cpu.get("cpu_idle")), 2)
}
}
environment["cpu"].update(cpu_dict)
environment.setdefault("memory", {})
for line in sys_resources.splitlines():
if "Memory usage:" in line:
proc_total_mem, proc_used_mem, _ = line.split(",")
proc_used_mem = re.search(r"\d+", proc_used_mem).group(0)
proc_total_mem = re.search(r"\d+", proc_total_mem).group(0)
break
else:
raise ValueError("Unexpected output from: {}".format(line))
environment["memory"]["used_ram"] = int(proc_used_mem)
environment["memory"]["available_ram"] = int(proc_total_mem)
output = self._send_command(temp_cmd)
environment.setdefault("temperature", {})
for line in output.splitlines():
if re.match(r"^[0-9]", line):
module, sensor, is_critical, is_alert, temp, _ = line.split()
is_critical = float(is_critical)
is_alert = float(is_alert)
temp = float(temp)
env_value = {
"is_alert": temp >= is_alert,
"is_critical": temp >= is_critical,
"temperature": temp,
}
location = "{0}-{1}".format(sensor, module)
environment["temperature"][location] = env_value
environment.setdefault("power", {})
environment["power"]["invalid"] = {
"status": True,
"output": -1.0,
"capacity": -1.0,
}
environment.setdefault("fans", {})
environment["fans"]["invalid"] = {"status": True}
return environment
def get_arp_table(self, vrf=""):
arp_table = []
command = "show ip arp vrf {} | exc INCOMPLETE".format(vrf or "all")
output = self._send_command(command)
separator = r"^Address\s+Age.*Interface.*$"
arp_list = re.split(separator, output, flags=re.M)
if len(arp_list) != 2:
raise ValueError("Error processing arp table output:\n\n{}".format(output))
arp_entries = arp_list[1].strip()
for line in arp_entries.splitlines():
if len(line.split()) >= 4:
line = re.sub(r"\s+[\*\+\#D]{1,4}\s*$", "", line, flags=re.M)
address, age, mac, interface = line.split()
else:
raise ValueError("Unexpected output from: {}".format(line.split()))
if age == "-":
age = -1.0
elif ":" not in age:
try:
age = float(age)
except ValueError:
age = -1.0
else:
age = convert_hhmmss(age)
age = float(age)
age = round(age, 1)
if not re.search(RE_IPADDR, address):
raise ValueError("Invalid IP Address detected: {}".format(address))
if not re.search(RE_MAC, mac):
raise ValueError("Invalid MAC Address detected: {}".format(mac))
entry = {
"interface": interface,
"mac": helpers.mac(mac),
"ip": address,
"age": age,
}
arp_table.append(entry)
return arp_table
def _get_ntp_entity(self, peer_type):
ntp_entities = {}
command = "show ntp peers"
output = self._send_command(command)
for line in output.splitlines():
if line == "" or "-----" in line or "Peer IP Address" in line:
continue
elif IPAddress(len(line.split()[0])).is_unicast:
peer_addr = line.split()[0]
ntp_entities[peer_addr] = {}
else:
raise ValueError("Did not correctly find a Peer IP Address")
return ntp_entities
def get_ntp_peers(self):
return self._get_ntp_entity("Peer")
def get_ntp_servers(self):
return self._get_ntp_entity("Server")
def get_interfaces_ip(self):
interfaces_ip = {}
ipv4_command = "show ip interface vrf all"
ipv6_command = "show ipv6 interface vrf all"
output_v4 = self._send_command(ipv4_command)
output_v6 = self._send_command(ipv6_command)
v4_interfaces = {}
for line in output_v4.splitlines():
if "Interface status" in line:
interface = line.split(",")[0]
continue
if "IP address" in line:
ip_address = line.split(",")[0].split()[2]
try:
prefix_len = int(line.split()[5].split("/")[1])
except (ValueError, IndexError):
prefix_len = "N/A"
if ip_address == "none":
v4_interfaces.setdefault(interface, {})
else:
val = {"prefix_length": prefix_len}
v4_interfaces.setdefault(interface, {})[ip_address] = val
v6_interfaces = {}
for line in output_v6.splitlines():
if "Interface status" in line:
interface = line.split(",")[0]
continue
if "VALID" in line:
line = line.strip()
if "link-local address" in line:
ip_address = line.split()[3]
prefix_len = "64"
elif "IPv6 address" in line:
ip_address = line.split()[2]
prefix_len = "64"
else:
ip_address, prefix_len = line.split()[0].split("/")
prefix_len = int(prefix_len)
val = {"prefix_length": prefix_len}
v6_interfaces.setdefault(interface, {})[ip_address] = val
else:
v6_interfaces.setdefault(interface, {})
for interface, data in v4_interfaces.items():
interfaces_ip.setdefault(interface, {"ipv4": {}})["ipv4"] = data
for interface, data in v6_interfaces.items():
interfaces_ip.setdefault(interface, {"ipv6": {}})["ipv6"] = data
return interfaces_ip
def get_mac_address_table(self):
RE_MACTABLE_FORMAT1 = r"^\s+{}\s+{}\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+".format(
VLAN_REGEX, MAC_REGEX
)
RE_MACTABLE_FORMAT2 = r"^\s+{}\s+{}\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+".format(
"-", MAC_REGEX
)
RE_MACTABLE_FORMAT3 = r"^\s+\S+"
mac_address_table = []
command = "show mac address-table"
output = self._send_command(command)
def remove_prefix(s, prefix):
return s[len(prefix) :] if s.startswith(prefix) else s
def process_mac_fields(vlan, mac, mac_type, interface):
if mac_type.lower() in ["self", "static", "system"]:
static = True
if vlan.lower() == "all":
vlan = 0
elif vlan == "-":
vlan = 0
if (
interface.lower() == "cpu"
or re.search(r"router", interface.lower())
or re.search(r"switch", interface.lower())
):
interface = ""
else:
static = False
if mac_type.lower() in ["dynamic"]:
active = True
else:
active = False
return {
"mac": helpers.mac(mac),
"interface": helpers.canonical_interface_name(interface),
"vlan": int(vlan),
"static": static,
"active": active,
"moves": -1,
"last_move": -1.0,
}
output = re.split(r"^----.*", output, flags=re.M)[1:]
output = "\n".join(output).strip()
output = re.sub(r"^[\*\+GOCE]", "", output, flags=re.M)
output = re.sub(r"^\(R\)", "", output, flags=re.M)
output = re.sub(r"^\(T\)", "", output, flags=re.M)
output = re.sub(r"^\(F\)", "", output, flags=re.M)
output = re.sub(r"vPC Peer-Link", "vPC-Peer-Link", output, flags=re.M)
for line in output.splitlines():
if re.search(r"^Legend", line):
continue
elif re.search(r"^\s+\* \- primary entry", line):
continue
elif re.search(r"^\s+age \-", line):
continue
elif re.search(r"^\s+VLAN", line):
continue
elif re.search(r"^------", line):
continue
elif re.search(r"^\s*$", line):
continue
for pattern in [
RE_MACTABLE_FORMAT1,
RE_MACTABLE_FORMAT2,
RE_MACTABLE_FORMAT3,
]:
if re.search(pattern, line):
fields = line.split()
if len(fields) >= 7:
vlan, mac, mac_type, _, _, _, interface = fields[:7]
mac_address_table.append(
process_mac_fields(vlan, mac, mac_type, interface)
)
# there can be multiples interfaces for the same MAC on the same line
for interface in fields[7:]:
mac_address_table.append(
process_mac_fields(vlan, mac, mac_type, interface)
)
break
# interfaces can overhang to the next line (line only contains interfaces)
elif len(fields) < 7:
for interface in fields:
mac_address_table.append(
process_mac_fields(vlan, mac, mac_type, interface)
)
break
else:
raise ValueError("Unexpected output from: {}".format(repr(line)))
return mac_address_table
def _get_bgp_route_attr(self, destination, vrf, next_hop, ip_version=4):
CMD_SHIBNV = 'show ip bgp neighbors vrf {vrf} | include "is {neigh}"'
search_re_dict = {
"aspath": {
"re": r"AS-Path: ([\d\(\)]([\d\(\) ])*)",
"group": 1,
"default": "",
},
"bgpnh": {
"re": r"[^|\\n][ ]{4}(" + IP_ADDR_REGEX + r")",
"group": 1,
"default": "",
},
"bgpfrom": {
"re": r"from (" + IP_ADDR_REGEX + r")",
"group": 1,
"default": "",
},
"bgpcomm": {
"re": r" Community: ([\w\d\-\: ]+)",
"group": 1,
"default": "",
},
"bgplp": {"re": r"localpref (\d+)", "group": 1, "default": ""},
# external, internal, redist
"bgpie": {"re": r"^: (\w+),", "group": 1, "default": ""},
"vrfimp": {
"re": r"Imported from [\S]+ \(VRF (\S+)\)",
"group": 1,
"default": "",
},
}
bgp_attr = {}
# get BGP AS number
outbgp = self._send_command('show bgp process | include "BGP Protocol Tag"')
matchbgpattr = RE_BGP_PROTO_TAG.match(outbgp)
if not matchbgpattr:
return bgp_attr
bgpas = matchbgpattr.group(1)
if ip_version == 4:
bgpcmd = "show ip bgp vrf {vrf} {destination}".format(
vrf=vrf, destination=destination
)
outbgp = self._send_command(bgpcmd)
outbgpsec = outbgp.split("Path type")
# this should not happen (zero BGP paths)...
if len(outbgpsec) == 1:
return bgp_attr
# process all bgp paths
for bgppath in outbgpsec[1:]:
if "is best path" not in bgppath:
# only best path is added to protocol attributes
continue
# find BGP attributes
for key in search_re_dict:
matchre = re.search(search_re_dict[key]["re"], bgppath)
if matchre:
groupnr = int(search_re_dict[key]["group"])
search_re_dict[key]["result"] = matchre.group(groupnr)
else:
search_re_dict[key]["result"] = search_re_dict[key]["default"]
bgpnh = search_re_dict["bgpnh"]["result"]
# if route is not leaked next hops have to match
if (
not (search_re_dict["bgpie"]["result"] in ["redist", "local"])
) and (bgpnh != next_hop):
# this is not the right route
continue
# find remote AS nr. of this neighbor
bgpcmd = CMD_SHIBNV.format(vrf=vrf, neigh=bgpnh)
outbgpnei = self._send_command(bgpcmd)
matchbgpras = RE_BGP_REMOTE_AS.search(outbgpnei)
if matchbgpras:
bgpras = matchbgpras.group(1)
else:
# next-hop is not known in this vrf, route leaked from
# other vrf or from vpnv4 table?
# get remote AS nr. from as-path if it is ebgp neighbor
# if locally sourced remote AS if undefined
bgpie = search_re_dict["bgpie"]["result"]
if bgpie == "external":
bgpras = bgpie.split(" ")[0].replace("(", "")
elif bgpie == "internal":
bgpras = bgpas
else: # redist, local
bgpras = ""
# community
bothcomm = []
extcomm = []
stdcomm = search_re_dict["bgpcomm"]["result"].split()
commsplit = bgppath.split("Extcommunity:")
if len(commsplit) == 2:
for line in commsplit[1].split("\n")[1:]:
# RT:65004:22
matchcommun = RE_BGP_COMMUN.match(line)
if matchcommun:
extcomm.append(matchcommun.group(1))
else:
# we've reached end of the extended community section
break
bothcomm = stdcomm + extcomm
bgp_attr = {
"as_path": search_re_dict["aspath"]["result"].strip(),
"remote_address": search_re_dict["bgpfrom"]["result"],
"local_preference": int(search_re_dict["bgplp"]["result"]),
"communities": bothcomm,
"local_as": helpers.as_number(bgpas),
}
if bgpras:
bgp_attr["remote_as"] = helpers.as_number(bgpras)
else:
bgp_attr["remote_as"] = 0
return bgp_attr
def get_route_to(self, destination="", protocol="", longer=False):
if longer:
raise NotImplementedError("Longer prefixes not yet supported for NXOS")
longer_pref = ""
vrf = ""
ip_version = None
try:
ip_version = IPNetwork(destination).version
except AddrFormatError:
return "Please specify a valid destination!"
if ip_version == 4:
routes = {}
if vrf:
send_cmd = "show ip route vrf {vrf} {destination} {longer}".format(
vrf=vrf, destination=destination, longer=longer_pref
).rstrip()
else:
send_cmd = "show ip route vrf all {destination} {longer}".format(
destination=destination, longer=longer_pref
).rstrip()
out_sh_ip_rou = self._send_command(send_cmd)
for vrfsec in out_sh_ip_rou.split("IP Route Table for ")[1:]:
if "Route not found" in vrfsec:
continue
vrffound = False
preffound = False
nh_list = []
cur_prefix = ""
for line in vrfsec.split("\n"):
if not vrffound:
vrfstr = RE_RT_VRF_NAME.match(line)
if vrfstr:
curvrf = vrfstr.group(1)
vrffound = True
else:
prefstr = RE_RT_IPV4_ROUTE_PREF.match(line)
if prefstr:
if preffound:
if cur_prefix not in routes:
routes[cur_prefix] = []
for nh in nh_list:
routes[cur_prefix].append(nh)
nh_list = []
else:
preffound = True
cur_prefix = prefstr.group(1)
continue
viastr = RE_IP_ROUTE_VIA_REGEX.match(line)
if viastr:
nh_used = viastr.group("used") == "*"
nh_ip = viastr.group("ip") or ""
nh_int = viastr.group("int")
nh_metric = viastr.group("metric")
nh_age = bgp_time_conversion(viastr.group("age"))
nh_source = viastr.group("source")
if protocol and protocol != nh_source:
continue
if nh_int:
nh_int_canon = helpers.canonical_interface_name(nh_int)
else:
nh_int_canon = ""
route_entry = {
"protocol": nh_source,
"outgoing_interface": nh_int_canon,
"age": nh_age,
"current_active": nh_used,
"routing_table": curvrf,
"last_active": nh_used,
"next_hop": nh_ip,
"selected_next_hop": nh_used,
"inactive_reason": "",
"preference": int(nh_metric),
}
if nh_source == "bgp":
route_entry[
"protocol_attributes"
] = self._get_bgp_route_attr(cur_prefix, curvrf, nh_ip)
else:
route_entry["protocol_attributes"] = {}
nh_list.append(route_entry)
# process last next hop entries
if preffound:
if cur_prefix not in routes:
routes[cur_prefix] = []
for nh in nh_list:
routes[cur_prefix].append(nh)
return routes
def get_snmp_information(self):
snmp_information = {}
command = "show running-config"
output = self._send_command(command)
snmp_config = helpers.textfsm_extractor(self, "snmp_config", output)
if not snmp_config:
return snmp_information
snmp_information = {
"contact": str(""),
"location": str(""),
"community": {},
"chassis_id": str(""),
}
for snmp_entry in snmp_config:
contact = str(snmp_entry.get("contact", ""))
if contact:
snmp_information["contact"] = contact
location = str(snmp_entry.get("location", ""))
if location:
snmp_information["location"] = location
community_name = str(snmp_entry.get("community", ""))
if not community_name:
continue
if community_name not in snmp_information["community"].keys():
snmp_information["community"][community_name] = {
"acl": str(snmp_entry.get("acl", "")),
"mode": str(snmp_entry.get("mode", "").lower()),
}
else:
acl = str(snmp_entry.get("acl", ""))
if acl:
snmp_information["community"][community_name]["acl"] = acl
mode = str(snmp_entry.get("mode", "").lower())
if mode:
snmp_information["community"][community_name]["mode"] = mode
return snmp_information
def get_users(self):
_CISCO_TO_CISCO_MAP = {"network-admin": 15, "network-operator": 5}
_DEFAULT_USER_DICT = {"password": "", "level": 0, "sshkeys": []}
users = {}
command = "show running-config"
output = self._send_command(command)
section_username_tabled_output = helpers.textfsm_extractor(
self, "users", output
)
for user in section_username_tabled_output:
username = user.get("username", "")
if not username:
continue
if username not in users:
users[username] = _DEFAULT_USER_DICT.copy()
password = user.get("password", "")
if password:
users[username]["password"] = str(password.strip())
level = 0
role = user.get("role", "")
if role.startswith("priv"):
level = int(role.split("-")[-1])
else:
level = _CISCO_TO_CISCO_MAP.get(role, 0)
if level > users.get(username).get("level"):
# unfortunately on Cisco you can set different priv levels for the same user
# Good news though: the device will consider the highest level
users[username]["level"] = level
sshkeytype = user.get("sshkeytype", "")
sshkeyvalue = user.get("sshkeyvalue", "")
if sshkeytype and sshkeyvalue:
if sshkeytype not in ["ssh-rsa", "ssh-dsa"]:
continue
users[username]["sshkeys"].append(str(sshkeyvalue))
return users
def get_vlans(self):
vlans = {}
command = "show vlan brief | json"
vlan_table_raw = self._get_command_table(
command, "TABLE_vlanbriefxbrief", "ROW_vlanbriefxbrief"
)
if isinstance(vlan_table_raw, dict):
vlan_table_raw = [vlan_table_raw]
for vlan in vlan_table_raw:
if "vlanshowplist-ifidx" not in vlan.keys():
vlan["vlanshowplist-ifidx"] = []
vlans[vlan["vlanshowbr-vlanid"]] = {
"name": vlan["vlanshowbr-vlanname"],
"interfaces": self._parse_vlan_ports(vlan["vlanshowplist-ifidx"]),
}
return vlans
def get_optics(self):
command = "show interface transceiver details"
output = self._send_command(command)
# Formatting data into return data structure
optics_detail = {}
# Extraction Regexps
port_ts_re = re.compile(r"^Ether.*?(?=\nEther|\Z)", re.M | re.DOTALL)
port_re = re.compile(r"^(Ether.*)[ ]*?$", re.M)
vendor_re = re.compile("name is (.*)$", re.M)
vendor_part_re = re.compile("part number is (.*)$", re.M)
vendor_rev_re = re.compile("revision is (.*)$", re.M)
serial_no_re = re.compile("serial number is (.*)$", re.M)
type_no_re = re.compile("type is (.*)$", re.M)
rx_instant_re = re.compile(r"Rx Power[ ]+(?:(\S+?)[ ]+dBm|(N.A))", re.M)
tx_instant_re = re.compile(r"Tx Power[ ]+(?:(\S+?)[ ]+dBm|(N.A))", re.M)
current_instant_re = re.compile(r"Current[ ]+(?:(\S+?)[ ]+mA|(N.A))", re.M)
port_ts_l = port_ts_re.findall(output)
for port_ts in port_ts_l:
port = port_re.search(port_ts).group(1)
# No transceiver is present in those case
if "transceiver is not present" in port_ts:
continue
if "transceiver is not applicable" in port_ts:
continue
port_detail = {"physical_channels": {"channel": []}}
# No metric present
vendor = vendor_re.search(port_ts).group(1)
vendor_part = vendor_part_re.search(port_ts).group(1)
vendor_rev = vendor_rev_re.search(port_ts).group(1)
serial_no = serial_no_re.search(port_ts).group(1)
type_s = type_no_re.search(port_ts).group(1)
state = {
"vendor": vendor.strip(),
"vendor_part": vendor_part.strip(),
"vendor_rev": vendor_rev.strip(),
"serial_no": serial_no.strip(),
"connector_type": self.connector_type_map.get(type_s, "Unknown"),
}
if "DOM is not supported" not in port_ts:
res = rx_instant_re.search(port_ts)
input_power = res.group(1) or res.group(2)
res = tx_instant_re.search(port_ts)
output_power = res.group(1) or res.group(2)
res = current_instant_re.search(port_ts)
current = res.group(1) or res.group(2)
# If interface is shutdown it returns "N/A" as output power
# or "N/A" as input power
# Converting that to -100.0 float
try:
float(output_power)
except ValueError:
output_power = -100.0
try:
float(input_power)
except ValueError:
input_power = -100.0
try:
float(current)
except ValueError:
current = -100.0
# Defaulting avg, min, max values to -100.0 since device does not
# return these values
optic_states = {
"index": 0,
"state": {
"input_power": {
"instant": (
float(input_power) if "input_power" else -100.0
),
"avg": -100.0,
"min": -100.0,
"max": -100.0,
},
"output_power": {
"instant": (
float(output_power) if "output_power" else -100.0
),
"avg": -100.0,
"min": -100.0,
"max": -100.0,
},
"laser_bias_current": {
"instant": (float(current) if "current" else -100.0),
"avg": 0.0,
"min": 0.0,
"max": 0.0,
},
},
}
port_detail["physical_channels"]["channel"].append(optic_states)
port_detail["state"] = state
optics_detail[port] = port_detail
return optics_detail
| true | true |
f7ffdadfd08c532d4ddb10b50d5697bfd1838ea6 | 1,622 | py | Python | modules/signatures/windows/antidbg_debuggercheck.py | Yuanmessi/Bold-Falcon | 00fcaba0b3d9c462b9d20ecb256ff85db5d119e2 | [
"BSD-3-Clause"
] | 24 | 2021-06-21T07:35:37.000Z | 2022-03-22T03:33:59.000Z | modules/signatures/windows/antidbg_debuggercheck.py | Yuanmessi/Bold-Falcon | 00fcaba0b3d9c462b9d20ecb256ff85db5d119e2 | [
"BSD-3-Clause"
] | 3 | 2021-07-01T08:09:05.000Z | 2022-01-28T03:38:36.000Z | modules/signatures/windows/antidbg_debuggercheck.py | Yuanmessi/Bold-Falcon | 00fcaba0b3d9c462b9d20ecb256ff85db5d119e2 | [
"BSD-3-Clause"
] | 6 | 2021-06-22T05:32:57.000Z | 2022-02-11T02:05:45.000Z | # Copyright (C) 2018 Kevin Ross
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lib.cuckoo.common.abstracts import Signature
class ChecksDebugger(Signature):
name = "checks_debugger"
description = "Checks if process is being debugged by a debugger"
severity = 1
categories = ["anti-debug"]
authors = ["Kevin Ross"]
minimum = "2.0"
filter_apinames = [
"CheckRemoteDebuggerPresent",
"IsDebuggerPresent",
]
def on_call(self, call, process):
self.mark_call()
def on_complete(self):
return self.has_marks()
class ChecksKernelDebugger(Signature):
name = "checks_kernel_debugger"
description = "Checks if process is being debugged by a kernel debugger"
severity = 2
categories = ["anti-debug"]
authors = ["Kevin Ross"]
minimum = "2.0"
filter_apinames = [
"SystemKernelDebuggerInformation",
]
def on_call(self, call, process):
self.mark_call()
def on_complete(self):
return self.has_marks()
| 29.490909 | 76 | 0.696054 |
from lib.cuckoo.common.abstracts import Signature
class ChecksDebugger(Signature):
name = "checks_debugger"
description = "Checks if process is being debugged by a debugger"
severity = 1
categories = ["anti-debug"]
authors = ["Kevin Ross"]
minimum = "2.0"
filter_apinames = [
"CheckRemoteDebuggerPresent",
"IsDebuggerPresent",
]
def on_call(self, call, process):
self.mark_call()
def on_complete(self):
return self.has_marks()
class ChecksKernelDebugger(Signature):
name = "checks_kernel_debugger"
description = "Checks if process is being debugged by a kernel debugger"
severity = 2
categories = ["anti-debug"]
authors = ["Kevin Ross"]
minimum = "2.0"
filter_apinames = [
"SystemKernelDebuggerInformation",
]
def on_call(self, call, process):
self.mark_call()
def on_complete(self):
return self.has_marks()
| true | true |
f7ffdb4a44411e66e6e5e0ae554d733f23bab2b6 | 655 | py | Python | migrations/versions/b8bfbb8170b6_add_mood.py | edementyev/wakeupbot | 975b95efe6845589046cf185da241a4aa255caf7 | [
"MIT"
] | null | null | null | migrations/versions/b8bfbb8170b6_add_mood.py | edementyev/wakeupbot | 975b95efe6845589046cf185da241a4aa255caf7 | [
"MIT"
] | null | null | null | migrations/versions/b8bfbb8170b6_add_mood.py | edementyev/wakeupbot | 975b95efe6845589046cf185da241a4aa255caf7 | [
"MIT"
] | null | null | null | """add_mood
Revision ID: b8bfbb8170b6
Revises: d459222e4af3
Create Date: 2020-07-19 11:11:16.102951
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "b8bfbb8170b6"
down_revision = "d459222e4af3"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("sleep_records", sa.Column("mood", sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("sleep_records", "mood")
# ### end Alembic commands ###
| 23.392857 | 81 | 0.694656 | import sqlalchemy as sa
from alembic import op
revision = "b8bfbb8170b6"
down_revision = "d459222e4af3"
branch_labels = None
depends_on = None
def upgrade():
| true | true |
f7ffdc7d44321f951095bf2994ac47ace0753d9f | 4,240 | py | Python | tests/python/pants_test/tasks/test_thrift_linter_integration.py | square/pants | 28a018c7f47900aec4f576c81a52e0e4b41d9fec | [
"Apache-2.0"
] | 11 | 2015-01-20T01:39:41.000Z | 2019-08-08T07:27:44.000Z | tests/python/pants_test/tasks/test_thrift_linter_integration.py | fakeNetflix/square-repo-pants | 28a018c7f47900aec4f576c81a52e0e4b41d9fec | [
"Apache-2.0"
] | 1 | 2019-08-21T07:29:26.000Z | 2019-08-21T07:29:26.000Z | tests/python/pants_test/tasks/test_thrift_linter_integration.py | fakeNetflix/square-repo-pants | 28a018c7f47900aec4f576c81a52e0e4b41d9fec | [
"Apache-2.0"
] | 5 | 2015-03-30T02:46:53.000Z | 2018-03-08T20:10:43.000Z | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class ThriftLinterTest(PantsRunIntegrationTest):
def test_good(self):
# thrift-linter should pass without warnings with correct thrift files.
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:good-thrift']
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
self.assertFalse('Lint errors found!' in pants_run.stdout_data)
def test_bad_default(self):
# thrift-linter fails on linter errors.
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-default']
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_strict(self):
# thrift-linter fails on linter errors (BUILD target defines thrift_linter_strict=True)
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-strict']
pants_run = self.run_pants(cmd)
self.assert_failure(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_non_strict(self):
# thrift-linter fails on linter errors (BUILD target defines thrift_linter_strict=False)
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-non-strict']
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_default_override(self):
# thrift-linter fails with command line flag overriding the BUILD section.
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-default',
'--thrift-linter-strict']
pants_run = self.run_pants(cmd)
self.assert_failure(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_strict_override(self):
# thrift-linter passes with non-strict command line flag overriding the BUILD section.
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-strict',
'--no-thrift-linter-strict']
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_non_strict_override(self):
# thrift-linter fails with command line flag overriding the BUILD section.
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-non-strict',
'--thrift-linter-strict']
pants_run = self.run_pants(cmd)
self.assert_failure(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_pants_ini_strict(self):
# thrift-linter fails if pants.ini has a thrift-linter:strict=True setting
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-default',]
pants_ini_config = {'thrift-linter': {'strict': True}}
pants_run = self.run_pants(cmd, config = pants_ini_config)
self.assert_failure(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_pants_ini_strict_overridden(self):
# thrift-linter passes if pants.ini has a thrift-linter:strict=True setting and
# a command line non-strict flag is passed.
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-default',
'--no-thrift-linter-strict']
pants_ini_config = {'thrift-linter': {'strict': True}}
pants_run = self.run_pants(cmd, config = pants_ini_config)
self.assert_success(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
| 42.828283 | 93 | 0.706368 |
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class ThriftLinterTest(PantsRunIntegrationTest):
def test_good(self):
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:good-thrift']
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
self.assertFalse('Lint errors found!' in pants_run.stdout_data)
def test_bad_default(self):
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-default']
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_strict(self):
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-strict']
pants_run = self.run_pants(cmd)
self.assert_failure(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_non_strict(self):
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-non-strict']
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_default_override(self):
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-default',
'--thrift-linter-strict']
pants_run = self.run_pants(cmd)
self.assert_failure(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_strict_override(self):
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-strict',
'--no-thrift-linter-strict']
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_non_strict_override(self):
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-non-strict',
'--thrift-linter-strict']
pants_run = self.run_pants(cmd)
self.assert_failure(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_pants_ini_strict(self):
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-default',]
pants_ini_config = {'thrift-linter': {'strict': True}}
pants_run = self.run_pants(cmd, config = pants_ini_config)
self.assert_failure(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
def test_bad_pants_ini_strict_overridden(self):
cmd = ['goal',
'thrift-linter',
'testprojects/src/thrift/com/pants/thrift_linter:bad-thrift-default',
'--no-thrift-linter-strict']
pants_ini_config = {'thrift-linter': {'strict': True}}
pants_run = self.run_pants(cmd, config = pants_ini_config)
self.assert_success(pants_run)
self.assertTrue('Lint errors found!' in pants_run.stdout_data)
| true | true |
f7ffdd1859f3c607028e46364794c66139d2c888 | 628 | py | Python | manage.py | Bchizi/instajam | 1a1669fa28acfaf5158715be3b7bd27ac1dc407a | [
"MIT"
] | null | null | null | manage.py | Bchizi/instajam | 1a1669fa28acfaf5158715be3b7bd27ac1dc407a | [
"MIT"
] | 9 | 2021-03-19T04:37:19.000Z | 2022-03-12T00:10:39.000Z | manage.py | Bchizi/instajam | 1a1669fa28acfaf5158715be3b7bd27ac1dc407a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'instajam.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.545455 | 73 | 0.683121 |
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'instajam.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true | true |
f7ffdd4c9a7a25cd70129b8f2b4b7827e0ace372 | 346 | py | Python | OOP/10_properties.py | marinaoliveira96/python-exercises | 13fc0ec30dec9bb6531cdeb41c80726971975835 | [
"MIT"
] | null | null | null | OOP/10_properties.py | marinaoliveira96/python-exercises | 13fc0ec30dec9bb6531cdeb41c80726971975835 | [
"MIT"
] | null | null | null | OOP/10_properties.py | marinaoliveira96/python-exercises | 13fc0ec30dec9bb6531cdeb41c80726971975835 | [
"MIT"
] | null | null | null | class Product:
def __init__(self, price):
self.price = price
@property
def price(self):
return self.__price
@price.setter
def price(self, value):
if value < 0:
raise ValueError("Price cannot be negative.")
self.__price = value
# getter and setter
product = Product(10)
product.price = 9
print(product.price)
| 19.222222 | 51 | 0.67052 | class Product:
def __init__(self, price):
self.price = price
@property
def price(self):
return self.__price
@price.setter
def price(self, value):
if value < 0:
raise ValueError("Price cannot be negative.")
self.__price = value
product = Product(10)
product.price = 9
print(product.price)
| true | true |
f7ffdd60b8aaa2c0752e32a70abef6541234574f | 460 | py | Python | bin/rehex.py | advantage-development/masternode-sentinel | d809d52a37d5d7bc15b02f9491396ec5bd269822 | [
"MIT"
] | null | null | null | bin/rehex.py | advantage-development/masternode-sentinel | d809d52a37d5d7bc15b02f9491396ec5bd269822 | [
"MIT"
] | null | null | null | bin/rehex.py | advantage-development/masternode-sentinel | d809d52a37d5d7bc15b02f9491396ec5bd269822 | [
"MIT"
] | null | null | null | import simplejson
import binascii
import sys
import pdb
from pprint import pprint
import sys
import os
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib')))
import advantagelib
# ============================================================================
usage = "%s <hex>" % sys.argv[0]
obj = None
if len(sys.argv) < 2:
print(usage)
sys.exit(1)
else:
obj = advantagelib.deserialise(sys.argv[1])
pdb.set_trace()
1
| 20.909091 | 84 | 0.591304 | import simplejson
import binascii
import sys
import pdb
from pprint import pprint
import sys
import os
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib')))
import advantagelib
usage = "%s <hex>" % sys.argv[0]
obj = None
if len(sys.argv) < 2:
print(usage)
sys.exit(1)
else:
obj = advantagelib.deserialise(sys.argv[1])
pdb.set_trace()
1
| true | true |
f7ffe1d22350a43524089bad9076cac3b1e79224 | 4,076 | py | Python | Collections-a-installer/community-general-2.4.0/scripts/inventory/freeipa.py | d-amien-b/simple-getwordpress | da90d515a0aa837b633d50db4d91d22b031c04a2 | [
"MIT"
] | 1 | 2020-01-22T13:11:23.000Z | 2020-01-22T13:11:23.000Z | Collections-a-installer/community-general-2.4.0/scripts/inventory/freeipa.py | d-amien-b/simple-getwordpress | da90d515a0aa837b633d50db4d91d22b031c04a2 | [
"MIT"
] | 12 | 2020-02-21T07:24:52.000Z | 2020-04-14T09:54:32.000Z | Collections-a-installer/community-general-2.4.0/scripts/inventory/freeipa.py | d-amien-b/simple-getwordpress | da90d515a0aa837b633d50db4d91d22b031c04a2 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import argparse
from distutils.version import LooseVersion
import json
import os
import sys
from ipalib import api, errors, __version__ as IPA_VERSION
from ansible.module_utils.six import u
def initialize():
'''
This function initializes the FreeIPA/IPA API. This function requires
no arguments. A kerberos key must be present in the users keyring in
order for this to work. IPA default configuration directory is /etc/ipa,
this path could be overridden with IPA_CONFDIR environment variable.
'''
api.bootstrap(context='cli')
if not os.path.isdir(api.env.confdir):
print("WARNING: IPA configuration directory (%s) is missing. "
"Environment variable IPA_CONFDIR could be used to override "
"default path." % api.env.confdir)
if LooseVersion(IPA_VERSION) >= LooseVersion('4.6.2'):
# With ipalib < 4.6.0 'server' and 'domain' have default values
# ('localhost:8888', 'example.com'), newer versions don't and
# DNS autodiscovery is broken, then one of jsonrpc_uri / xmlrpc_uri is
# required.
# ipalib 4.6.0 is unusable (https://pagure.io/freeipa/issue/7132)
# that's why 4.6.2 is explicitely tested.
if 'server' not in api.env or 'domain' not in api.env:
sys.exit("ERROR: ('jsonrpc_uri' or 'xmlrpc_uri') or 'domain' are not "
"defined in '[global]' section of '%s' nor in '%s'." %
(api.env.conf, api.env.conf_default))
api.finalize()
try:
api.Backend.rpcclient.connect()
except AttributeError:
# FreeIPA < 4.0 compatibility
api.Backend.xmlclient.connect()
return api
def list_groups(api):
'''
This function prints a list of all host groups. This function requires
one argument, the FreeIPA/IPA API object.
'''
inventory = {}
hostvars = {}
result = api.Command.hostgroup_find(all=True)['result']
for hostgroup in result:
# Get direct and indirect members (nested hostgroups) of hostgroup
members = []
if 'member_host' in hostgroup:
members = list(hostgroup['member_host'])
if 'memberindirect_host' in hostgroup:
members += (host for host in hostgroup['memberindirect_host'])
inventory[hostgroup['cn'][0]] = {'hosts': list(members)}
for member in members:
hostvars[member] = {}
inventory['_meta'] = {'hostvars': hostvars}
inv_string = json.dumps(inventory, indent=1, sort_keys=True)
print(inv_string)
return None
def parse_args():
'''
This function parses the arguments that were passed in via the command line.
This function expects no arguments.
'''
parser = argparse.ArgumentParser(description='Ansible FreeIPA/IPA '
'inventory module')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--list', action='store_true',
help='List active servers')
group.add_argument('--host', help='List details about the specified host')
return parser.parse_args()
def get_host_attributes(api, host):
"""
This function expects one string, this hostname to lookup variables for.
Args:
api: FreeIPA API Object
host: Name of Hostname
Returns: Dict of Host vars if found else None
"""
try:
result = api.Command.host_show(u(host))['result']
if 'usercertificate' in result:
del result['usercertificate']
return json.dumps(result, indent=1)
except errors.NotFound as e:
return {}
if __name__ == '__main__':
args = parse_args()
api = initialize()
if args.host:
print(get_host_attributes(api, args.host))
elif args.list:
list_groups(api)
| 32.094488 | 92 | 0.648675 |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import argparse
from distutils.version import LooseVersion
import json
import os
import sys
from ipalib import api, errors, __version__ as IPA_VERSION
from ansible.module_utils.six import u
def initialize():
api.bootstrap(context='cli')
if not os.path.isdir(api.env.confdir):
print("WARNING: IPA configuration directory (%s) is missing. "
"Environment variable IPA_CONFDIR could be used to override "
"default path." % api.env.confdir)
if LooseVersion(IPA_VERSION) >= LooseVersion('4.6.2'):
# DNS autodiscovery is broken, then one of jsonrpc_uri / xmlrpc_uri is
# required.
# ipalib 4.6.0 is unusable (https://pagure.io/freeipa/issue/7132)
# that's why 4.6.2 is explicitely tested.
if 'server' not in api.env or 'domain' not in api.env:
sys.exit("ERROR: ('jsonrpc_uri' or 'xmlrpc_uri') or 'domain' are not "
"defined in '[global]' section of '%s' nor in '%s'." %
(api.env.conf, api.env.conf_default))
api.finalize()
try:
api.Backend.rpcclient.connect()
except AttributeError:
api.Backend.xmlclient.connect()
return api
def list_groups(api):
inventory = {}
hostvars = {}
result = api.Command.hostgroup_find(all=True)['result']
for hostgroup in result:
members = []
if 'member_host' in hostgroup:
members = list(hostgroup['member_host'])
if 'memberindirect_host' in hostgroup:
members += (host for host in hostgroup['memberindirect_host'])
inventory[hostgroup['cn'][0]] = {'hosts': list(members)}
for member in members:
hostvars[member] = {}
inventory['_meta'] = {'hostvars': hostvars}
inv_string = json.dumps(inventory, indent=1, sort_keys=True)
print(inv_string)
return None
def parse_args():
parser = argparse.ArgumentParser(description='Ansible FreeIPA/IPA '
'inventory module')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--list', action='store_true',
help='List active servers')
group.add_argument('--host', help='List details about the specified host')
return parser.parse_args()
def get_host_attributes(api, host):
try:
result = api.Command.host_show(u(host))['result']
if 'usercertificate' in result:
del result['usercertificate']
return json.dumps(result, indent=1)
except errors.NotFound as e:
return {}
if __name__ == '__main__':
args = parse_args()
api = initialize()
if args.host:
print(get_host_attributes(api, args.host))
elif args.list:
list_groups(api)
| true | true |
f7ffe21bd0f2206db7cb425b138213367a16205d | 18,287 | py | Python | bootleg/dataloaders/wiki_dataset.py | mleszczy/bootleg | 162d74001cdfbbe146753393641d549e0328acb1 | [
"Apache-2.0"
] | 1 | 2021-01-11T18:40:09.000Z | 2021-01-11T18:40:09.000Z | bootleg/dataloaders/wiki_dataset.py | mleszczy/bootleg | 162d74001cdfbbe146753393641d549e0328acb1 | [
"Apache-2.0"
] | null | null | null | bootleg/dataloaders/wiki_dataset.py | mleszczy/bootleg | 162d74001cdfbbe146753393641d549e0328acb1 | [
"Apache-2.0"
] | null | null | null | import os
import time
import ujson as json
import torch
import sys
import pickle
import numpy as np
from torch.utils.data import Dataset
import torch.distributed as dist
import torch.nn.functional as F
from bootleg.symbols.alias_entity_table import AliasEntityTable
from bootleg.symbols.constants import *
from bootleg.prep import prep_data
from bootleg.utils import logging_utils, data_utils, train_utils
from bootleg.utils.utils import import_class
from bootleg.utils import utils
# https://github.com/pytorch/pytorch/issues/37581#issuecomment-624516586
import warnings
warnings.filterwarnings("ignore", message=".*The given NumPy array is not writeable.*")
class WikiDataset(Dataset):
"""
Main dataset class that handles preparing a batch of input.
Things to note
**Input is a sentence with mentions that are both true and false golds. A true gold is one that was directly
mined with Wikipedia. A false gold is one that was generated by weak labelling.
**We determine entities that are in a slice by if the true entity index is -1 or not. During train, if use_weak_label is true,
we allow the model to leverage true and false golds. During eval, we only score true enchors.
**Some embeddings require more expensive processing. E.g., extracting the pairs of candidate entities that are connected
in a KG. When this processing is done in the dataloader where is can benefit from multiple dataloader threads,
the embedding is stored in batch_on_the_fly. This embedding must have a batch_prep method
When this processing is done during data prep, the embedding is stored in batch_prep.
**If training a NIL model, we support randomly removing the true entity from the candidate list and setting the true
entity index to be the NIL entity.
**We support data slices (subsets of data) for both training (if slice model) and eval. If using slices for training model,
we supports probabilistic slice indices.
Attributes:
batch_prepped_emb_file_names: embedding that are batch prepped in advance
batch_on_the_fly_embs: embedding where the batch_prep method is called in the __get_item__ method
random_nil: whether to do NIL candidate random generation
Batch Inputs:
start_idx_in_sent: first token index of a mention,
end_idx_in_sent: last token index of a mention,
alias_idx: the alias (mention) index in our alias dictionary,
word_indices: word indexes into the word emeddings (e.g., BERT token indices),
sent_idx: unique sentence index,
subsent_idx: unique subsentence index in the case of sentence windowing,
entity_indices: the entity indices in our entity dictionary,
alias_list_pos: keeps track of the original alias position in the list of all aliases in case the sentence
is split via windowing
true_entity_idx_for_train: entity indices for true and false golds, as seen during train
slice_indices (optional): if slice dataset, we pass in matrix where each row is alias and each column
is 0/1 if that mention is in the slice or not
<ind_task_name> (option): probabilistic labels of if an mention is in a slice or not (used in slicing model)
<pred_task_name>: NED prediction labels; for slice model, predictions of aliases not in the slice are masked
<embs>: all batch prep or batch on the fly emeddings
"""
def __init__(self, args, use_weak_label, input_src, dataset_name,
is_writer, distributed, word_symbols, entity_symbols,
slice_dataset=None, dataset_is_eval=False):
# Need to save args to reinstantiate logger
self.args = args
self.logger = logging_utils.get_logger(args)
# Number of candidates, including NIL if a NIL model (train_in_candidates is False)
self.K = entity_symbols.max_candidates + (not args.data_config.train_in_candidates)
self.num_entities_with_pad_and_nocand = entity_symbols.num_entities_with_pad_and_nocand
self.dataset_name = dataset_name
self.slice_dataset = slice_dataset
self.dataset_is_eval = dataset_is_eval
# Slice names used for eval slices and a slicing model
self.slice_names = train_utils.get_data_slices(args, dataset_is_eval)
self.storage_type_file = data_utils.get_storage_file(self.dataset_name)
# Mappings from sent_idx to row_id in dataset
self.sent_idx_file = os.path.splitext(dataset_name)[0] + "_sent_idx.json"
self.type_pred = False
if args.data_config.type_prediction.use_type_pred:
self.type_pred = True
self.eid2typeid, self.num_types_with_pad = self.load_coarse_type_table(args, entity_symbols)
# Load memory mapped file
self.logger.info("Loading dataset...")
self.logger.debug("Seeing if " + dataset_name + " exists")
if (args.data_config.overwrite_preprocessed_data or
(not os.path.exists(self.dataset_name)) or
(not os.path.exists(self.sent_idx_file)) or
(not os.path.exists(self.storage_type_file)) or
(not os.path.exists(data_utils.get_batch_prep_config(self.dataset_name)))):
start = time.time()
self.logger.debug(f"Building dataset with {input_src}")
# Only prep data once per node
if is_writer:
prep_data(args, use_weak_label=use_weak_label, dataset_is_eval=self.dataset_is_eval,
input_src=input_src, dataset_name=dataset_name,
prep_dir=data_utils.get_data_prep_dir(args))
if distributed:
# Make sure all processes wait for data to be created
dist.barrier()
self.logger.debug(f"Finished building and saving dataset in {round(time.time() - start, 2)}s.")
start = time.time()
# Storage type for loading memory mapped file of dataset
self.storage_type = pickle.load(open(self.storage_type_file, 'rb'))
self.data = np.memmap(self.dataset_name, dtype=self.storage_type, mode='r')
self.data_len = len(self.data)
# Mapping from sentence idx to rows in the dataset (indices).
# Needed when sampling sentence indices from slices for evaluation.
sent_idx_to_idx_str = utils.load_json_file(self.sent_idx_file)
self.sent_idx_to_idx = {int(i):val for i,val in sent_idx_to_idx_str.items()}
self.logger.info(f"Finished loading dataset.")
# Stores info about the batch prepped embedding memory mapped files and their shapes and datatypes
# so we can load them
self.batch_prep_config = utils.load_json_file(data_utils.get_batch_prep_config(self.dataset_name))
self.batch_prepped_emb_files = {}
self.batch_prepped_emb_file_names = {}
for emb in args.data_config.ent_embeddings:
if 'batch_prep' in emb and emb['batch_prep']:
assert emb.key in self.batch_prep_config, f'Need to prep {emb.key}. Please call prep instead of run with batch_prep_embeddings set to true.'
self.batch_prepped_emb_file_names[emb.key] = os.path.join(os.path.dirname(self.dataset_name),
os.path.basename(self.batch_prep_config[emb.key]['file_name']))
self.batch_prepped_emb_files[emb.key] = np.memmap(
self.batch_prepped_emb_file_names[emb.key],
dtype=self.batch_prep_config[emb.key]['dtype'],
shape=tuple(self.batch_prep_config[emb.key]['shape']),
mode='r')
assert len(self.batch_prepped_emb_files[emb.key]) == self.data_len,\
f'Preprocessed emb data file {self.batch_prep_config[emb.key]["file_name"]} does not match length of main data file.'
# Stores embeddings that we compute on the fly; these are embeddings where batch_on_the_fly is set to true.
self.batch_on_the_fly_embs = {}
for emb in args.data_config.ent_embeddings:
if 'batch_on_the_fly' in emb and emb['batch_on_the_fly'] is True:
mod, load_class = import_class("bootleg.embeddings", emb.load_class)
try:
self.batch_on_the_fly_embs[emb.key] = getattr(mod, load_class)(main_args=args,
emb_args=emb['args'], entity_symbols=entity_symbols,
model_device=None, word_symbols=None, key=emb.key)
except AttributeError as e:
self.logger.warning(f'No prep method found for {emb.load_class} with error {e}')
except Exception as e:
print("ERROR", e)
# The data in this table shouldn't be pickled since we delete it in the class __getstate__
self.alias2entity_table = AliasEntityTable(args=args, entity_symbols=entity_symbols)
# Random NIL percent
self.mask_perc = args.train_config.random_nil_perc
self.random_nil = False
# Don't want to random mask for eval
if not dataset_is_eval:
# Whether to use a random NIL training regime
self.random_nil = args.train_config.random_nil
if self.random_nil:
self.logger.info(f'Using random nils during training with {self.mask_perc} percent')
def __len__(self):
return self.data_len
def __getitem__(self, key):
# start = time.time()
example = self.data[key]
entity_indices = self.alias2entity_table(example['alias_idx'])
# True entities will be true and false golds for train (if use_weak_label in config is true) and just true golds for eval
true_entities = torch.from_numpy(example['true_entity_idx'])
M = true_entities.shape
if self.random_nil:
# example['true_entity_idx'] is M -> we want to sample some % of these and set them to not in candidate list
# randomly mask each entity embedding
bern_prob = (torch.ones(M) * self.mask_perc)
keep_mask = torch.bernoulli(bern_prob) < 1
# whichever we sample, we want to set corresponding true candidate to -1 and mask it out
# to simulate not being in the candidate list
# can't have negatives for one hot so we temporarily cast padded values to 0
padded_entities = true_entities == -1
true_entities = true_entities.masked_fill(padded_entities, 0)
one_hot_true_entities = F.one_hot(true_entities, num_classes=self.K)
one_hot_true_entities[keep_mask.unsqueeze(-1).expand_as(one_hot_true_entities)] = 0
one_hot_true_entities[padded_entities.unsqueeze(-1).expand_as(one_hot_true_entities)] = 0
entity_indices = entity_indices.masked_fill(one_hot_true_entities, -1)
# set new true label to 0 ('not in candidate')
true_entities = true_entities.masked_fill(~keep_mask, 0)
# make sure original padded entities are padded
true_entities = true_entities.masked_fill(padded_entities, -1)
start_idx_in_sent = example['start_idx_in_sent']
end_idx_in_sent = example['end_idx_in_sent']
example_dict = {'start_idx_in_sent': start_idx_in_sent,
'end_idx_in_sent': end_idx_in_sent,
'alias_idx': example['alias_idx'],
'word_indices': example['word_indices'],
'sent_idx': example['sent_idx'],
'subsent_idx': example['subsent_idx'],
'entity_indices': entity_indices,
# due to subsentence split, we need to keep track of the original alias position in the list
# to do eval over slices when distributed
# (examples from a sentence may be distributed across different GPUs)
'alias_list_pos': example['alias_list_pos'],
# true entities of the mentions seen during train (true and false golds); in eval, we only keep
# true entities of true golds
'true_entity_idx_for_train': example['true_entity_idx_for_train']}
# If this dataset is associated with slices, slice_indices is a incidence matrix indicating
# for each alias in the batch, which ones participate in which slice (slices keep track of sentence indexes and aliases to predict)
# Slices are not windowed like that are for training data.
if self.slice_dataset is not None:
# -1 is pad and should not be in the mapping from sentence index to row in array.
assert -1 != self.slice_dataset.sent_idx_arr[example["sent_idx"]]
# One row per mention and one column per slice
slice_indices = np.hstack([self.slice_dataset.data[slice_name][self.slice_dataset.sent_idx_arr[example["sent_idx"]]].alias_to_predict.T
for slice_name in self.slice_names])
prob_labels_arr = np.hstack([self.slice_dataset.data[slice_name][self.slice_dataset.sent_idx_arr[example["sent_idx"]]].prob_labels.T
for slice_name in self.slice_names])
# alias_list_pos will have -1 for no alias; we want these to become zero in slice_indices.
# Therefore we add a pad row to the bottom of slice_indices
slice_indices = np.vstack([slice_indices, np.zeros(slice_indices.shape[1])]).astype(int)
slice_indices = slice_indices[example['alias_list_pos']]
# Probabilistic slice labels for slice indicator head training
prob_labels_arr = np.vstack([prob_labels_arr, np.zeros(prob_labels_arr.shape[1])]).astype(float)
prob_labels_arr = prob_labels_arr[example['alias_list_pos']]
# If this is an eval dataset, keep slice indices intact for eval_wrapper
example_dict['slice_indices'] = slice_indices
# Assign true entity idx to -1 if example alias doesn't participate in slice
for i, slice_name in enumerate(self.slice_names):
prob_labels = prob_labels_arr[:,i]
bin_in_slice_labels = slice_indices[:,i]
# NED prediction labels; set predictions to be -1 for masking for mentions not in a slice
pred_labels = np.copy(true_entities)
pred_labels[~(bin_in_slice_labels).astype(bool)] = -1
# Mask out slice alias labels for which we don't want to make a prediction
# We need to use true_entity_idx to account for subsentences which indicate
# which alias to predict
prob_labels[true_entities == -1] = -1
ind_task_name = train_utils.get_slice_head_ind_name(slice_name)
pred_task_name = train_utils.get_slice_head_pred_name(slice_name)
# Add indicator head and prediction head labels
example_dict[ind_task_name] = prob_labels
example_dict[pred_task_name] = pred_labels
else:
example_dict[train_utils.get_slice_head_pred_name(FINAL_LOSS)] = example['true_entity_idx']
# Add type preds
if self.type_pred:
example_dict["type_labels"] = self.eid2typeid[true_entities]
# Add embeddings to example forward
for emb_name in self.batch_prepped_emb_files:
example_dict[emb_name] = np.asarray(self.batch_prepped_emb_files[emb_name][key])
# Prep the embeddings (this will call the batch_prep method for the embedding)
for emb_name, emb in self.batch_on_the_fly_embs.items():
example_dict[emb_name] = emb.batch_prep(example['alias_idx'], entity_indices)
return example_dict
def __getstate__(self):
state = self.__dict__.copy()
# Not picklable
del state['data']
del state['logger']
# the sent_idx mapping is expensive to pickle so remove
# also not needed in dataloader workers so we don't need to setstate for it
del state['sent_idx_to_idx']
del state['batch_prepped_emb_files']
return state
def __setstate__(self, state):
self.__dict__.update(state)
self.data = np.memmap(self.dataset_name, dtype=self.storage_type, mode='r')
self.batch_prepped_emb_files = {}
for emb_name, file_name in self.batch_prepped_emb_file_names.items():
self.batch_prepped_emb_files[emb_name] = np.memmap(self.batch_prepped_emb_file_names[emb_name],
dtype=self.batch_prep_config[emb_name]['dtype'],
shape=tuple(self.batch_prep_config[emb_name]['shape']),
mode='r')
self.logger = logging_utils.get_logger(self.args)
def __repr__(self):
return f"Dataset {self.dataset_name}"
def load_coarse_type_table(self, args, entity_symbols):
emb_dir = args.data_config.emb_dir
coarse_type_file = args.data_config.type_prediction.file
with open(os.path.join(emb_dir, coarse_type_file)) as in_f:
# take the first type; UNK type is 0
qid2type = {}
max_type = 0
for k, v in json.load(in_f).items():
if len(v) > 0:
qid2type[k] = v[0]+1
else:
qid2type[k] = 0
max_type = max(max_type, qid2type[k])
# We assume types are indexed from 0. So, 6 types will have indices 0 - 5. Max type will get 5+1 = 6.
assert max_type == args.data_config.type_prediction.num_types,\
f"{args.data_config.type_prediction.num_types} from args.data_config.type_prediction.num_types must match our computed number {max_type}"
# All qids get unk types
values = [0 for _ in range(self.num_entities_with_pad_and_nocand)]
for qid in qid2type:
if entity_symbols.qid_exists(qid):
values[entity_symbols.get_eid(qid)] = qid2type[qid]
# Padded eid gets -1
values[-1] = -1
num_types_with_pad = max_type+1
eid2coarsetype = torch.tensor(values)
return eid2coarsetype, num_types_with_pad
| 58.238854 | 156 | 0.667359 | import os
import time
import ujson as json
import torch
import sys
import pickle
import numpy as np
from torch.utils.data import Dataset
import torch.distributed as dist
import torch.nn.functional as F
from bootleg.symbols.alias_entity_table import AliasEntityTable
from bootleg.symbols.constants import *
from bootleg.prep import prep_data
from bootleg.utils import logging_utils, data_utils, train_utils
from bootleg.utils.utils import import_class
from bootleg.utils import utils
gs.filterwarnings("ignore", message=".*The given NumPy array is not writeable.*")
class WikiDataset(Dataset):
def __init__(self, args, use_weak_label, input_src, dataset_name,
is_writer, distributed, word_symbols, entity_symbols,
slice_dataset=None, dataset_is_eval=False):
self.args = args
self.logger = logging_utils.get_logger(args)
self.K = entity_symbols.max_candidates + (not args.data_config.train_in_candidates)
self.num_entities_with_pad_and_nocand = entity_symbols.num_entities_with_pad_and_nocand
self.dataset_name = dataset_name
self.slice_dataset = slice_dataset
self.dataset_is_eval = dataset_is_eval
self.slice_names = train_utils.get_data_slices(args, dataset_is_eval)
self.storage_type_file = data_utils.get_storage_file(self.dataset_name)
self.sent_idx_file = os.path.splitext(dataset_name)[0] + "_sent_idx.json"
self.type_pred = False
if args.data_config.type_prediction.use_type_pred:
self.type_pred = True
self.eid2typeid, self.num_types_with_pad = self.load_coarse_type_table(args, entity_symbols)
self.logger.info("Loading dataset...")
self.logger.debug("Seeing if " + dataset_name + " exists")
if (args.data_config.overwrite_preprocessed_data or
(not os.path.exists(self.dataset_name)) or
(not os.path.exists(self.sent_idx_file)) or
(not os.path.exists(self.storage_type_file)) or
(not os.path.exists(data_utils.get_batch_prep_config(self.dataset_name)))):
start = time.time()
self.logger.debug(f"Building dataset with {input_src}")
if is_writer:
prep_data(args, use_weak_label=use_weak_label, dataset_is_eval=self.dataset_is_eval,
input_src=input_src, dataset_name=dataset_name,
prep_dir=data_utils.get_data_prep_dir(args))
if distributed:
dist.barrier()
self.logger.debug(f"Finished building and saving dataset in {round(time.time() - start, 2)}s.")
start = time.time()
self.storage_type = pickle.load(open(self.storage_type_file, 'rb'))
self.data = np.memmap(self.dataset_name, dtype=self.storage_type, mode='r')
self.data_len = len(self.data)
sent_idx_to_idx_str = utils.load_json_file(self.sent_idx_file)
self.sent_idx_to_idx = {int(i):val for i,val in sent_idx_to_idx_str.items()}
self.logger.info(f"Finished loading dataset.")
self.batch_prep_config = utils.load_json_file(data_utils.get_batch_prep_config(self.dataset_name))
self.batch_prepped_emb_files = {}
self.batch_prepped_emb_file_names = {}
for emb in args.data_config.ent_embeddings:
if 'batch_prep' in emb and emb['batch_prep']:
assert emb.key in self.batch_prep_config, f'Need to prep {emb.key}. Please call prep instead of run with batch_prep_embeddings set to true.'
self.batch_prepped_emb_file_names[emb.key] = os.path.join(os.path.dirname(self.dataset_name),
os.path.basename(self.batch_prep_config[emb.key]['file_name']))
self.batch_prepped_emb_files[emb.key] = np.memmap(
self.batch_prepped_emb_file_names[emb.key],
dtype=self.batch_prep_config[emb.key]['dtype'],
shape=tuple(self.batch_prep_config[emb.key]['shape']),
mode='r')
assert len(self.batch_prepped_emb_files[emb.key]) == self.data_len,\
f'Preprocessed emb data file {self.batch_prep_config[emb.key]["file_name"]} does not match length of main data file.'
self.batch_on_the_fly_embs = {}
for emb in args.data_config.ent_embeddings:
if 'batch_on_the_fly' in emb and emb['batch_on_the_fly'] is True:
mod, load_class = import_class("bootleg.embeddings", emb.load_class)
try:
self.batch_on_the_fly_embs[emb.key] = getattr(mod, load_class)(main_args=args,
emb_args=emb['args'], entity_symbols=entity_symbols,
model_device=None, word_symbols=None, key=emb.key)
except AttributeError as e:
self.logger.warning(f'No prep method found for {emb.load_class} with error {e}')
except Exception as e:
print("ERROR", e)
self.alias2entity_table = AliasEntityTable(args=args, entity_symbols=entity_symbols)
# Random NIL percent
self.mask_perc = args.train_config.random_nil_perc
self.random_nil = False
# Don't want to random mask for eval
if not dataset_is_eval:
self.random_nil = args.train_config.random_nil
if self.random_nil:
self.logger.info(f'Using random nils during training with {self.mask_perc} percent')
def __len__(self):
return self.data_len
def __getitem__(self, key):
example = self.data[key]
entity_indices = self.alias2entity_table(example['alias_idx'])
true_entities = torch.from_numpy(example['true_entity_idx'])
M = true_entities.shape
if self.random_nil:
bern_prob = (torch.ones(M) * self.mask_perc)
keep_mask = torch.bernoulli(bern_prob) < 1
padded_entities = true_entities == -1
true_entities = true_entities.masked_fill(padded_entities, 0)
one_hot_true_entities = F.one_hot(true_entities, num_classes=self.K)
one_hot_true_entities[keep_mask.unsqueeze(-1).expand_as(one_hot_true_entities)] = 0
one_hot_true_entities[padded_entities.unsqueeze(-1).expand_as(one_hot_true_entities)] = 0
entity_indices = entity_indices.masked_fill(one_hot_true_entities, -1)
# set new true label to 0 ('not in candidate')
true_entities = true_entities.masked_fill(~keep_mask, 0)
# make sure original padded entities are padded
true_entities = true_entities.masked_fill(padded_entities, -1)
start_idx_in_sent = example['start_idx_in_sent']
end_idx_in_sent = example['end_idx_in_sent']
example_dict = {'start_idx_in_sent': start_idx_in_sent,
'end_idx_in_sent': end_idx_in_sent,
'alias_idx': example['alias_idx'],
'word_indices': example['word_indices'],
'sent_idx': example['sent_idx'],
'subsent_idx': example['subsent_idx'],
'entity_indices': entity_indices,
# due to subsentence split, we need to keep track of the original alias position in the list
# to do eval over slices when distributed
# (examples from a sentence may be distributed across different GPUs)
'alias_list_pos': example['alias_list_pos'],
# true entities of the mentions seen during train (true and false golds); in eval, we only keep
# true entities of true golds
'true_entity_idx_for_train': example['true_entity_idx_for_train']}
# If this dataset is associated with slices, slice_indices is a incidence matrix indicating
# for each alias in the batch, which ones participate in which slice (slices keep track of sentence indexes and aliases to predict)
# Slices are not windowed like that are for training data.
if self.slice_dataset is not None:
# -1 is pad and should not be in the mapping from sentence index to row in array.
assert -1 != self.slice_dataset.sent_idx_arr[example["sent_idx"]]
# One row per mention and one column per slice
slice_indices = np.hstack([self.slice_dataset.data[slice_name][self.slice_dataset.sent_idx_arr[example["sent_idx"]]].alias_to_predict.T
for slice_name in self.slice_names])
prob_labels_arr = np.hstack([self.slice_dataset.data[slice_name][self.slice_dataset.sent_idx_arr[example["sent_idx"]]].prob_labels.T
for slice_name in self.slice_names])
# alias_list_pos will have -1 for no alias; we want these to become zero in slice_indices.
# Therefore we add a pad row to the bottom of slice_indices
slice_indices = np.vstack([slice_indices, np.zeros(slice_indices.shape[1])]).astype(int)
slice_indices = slice_indices[example['alias_list_pos']]
# Probabilistic slice labels for slice indicator head training
prob_labels_arr = np.vstack([prob_labels_arr, np.zeros(prob_labels_arr.shape[1])]).astype(float)
prob_labels_arr = prob_labels_arr[example['alias_list_pos']]
# If this is an eval dataset, keep slice indices intact for eval_wrapper
example_dict['slice_indices'] = slice_indices
# Assign true entity idx to -1 if example alias doesn't participate in slice
for i, slice_name in enumerate(self.slice_names):
prob_labels = prob_labels_arr[:,i]
bin_in_slice_labels = slice_indices[:,i]
pred_labels = np.copy(true_entities)
pred_labels[~(bin_in_slice_labels).astype(bool)] = -1
# We need to use true_entity_idx to account for subsentences which indicate
# which alias to predict
prob_labels[true_entities == -1] = -1
ind_task_name = train_utils.get_slice_head_ind_name(slice_name)
pred_task_name = train_utils.get_slice_head_pred_name(slice_name)
# Add indicator head and prediction head labels
example_dict[ind_task_name] = prob_labels
example_dict[pred_task_name] = pred_labels
else:
example_dict[train_utils.get_slice_head_pred_name(FINAL_LOSS)] = example['true_entity_idx']
# Add type preds
if self.type_pred:
example_dict["type_labels"] = self.eid2typeid[true_entities]
# Add embeddings to example forward
for emb_name in self.batch_prepped_emb_files:
example_dict[emb_name] = np.asarray(self.batch_prepped_emb_files[emb_name][key])
# Prep the embeddings (this will call the batch_prep method for the embedding)
for emb_name, emb in self.batch_on_the_fly_embs.items():
example_dict[emb_name] = emb.batch_prep(example['alias_idx'], entity_indices)
return example_dict
def __getstate__(self):
state = self.__dict__.copy()
# Not picklable
del state['data']
del state['logger']
# the sent_idx mapping is expensive to pickle so remove
# also not needed in dataloader workers so we don't need to setstate for it
del state['sent_idx_to_idx']
del state['batch_prepped_emb_files']
return state
def __setstate__(self, state):
self.__dict__.update(state)
self.data = np.memmap(self.dataset_name, dtype=self.storage_type, mode='r')
self.batch_prepped_emb_files = {}
for emb_name, file_name in self.batch_prepped_emb_file_names.items():
self.batch_prepped_emb_files[emb_name] = np.memmap(self.batch_prepped_emb_file_names[emb_name],
dtype=self.batch_prep_config[emb_name]['dtype'],
shape=tuple(self.batch_prep_config[emb_name]['shape']),
mode='r')
self.logger = logging_utils.get_logger(self.args)
def __repr__(self):
return f"Dataset {self.dataset_name}"
def load_coarse_type_table(self, args, entity_symbols):
emb_dir = args.data_config.emb_dir
coarse_type_file = args.data_config.type_prediction.file
with open(os.path.join(emb_dir, coarse_type_file)) as in_f:
qid2type = {}
max_type = 0
for k, v in json.load(in_f).items():
if len(v) > 0:
qid2type[k] = v[0]+1
else:
qid2type[k] = 0
max_type = max(max_type, qid2type[k])
assert max_type == args.data_config.type_prediction.num_types,\
f"{args.data_config.type_prediction.num_types} from args.data_config.type_prediction.num_types must match our computed number {max_type}"
values = [0 for _ in range(self.num_entities_with_pad_and_nocand)]
for qid in qid2type:
if entity_symbols.qid_exists(qid):
values[entity_symbols.get_eid(qid)] = qid2type[qid]
values[-1] = -1
num_types_with_pad = max_type+1
eid2coarsetype = torch.tensor(values)
return eid2coarsetype, num_types_with_pad
| true | true |
f7ffe2d124f030817d7d121b6510b244546c2572 | 1,741 | py | Python | src/globus_cli/commands/group/member/approve.py | globusonline/globus-cli | 696857baafac198141edc3c1c29c72215f217df1 | [
"Apache-2.0"
] | null | null | null | src/globus_cli/commands/group/member/approve.py | globusonline/globus-cli | 696857baafac198141edc3c1c29c72215f217df1 | [
"Apache-2.0"
] | 1 | 2016-04-09T17:26:05.000Z | 2016-04-11T16:13:50.000Z | src/globus_cli/commands/group/member/approve.py | globusonline/globus-cli | 696857baafac198141edc3c1c29c72215f217df1 | [
"Apache-2.0"
] | null | null | null | import click
from globus_cli.login_manager import LoginManager
from globus_cli.parsing import IdentityType, ParsedIdentity, command
from globus_cli.termio import FORMAT_TEXT_RECORD, formatted_print
from globus_cli.types import FIELD_LIST_T
APPROVED_USER_FIELDS: FIELD_LIST_T = [
("Group ID", "group_id"),
("Approved User ID", "identity_id"),
("Approved User Username", "username"),
]
@command("approve", short_help="Approve a member to join a group")
@click.argument("group_id", type=click.UUID)
@click.argument("user", type=IdentityType())
@LoginManager.requires_login(LoginManager.GROUPS_RS)
def member_approve(group_id: str, user: ParsedIdentity, login_manager):
"""
Approve a pending member to join a group, changing their status from 'invited'
to 'active'.
The USER argument may be an identity ID or username (whereas the group must be
specified with an ID).
"""
auth_client = login_manager.get_auth_client()
groups_client = login_manager.get_groups_client()
identity_id = auth_client.maybe_lookup_identity_id(user.value)
if not identity_id:
raise click.UsageError(f"Couldn't determine identity from user value: {user}")
actions = {"approve": [{"identity_id": identity_id}]}
response = groups_client.batch_membership_action(group_id, actions)
if not response.get("approve", None):
try:
raise ValueError(response["errors"]["approve"][0]["detail"])
except (IndexError, KeyError):
raise ValueError("Could not approve the user to join the group")
formatted_print(
response,
text_format=FORMAT_TEXT_RECORD,
fields=APPROVED_USER_FIELDS,
response_key=lambda data: data["approve"][0],
)
| 38.688889 | 86 | 0.721424 | import click
from globus_cli.login_manager import LoginManager
from globus_cli.parsing import IdentityType, ParsedIdentity, command
from globus_cli.termio import FORMAT_TEXT_RECORD, formatted_print
from globus_cli.types import FIELD_LIST_T
APPROVED_USER_FIELDS: FIELD_LIST_T = [
("Group ID", "group_id"),
("Approved User ID", "identity_id"),
("Approved User Username", "username"),
]
@command("approve", short_help="Approve a member to join a group")
@click.argument("group_id", type=click.UUID)
@click.argument("user", type=IdentityType())
@LoginManager.requires_login(LoginManager.GROUPS_RS)
def member_approve(group_id: str, user: ParsedIdentity, login_manager):
auth_client = login_manager.get_auth_client()
groups_client = login_manager.get_groups_client()
identity_id = auth_client.maybe_lookup_identity_id(user.value)
if not identity_id:
raise click.UsageError(f"Couldn't determine identity from user value: {user}")
actions = {"approve": [{"identity_id": identity_id}]}
response = groups_client.batch_membership_action(group_id, actions)
if not response.get("approve", None):
try:
raise ValueError(response["errors"]["approve"][0]["detail"])
except (IndexError, KeyError):
raise ValueError("Could not approve the user to join the group")
formatted_print(
response,
text_format=FORMAT_TEXT_RECORD,
fields=APPROVED_USER_FIELDS,
response_key=lambda data: data["approve"][0],
)
| true | true |
f7ffe32f2174b14ab36b60734d3122f28c18bae8 | 17,424 | py | Python | synchronize.py | calizarr/PhenoPiSight | 9ee01dfa5e11a16e43ae1c71f1effcf58f22435a | [
"MIT"
] | 4 | 2017-09-03T14:12:42.000Z | 2019-02-27T00:24:40.000Z | synchronize.py | calizarr/PhenoPiSight | 9ee01dfa5e11a16e43ae1c71f1effcf58f22435a | [
"MIT"
] | null | null | null | synchronize.py | calizarr/PhenoPiSight | 9ee01dfa5e11a16e43ae1c71f1effcf58f22435a | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012-2013, Timothy Appnel <tim@appnel.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: synchronize
version_added: "1.4"
short_description: Uses rsync to make synchronizing file paths in your playbooks quick and easy.
description:
- C(synchronize) is a wrapper around the rsync command, meant to make common tasks with rsync easier. It is run and originates on the local host where Ansible is being run. Of course, you could just use the command action to call rsync yourself, but you also have to add a fair number of boilerplate options and host facts. You `still` may need to call rsync directly via C(command) or C(shell) depending on your use case. C(synchronize) does not provide access to the full power of rsync, but does make most invocations easier to follow.
options:
src:
description:
- Path on the source host that will be synchronized to the destination; The path can be absolute or relative.
required: true
dest:
description:
- Path on the destination host that will be synchronized from the source; The path can be absolute or relative.
required: true
dest_port:
description:
- Port number for ssh on the destination host. Prior to ansible 2.0, the ansible_ssh_port inventory var took precedence over this value.
default: Value of ansible_ssh_port for this host, remote_port config setting, or 22 if none of those are set
version_added: "1.5"
mode:
description:
- Specify the direction of the synchronization. In push mode the localhost or delegate is the source; In pull mode the remote host in context is the source.
required: false
choices: [ 'push', 'pull' ]
default: 'push'
archive:
description:
- Mirrors the rsync archive flag, enables recursive, links, perms, times, owner, group flags and -D.
choices: [ 'yes', 'no' ]
default: 'yes'
required: false
checksum:
description:
- Skip based on checksum, rather than mod-time & size; Note that that "archive" option is still enabled by default - the "checksum" option will not disable it.
choices: [ 'yes', 'no' ]
default: 'no'
required: false
version_added: "1.6"
compress:
description:
- Compress file data during the transfer. In most cases, leave this enabled unless it causes problems.
choices: [ 'yes', 'no' ]
default: 'yes'
required: false
version_added: "1.7"
existing_only:
description:
- Skip creating new files on receiver.
choices: [ 'yes', 'no' ]
default: 'no'
required: false
version_added: "1.5"
delete:
description:
- Delete files that don't exist (after transfer, not before) in the C(src) path. This option requires C(recursive=yes).
choices: [ 'yes', 'no' ]
default: 'no'
required: false
dirs:
description:
- Transfer directories without recursing
choices: [ 'yes', 'no' ]
default: 'no'
required: false
recursive:
description:
- Recurse into directories.
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
links:
description:
- Copy symlinks as symlinks.
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
copy_links:
description:
- Copy symlinks as the item that they point to (the referent) is copied, rather than the symlink.
choices: [ 'yes', 'no' ]
default: 'no'
required: false
perms:
description:
- Preserve permissions.
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
times:
description:
- Preserve modification times
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
owner:
description:
- Preserve owner (super user only)
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
group:
description:
- Preserve group
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
rsync_path:
description:
- Specify the rsync command to run on the remote host. See C(--rsync-path) on the rsync man page.
required: false
rsync_timeout:
description:
- Specify a --timeout for the rsync command in seconds.
default: 0
required: false
set_remote_user:
description:
- put user@ for the remote paths. If you have a custom ssh config to define the remote user for a host
that does not match the inventory user, you should set this parameter to "no".
default: yes
use_ssh_args:
description:
- Use the ssh_args specified in ansible.cfg
default: "no"
choices:
- "yes"
- "no"
version_added: "2.0"
rsync_opts:
description:
- Specify additional rsync options by passing in an array.
default:
required: false
version_added: "1.6"
partial:
description:
- Tells rsync to keep the partial file which should make a subsequent transfer of the rest of the file much faster.
default: no
required: false
version_added: "2.0"
verify_host:
description:
- Verify destination host key.
default: no
required: false
version_added: "2.0"
notes:
- rsync must be installed on both the local and remote host.
- For the C(synchronize) module, the "local host" is the host `the synchronize task originates on`, and the "destination host" is the host `synchronize is connecting to`.
- The "local host" can be changed to a different host by using `delegate_to`. This enables copying between two remote hosts or entirely on one remote machine.
- "The user and permissions for the synchronize `src` are those of the user running the Ansible task on the local host (or the remote_user for a delegate_to host when delegate_to is used)."
- The user and permissions for the synchronize `dest` are those of the `remote_user` on the destination host or the `become_user` if `become=yes` is active.
- In 2.0.0.0 a bug in the synchronize module made become occur on the "local host". This was fixed in 2.0.1.
- Expect that dest=~/x will be ~<remote_user>/x even if using sudo.
- Inspect the verbose output to validate the destination user/host/path
are what was expected.
- To exclude files and directories from being synchronized, you may add
C(.rsync-filter) files to the source directory.
- rsync daemon must be up and running with correct permission when using
rsync protocol in source or destination path.
author: "Timothy Appnel (@tima)"
'''
EXAMPLES = '''
# Synchronization of src on the control machine to dest on the remote hosts
synchronize: src=some/relative/path dest=/some/absolute/path
# Synchronization using rsync protocol (push)
synchronize: src=some/relative/path/ dest=rsync://somehost.com/path/
# Synchronization using rsync protocol (pull)
synchronize: mode=pull src=rsync://somehost.com/path/ dest=/some/absolute/path/
# Synchronization using rsync protocol on delegate host (push)
synchronize: >
src=/some/absolute/path/ dest=rsync://somehost.com/path/
delegate_to: delegate.host
# Synchronization using rsync protocol on delegate host (pull)
synchronize: >
mode=pull src=rsync://somehost.com/path/ dest=/some/absolute/path/
delegate_to: delegate.host
# Synchronization without any --archive options enabled
synchronize: src=some/relative/path dest=/some/absolute/path archive=no
# Synchronization with --archive options enabled except for --recursive
synchronize: src=some/relative/path dest=/some/absolute/path recursive=no
# Synchronization with --archive options enabled except for --times, with --checksum option enabled
synchronize: src=some/relative/path dest=/some/absolute/path checksum=yes times=no
# Synchronization without --archive options enabled except use --links
synchronize: src=some/relative/path dest=/some/absolute/path archive=no links=yes
# Synchronization of two paths both on the control machine
local_action: synchronize src=some/relative/path dest=/some/absolute/path
# Synchronization of src on the inventory host to the dest on the localhost in pull mode
synchronize: mode=pull src=some/relative/path dest=/some/absolute/path
# Synchronization of src on delegate host to dest on the current inventory host.
synchronize:
src: /first/absolute/path
dest: /second/absolute/path
delegate_to: delegate.host
# Synchronize two directories on one remote host.
synchronize:
src: /first/absolute/path
dest: /second/absolute/path
delegate_to: "{{ inventory_hostname }}"
# Synchronize and delete files in dest on the remote host that are not found in src of localhost.
synchronize: src=some/relative/path dest=/some/absolute/path delete=yes
# Synchronize using an alternate rsync command
# This specific command is granted su privileges on the destination
synchronize: src=some/relative/path dest=/some/absolute/path rsync_path="su -c rsync"
# Example .rsync-filter file in the source directory
- var # exclude any path whose last part is 'var'
- /var # exclude any path starting with 'var' starting at the source directory
+ /var/conf # include /var/conf even though it was previously excluded
# Synchronize passing in extra rsync options
synchronize:
src: /tmp/helloworld
dest: /var/www/helloword
rsync_opts:
- "--no-motd"
- "--exclude=.git"
'''
client_addr = None
def substitute_controller(path):
global client_addr
if not client_addr:
ssh_env_string = os.environ.get('SSH_CLIENT', None)
try:
client_addr, _ = ssh_env_string.split(None, 1)
except AttributeError:
ssh_env_string = os.environ.get('SSH_CONNECTION', None)
try:
client_addr, _ = ssh_env_string.split(None, 1)
except AttributeError:
pass
if not client_addr:
raise ValueError
if path.startswith('localhost:'):
path = path.replace('localhost', client_addr, 1)
return path
def main():
module = AnsibleModule(
argument_spec = dict(
src = dict(required=True),
dest = dict(required=True),
dest_port = dict(default=22, type='int'),
delete = dict(default='no', type='bool'),
private_key = dict(default=None),
rsync_path = dict(default=None),
_local_rsync_path = dict(default='rsync', type='path'),
_substitute_controller = dict(default='no', type='bool'),
archive = dict(default='yes', type='bool'),
checksum = dict(default='no', type='bool'),
compress = dict(default='yes', type='bool'),
existing_only = dict(default='no', type='bool'),
dirs = dict(default='no', type='bool'),
recursive = dict(type='bool'),
links = dict(type='bool'),
copy_links = dict(type='bool'),
perms = dict(type='bool'),
times = dict(type='bool'),
owner = dict(type='bool'),
group = dict(type='bool'),
set_remote_user = dict(default='yes', type='bool'),
rsync_timeout = dict(type='int', default=0),
rsync_opts = dict(type='list'),
ssh_args = dict(type='str'),
partial = dict(default='no', type='bool'),
verify_host = dict(default='no', type='bool'),
mode = dict(default='push', choices=['push', 'pull']),
),
supports_check_mode = True
)
if module.params['_substitute_controller']:
try:
source = '"' + substitute_controller(module.params['src']) + '"'
dest = '"' + substitute_controller(module.params['dest']) + '"'
except ValueError:
module.fail_json(msg='Could not determine controller hostname for rsync to send to')
else:
source = '"' + module.params['src'] + '"'
dest = '"' + module.params['dest'] + '"'
dest_port = module.params['dest_port']
delete = module.params['delete']
private_key = module.params['private_key']
rsync_path = module.params['rsync_path']
rsync = module.params.get('_local_rsync_path', 'rsync')
rsync_timeout = module.params.get('rsync_timeout', 'rsync_timeout')
archive = module.params['archive']
checksum = module.params['checksum']
compress = module.params['compress']
existing_only = module.params['existing_only']
dirs = module.params['dirs']
partial = module.params['partial']
# the default of these params depends on the value of archive
recursive = module.params['recursive']
links = module.params['links']
copy_links = module.params['copy_links']
perms = module.params['perms']
times = module.params['times']
owner = module.params['owner']
group = module.params['group']
rsync_opts = module.params['rsync_opts']
ssh_args = module.params['ssh_args']
verify_host = module.params['verify_host']
if '/' not in rsync:
rsync = module.get_bin_path(rsync, required=True)
ssh = module.get_bin_path('ssh', required=True)
cmd = '%s --delay-updates -F' % rsync
if compress:
cmd = cmd + ' --compress'
if rsync_timeout:
cmd = cmd + ' --timeout=%s' % rsync_timeout
if module.check_mode:
cmd = cmd + ' --dry-run'
if delete:
cmd = cmd + ' --delete-after'
if existing_only:
cmd = cmd + ' --existing'
if checksum:
cmd = cmd + ' --checksum'
if archive:
cmd = cmd + ' --archive'
if recursive is False:
cmd = cmd + ' --no-recursive'
if links is False:
cmd = cmd + ' --no-links'
if copy_links is True:
cmd = cmd + ' --copy-links'
if perms is False:
cmd = cmd + ' --no-perms'
if times is False:
cmd = cmd + ' --no-times'
if owner is False:
cmd = cmd + ' --no-owner'
if group is False:
cmd = cmd + ' --no-group'
else:
if recursive is True:
cmd = cmd + ' --recursive'
if links is True:
cmd = cmd + ' --links'
if copy_links is True:
cmd = cmd + ' --copy-links'
if perms is True:
cmd = cmd + ' --perms'
if times is True:
cmd = cmd + ' --times'
if owner is True:
cmd = cmd + ' --owner'
if group is True:
cmd = cmd + ' --group'
if dirs:
cmd = cmd + ' --dirs'
if private_key is None:
private_key = ''
else:
private_key = '-i '+ private_key
# ssh_opts = '-S none'
ssh_opts = ""
control_path = "-o ControlPath=/home/clizarraga/.ansible/cp/ansible-ssh-%h-%p-%r"
if not verify_host:
# ssh_opts = '%s -o StrictHostKeyChecking=no' % ssh_opts
ssh_opts = '-o StrictHostKeyChecking=no'
if ssh_args:
ssh_opts = '%s %s' % (ssh_opts, ssh_args)
if source.startswith('rsync://') and dest.startswith('rsync://'):
module.fail_json(msg='either src or dest must be a localhost', rc=1)
if not source.startswith('rsync://') and not dest.startswith('rsync://'):
if dest_port != 22:
cmd += " --rsh 'ssh %s %s -o Port=%s'" % (private_key, ssh_opts, dest_port)
else:
cmd += " --rsh 'ssh %s %s %s'" % (private_key, ssh_opts, control_path) # need ssh param
if rsync_path:
cmd = cmd + " --rsync-path=%s" % (rsync_path)
if rsync_opts:
cmd = cmd + " " + " ".join(rsync_opts)
if partial:
cmd = cmd + " --partial"
changed_marker = '<<CHANGED>>'
cmd = cmd + " --out-format='" + changed_marker + "%i %n%L'"
# expand the paths
if '@' not in source:
source = os.path.expanduser(source)
if '@' not in dest:
dest = os.path.expanduser(dest)
cmd = ' '.join([cmd, source, dest])
cmdstr = cmd
(rc, out, err) = module.run_command(cmd)
if rc:
return module.fail_json(msg=err, rc=rc, cmd=cmdstr)
else:
changed = changed_marker in out
out_clean=out.replace(changed_marker,'')
out_lines=out_clean.split('\n')
while '' in out_lines:
out_lines.remove('')
if module._diff:
diff = {'prepared': out_clean}
return module.exit_json(changed=changed, msg=out_clean,
rc=rc, cmd=cmdstr, stdout_lines=out_lines,
diff=diff)
else:
return module.exit_json(changed=changed, msg=out_clean,
rc=rc, cmd=cmdstr, stdout_lines=out_lines)
# import module snippets
from ansible.module_utils.basic import *
main()
| 37.470968 | 542 | 0.646809 |
DOCUMENTATION = '''
---
module: synchronize
version_added: "1.4"
short_description: Uses rsync to make synchronizing file paths in your playbooks quick and easy.
description:
- C(synchronize) is a wrapper around the rsync command, meant to make common tasks with rsync easier. It is run and originates on the local host where Ansible is being run. Of course, you could just use the command action to call rsync yourself, but you also have to add a fair number of boilerplate options and host facts. You `still` may need to call rsync directly via C(command) or C(shell) depending on your use case. C(synchronize) does not provide access to the full power of rsync, but does make most invocations easier to follow.
options:
src:
description:
- Path on the source host that will be synchronized to the destination; The path can be absolute or relative.
required: true
dest:
description:
- Path on the destination host that will be synchronized from the source; The path can be absolute or relative.
required: true
dest_port:
description:
- Port number for ssh on the destination host. Prior to ansible 2.0, the ansible_ssh_port inventory var took precedence over this value.
default: Value of ansible_ssh_port for this host, remote_port config setting, or 22 if none of those are set
version_added: "1.5"
mode:
description:
- Specify the direction of the synchronization. In push mode the localhost or delegate is the source; In pull mode the remote host in context is the source.
required: false
choices: [ 'push', 'pull' ]
default: 'push'
archive:
description:
- Mirrors the rsync archive flag, enables recursive, links, perms, times, owner, group flags and -D.
choices: [ 'yes', 'no' ]
default: 'yes'
required: false
checksum:
description:
- Skip based on checksum, rather than mod-time & size; Note that that "archive" option is still enabled by default - the "checksum" option will not disable it.
choices: [ 'yes', 'no' ]
default: 'no'
required: false
version_added: "1.6"
compress:
description:
- Compress file data during the transfer. In most cases, leave this enabled unless it causes problems.
choices: [ 'yes', 'no' ]
default: 'yes'
required: false
version_added: "1.7"
existing_only:
description:
- Skip creating new files on receiver.
choices: [ 'yes', 'no' ]
default: 'no'
required: false
version_added: "1.5"
delete:
description:
- Delete files that don't exist (after transfer, not before) in the C(src) path. This option requires C(recursive=yes).
choices: [ 'yes', 'no' ]
default: 'no'
required: false
dirs:
description:
- Transfer directories without recursing
choices: [ 'yes', 'no' ]
default: 'no'
required: false
recursive:
description:
- Recurse into directories.
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
links:
description:
- Copy symlinks as symlinks.
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
copy_links:
description:
- Copy symlinks as the item that they point to (the referent) is copied, rather than the symlink.
choices: [ 'yes', 'no' ]
default: 'no'
required: false
perms:
description:
- Preserve permissions.
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
times:
description:
- Preserve modification times
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
owner:
description:
- Preserve owner (super user only)
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
group:
description:
- Preserve group
choices: [ 'yes', 'no' ]
default: the value of the archive option
required: false
rsync_path:
description:
- Specify the rsync command to run on the remote host. See C(--rsync-path) on the rsync man page.
required: false
rsync_timeout:
description:
- Specify a --timeout for the rsync command in seconds.
default: 0
required: false
set_remote_user:
description:
- put user@ for the remote paths. If you have a custom ssh config to define the remote user for a host
that does not match the inventory user, you should set this parameter to "no".
default: yes
use_ssh_args:
description:
- Use the ssh_args specified in ansible.cfg
default: "no"
choices:
- "yes"
- "no"
version_added: "2.0"
rsync_opts:
description:
- Specify additional rsync options by passing in an array.
default:
required: false
version_added: "1.6"
partial:
description:
- Tells rsync to keep the partial file which should make a subsequent transfer of the rest of the file much faster.
default: no
required: false
version_added: "2.0"
verify_host:
description:
- Verify destination host key.
default: no
required: false
version_added: "2.0"
notes:
- rsync must be installed on both the local and remote host.
- For the C(synchronize) module, the "local host" is the host `the synchronize task originates on`, and the "destination host" is the host `synchronize is connecting to`.
- The "local host" can be changed to a different host by using `delegate_to`. This enables copying between two remote hosts or entirely on one remote machine.
- "The user and permissions for the synchronize `src` are those of the user running the Ansible task on the local host (or the remote_user for a delegate_to host when delegate_to is used)."
- The user and permissions for the synchronize `dest` are those of the `remote_user` on the destination host or the `become_user` if `become=yes` is active.
- In 2.0.0.0 a bug in the synchronize module made become occur on the "local host". This was fixed in 2.0.1.
- Expect that dest=~/x will be ~<remote_user>/x even if using sudo.
- Inspect the verbose output to validate the destination user/host/path
are what was expected.
- To exclude files and directories from being synchronized, you may add
C(.rsync-filter) files to the source directory.
- rsync daemon must be up and running with correct permission when using
rsync protocol in source or destination path.
author: "Timothy Appnel (@tima)"
'''
EXAMPLES = '''
# Synchronization of src on the control machine to dest on the remote hosts
synchronize: src=some/relative/path dest=/some/absolute/path
# Synchronization using rsync protocol (push)
synchronize: src=some/relative/path/ dest=rsync://somehost.com/path/
# Synchronization using rsync protocol (pull)
synchronize: mode=pull src=rsync://somehost.com/path/ dest=/some/absolute/path/
# Synchronization using rsync protocol on delegate host (push)
synchronize: >
src=/some/absolute/path/ dest=rsync://somehost.com/path/
delegate_to: delegate.host
# Synchronization using rsync protocol on delegate host (pull)
synchronize: >
mode=pull src=rsync://somehost.com/path/ dest=/some/absolute/path/
delegate_to: delegate.host
# Synchronization without any --archive options enabled
synchronize: src=some/relative/path dest=/some/absolute/path archive=no
# Synchronization with --archive options enabled except for --recursive
synchronize: src=some/relative/path dest=/some/absolute/path recursive=no
# Synchronization with --archive options enabled except for --times, with --checksum option enabled
synchronize: src=some/relative/path dest=/some/absolute/path checksum=yes times=no
# Synchronization without --archive options enabled except use --links
synchronize: src=some/relative/path dest=/some/absolute/path archive=no links=yes
# Synchronization of two paths both on the control machine
local_action: synchronize src=some/relative/path dest=/some/absolute/path
# Synchronization of src on the inventory host to the dest on the localhost in pull mode
synchronize: mode=pull src=some/relative/path dest=/some/absolute/path
# Synchronization of src on delegate host to dest on the current inventory host.
synchronize:
src: /first/absolute/path
dest: /second/absolute/path
delegate_to: delegate.host
# Synchronize two directories on one remote host.
synchronize:
src: /first/absolute/path
dest: /second/absolute/path
delegate_to: "{{ inventory_hostname }}"
# Synchronize and delete files in dest on the remote host that are not found in src of localhost.
synchronize: src=some/relative/path dest=/some/absolute/path delete=yes
# Synchronize using an alternate rsync command
# This specific command is granted su privileges on the destination
synchronize: src=some/relative/path dest=/some/absolute/path rsync_path="su -c rsync"
# Example .rsync-filter file in the source directory
- var # exclude any path whose last part is 'var'
- /var # exclude any path starting with 'var' starting at the source directory
+ /var/conf # include /var/conf even though it was previously excluded
# Synchronize passing in extra rsync options
synchronize:
src: /tmp/helloworld
dest: /var/www/helloword
rsync_opts:
- "--no-motd"
- "--exclude=.git"
'''
client_addr = None
def substitute_controller(path):
global client_addr
if not client_addr:
ssh_env_string = os.environ.get('SSH_CLIENT', None)
try:
client_addr, _ = ssh_env_string.split(None, 1)
except AttributeError:
ssh_env_string = os.environ.get('SSH_CONNECTION', None)
try:
client_addr, _ = ssh_env_string.split(None, 1)
except AttributeError:
pass
if not client_addr:
raise ValueError
if path.startswith('localhost:'):
path = path.replace('localhost', client_addr, 1)
return path
def main():
module = AnsibleModule(
argument_spec = dict(
src = dict(required=True),
dest = dict(required=True),
dest_port = dict(default=22, type='int'),
delete = dict(default='no', type='bool'),
private_key = dict(default=None),
rsync_path = dict(default=None),
_local_rsync_path = dict(default='rsync', type='path'),
_substitute_controller = dict(default='no', type='bool'),
archive = dict(default='yes', type='bool'),
checksum = dict(default='no', type='bool'),
compress = dict(default='yes', type='bool'),
existing_only = dict(default='no', type='bool'),
dirs = dict(default='no', type='bool'),
recursive = dict(type='bool'),
links = dict(type='bool'),
copy_links = dict(type='bool'),
perms = dict(type='bool'),
times = dict(type='bool'),
owner = dict(type='bool'),
group = dict(type='bool'),
set_remote_user = dict(default='yes', type='bool'),
rsync_timeout = dict(type='int', default=0),
rsync_opts = dict(type='list'),
ssh_args = dict(type='str'),
partial = dict(default='no', type='bool'),
verify_host = dict(default='no', type='bool'),
mode = dict(default='push', choices=['push', 'pull']),
),
supports_check_mode = True
)
if module.params['_substitute_controller']:
try:
source = '"' + substitute_controller(module.params['src']) + '"'
dest = '"' + substitute_controller(module.params['dest']) + '"'
except ValueError:
module.fail_json(msg='Could not determine controller hostname for rsync to send to')
else:
source = '"' + module.params['src'] + '"'
dest = '"' + module.params['dest'] + '"'
dest_port = module.params['dest_port']
delete = module.params['delete']
private_key = module.params['private_key']
rsync_path = module.params['rsync_path']
rsync = module.params.get('_local_rsync_path', 'rsync')
rsync_timeout = module.params.get('rsync_timeout', 'rsync_timeout')
archive = module.params['archive']
checksum = module.params['checksum']
compress = module.params['compress']
existing_only = module.params['existing_only']
dirs = module.params['dirs']
partial = module.params['partial']
# the default of these params depends on the value of archive
recursive = module.params['recursive']
links = module.params['links']
copy_links = module.params['copy_links']
perms = module.params['perms']
times = module.params['times']
owner = module.params['owner']
group = module.params['group']
rsync_opts = module.params['rsync_opts']
ssh_args = module.params['ssh_args']
verify_host = module.params['verify_host']
if '/' not in rsync:
rsync = module.get_bin_path(rsync, required=True)
ssh = module.get_bin_path('ssh', required=True)
cmd = '%s --delay-updates -F' % rsync
if compress:
cmd = cmd + ' --compress'
if rsync_timeout:
cmd = cmd + ' --timeout=%s' % rsync_timeout
if module.check_mode:
cmd = cmd + ' --dry-run'
if delete:
cmd = cmd + ' --delete-after'
if existing_only:
cmd = cmd + ' --existing'
if checksum:
cmd = cmd + ' --checksum'
if archive:
cmd = cmd + ' --archive'
if recursive is False:
cmd = cmd + ' --no-recursive'
if links is False:
cmd = cmd + ' --no-links'
if copy_links is True:
cmd = cmd + ' --copy-links'
if perms is False:
cmd = cmd + ' --no-perms'
if times is False:
cmd = cmd + ' --no-times'
if owner is False:
cmd = cmd + ' --no-owner'
if group is False:
cmd = cmd + ' --no-group'
else:
if recursive is True:
cmd = cmd + ' --recursive'
if links is True:
cmd = cmd + ' --links'
if copy_links is True:
cmd = cmd + ' --copy-links'
if perms is True:
cmd = cmd + ' --perms'
if times is True:
cmd = cmd + ' --times'
if owner is True:
cmd = cmd + ' --owner'
if group is True:
cmd = cmd + ' --group'
if dirs:
cmd = cmd + ' --dirs'
if private_key is None:
private_key = ''
else:
private_key = '-i '+ private_key
# ssh_opts = '-S none'
ssh_opts = ""
control_path = "-o ControlPath=/home/clizarraga/.ansible/cp/ansible-ssh-%h-%p-%r"
if not verify_host:
# ssh_opts = '%s -o StrictHostKeyChecking=no' % ssh_opts
ssh_opts = '-o StrictHostKeyChecking=no'
if ssh_args:
ssh_opts = '%s %s' % (ssh_opts, ssh_args)
if source.startswith('rsync://') and dest.startswith('rsync://'):
module.fail_json(msg='either src or dest must be a localhost', rc=1)
if not source.startswith('rsync://') and not dest.startswith('rsync://'):
if dest_port != 22:
cmd += " --rsh 'ssh %s %s -o Port=%s'" % (private_key, ssh_opts, dest_port)
else:
cmd += " --rsh 'ssh %s %s %s'" % (private_key, ssh_opts, control_path) # need ssh param
if rsync_path:
cmd = cmd + " --rsync-path=%s" % (rsync_path)
if rsync_opts:
cmd = cmd + " " + " ".join(rsync_opts)
if partial:
cmd = cmd + " --partial"
changed_marker = '<<CHANGED>>'
cmd = cmd + " --out-format='" + changed_marker + "%i %n%L'"
# expand the paths
if '@' not in source:
source = os.path.expanduser(source)
if '@' not in dest:
dest = os.path.expanduser(dest)
cmd = ' '.join([cmd, source, dest])
cmdstr = cmd
(rc, out, err) = module.run_command(cmd)
if rc:
return module.fail_json(msg=err, rc=rc, cmd=cmdstr)
else:
changed = changed_marker in out
out_clean=out.replace(changed_marker,'')
out_lines=out_clean.split('\n')
while '' in out_lines:
out_lines.remove('')
if module._diff:
diff = {'prepared': out_clean}
return module.exit_json(changed=changed, msg=out_clean,
rc=rc, cmd=cmdstr, stdout_lines=out_lines,
diff=diff)
else:
return module.exit_json(changed=changed, msg=out_clean,
rc=rc, cmd=cmdstr, stdout_lines=out_lines)
# import module snippets
from ansible.module_utils.basic import *
main()
| true | true |
f7ffe43478e59d4e76f10ecc71e03ff0041074d2 | 927 | py | Python | src/utils/print_config.py | jabra/PrivacyFL | 93dd68cc7dcd52f1f95e28c514d21a1e1ea081cb | [
"Unlicense",
"MIT"
] | null | null | null | src/utils/print_config.py | jabra/PrivacyFL | 93dd68cc7dcd52f1f95e28c514d21a1e1ea081cb | [
"Unlicense",
"MIT"
] | null | null | null | src/utils/print_config.py | jabra/PrivacyFL | 93dd68cc7dcd52f1f95e28c514d21a1e1ea081cb | [
"Unlicense",
"MIT"
] | null | null | null | import sys
sys.path.append('..')
import config
def print_config(len_per_iteration):
"""
Prints parameters at start of simulation. The two arguments are dynamically created and hence not in config.
:param len_per_iteration: length of training dataset for each client for each iteration
:param sensitivity: sensitivity for differential privacy
"""
print('\n')
print(
'Running simulation with: \n{} clients \n{} iterations \n{}differential privacy \nand {}security \n'.format(
config.NUM_CLIENTS, config.ITERATIONS, 'no ' if not config.USE_DP_PRIVACY else '',
'no ' if not config.USE_SECURITY else ''))
print('Training length per client per iteration is {}\n'.format((len_per_iteration)))
print(
'Simulation parameters are: \nTolerance for weight convergence = {} \nEpsilon for DP privacy is {}'.format(
config.tolerance, config.epsilon))
| 42.136364 | 116 | 0.692557 | import sys
sys.path.append('..')
import config
def print_config(len_per_iteration):
print('\n')
print(
'Running simulation with: \n{} clients \n{} iterations \n{}differential privacy \nand {}security \n'.format(
config.NUM_CLIENTS, config.ITERATIONS, 'no ' if not config.USE_DP_PRIVACY else '',
'no ' if not config.USE_SECURITY else ''))
print('Training length per client per iteration is {}\n'.format((len_per_iteration)))
print(
'Simulation parameters are: \nTolerance for weight convergence = {} \nEpsilon for DP privacy is {}'.format(
config.tolerance, config.epsilon))
| true | true |
f7ffe45b0b680cfeefb1ddf830a61bdf34ba9439 | 1,033 | py | Python | server/dva/celery.py | Yinqingwen/Dva | 3b8d1d1435f6a804a9c370006b931f9dc50a7462 | [
"BSD-3-Clause",
"Apache-2.0",
"MIT"
] | 1 | 2021-04-10T20:19:35.000Z | 2021-04-10T20:19:35.000Z | server/dva/celery.py | Yinqingwen/Dva | 3b8d1d1435f6a804a9c370006b931f9dc50a7462 | [
"BSD-3-Clause",
"Apache-2.0",
"MIT"
] | null | null | null | server/dva/celery.py | Yinqingwen/Dva | 3b8d1d1435f6a804a9c370006b931f9dc50a7462 | [
"BSD-3-Clause",
"Apache-2.0",
"MIT"
] | 3 | 2021-07-13T10:52:48.000Z | 2022-03-11T03:31:45.000Z | from __future__ import absolute_import
import os
from celery import Celery
from kombu.common import Broadcast
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dva.settings')
from django.conf import settings # noqa
app = Celery('dva')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.conf.update(
CELERYD_PREFETCH_MULTIPLIER=1,
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_RESULT_BACKEND='django-db',
)
app.conf.task_queue_max_priority = 10
app.conf.task_queues = (Broadcast('qmanager'),Broadcast('qrefresher'),)
app.conf.task_routes = {
'manage_host': {'queue': 'qmanager'},
'refresh_retriever': {'queue': 'qrefresher'},
}
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| 26.487179 | 71 | 0.752178 | from __future__ import absolute_import
import os
from celery import Celery
from kombu.common import Broadcast
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dva.settings')
from django.conf import settings
app = Celery('dva')
app.config_from_object('django.conf:settings')
app.conf.update(
CELERYD_PREFETCH_MULTIPLIER=1,
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_RESULT_BACKEND='django-db',
)
app.conf.task_queue_max_priority = 10
app.conf.task_queues = (Broadcast('qmanager'),Broadcast('qrefresher'),)
app.conf.task_routes = {
'manage_host': {'queue': 'qmanager'},
'refresh_retriever': {'queue': 'qrefresher'},
}
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| true | true |
f7ffe4a8e96616b9a39388581dae3d307ee35ac0 | 823 | py | Python | src/attack_flow/scripts/graphviz.py | center-for-threat-informed-defense/attack-flow | 93a39a29edca19be979f1a8df4b95564bba49e8d | [
"Apache-2.0"
] | 165 | 2022-03-03T14:41:10.000Z | 2022-03-31T10:36:41.000Z | src/attack_flow/scripts/graphviz.py | center-for-threat-informed-defense/attack-flow | 93a39a29edca19be979f1a8df4b95564bba49e8d | [
"Apache-2.0"
] | 3 | 2022-03-03T14:55:30.000Z | 2022-03-25T14:22:57.000Z | src/attack_flow/scripts/graphviz.py | center-for-threat-informed-defense/attack-flow | 93a39a29edca19be979f1a8df4b95564bba49e8d | [
"Apache-2.0"
] | 18 | 2022-03-03T16:56:05.000Z | 2022-03-24T21:05:26.000Z | """
Convert ATT&CK Flow documents to GraphViz format.
"""
import argparse
import json
import attack_flow.graphviz
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('attack_flow',
help='The Attack Flow document to convert.')
parser.add_argument('graphviz',
help='The path to write the converted file to.')
parser.add_argument('--verbose', action='store_true',
help='Display verbose errors.')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
with open(args.attack_flow, 'r') as af:
attack_flow_doc = json.load(af)
converted = attack_flow.graphviz.convert(attack_flow_doc)
with open(args.graphviz, 'w') as gv:
gv.write(converted)
| 29.392857 | 72 | 0.652491 | import argparse
import json
import attack_flow.graphviz
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('attack_flow',
help='The Attack Flow document to convert.')
parser.add_argument('graphviz',
help='The path to write the converted file to.')
parser.add_argument('--verbose', action='store_true',
help='Display verbose errors.')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
with open(args.attack_flow, 'r') as af:
attack_flow_doc = json.load(af)
converted = attack_flow.graphviz.convert(attack_flow_doc)
with open(args.graphviz, 'w') as gv:
gv.write(converted)
| true | true |
f7ffe4b88557ef5caff135ba51749eb640ce96ca | 1,096 | py | Python | setup.py | AlexandrMov/sqlbehave | 587e71a7cf1307867fb848daf98c3990bb554165 | [
"MIT"
] | null | null | null | setup.py | AlexandrMov/sqlbehave | 587e71a7cf1307867fb848daf98c3990bb554165 | [
"MIT"
] | 2 | 2017-03-24T13:58:52.000Z | 2017-12-01T06:54:39.000Z | setup.py | AlexandrMov/sqlbehave | 587e71a7cf1307867fb848daf98c3990bb554165 | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="sqlbehave",
version="0.0.2.dev2",
description='sqlbehave is behaviour-driven development based on Behave package',
long_description=long_description,
url="https://github.com/AlexandrMov/sqlbehave",
author='Alexandr Movsunov',
author_email='sqlfuse@gmail.com',
license='MIT',
classifiers=[
"Development Status :: 1 - Planning",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Testing",
"Programming Language :: SQL",
"Programming Language :: Python :: 3 :: Only"
],
keywords='bdd behave sql development testing',
install_requires=["behave>=1.2.5", "sqlalchemy>=1.1"],
packages=find_packages(),
package_data={
'scripts': ['mssql'],
},
include_package_data=True,
scripts=['sqlbehave/bin/sqlbehave-admin.py'],
)
| 31.314286 | 84 | 0.663321 | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="sqlbehave",
version="0.0.2.dev2",
description='sqlbehave is behaviour-driven development based on Behave package',
long_description=long_description,
url="https://github.com/AlexandrMov/sqlbehave",
author='Alexandr Movsunov',
author_email='sqlfuse@gmail.com',
license='MIT',
classifiers=[
"Development Status :: 1 - Planning",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Testing",
"Programming Language :: SQL",
"Programming Language :: Python :: 3 :: Only"
],
keywords='bdd behave sql development testing',
install_requires=["behave>=1.2.5", "sqlalchemy>=1.1"],
packages=find_packages(),
package_data={
'scripts': ['mssql'],
},
include_package_data=True,
scripts=['sqlbehave/bin/sqlbehave-admin.py'],
)
| true | true |
f7ffe59dcff0102f4855cc0bce68c23e8115095b | 278 | py | Python | projects/code_combat/4_Backwoods_Forest/159-Blind_Distance/blind.py | only-romano/junkyard | b60a25b2643f429cdafee438d20f9966178d6f36 | [
"MIT"
] | null | null | null | projects/code_combat/4_Backwoods_Forest/159-Blind_Distance/blind.py | only-romano/junkyard | b60a25b2643f429cdafee438d20f9966178d6f36 | [
"MIT"
] | null | null | null | projects/code_combat/4_Backwoods_Forest/159-Blind_Distance/blind.py | only-romano/junkyard | b60a25b2643f429cdafee438d20f9966178d6f36 | [
"MIT"
] | null | null | null | def nearestEnemyDistance():
enemy = hero.findNearestEnemy()
result = 0
if enemy:
result = hero.distanceTo(enemy)
return result
while True:
enemyDistance = nearestEnemyDistance()
if enemyDistance > 0:
hero.say(enemyDistance)
| 21.384615 | 43 | 0.636691 | def nearestEnemyDistance():
enemy = hero.findNearestEnemy()
result = 0
if enemy:
result = hero.distanceTo(enemy)
return result
while True:
enemyDistance = nearestEnemyDistance()
if enemyDistance > 0:
hero.say(enemyDistance)
| true | true |
f7ffe5ee6efd884d5b57ef0d7b05ecab3c6be7b4 | 408 | py | Python | paypalpayoutssdk/config.py | truthiswill/Payouts-Python-SDK | ba04ffafb8165a1b7cdfd5841f08a96dccdd190b | [
"BSD-Source-Code"
] | 23 | 2020-03-02T13:31:55.000Z | 2022-03-06T11:25:21.000Z | paypalpayoutssdk/config.py | truthiswill/Payouts-Python-SDK | ba04ffafb8165a1b7cdfd5841f08a96dccdd190b | [
"BSD-Source-Code"
] | 4 | 2020-09-26T08:40:26.000Z | 2022-03-01T17:29:51.000Z | paypalpayoutssdk/config.py | truthiswill/Payouts-Python-SDK | ba04ffafb8165a1b7cdfd5841f08a96dccdd190b | [
"BSD-Source-Code"
] | 21 | 2020-02-07T10:02:57.000Z | 2021-09-09T18:05:02.000Z | __version__ = "1.0.0"
__pypi_username__ = "paypal"
__pypi_packagename__ = "paypal-payouts-sdk"
__github_username__ = "paypal"
__github_reponame__ = "Payouts-Python-SDK"
import re
import os
def find_packages():
path = "."
ret = []
for root, dirs, files in os.walk(path):
if '__init__.py' in files:
ret.append(re.sub('^[^A-z0-9_]+', '', root.replace('/', '.')))
return ret | 25.5 | 74 | 0.632353 | __version__ = "1.0.0"
__pypi_username__ = "paypal"
__pypi_packagename__ = "paypal-payouts-sdk"
__github_username__ = "paypal"
__github_reponame__ = "Payouts-Python-SDK"
import re
import os
def find_packages():
path = "."
ret = []
for root, dirs, files in os.walk(path):
if '__init__.py' in files:
ret.append(re.sub('^[^A-z0-9_]+', '', root.replace('/', '.')))
return ret | true | true |
f7ffe64fb0ae085e941308aab7b3c4ec8cad6e53 | 362 | py | Python | homepage/tests/test_misc_stuff.py | ralphqq/dc-alerts-service | f00131dd7b2a0ee72ae5476b40f8552480c655b6 | [
"MIT"
] | null | null | null | homepage/tests/test_misc_stuff.py | ralphqq/dc-alerts-service | f00131dd7b2a0ee72ae5476b40f8552480c655b6 | [
"MIT"
] | 7 | 2020-06-05T22:45:18.000Z | 2021-06-09T18:32:43.000Z | homepage/tests/test_misc_stuff.py | ralphqq/dc-alerts-service | f00131dd7b2a0ee72ae5476b40f8552480c655b6 | [
"MIT"
] | null | null | null | from django.conf import settings
from django.test import TestCase
class MiscStuffTest(TestCase):
def test_external_url_building_settings(self):
self.assertEqual(
settings.EXTERNAL_URL_SCHEME,
'http'
)
self.assertEqual(
settings.EXTERNAL_URL_HOST,
'testserver'
) | 24.133333 | 51 | 0.607735 | from django.conf import settings
from django.test import TestCase
class MiscStuffTest(TestCase):
def test_external_url_building_settings(self):
self.assertEqual(
settings.EXTERNAL_URL_SCHEME,
'http'
)
self.assertEqual(
settings.EXTERNAL_URL_HOST,
'testserver'
) | true | true |
f7ffe660a98ad0e182716b9ac152e211afd97eb2 | 345 | py | Python | shark/utils/dynamic_callable.py | bikeshedder/shark | 36c5baecee6f5d8967aea8385b509d84dd9affa6 | [
"BSD-2-Clause"
] | null | null | null | shark/utils/dynamic_callable.py | bikeshedder/shark | 36c5baecee6f5d8967aea8385b509d84dd9affa6 | [
"BSD-2-Clause"
] | 1 | 2022-03-12T00:23:43.000Z | 2022-03-12T00:23:43.000Z | shark/utils/dynamic_callable.py | bikeshedder/shark | 36c5baecee6f5d8967aea8385b509d84dd9affa6 | [
"BSD-2-Clause"
] | null | null | null | from importlib import import_module
def dynamic_callable(name):
module_name, function_name = name.rsplit('.', 1)
def wrapped(*args, **kwargs):
module = import_module(module_name)
function = getattr(module, function_name)
return function(*args, **kwargs)
wrapped.__name__ == function_name
return wrapped
| 28.75 | 52 | 0.692754 | from importlib import import_module
def dynamic_callable(name):
module_name, function_name = name.rsplit('.', 1)
def wrapped(*args, **kwargs):
module = import_module(module_name)
function = getattr(module, function_name)
return function(*args, **kwargs)
wrapped.__name__ == function_name
return wrapped
| true | true |
f7ffe86fc7b8c773b2ef4a9eef3ba5381fd2da5c | 2,151 | py | Python | robotice/conf/managers/actions.py | robotice/robotice | aa8af3ac357021a8884bdbdaae760c7a368bfd13 | [
"Apache-2.0"
] | 2 | 2015-02-19T23:39:28.000Z | 2015-03-27T21:59:50.000Z | robotice/conf/managers/actions.py | robotice/robotice | aa8af3ac357021a8884bdbdaae760c7a368bfd13 | [
"Apache-2.0"
] | 1 | 2015-01-17T12:22:42.000Z | 2015-01-17T12:22:42.000Z | robotice/conf/managers/actions.py | robotice/robotice | aa8af3ac357021a8884bdbdaae760c7a368bfd13 | [
"Apache-2.0"
] | null | null | null |
"""
base object managers
"""
import sys
import logging
from robotice.conf.managers import base
from celery import states
from celery import Celery
from celery.result import AsyncResult
from celery.backends.base import DisabledBackend
import glob
from yaml import load, dump, safe_dump
LOG = logging.getLogger(__name__)
class ActionManager(base.BaseConfigManager, base.CeleryManager):
# move to config
config_path = "actions/*.yml"
def do(self, action_id):
"""Execute a action by name(uuid) (doesn't require task sources)
"""
action = actions.get(action_id)
if not action:
return {"status": 404, "error": "Action %s not found." % action_id}
LOG.debug(action)
app = self.capp()
args, kwargs, options = self._get_task_args(action.get("options"))
command = action.get("command", None)
LOG.error(args)
LOG.debug("Invoking task '%s' with '%s' and '%s'",
command, args, kwargs)
result = app.send_task(
command, args=args, kwargs=kwargs, **options)
response = {'task-id': result.task_id}
response["action"] = action
if self.backend_configured(result):
response.update(state=result.state)
return response
def backend_configured(self, result):
return not isinstance(result.backend, DisabledBackend)
def _get_task_args(self, body):
"""helper which return task args, kwargs and options
"""
try:
options = body
if isinstance(body, basestring):
options = json.loads(body)
args = options.pop('args', [])
kwargs = options.pop('kwargs', {})
except Exception, e:
raise exc.HTTPBadRequest(str(e))
if not isinstance(args, (list, tuple)):
try:
args = args.values()
return args, kwargs, options
except Exception, e:
raise e
raise exc.HTTPBadRequest('args must be an array')
return args, kwargs, options
actions = ActionManager() | 25.305882 | 79 | 0.599721 |
"""
base object managers
"""
import sys
import logging
from robotice.conf.managers import base
from celery import states
from celery import Celery
from celery.result import AsyncResult
from celery.backends.base import DisabledBackend
import glob
from yaml import load, dump, safe_dump
LOG = logging.getLogger(__name__)
class ActionManager(base.BaseConfigManager, base.CeleryManager):
config_path = "actions/*.yml"
def do(self, action_id):
"""Execute a action by name(uuid) (doesn't require task sources)
"""
action = actions.get(action_id)
if not action:
return {"status": 404, "error": "Action %s not found." % action_id}
LOG.debug(action)
app = self.capp()
args, kwargs, options = self._get_task_args(action.get("options"))
command = action.get("command", None)
LOG.error(args)
LOG.debug("Invoking task '%s' with '%s' and '%s'",
command, args, kwargs)
result = app.send_task(
command, args=args, kwargs=kwargs, **options)
response = {'task-id': result.task_id}
response["action"] = action
if self.backend_configured(result):
response.update(state=result.state)
return response
def backend_configured(self, result):
return not isinstance(result.backend, DisabledBackend)
def _get_task_args(self, body):
"""helper which return task args, kwargs and options
"""
try:
options = body
if isinstance(body, basestring):
options = json.loads(body)
args = options.pop('args', [])
kwargs = options.pop('kwargs', {})
except Exception, e:
raise exc.HTTPBadRequest(str(e))
if not isinstance(args, (list, tuple)):
try:
args = args.values()
return args, kwargs, options
except Exception, e:
raise e
raise exc.HTTPBadRequest('args must be an array')
return args, kwargs, options
actions = ActionManager() | false | true |
f7ffe881e3d6c7584439f1385c3b5a42c4b05e99 | 9,907 | py | Python | pylibup/static.py | trisongz/pylibup | 456c082032cb14e7b2f12f115b4033237a0b1d1f | [
"MIT"
] | null | null | null | pylibup/static.py | trisongz/pylibup | 456c082032cb14e7b2f12f115b4033237a0b1d1f | [
"MIT"
] | null | null | null | pylibup/static.py | trisongz/pylibup | 456c082032cb14e7b2f12f115b4033237a0b1d1f | [
"MIT"
] | null | null | null | setup_py_template = """
import os
import sys
from pathlib import Path
from setuptools import setup, find_packages
{% if require_py3 %}
if sys.version_info.major != 3:
raise RuntimeError("This package requires Python 3+")
{% endif %}
version = '{{ pkg_version }}'
pkg_name = '{{ pkg_name }}'
gitrepo = '{{ git_repo }}/{{ pkg_name }}'
root = Path(__file__).parent
requirements = [
{%- for item in requirements %}
'{{ item }}',
{%- endfor %}
]
args = {
'packages': find_packages(include = ['{{ lib_name }}', '{{ lib_name }}.*']),
'install_requires': requirements,
'long_description': root.joinpath('README.md').read_text(encoding='utf-8'),
{%- if require_py3_version %}
'python_requires': '>={{ require_py3_version }}',
{%- endif %}
{%- if kwargs %}
{%- for key, value in kwargs|dictsort %}
'{{ key }}': {{ value }},
{%- endfor %}
{%- endif %}
{%- if include_pkg_files %}
'include_package_data': True,
{%- endif %}
{%- if data_files %}
{%- for key, value in data_files|dictsort %}
'{{ key }}': {{ value }},
{%- endfor %}
{%- endif %}
'entry_points': {
{%- if cli_cmds %}
'console_scripts': [
{%- for item in cli_cmds %}
'{{ item }}',
{%- endfor %}
],
{%- endif %}
{%- if entry_points %}
{%- for key, value in entry_points|dictsort %}
'{{ key }}': {{ value }},
{%- endfor %}
{%- endif %}
}
}
setup(
name=pkg_name,
version=version,
url='https://github.com/{{ git_repo }}/{{ pkg_name }}',
license='MIT Style',
description='{{ description }}',
author='{{ author }}',
author_email='{{ email }}',
long_description_content_type="text/markdown",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: {% if require_py3_version %}{{ require_py3_version }}{% endif %}',
'Topic :: Software Development :: Libraries',
],
**args
)
"""
install_requirements_template = """
{%- for item in requirements %}
{{ item }}
{%- endfor %}
"""
gitignores_template = """
{%- for item in gitignore %}
{{ item }}
{%- endfor %}
"""
pyinit_template = """
{%- for item in modules %}
from . import {{ item }}
{%- endfor %}
"""
readme_template = """
# {{ pkg_name }}
{{ description }}
{%- if readme_text %}
---
{{ readme_text }}
{%- endif %}
---
## Quickstart
```bash
# From pypi
pip install --upgrade {{ pkg_name }}
# From source
pip install --upgrade git+https://github.com/{{ git_repo }}/{{ pkg_name }}
```
## Usage
```python
import {{ lib_name }}
## Do something here
```
---
## Libraries & Dependencies
{%- if require_py3_version %}
- `Python {{ require_py3_version }}`
{%- endif %}
{%- for item in requirements %}
- `{{ item }}`
{% endfor %}
---
This Python Library has been autogenerated with [pylibup](https://github.com/trisongz/pylibup).
"""
build_sh_template = """
#!/bin/bash
## Autogenerated from Pylibup
MODE=$1
if [[ "$MODE" == "prod" || "$MODE" == "dist" ]]; then
UPLOAD="dist"
else
UPLOAD="test"
fi
rm -r dist/*
rm -r build/*
python setup.py sdist bdist_wheel
if [[ "$UPLOAD" == "dist" ]]; then
echo "Uploading to Dist Pypi"
python -m twine upload --repository pypi dist/*
else
echo "Uploading to Test Pypi"
python -m twine upload --repository testpypi dist/*
fi
"""
github_action_template = """
## Autogenerated from Pylibup
name: Upload Python Package
on:
push:
paths:
- 'setup.py'
release:
types: [created]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
"""
github_action_template_pypi_publish = """
## Autogenerated from Pylibup
name: Upload Python Package
on:
push:
paths:
- 'setup.py'
release:
types: [created]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
"""
github_action_template_docker_build = """
## Autogenerated from Pylibup
name: Build {{ app_name }} Docker Image
on:
push:
paths:
- 'app/**/**'
- '{{ lib_name }}/**'
- 'Dockerfile'
- 'requirements.txt'
- '.github/workflows/docker-build.yaml'
env:
{%- if require_ecr %}
IMG_REPO: {{ ecr_options.repo }}
{% else %}
IMG_REPO: {{ docker_options.repo }}
{%- endif %}
jobs:
build-latest-docker-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
{%- if require_ecr %}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1
- name: Create ECR Repo if not Exists
uses: int128/create-ecr-repository-action@v1
id: ecr
with:
repository: ${{ env.IMG_REPO }}
- name: Build and Push Docker Image: {{ app_name }}
uses: docker/build-push-action@v2
with:
file: Dockerfile
platforms: linux/amd64
push: true
tags: |
${{ steps.ecr.outputs.repository-uri }}:latest
cache-from: type=gha
cache-to: type=gha,mode=max
{% else %}
- name: Build and Push Docker Image: {{ app_name }}
uses: docker/build-push-action@v2
with:
file: Dockerfile
platforms: linux/amd64
push: true
tags: |
${{ env.IMG_REPO }}:latest
cache-from: type=gha
cache-to: type=gha,mode=max
{%- endif %}
"""
dockerfile_fastapi_template = """
## Autogenerated from Pylibup
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.9
COPY ./requirements.txt /app/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
COPY ./app /app
WORKDIR /app
ENV PYTHONPATH=/app:$PYTHONPATH
"""
pylib_metadata_template = """
# Autogenerated by Pylib
{%- if setup %}
setup:
{%- for key, value in setup %}
{{ key }}: {{ value }}
{%- endfor %}
{%- endif %}
{%- if repo %}
repo: {{ repo }}
{%- endif %}
{%- if readme_text %}
readme_text: {{ readme_text }}
{%- endif %}
{%- if project_description %}
project_description: {{ project_description }}
{%- endif %}
{%- if gitignores %}
gitignores:
{%- for item in gitignores %}
- '{{ item }}'
{%- endfor %}
{%- endif %}
{%- if structure %}
structure:
{%- if structure.modules %}
modules:
{%- for item in structure.modules %}
- {{ item }}
{%- endfor %}
{%- endif %}
{%- endif %}
{%- if secrets %}
secrets:
{%- for key, value in secrets %}
{{ key }}: {{ value }}
{%- endfor %}
{%- endif %}
{%- if options %}
options:
{%- for key, value in options|dictsort %}
{{ key }}: {{ value }}
{%- endfor %}
{%- endif %}
{%- if workflows %}
workflows:
{%- for key, value in workflows %}
{{ key }}: {{ value }}
{%- endfor %}
{%- endif %}
"""
default_metadata_setup = {
'author': 'Tri Songz',
'email': '',
'git_repo': '',
'description': '',
'pkg_version': '0.0.0a',
'pkg_name': 'pylibup',
'lib_name': 'pylibup',
'require_py3': True,
'require_py3_version': 3.7,
'requirements': ['lazycls', 'pylogz'],
'kwargs': {},
'cli_cmds': [],
}
default_metadata_gitignores = [
'cache*',
'*.DS_Store',
'tests*',
'__pycache__*',
'*logs',
'*dist',
'*build',
'**build.sh',
'*test.py',
'*.egg-info*',
'*.vscode',
'**.ipynb',
'**meta.yaml',
'**metadata.yaml',
'**state.yaml'
]
default_metadata_modules = [
'classes',
'client',
'config',
'utils',
]
default_metadata_secrets = {
'AWS_ACCESS_KEY_ID': {'from': 'AWS_ACCESS_KEY_ID_SVC_ACCOUNT'},
'AWS_SECRET_ACCESS_KEY': {'from': 'AWS_SECRET_ACCESS_KEY_SVC_ACCOUNT'},
'AWS_REGION': 'us-east-1',
}
default_metadata_options = {
'default_branch': 'main',
'include_init': True,
'include_app': True,
'include_dockerfile': True,
'include_buildscript': True,
'include_reqtext': True,
'private': True,
}
default_metadata_dockerbuild_options = {
'app_name':'',
'require_ecr': True,
'ecr_options': {
'img_repo': ''
},
'docker_options':{
'img_repo': ''
}
}
default_metadata_workflows = {
'pypi_publish': True,
'docker_build': False,
'docker_build_options': default_metadata_dockerbuild_options
}
default_pylib_metadata = {
'setup': default_metadata_setup,
'repo': '',
'readme_text': '',
'project_description': '',
'gitignores': default_metadata_gitignores,
'structure': {'modules': default_metadata_modules},
'secrets': default_metadata_secrets,
'options': default_metadata_options,
'workflows': default_metadata_workflows
}
| 20.989407 | 109 | 0.589179 | setup_py_template = """
import os
import sys
from pathlib import Path
from setuptools import setup, find_packages
{% if require_py3 %}
if sys.version_info.major != 3:
raise RuntimeError("This package requires Python 3+")
{% endif %}
version = '{{ pkg_version }}'
pkg_name = '{{ pkg_name }}'
gitrepo = '{{ git_repo }}/{{ pkg_name }}'
root = Path(__file__).parent
requirements = [
{%- for item in requirements %}
'{{ item }}',
{%- endfor %}
]
args = {
'packages': find_packages(include = ['{{ lib_name }}', '{{ lib_name }}.*']),
'install_requires': requirements,
'long_description': root.joinpath('README.md').read_text(encoding='utf-8'),
{%- if require_py3_version %}
'python_requires': '>={{ require_py3_version }}',
{%- endif %}
{%- if kwargs %}
{%- for key, value in kwargs|dictsort %}
'{{ key }}': {{ value }},
{%- endfor %}
{%- endif %}
{%- if include_pkg_files %}
'include_package_data': True,
{%- endif %}
{%- if data_files %}
{%- for key, value in data_files|dictsort %}
'{{ key }}': {{ value }},
{%- endfor %}
{%- endif %}
'entry_points': {
{%- if cli_cmds %}
'console_scripts': [
{%- for item in cli_cmds %}
'{{ item }}',
{%- endfor %}
],
{%- endif %}
{%- if entry_points %}
{%- for key, value in entry_points|dictsort %}
'{{ key }}': {{ value }},
{%- endfor %}
{%- endif %}
}
}
setup(
name=pkg_name,
version=version,
url='https://github.com/{{ git_repo }}/{{ pkg_name }}',
license='MIT Style',
description='{{ description }}',
author='{{ author }}',
author_email='{{ email }}',
long_description_content_type="text/markdown",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: {% if require_py3_version %}{{ require_py3_version }}{% endif %}',
'Topic :: Software Development :: Libraries',
],
**args
)
"""
install_requirements_template = """
{%- for item in requirements %}
{{ item }}
{%- endfor %}
"""
gitignores_template = """
{%- for item in gitignore %}
{{ item }}
{%- endfor %}
"""
pyinit_template = """
{%- for item in modules %}
from . import {{ item }}
{%- endfor %}
"""
readme_template = """
# {{ pkg_name }}
{{ description }}
{%- if readme_text %}
---
{{ readme_text }}
{%- endif %}
---
## Quickstart
```bash
# From pypi
pip install --upgrade {{ pkg_name }}
# From source
pip install --upgrade git+https://github.com/{{ git_repo }}/{{ pkg_name }}
```
## Usage
```python
import {{ lib_name }}
## Do something here
```
---
## Libraries & Dependencies
{%- if require_py3_version %}
- `Python {{ require_py3_version }}`
{%- endif %}
{%- for item in requirements %}
- `{{ item }}`
{% endfor %}
---
This Python Library has been autogenerated with [pylibup](https://github.com/trisongz/pylibup).
"""
build_sh_template = """
#!/bin/bash
## Autogenerated from Pylibup
MODE=$1
if [[ "$MODE" == "prod" || "$MODE" == "dist" ]]; then
UPLOAD="dist"
else
UPLOAD="test"
fi
rm -r dist/*
rm -r build/*
python setup.py sdist bdist_wheel
if [[ "$UPLOAD" == "dist" ]]; then
echo "Uploading to Dist Pypi"
python -m twine upload --repository pypi dist/*
else
echo "Uploading to Test Pypi"
python -m twine upload --repository testpypi dist/*
fi
"""
github_action_template = """
## Autogenerated from Pylibup
name: Upload Python Package
on:
push:
paths:
- 'setup.py'
release:
types: [created]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
"""
github_action_template_pypi_publish = """
## Autogenerated from Pylibup
name: Upload Python Package
on:
push:
paths:
- 'setup.py'
release:
types: [created]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
"""
github_action_template_docker_build = """
## Autogenerated from Pylibup
name: Build {{ app_name }} Docker Image
on:
push:
paths:
- 'app/**/**'
- '{{ lib_name }}/**'
- 'Dockerfile'
- 'requirements.txt'
- '.github/workflows/docker-build.yaml'
env:
{%- if require_ecr %}
IMG_REPO: {{ ecr_options.repo }}
{% else %}
IMG_REPO: {{ docker_options.repo }}
{%- endif %}
jobs:
build-latest-docker-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
{%- if require_ecr %}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1
- name: Create ECR Repo if not Exists
uses: int128/create-ecr-repository-action@v1
id: ecr
with:
repository: ${{ env.IMG_REPO }}
- name: Build and Push Docker Image: {{ app_name }}
uses: docker/build-push-action@v2
with:
file: Dockerfile
platforms: linux/amd64
push: true
tags: |
${{ steps.ecr.outputs.repository-uri }}:latest
cache-from: type=gha
cache-to: type=gha,mode=max
{% else %}
- name: Build and Push Docker Image: {{ app_name }}
uses: docker/build-push-action@v2
with:
file: Dockerfile
platforms: linux/amd64
push: true
tags: |
${{ env.IMG_REPO }}:latest
cache-from: type=gha
cache-to: type=gha,mode=max
{%- endif %}
"""
dockerfile_fastapi_template = """
## Autogenerated from Pylibup
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.9
COPY ./requirements.txt /app/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
COPY ./app /app
WORKDIR /app
ENV PYTHONPATH=/app:$PYTHONPATH
"""
pylib_metadata_template = """
# Autogenerated by Pylib
{%- if setup %}
setup:
{%- for key, value in setup %}
{{ key }}: {{ value }}
{%- endfor %}
{%- endif %}
{%- if repo %}
repo: {{ repo }}
{%- endif %}
{%- if readme_text %}
readme_text: {{ readme_text }}
{%- endif %}
{%- if project_description %}
project_description: {{ project_description }}
{%- endif %}
{%- if gitignores %}
gitignores:
{%- for item in gitignores %}
- '{{ item }}'
{%- endfor %}
{%- endif %}
{%- if structure %}
structure:
{%- if structure.modules %}
modules:
{%- for item in structure.modules %}
- {{ item }}
{%- endfor %}
{%- endif %}
{%- endif %}
{%- if secrets %}
secrets:
{%- for key, value in secrets %}
{{ key }}: {{ value }}
{%- endfor %}
{%- endif %}
{%- if options %}
options:
{%- for key, value in options|dictsort %}
{{ key }}: {{ value }}
{%- endfor %}
{%- endif %}
{%- if workflows %}
workflows:
{%- for key, value in workflows %}
{{ key }}: {{ value }}
{%- endfor %}
{%- endif %}
"""
default_metadata_setup = {
'author': 'Tri Songz',
'email': '',
'git_repo': '',
'description': '',
'pkg_version': '0.0.0a',
'pkg_name': 'pylibup',
'lib_name': 'pylibup',
'require_py3': True,
'require_py3_version': 3.7,
'requirements': ['lazycls', 'pylogz'],
'kwargs': {},
'cli_cmds': [],
}
default_metadata_gitignores = [
'cache*',
'*.DS_Store',
'tests*',
'__pycache__*',
'*logs',
'*dist',
'*build',
'**build.sh',
'*test.py',
'*.egg-info*',
'*.vscode',
'**.ipynb',
'**meta.yaml',
'**metadata.yaml',
'**state.yaml'
]
default_metadata_modules = [
'classes',
'client',
'config',
'utils',
]
default_metadata_secrets = {
'AWS_ACCESS_KEY_ID': {'from': 'AWS_ACCESS_KEY_ID_SVC_ACCOUNT'},
'AWS_SECRET_ACCESS_KEY': {'from': 'AWS_SECRET_ACCESS_KEY_SVC_ACCOUNT'},
'AWS_REGION': 'us-east-1',
}
default_metadata_options = {
'default_branch': 'main',
'include_init': True,
'include_app': True,
'include_dockerfile': True,
'include_buildscript': True,
'include_reqtext': True,
'private': True,
}
default_metadata_dockerbuild_options = {
'app_name':'',
'require_ecr': True,
'ecr_options': {
'img_repo': ''
},
'docker_options':{
'img_repo': ''
}
}
default_metadata_workflows = {
'pypi_publish': True,
'docker_build': False,
'docker_build_options': default_metadata_dockerbuild_options
}
default_pylib_metadata = {
'setup': default_metadata_setup,
'repo': '',
'readme_text': '',
'project_description': '',
'gitignores': default_metadata_gitignores,
'structure': {'modules': default_metadata_modules},
'secrets': default_metadata_secrets,
'options': default_metadata_options,
'workflows': default_metadata_workflows
}
| true | true |
f7ffe8ade72a8d8c7bd87a0e343c39fbb201ee82 | 792 | py | Python | kaggle-classification/keras_trainer/custom_metrics.py | CyberFlameGO/conversationai-models | f82f66398b221d9fe3bcfd7641610af454b3db46 | [
"Apache-2.0"
] | 139 | 2018-03-05T16:34:41.000Z | 2022-03-09T01:36:13.000Z | kaggle-classification/keras_trainer/custom_metrics.py | CyberFlameGO/conversationai-models | f82f66398b221d9fe3bcfd7641610af454b3db46 | [
"Apache-2.0"
] | 125 | 2018-03-05T21:19:31.000Z | 2020-11-13T17:50:12.000Z | kaggle-classification/keras_trainer/custom_metrics.py | CyberFlameGO/conversationai-models | f82f66398b221d9fe3bcfd7641610af454b3db46 | [
"Apache-2.0"
] | 47 | 2018-03-21T19:37:14.000Z | 2022-03-09T01:36:21.000Z | """Custom metrics used by Keras models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
def auc_roc(y_true, y_pred):
# any tensorflow metric
y_true = tf.to_int32(tf.greater(y_true, 0.5))
value, update_op = tf.metrics.auc(y_true, y_pred)
# find all variables created for this metric
metric_vars = [
i for i in tf.local_variables() if 'auc_roc' in i.name.split('/')[1]
]
# Add metric variables to GLOBAL_VARIABLES collection.
# They will be initialized for new session.
for v in metric_vars:
tf.add_to_collection(tf.GraphKeys.GLOBAL_VARIABLES, v)
# force update metric values
with tf.control_dependencies([update_op]):
value = tf.identity(value)
return value
| 27.310345 | 74 | 0.738636 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
def auc_roc(y_true, y_pred):
y_true = tf.to_int32(tf.greater(y_true, 0.5))
value, update_op = tf.metrics.auc(y_true, y_pred)
metric_vars = [
i for i in tf.local_variables() if 'auc_roc' in i.name.split('/')[1]
]
for v in metric_vars:
tf.add_to_collection(tf.GraphKeys.GLOBAL_VARIABLES, v)
with tf.control_dependencies([update_op]):
value = tf.identity(value)
return value
| true | true |
f7ffe92c13c3f191c4b7709360a2d1ca4865e69c | 18,296 | py | Python | libcloud/dns/drivers/gandi_live.py | Matir/libcloud | a496af770e91905e286f9565f84acfcabe2db7d5 | [
"Apache-2.0"
] | 1,435 | 2015-01-07T05:32:51.000Z | 2022-03-25T19:39:34.000Z | libcloud/dns/drivers/gandi_live.py | Matir/libcloud | a496af770e91905e286f9565f84acfcabe2db7d5 | [
"Apache-2.0"
] | 1,158 | 2015-01-04T18:08:42.000Z | 2022-03-24T14:34:57.000Z | libcloud/dns/drivers/gandi_live.py | Matir/libcloud | a496af770e91905e286f9565f84acfcabe2db7d5 | [
"Apache-2.0"
] | 832 | 2015-01-05T09:20:21.000Z | 2022-03-24T19:22:19.000Z | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import copy
from libcloud.dns.types import Provider, RecordType
from libcloud.dns.types import RecordError
from libcloud.dns.types import ZoneDoesNotExistError, \
RecordDoesNotExistError, ZoneAlreadyExistsError, RecordAlreadyExistsError
from libcloud.dns.base import DNSDriver, Zone, Record
from libcloud.common.gandi_live import ResourceNotFoundError, \
ResourceConflictError, GandiLiveResponse, GandiLiveConnection, \
BaseGandiLiveDriver
__all__ = [
'GandiLiveDNSDriver',
]
TTL_MIN = 300
TTL_MAX = 2592000 # 30 days
API_BASE = '/api/v5'
class GandiLiveDNSResponse(GandiLiveResponse):
pass
class GandiLiveDNSConnection(GandiLiveConnection):
responseCls = GandiLiveDNSResponse
class GandiLiveDNSDriver(BaseGandiLiveDriver, DNSDriver):
"""
API reference can be found at:
https://doc.livedns.gandi.net/
Please note that the Libcloud paradigm of one zone per domain does not
match exactly with Gandi LiveDNS. For Gandi, a "zone" can apply to
multiple domains. This driver behaves as if the domain is a zone, but be
warned that modifying a domain means modifying the zone. Iif you have a
zone associated with mutiple domains, all of those domains will be
modified as well.
"""
type = Provider.GANDI
name = 'Gandi LiveDNS'
website = 'http://www.gandi.net/domain'
connectionCls = GandiLiveDNSConnection
# also supports CAA, CDS
RECORD_TYPE_MAP = {
RecordType.A: 'A',
RecordType.AAAA: 'AAAA',
RecordType.ALIAS: 'ALIAS',
RecordType.CNAME: 'CNAME',
RecordType.DNAME: 'DNAME',
RecordType.DS: 'DS',
RecordType.KEY: 'KEY',
RecordType.LOC: 'LOC',
RecordType.MX: 'MX',
RecordType.NS: 'NS',
RecordType.PTR: 'PTR',
RecordType.SPF: 'SPF',
RecordType.SRV: 'SRV',
RecordType.SSHFP: 'SSHFP',
RecordType.TLSA: 'TLSA',
RecordType.TXT: 'TXT',
RecordType.WKS: 'WKS',
RecordType.CAA: 'CAA',
}
def list_zones(self):
zones = self.connection.request(action='%s/domains' % API_BASE,
method='GET')
return self._to_zones(zones.object)
def get_zone(self, zone_id):
action = '%s/domains/%s' % (API_BASE, zone_id)
try:
zone = self.connection.request(action=action, method='GET')
except ResourceNotFoundError:
raise ZoneDoesNotExistError(value='',
driver=self.connection.driver,
zone_id=zone_id)
return self._to_zone(zone.object)
"""
:param extra: (optional) Extra attribute ('name'); if not provided, name
is based on domain.
:return: :class:`Zone` with attribute zone_uuid set in extra ``dict``
"""
def create_zone(self, domain, type='master', ttl=None, extra=None):
if extra and 'name' in extra:
zone_name = extra['name']
else:
zone_name = '%s zone' % domain
zone_data = {
'name': zone_name,
}
try:
new_zone = self.connection.request(action='%s/zones' % API_BASE,
method='POST',
data=zone_data)
except ResourceConflictError:
raise ZoneAlreadyExistsError(value='',
driver=self.connection.driver,
zone_id=zone_name)
new_zone_uuid = new_zone.headers['location'].split('/')[-1]
self.ex_switch_domain_gandi_zone(domain, new_zone_uuid)
return self._to_zone({'fqdn': domain, 'zone_uuid': new_zone_uuid})
def list_records(self, zone):
action = '%s/domains/%s/records' % (API_BASE, zone.id)
records = self.connection.request(action=action, method='GET')
return self._to_records(records.object, zone)
"""
:return: :class:`Record` with the extra ``dict`` containing attribute
other_values ``list`` of ``str`` for other values; the first
value is returned through Record.data.
"""
def get_record(self, zone_id, record_id):
record_type, name = record_id.split(':', 1)
action = '%s/domains/%s/records/%s/%s' % (API_BASE,
zone_id,
name,
record_type)
try:
record = self.connection.request(action=action, method='GET')
except ResourceNotFoundError:
raise RecordDoesNotExistError(value='',
driver=self.connection.driver,
record_id=record_id)
return self._to_record(record.object, self.get_zone(zone_id))[0]
def create_record(self, name, zone, type, data, extra=None):
self._validate_record(None, name, type, data, extra)
action = '%s/domains/%s/records' % (API_BASE, zone.id)
if type == 'MX':
data = '%s %s' % (extra['priority'], data)
record_data = {
'rrset_name': name,
'rrset_type': self.RECORD_TYPE_MAP[type],
'rrset_values': [data],
}
if extra is not None and 'ttl' in extra:
record_data['rrset_ttl'] = extra['ttl']
try:
self.connection.request(action=action, method='POST',
data=record_data)
except ResourceConflictError:
raise RecordAlreadyExistsError(value='',
driver=self.connection.driver,
record_id='%s:%s' % (
self.RECORD_TYPE_MAP[type],
name))
return self._to_record_sub(record_data, zone, data)
"""
Ignores name and type, not allowed in an update call to the service.
The Gandi service requires all values for a record when doing an update.
Not providing all values during an update means the service will interpret
it as replacing all values with the one data value. The easiest way to
accomplish this is to make sure the value of a get_record is used as the
value of the record parameter.
This method will change the value when only one exists. When more than
one exists, it will combine the data parameter value with the extra dict
values contained in the list extra['_other_records']. This method should
only be used to make single value updates.
To change the number of values in the value set or to change several at
once, delete and recreate, potentially using ex_create_multi_value_record.
"""
def update_record(self, record, name, type, data, extra):
self._validate_record(record.id, record.name, record.type, data, extra)
action = '%s/domains/%s/records/%s/%s' % (
API_BASE,
record.zone.id,
record.name,
self.RECORD_TYPE_MAP[record.type]
)
multiple_value_record = record.extra.get('_multi_value', False)
other_records = record.extra.get('_other_records', [])
if record.type == RecordType.MX:
data = '%s %s' % (extra['priority'], data)
if multiple_value_record and len(other_records) > 0:
rvalue = [data]
for other_record in other_records:
if record.type == RecordType.MX:
rvalue.append('%s %s' %
(other_record['extra']['priority'],
other_record['data']))
else:
rvalue.append(other_record['data'])
else:
rvalue = [data]
record_data = {
'rrset_values': rvalue
}
if extra is not None and 'ttl' in extra:
record_data['rrset_ttl'] = extra['ttl']
try:
self.connection.request(action=action, method='PUT',
data=record_data)
except ResourceNotFoundError:
raise RecordDoesNotExistError(value='',
driver=self.connection.driver,
record_id=record.id)
record_data['rrset_name'] = record.name
record_data['rrset_type'] = self.RECORD_TYPE_MAP[record.type]
return self._to_record(record_data, record.zone)[0]
"""
The Gandi service considers all values for a name-type combination to be
one record. Deleting that name-type record means deleting all values for
it.
"""
def delete_record(self, record):
action = '%s/domains/%s/records/%s/%s' % (
API_BASE,
record.zone.id,
record.name,
self.RECORD_TYPE_MAP[record.type]
)
try:
self.connection.request(action=action, method='DELETE')
except ResourceNotFoundError:
raise RecordDoesNotExistError(value='',
driver=self.connection.driver,
record_id=record.id)
# Originally checked for success here, but it should never reach
# this point with anything other than HTTP 200
return True
def export_zone_to_bind_format(self, zone):
action = '%s/domains/%s/records' % (API_BASE, zone.id)
headers = {
'Accept': 'text/plain'
}
resp = self.connection.request(action=action, method='GET',
headers=headers, raw=True)
return resp.body
# There is nothing you can update about a domain; you can update zones'
# names and which zone a domain is associated with, but the domain itself
# is basically immutable. Instead, some ex_ methods for dealing with
# Gandi zones.
"""
Update the name of a Gandi zone.
Note that a Gandi zone is not the same as a Libcloud zone. A Gandi zone
is a separate object type from a Gandi domain; a Gandi zone can be reused
by multiple Gandi domains, and the actual records are associated with the
zone directly. This is mostly masked in this driver to make it look like
records are associated with domains. If you need to step out of that
masking, use these extension methods.
:param zone_uuid: Identifier for the Gandi zone.
:type zone_uuid: ``str``
:param name: New name for the Gandi zone.
:type name: ``str``
:return: ``bool``
"""
def ex_update_gandi_zone_name(self, zone_uuid, name):
action = '%s/zones/%s' % (API_BASE, zone_uuid)
data = {
'name': name,
}
self.connection.request(action=action, method='PATCH',
data=data)
return True
# There is no concept of deleting domains in this API, not even to
# disassociate a domain from a zone. You can delete a zone, though.
"""
Delete a Gandi zone. This may raise a ResourceConflictError if you
try to delete a zone that has domains still using it.
:param zone_uuid: Identifier for the Gandi zone
:type zone_uuid: ``str``
:return: ``bool``
"""
def ex_delete_gandi_zone(self, zone_uuid):
self.connection.request(action='%s/zones/%s' % (API_BASE, zone_uuid),
method='DELETE')
return True
"""
Change the Gandi zone a domain is asociated with.
:param domain: Domain name to switch zones.
:type domain: ``str``
:param zone_uuid: Identifier for the new Gandi zone to switch to.
:type zone_uuid: ``str``
:return: ``bool``
"""
def ex_switch_domain_gandi_zone(self, domain, zone_uuid):
domain_data = {
'zone_uuid': zone_uuid,
}
self.connection.request(action='%s/domains/%s' % (API_BASE, domain),
method='PATCH',
data=domain_data)
return True
"""
Create a new record with multiple values.
:param data: Record values (depends on the record type)
:type data: ``list`` (of ``str``)
:return: ``list`` of :class:`Record`s
"""
def ex_create_multi_value_record(self, name, zone, type, data, extra=None):
self._validate_record(None, name, type, data, extra)
action = '%s/domains/%s/records' % (API_BASE, zone.id)
record_data = {
'rrset_name': name,
'rrset_type': self.RECORD_TYPE_MAP[type],
'rrset_values': data,
}
if extra is not None and 'ttl' in extra:
record_data['rrset_ttl'] = extra['ttl']
try:
self.connection.request(action=action, method='POST',
data=record_data)
except ResourceConflictError:
raise RecordAlreadyExistsError(value='',
driver=self.connection.driver,
record_id='%s:%s' % (
self.RECORD_TYPE_MAP[type],
name))
return self._to_record(record_data, zone)
def _to_record(self, data, zone):
records = []
rrset_values = data['rrset_values']
multiple_value_record = len(rrset_values) > 1
for index, rrset_value in enumerate(rrset_values):
record = self._to_record_sub(data, zone, rrset_value)
record.extra['_multi_value'] = multiple_value_record
if multiple_value_record:
record.extra['_other_records'] = []
records.append(record)
if multiple_value_record:
for index in range(0, len(records)):
record = records[index]
for other_index, other_record in enumerate(records):
if index == other_index:
continue
extra = copy.deepcopy(other_record.extra)
extra.pop('_multi_value')
extra.pop('_other_records')
item = {
'name': other_record.name,
'data': other_record.data,
'type': other_record.type,
'extra': extra
}
record.extra['_other_records'].append(item)
return records
def _to_record_sub(self, data, zone, value):
extra = {}
ttl = data.get('rrset_ttl', None)
if ttl is not None:
extra['ttl'] = int(ttl)
if data['rrset_type'] == 'MX':
priority, value = value.split()
extra['priority'] = priority
return Record(
id='%s:%s' % (data['rrset_type'], data['rrset_name']),
name=data['rrset_name'],
type=self._string_to_record_type(data['rrset_type']),
data=value,
zone=zone,
driver=self,
ttl=ttl,
extra=extra)
def _to_records(self, data, zone):
records = []
for r in data:
records += self._to_record(r, zone)
return records
def _to_zone(self, zone):
extra = {}
if 'zone_uuid' in zone:
extra = {
'zone_uuid': zone['zone_uuid']
}
return Zone(
id=str(zone['fqdn']),
domain=zone['fqdn'],
type='master',
ttl=0,
driver=self,
extra=extra,
)
def _to_zones(self, zones):
ret = []
for z in zones:
ret.append(self._to_zone(z))
return ret
def _validate_record(self, record_id, name, record_type, data, extra):
if len(data) > 1024:
raise RecordError('Record data must be <= 1024 characters',
driver=self, record_id=record_id)
if type == 'MX' or type == RecordType.MX:
if extra is None or 'priority' not in extra:
raise RecordError('MX record must have a priority',
driver=self, record_id=record_id)
if extra is not None and '_other_records' in extra:
for other_value in extra.get('_other_records', []):
if len(other_value['data']) > 1024:
raise RecordError('Record data must be <= 1024 characters',
driver=self, record_id=record_id)
if type == 'MX' or type == RecordType.MX:
if (other_value['extra'] is None
or 'priority' not in other_value['extra']):
raise RecordError('MX record must have a priority',
driver=self, record_id=record_id)
if extra is not None and 'ttl' in extra:
if extra['ttl'] < TTL_MIN:
raise RecordError('TTL must be at least 300 seconds',
driver=self, record_id=record_id)
if extra['ttl'] > TTL_MAX:
raise RecordError('TTL must not exceed 30 days',
driver=self, record_id=record_id)
| 37.723711 | 79 | 0.565643 |
from __future__ import with_statement
import copy
from libcloud.dns.types import Provider, RecordType
from libcloud.dns.types import RecordError
from libcloud.dns.types import ZoneDoesNotExistError, \
RecordDoesNotExistError, ZoneAlreadyExistsError, RecordAlreadyExistsError
from libcloud.dns.base import DNSDriver, Zone, Record
from libcloud.common.gandi_live import ResourceNotFoundError, \
ResourceConflictError, GandiLiveResponse, GandiLiveConnection, \
BaseGandiLiveDriver
__all__ = [
'GandiLiveDNSDriver',
]
TTL_MIN = 300
TTL_MAX = 2592000
API_BASE = '/api/v5'
class GandiLiveDNSResponse(GandiLiveResponse):
pass
class GandiLiveDNSConnection(GandiLiveConnection):
responseCls = GandiLiveDNSResponse
class GandiLiveDNSDriver(BaseGandiLiveDriver, DNSDriver):
type = Provider.GANDI
name = 'Gandi LiveDNS'
website = 'http://www.gandi.net/domain'
connectionCls = GandiLiveDNSConnection
RECORD_TYPE_MAP = {
RecordType.A: 'A',
RecordType.AAAA: 'AAAA',
RecordType.ALIAS: 'ALIAS',
RecordType.CNAME: 'CNAME',
RecordType.DNAME: 'DNAME',
RecordType.DS: 'DS',
RecordType.KEY: 'KEY',
RecordType.LOC: 'LOC',
RecordType.MX: 'MX',
RecordType.NS: 'NS',
RecordType.PTR: 'PTR',
RecordType.SPF: 'SPF',
RecordType.SRV: 'SRV',
RecordType.SSHFP: 'SSHFP',
RecordType.TLSA: 'TLSA',
RecordType.TXT: 'TXT',
RecordType.WKS: 'WKS',
RecordType.CAA: 'CAA',
}
def list_zones(self):
zones = self.connection.request(action='%s/domains' % API_BASE,
method='GET')
return self._to_zones(zones.object)
def get_zone(self, zone_id):
action = '%s/domains/%s' % (API_BASE, zone_id)
try:
zone = self.connection.request(action=action, method='GET')
except ResourceNotFoundError:
raise ZoneDoesNotExistError(value='',
driver=self.connection.driver,
zone_id=zone_id)
return self._to_zone(zone.object)
def create_zone(self, domain, type='master', ttl=None, extra=None):
if extra and 'name' in extra:
zone_name = extra['name']
else:
zone_name = '%s zone' % domain
zone_data = {
'name': zone_name,
}
try:
new_zone = self.connection.request(action='%s/zones' % API_BASE,
method='POST',
data=zone_data)
except ResourceConflictError:
raise ZoneAlreadyExistsError(value='',
driver=self.connection.driver,
zone_id=zone_name)
new_zone_uuid = new_zone.headers['location'].split('/')[-1]
self.ex_switch_domain_gandi_zone(domain, new_zone_uuid)
return self._to_zone({'fqdn': domain, 'zone_uuid': new_zone_uuid})
def list_records(self, zone):
action = '%s/domains/%s/records' % (API_BASE, zone.id)
records = self.connection.request(action=action, method='GET')
return self._to_records(records.object, zone)
def get_record(self, zone_id, record_id):
record_type, name = record_id.split(':', 1)
action = '%s/domains/%s/records/%s/%s' % (API_BASE,
zone_id,
name,
record_type)
try:
record = self.connection.request(action=action, method='GET')
except ResourceNotFoundError:
raise RecordDoesNotExistError(value='',
driver=self.connection.driver,
record_id=record_id)
return self._to_record(record.object, self.get_zone(zone_id))[0]
def create_record(self, name, zone, type, data, extra=None):
self._validate_record(None, name, type, data, extra)
action = '%s/domains/%s/records' % (API_BASE, zone.id)
if type == 'MX':
data = '%s %s' % (extra['priority'], data)
record_data = {
'rrset_name': name,
'rrset_type': self.RECORD_TYPE_MAP[type],
'rrset_values': [data],
}
if extra is not None and 'ttl' in extra:
record_data['rrset_ttl'] = extra['ttl']
try:
self.connection.request(action=action, method='POST',
data=record_data)
except ResourceConflictError:
raise RecordAlreadyExistsError(value='',
driver=self.connection.driver,
record_id='%s:%s' % (
self.RECORD_TYPE_MAP[type],
name))
return self._to_record_sub(record_data, zone, data)
def update_record(self, record, name, type, data, extra):
self._validate_record(record.id, record.name, record.type, data, extra)
action = '%s/domains/%s/records/%s/%s' % (
API_BASE,
record.zone.id,
record.name,
self.RECORD_TYPE_MAP[record.type]
)
multiple_value_record = record.extra.get('_multi_value', False)
other_records = record.extra.get('_other_records', [])
if record.type == RecordType.MX:
data = '%s %s' % (extra['priority'], data)
if multiple_value_record and len(other_records) > 0:
rvalue = [data]
for other_record in other_records:
if record.type == RecordType.MX:
rvalue.append('%s %s' %
(other_record['extra']['priority'],
other_record['data']))
else:
rvalue.append(other_record['data'])
else:
rvalue = [data]
record_data = {
'rrset_values': rvalue
}
if extra is not None and 'ttl' in extra:
record_data['rrset_ttl'] = extra['ttl']
try:
self.connection.request(action=action, method='PUT',
data=record_data)
except ResourceNotFoundError:
raise RecordDoesNotExistError(value='',
driver=self.connection.driver,
record_id=record.id)
record_data['rrset_name'] = record.name
record_data['rrset_type'] = self.RECORD_TYPE_MAP[record.type]
return self._to_record(record_data, record.zone)[0]
def delete_record(self, record):
action = '%s/domains/%s/records/%s/%s' % (
API_BASE,
record.zone.id,
record.name,
self.RECORD_TYPE_MAP[record.type]
)
try:
self.connection.request(action=action, method='DELETE')
except ResourceNotFoundError:
raise RecordDoesNotExistError(value='',
driver=self.connection.driver,
record_id=record.id)
return True
def export_zone_to_bind_format(self, zone):
action = '%s/domains/%s/records' % (API_BASE, zone.id)
headers = {
'Accept': 'text/plain'
}
resp = self.connection.request(action=action, method='GET',
headers=headers, raw=True)
return resp.body
# names and which zone a domain is associated with, but the domain itself
# is basically immutable. Instead, some ex_ methods for dealing with
# Gandi zones.
def ex_update_gandi_zone_name(self, zone_uuid, name):
action = '%s/zones/%s' % (API_BASE, zone_uuid)
data = {
'name': name,
}
self.connection.request(action=action, method='PATCH',
data=data)
return True
# There is no concept of deleting domains in this API, not even to
# disassociate a domain from a zone. You can delete a zone, though.
def ex_delete_gandi_zone(self, zone_uuid):
self.connection.request(action='%s/zones/%s' % (API_BASE, zone_uuid),
method='DELETE')
return True
def ex_switch_domain_gandi_zone(self, domain, zone_uuid):
domain_data = {
'zone_uuid': zone_uuid,
}
self.connection.request(action='%s/domains/%s' % (API_BASE, domain),
method='PATCH',
data=domain_data)
return True
def ex_create_multi_value_record(self, name, zone, type, data, extra=None):
self._validate_record(None, name, type, data, extra)
action = '%s/domains/%s/records' % (API_BASE, zone.id)
record_data = {
'rrset_name': name,
'rrset_type': self.RECORD_TYPE_MAP[type],
'rrset_values': data,
}
if extra is not None and 'ttl' in extra:
record_data['rrset_ttl'] = extra['ttl']
try:
self.connection.request(action=action, method='POST',
data=record_data)
except ResourceConflictError:
raise RecordAlreadyExistsError(value='',
driver=self.connection.driver,
record_id='%s:%s' % (
self.RECORD_TYPE_MAP[type],
name))
return self._to_record(record_data, zone)
def _to_record(self, data, zone):
records = []
rrset_values = data['rrset_values']
multiple_value_record = len(rrset_values) > 1
for index, rrset_value in enumerate(rrset_values):
record = self._to_record_sub(data, zone, rrset_value)
record.extra['_multi_value'] = multiple_value_record
if multiple_value_record:
record.extra['_other_records'] = []
records.append(record)
if multiple_value_record:
for index in range(0, len(records)):
record = records[index]
for other_index, other_record in enumerate(records):
if index == other_index:
continue
extra = copy.deepcopy(other_record.extra)
extra.pop('_multi_value')
extra.pop('_other_records')
item = {
'name': other_record.name,
'data': other_record.data,
'type': other_record.type,
'extra': extra
}
record.extra['_other_records'].append(item)
return records
def _to_record_sub(self, data, zone, value):
extra = {}
ttl = data.get('rrset_ttl', None)
if ttl is not None:
extra['ttl'] = int(ttl)
if data['rrset_type'] == 'MX':
priority, value = value.split()
extra['priority'] = priority
return Record(
id='%s:%s' % (data['rrset_type'], data['rrset_name']),
name=data['rrset_name'],
type=self._string_to_record_type(data['rrset_type']),
data=value,
zone=zone,
driver=self,
ttl=ttl,
extra=extra)
def _to_records(self, data, zone):
records = []
for r in data:
records += self._to_record(r, zone)
return records
def _to_zone(self, zone):
extra = {}
if 'zone_uuid' in zone:
extra = {
'zone_uuid': zone['zone_uuid']
}
return Zone(
id=str(zone['fqdn']),
domain=zone['fqdn'],
type='master',
ttl=0,
driver=self,
extra=extra,
)
def _to_zones(self, zones):
ret = []
for z in zones:
ret.append(self._to_zone(z))
return ret
def _validate_record(self, record_id, name, record_type, data, extra):
if len(data) > 1024:
raise RecordError('Record data must be <= 1024 characters',
driver=self, record_id=record_id)
if type == 'MX' or type == RecordType.MX:
if extra is None or 'priority' not in extra:
raise RecordError('MX record must have a priority',
driver=self, record_id=record_id)
if extra is not None and '_other_records' in extra:
for other_value in extra.get('_other_records', []):
if len(other_value['data']) > 1024:
raise RecordError('Record data must be <= 1024 characters',
driver=self, record_id=record_id)
if type == 'MX' or type == RecordType.MX:
if (other_value['extra'] is None
or 'priority' not in other_value['extra']):
raise RecordError('MX record must have a priority',
driver=self, record_id=record_id)
if extra is not None and 'ttl' in extra:
if extra['ttl'] < TTL_MIN:
raise RecordError('TTL must be at least 300 seconds',
driver=self, record_id=record_id)
if extra['ttl'] > TTL_MAX:
raise RecordError('TTL must not exceed 30 days',
driver=self, record_id=record_id)
| true | true |
f7ffe96eddd38d835bb2151aa9593ac17d333095 | 728 | py | Python | visualization/train.py | norveclibalikci/MyDeepLearning | d72e3d70aa11c8d8b05f0f72744a9cafa30e808a | [
"MIT"
] | 2 | 2018-05-15T10:28:48.000Z | 2018-09-28T13:38:11.000Z | visualization/train.py | norveclibalikci/MyDeepLearning | d72e3d70aa11c8d8b05f0f72744a9cafa30e808a | [
"MIT"
] | null | null | null | visualization/train.py | norveclibalikci/MyDeepLearning | d72e3d70aa11c8d8b05f0f72744a9cafa30e808a | [
"MIT"
] | 1 | 2019-10-25T13:24:05.000Z | 2019-10-25T13:24:05.000Z | from model import *
from utils import *
from dataloader import *
from torch.autograd import Variable
import pandas as pd
import torchvision.datasets as dset
from torch.utils.data import Dataset, DataLoader
NUM_TRAIN = 1000
NUM_VAL = 128
mnist_train = dset.MNIST('../datasets/MNIST', train=True, transform=T.ToTensor())
loader_train = DataLoader(mnist_train, batch_size=batch_size,
sampler=ChunkSampler(NUM_TRAIN, 0))
mnist_val = dset.MNIST('./cs231n/datasets/MNIST_data', train=True, download=True,
transform=T.ToTensor())
loader_val = DataLoader(mnist_val, batch_size=batch_size,
sampler=ChunkSampler(NUM_VAL, NUM_TRAIN)) | 40.444444 | 91 | 0.68544 | from model import *
from utils import *
from dataloader import *
from torch.autograd import Variable
import pandas as pd
import torchvision.datasets as dset
from torch.utils.data import Dataset, DataLoader
NUM_TRAIN = 1000
NUM_VAL = 128
mnist_train = dset.MNIST('../datasets/MNIST', train=True, transform=T.ToTensor())
loader_train = DataLoader(mnist_train, batch_size=batch_size,
sampler=ChunkSampler(NUM_TRAIN, 0))
mnist_val = dset.MNIST('./cs231n/datasets/MNIST_data', train=True, download=True,
transform=T.ToTensor())
loader_val = DataLoader(mnist_val, batch_size=batch_size,
sampler=ChunkSampler(NUM_VAL, NUM_TRAIN)) | true | true |
f7ffe9946f6207a94db033e357cc28b56d519864 | 199 | py | Python | server/favorites/apps.py | tosmak16/favorite-things | bf8e464c26f2fd2b595265d4de8153a7f14ed16e | [
"MIT"
] | 1 | 2019-08-12T18:15:11.000Z | 2019-08-12T18:15:11.000Z | server/favorites/apps.py | tosmak16/favorite-things | bf8e464c26f2fd2b595265d4de8153a7f14ed16e | [
"MIT"
] | 14 | 2020-06-05T22:44:47.000Z | 2022-02-26T15:01:20.000Z | server/favorites/apps.py | tosmak16/favorite-things | bf8e464c26f2fd2b595265d4de8153a7f14ed16e | [
"MIT"
] | 1 | 2020-02-09T01:53:26.000Z | 2020-02-09T01:53:26.000Z | from django.apps import AppConfig
class FavoritesConfig(AppConfig):
name = 'favorites'
def ready(self):
# everytime server restarts
import favorites.signals # noqa: F401
| 18.090909 | 46 | 0.683417 | from django.apps import AppConfig
class FavoritesConfig(AppConfig):
name = 'favorites'
def ready(self):
import favorites.signals
| true | true |
f7ffea4a142cffd73d6b28e0bc91eba7626e48fb | 3,378 | py | Python | configs/ttfnet/ttfnet_d53_beta02_3lr_1x.py | mrsempress/mmdetection | cb650560c97a2fe56a9b369a1abc8ec17e06583a | [
"Apache-2.0"
] | null | null | null | configs/ttfnet/ttfnet_d53_beta02_3lr_1x.py | mrsempress/mmdetection | cb650560c97a2fe56a9b369a1abc8ec17e06583a | [
"Apache-2.0"
] | null | null | null | configs/ttfnet/ttfnet_d53_beta02_3lr_1x.py | mrsempress/mmdetection | cb650560c97a2fe56a9b369a1abc8ec17e06583a | [
"Apache-2.0"
] | null | null | null | # model settings
model = dict(
type='TTFNet',
pretrained='./pretrain/darknet53.pth',
backbone=dict(
type='DarknetV3',
layers=[1, 2, 8, 8, 4],
inplanes=[3, 32, 64, 128, 256, 512],
planes=[32, 64, 128, 256, 512, 1024],
norm_cfg=dict(type='BN'),
out_indices=(1, 2, 3, 4),
frozen_stages=1,
norm_eval=False),
neck=None,
bbox_head=dict(
type='TTFHead',
inplanes=(128, 256, 512, 1024),
head_conv=128,
wh_conv=64,
hm_head_conv_num=2,
wh_head_conv_num=2,
num_classes=81,
wh_offset_base=16,
wh_agnostic=True,
wh_gaussian=True,
shortcut_cfg=(1, 2, 3),
norm_cfg=dict(type='BN'),
alpha=0.54,
beta=0.2,
hm_weight=1.,
wh_weight=5.))
cudnn_benchmark = True
# training and testing settings
train_cfg = dict(
vis_every_n_iters=100,
debug=False)
test_cfg = dict(
score_thr=0.01,
max_per_img=100)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(512, 512), keep_ratio=False),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(512, 512),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=False),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=12,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(type='SGD', lr=0.003, momentum=0.9, weight_decay=0.0004,
paramwise_options=dict(bias_lr_mult=2., bias_decay_mult=0.))
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 5,
step=[9, 11])
checkpoint_config = dict(save_every_n_steps=200, max_to_keep=1, keep_every_n_epochs=9)
# yapf:disable
log_config = dict(interval=20)
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = 'ttfnet53_beta02_3lr_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| 30.432432 | 86 | 0.625518 |
model = dict(
type='TTFNet',
pretrained='./pretrain/darknet53.pth',
backbone=dict(
type='DarknetV3',
layers=[1, 2, 8, 8, 4],
inplanes=[3, 32, 64, 128, 256, 512],
planes=[32, 64, 128, 256, 512, 1024],
norm_cfg=dict(type='BN'),
out_indices=(1, 2, 3, 4),
frozen_stages=1,
norm_eval=False),
neck=None,
bbox_head=dict(
type='TTFHead',
inplanes=(128, 256, 512, 1024),
head_conv=128,
wh_conv=64,
hm_head_conv_num=2,
wh_head_conv_num=2,
num_classes=81,
wh_offset_base=16,
wh_agnostic=True,
wh_gaussian=True,
shortcut_cfg=(1, 2, 3),
norm_cfg=dict(type='BN'),
alpha=0.54,
beta=0.2,
hm_weight=1.,
wh_weight=5.))
cudnn_benchmark = True
train_cfg = dict(
vis_every_n_iters=100,
debug=False)
test_cfg = dict(
score_thr=0.01,
max_per_img=100)
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(512, 512), keep_ratio=False),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(512, 512),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=False),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=12,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
optimizer = dict(type='SGD', lr=0.003, momentum=0.9, weight_decay=0.0004,
paramwise_options=dict(bias_lr_mult=2., bias_decay_mult=0.))
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 5,
step=[9, 11])
checkpoint_config = dict(save_every_n_steps=200, max_to_keep=1, keep_every_n_epochs=9)
log_config = dict(interval=20)
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = 'ttfnet53_beta02_3lr_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| true | true |
f7ffead94838ee66fcacafa656afbc8bfe584c7b | 626 | py | Python | intouch/queryset_csv/views.py | intouch-smartwater/queryset-csv | ca390f677ff206609c6e559b8d2fdaef5a1f13dd | [
"MIT"
] | 1 | 2019-04-25T13:38:15.000Z | 2019-04-25T13:38:15.000Z | intouch/queryset_csv/views.py | intouch-smartwater/queryset-csv | ca390f677ff206609c6e559b8d2fdaef5a1f13dd | [
"MIT"
] | 1 | 2019-04-25T08:54:56.000Z | 2019-04-26T08:45:37.000Z | intouch/queryset_csv/views.py | intouch-smartwater/queryset-csv | ca390f677ff206609c6e559b8d2fdaef5a1f13dd | [
"MIT"
] | 1 | 2020-01-13T09:37:39.000Z | 2020-01-13T09:37:39.000Z | from django.apps.registry import apps
from django.views.generic.base import View
from intouch.queryset_csv.shortcuts import queryset_as_csv_response
from django.core.exceptions import PermissionDenied
class ModelAsCsvView(View):
def get(self, request, app, model):
Model = apps.get_model(app, model)
if self.has_export_permission(request.user, Model):
return queryset_as_csv_response(Model.objects.all(), is_stream=True)
else:
raise PermissionDenied
def has_export_permission(self, user, Model):
return True # TODO check for permission
| 31.3 | 81 | 0.710863 | from django.apps.registry import apps
from django.views.generic.base import View
from intouch.queryset_csv.shortcuts import queryset_as_csv_response
from django.core.exceptions import PermissionDenied
class ModelAsCsvView(View):
def get(self, request, app, model):
Model = apps.get_model(app, model)
if self.has_export_permission(request.user, Model):
return queryset_as_csv_response(Model.objects.all(), is_stream=True)
else:
raise PermissionDenied
def has_export_permission(self, user, Model):
return True
| true | true |
f7ffeaea9f8cebf4897d90d9cab71ff4dae91d81 | 127 | py | Python | copyxnat/__init__.py | tomdoel/copyxnat | 9fcce0ef584151feefbfc7b5c259f6a7bcd7b5f3 | [
"MIT"
] | null | null | null | copyxnat/__init__.py | tomdoel/copyxnat | 9fcce0ef584151feefbfc7b5c259f6a7bcd7b5f3 | [
"MIT"
] | 1 | 2021-02-22T14:13:21.000Z | 2021-02-23T22:27:29.000Z | copyxnat/__init__.py | tomdoel/copyxnat | 9fcce0ef584151feefbfc7b5c259f6a7bcd7b5f3 | [
"MIT"
] | null | null | null | # coding=utf-8
"""copyxnat"""
from copyxnat.utils.versioning import get_version
__version__ = get_version()
del get_version
| 14.111111 | 49 | 0.771654 |
from copyxnat.utils.versioning import get_version
__version__ = get_version()
del get_version
| true | true |
f7ffebeac00b00d4280016ea33be60bd01035b11 | 463 | py | Python | week1/ex7.py | bkwin66/pyclass | e3c8359a7f7461627b70c3f3819df3f6405e5dd6 | [
"Apache-2.0"
] | null | null | null | week1/ex7.py | bkwin66/pyclass | e3c8359a7f7461627b70c3f3819df3f6405e5dd6 | [
"Apache-2.0"
] | null | null | null | week1/ex7.py | bkwin66/pyclass | e3c8359a7f7461627b70c3f3819df3f6405e5dd6 | [
"Apache-2.0"
] | null | null | null | #Brian windle
import yaml
import json
from pprint import pprint
def output_format(my_list, my_str):
print '\n\n'
print '#' * 3
print '#' * 3 + my_str
print '#' * 3
pprint(my_list)
yaml_file = 'myyaml.yml'
json_file = 'myjson.json'
with open(yaml_file) as f:
yaml_list = yaml.load(f)
with open(json_file) as f:
json_list = json.load(f)
output_format(yaml_list, ' YAML')
output_format(json_list, ' JSON')
print '\n'
| 15.965517 | 37 | 0.641469 |
import yaml
import json
from pprint import pprint
def output_format(my_list, my_str):
print '\n\n'
print '#' * 3
print '#' * 3 + my_str
print '#' * 3
pprint(my_list)
yaml_file = 'myyaml.yml'
json_file = 'myjson.json'
with open(yaml_file) as f:
yaml_list = yaml.load(f)
with open(json_file) as f:
json_list = json.load(f)
output_format(yaml_list, ' YAML')
output_format(json_list, ' JSON')
print '\n'
| false | true |
f7fff00def5778f70d4d972ab111d5fd66951d71 | 467 | py | Python | Languages/Python/power_in_logn.py | Nandini2901/Hacktoberfest-1 | ac5eff7c8678f3ce00041bdba20c63c416dac690 | [
"MIT"
] | 1 | 2021-07-14T19:04:48.000Z | 2021-07-14T19:04:48.000Z | Languages/Python/power_in_logn.py | Nandini2901/Hacktoberfest-1 | ac5eff7c8678f3ce00041bdba20c63c416dac690 | [
"MIT"
] | 1 | 2020-10-13T06:01:45.000Z | 2020-10-13T06:01:45.000Z | Languages/Python/power_in_logn.py | Nandini2901/Hacktoberfest-1 | ac5eff7c8678f3ce00041bdba20c63c416dac690 | [
"MIT"
] | 1 | 2020-10-13T10:41:27.000Z | 2020-10-13T10:41:27.000Z | def power(x, y):
res = 1
sign = y<0
if sign:
y = y* -1
while (y > 0):
if ((y & 1) == 1) :
res = res * x
y = y >> 1
x = x * x
if sign:
return 1/res
return res
print("Power is ",power(2,0))
print("Power is ",power(2,1))
print("Power is ",power(2,2))
print("Power is ",power(2,5))
print("Power is ",power(2,-3))
| 16.103448 | 32 | 0.372591 | def power(x, y):
res = 1
sign = y<0
if sign:
y = y* -1
while (y > 0):
if ((y & 1) == 1) :
res = res * x
y = y >> 1
x = x * x
if sign:
return 1/res
return res
print("Power is ",power(2,0))
print("Power is ",power(2,1))
print("Power is ",power(2,2))
print("Power is ",power(2,5))
print("Power is ",power(2,-3))
| true | true |
f7fff12c9de7d965576542633e24101a52624955 | 16,310 | py | Python | parlai/tasks/blended_skill_talk/agents.py | erekgit/parlai | 5d11848316b0b4fe7bf28ca19b215a37004ec668 | [
"MIT"
] | 41 | 2019-06-07T17:36:10.000Z | 2021-11-16T06:26:16.000Z | parlai/tasks/blended_skill_talk/agents.py | erekgit/parlai | 5d11848316b0b4fe7bf28ca19b215a37004ec668 | [
"MIT"
] | null | null | null | parlai/tasks/blended_skill_talk/agents.py | erekgit/parlai | 5d11848316b0b4fe7bf28ca19b215a37004ec668 | [
"MIT"
] | 11 | 2019-06-06T01:19:08.000Z | 2020-07-23T07:34:56.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import copy
import json
import os
import re
from collections import defaultdict
from typing import List, Optional, Dict
from tqdm import tqdm
from parlai.core.opt import Opt
from parlai.core.teachers import (
ParlAIDialogTeacher,
create_task_agent_from_taskname,
MultiTaskTeacher,
)
from parlai.tasks.convai2.agents import DefaultTeacher as Convai2DefaultTeacher
from parlai.tasks.empathetic_dialogues.agents import EmpatheticDialoguesTeacher
from parlai.tasks.wizard_of_wikipedia.agents import WizardDialogKnowledgeTeacher
from parlai.utils.misc import warn_once
from .build import build
##################################################
#### Teacher for the BlendedSkillTalk Dataset ####
##################################################
def raw_data_path(opt: Opt) -> str:
# Build the data if it doesn't exist.
build(opt)
dt = opt['datatype'].split(':')[0]
return os.path.join(opt['datapath'], 'blended_skill_talk', dt + '.json')
def _processed_data_path(opt: Opt) -> str:
# Build the data if it doesn't exist.
build(opt)
dt = opt['datatype'].split(':')[0]
return os.path.join(opt['datapath'], 'blended_skill_talk', dt + '.txt')
def _cached_data_path(opt: Opt, experiencer_side_only: bool) -> str:
# Build the data if it doesn't exist.
build(opt)
dt = opt['datatype'].split(':')[0]
side_string = 'experiencer_only' if experiencer_side_only else 'both_sides'
return os.path.join(
opt['datapath'],
'blended_skill_talk',
f'ed_persona_topicifier__{dt}__{side_string}.json',
)
def safe_personas_path(opt: Opt) -> str:
# Build the data if it doesn't exist.
build(opt)
return os.path.join(opt['datapath'], 'blended_skill_talk', 'safe_personas.txt')
class BlendedSkillTalkTeacher(ParlAIDialogTeacher):
def __init__(self, opt, shared=None):
opt = copy.deepcopy(opt)
opt['parlaidialogteacher_datafile'] = _processed_data_path(opt)
super().__init__(opt, shared)
class InteractiveTeacher(BlendedSkillTalkTeacher):
# Dummy class to add arguments for interactive world.
pass
class SelfchatTeacher(BlendedSkillTalkTeacher):
# Dummy class to add arguments for interactive world.
pass
class DefaultTeacher(BlendedSkillTalkTeacher):
pass
def create_agents(opt):
if not opt.get('interactive_task', False):
return create_task_agent_from_taskname(opt)
else:
# interactive task has no task agents (they are attached as user agents)
return []
################################################################################
## Teachers for adding ConvAI2 personas and WoW topics to existing datasets ##
################################################################################
class ConvAI2PersonaTopicifierTeacher(Convai2DefaultTeacher):
"""
Adds WoW topics to ConvAI2 data.
"""
def __init__(self, opt, shared=None):
if 'stream' in opt['datatype']:
warn_once('Warning: this teacher is not compatible with StreamDialogData!')
# StreamDialogData works by reading directly from a text file without any
# alteration, but this teacher must append a WoW topic string to the context
# of the first example of each episode.
assert opt['datatype'].endswith(':stream')
opt['datatype'] = opt['datatype'][: -len(':stream')]
self.persona_topicifier = PersonaTopicifier(
opt=opt, should_have_personas=True, should_have_topics=False
)
super().__init__(opt, shared=shared)
def get(self, episode_idx, entry_idx=None):
gotten = super().get(episode_idx, entry_idx=entry_idx)
if entry_idx == 0:
modified_text = self.persona_topicifier.get_modified_text(gotten['text'])
gotten['text'] = modified_text
return gotten
class WoWPersonaTopicifierTeacher(WizardDialogKnowledgeTeacher):
"""
Adds personas to WoW data.
"""
def __init__(self, opt, shared=None):
self.persona_topicifier = PersonaTopicifier(
opt=opt, should_have_personas=False, should_have_topics=True
)
super().__init__(opt, shared=shared)
def get(self, episode_idx, entry_idx=None):
gotten = super().get(episode_idx, entry_idx=entry_idx)
if entry_idx == 0:
modified_text = self.persona_topicifier.get_modified_text(gotten['text'])
gotten['text'] = modified_text
return gotten
class EDPersonaTopicifierTeacher(EmpatheticDialoguesTeacher):
"""
Adds persona and WoW topic to ED context strings.
"""
RECOMPILE_DEFAULT = False
@classmethod
def add_cmdline_args(cls, argparser):
EmpatheticDialoguesTeacher.add_cmdline_args(argparser)
agent = argparser.add_argument_group('EDPersonaTopicifierTeacher arguments')
agent.add_argument(
'--recompile-persona-topic-data',
type='bool',
default=cls.RECOMPILE_DEFAULT,
help='Re-compile data with ConvAI2 personas and WoW topics added. Only useful for demonstrating how data was produced.',
)
def __init__(self, opt, shared=None):
self.persona_topicifier = PersonaTopicifier(
opt=opt, should_have_personas=False, should_have_topics=False
)
super().__init__(opt, shared=shared)
if (
self.remove_political_convos is True
or self.opt.get('deepmoji') is not None
or self.opt.get('fasttextloc') is not None
or self.opt.get('prepend', -1) > 0
):
raise NotImplementedError(
'Removing political conversations or using deepmoji, fasttextloc, or '
'prepend not supported with this teacher.'
)
# Running over all examples is really slow because the process of finding a WoW
# topic is expensive, so let's load cached data with personas and topics unless
# --recompile-persona-topic-data is True
if opt.get('recompile_persona_topic_data', self.RECOMPILE_DEFAULT):
self.data_path = (
_cached_data_path(
opt=self.opt, experiencer_side_only=self.experiencer_side_only
)
+ '.recompiled'
)
warn_once(f'Compiling data file for {self.data_path}.')
self.persona_topic_data = self._compile_data()
warn_once(f'Saving data to {self.data_path}.')
with open(self.data_path, 'w') as f_write:
json.dump(self.persona_topic_data, f_write)
else:
self.data_path = _cached_data_path(
opt=self.opt, experiencer_side_only=self.experiencer_side_only
)
warn_once(f'Loading cached data from {self.data_path}.')
with open(self.data_path, 'r') as f_read:
self.persona_topic_data = json.load(f_read)
def _compile_data(self) -> List[List[dict]]:
"""
Compile data to be saved for faster future use.
"""
warn_once(f'Starting to compile {self.num_episodes():d} episodes.')
all_data = []
for episode_idx in tqdm(range(self.num_episodes())):
episode_data = []
entry_idx = 0
while True:
example_data = self._get_example(
episode_idx=episode_idx, entry_idx=entry_idx
)
episode_data.append(example_data)
if example_data['episode_done']:
all_data.append(episode_data)
break
else:
entry_idx += 1
return all_data
def _get_example(self, episode_idx: int, entry_idx: Optional[int] = None):
"""
Get example from the base ED teacher and add persona and WoW topic strings.
"""
gotten = super().get(episode_idx, entry_idx=entry_idx)
if entry_idx == 0:
modified_text = self.persona_topicifier.get_modified_text(gotten['text'])
gotten['text'] = modified_text
return gotten
def get(self, episode_idx: int, entry_idx: Optional[int] = None) -> dict:
"""
Get example from the final data with personas and WoW topic strings.
"""
if entry_idx is None:
entry_idx = 0
return self.persona_topic_data[episode_idx][entry_idx]
class PersonaTopicifier:
def __init__(
self,
opt: Opt,
should_have_personas: bool = False,
should_have_topics: bool = False,
no_persona_is_error: bool = False,
):
self.datapath = opt['datapath']
self.utterance_to_persona_map = {}
self.should_have_personas = should_have_personas
self.should_have_topics = should_have_topics
self.no_persona_is_error = no_persona_is_error
# Throw an exception if a persona is not found for the input WoW topic
# Get persona files
build(opt)
# this returns map of persona line str to WoW topic
(
self.wow_topics_to_persona_strings_map,
self.persona_strings_to_wow_topics_map,
) = self._setup_personas_to_wow_topics()
self.personas_file_path = os.path.join(
self.datapath, 'blended_skill_talk', 'persona_list.txt'
)
with open(self.personas_file_path, 'r') as f:
self.personas = f.read().strip().split('||')
# There's an extra line at the end of the file which is ''
self.personas = [p for p in self.personas if p]
def _setup_personas_to_wow_topics(self) -> Dict[str, List[str]]:
topic_to_persona_path = os.path.join(
self.datapath, 'blended_skill_talk', 'topic_to_persona_list.txt'
)
persona_strings_to_topics = defaultdict(list)
topics_to_persona_strings = defaultdict(list)
with open(topic_to_persona_path, 'r') as f:
for line in f:
match = re.fullmatch(r'([^[]+): (\[.+\])\n', line)
topic = match.group(1)
persona_strings = eval(match.group(2))
assert isinstance(persona_strings, list)
topics_to_persona_strings[topic] = persona_strings
for str_ in persona_strings:
persona_strings_to_topics[str_].append(topic)
warn_once(
f'FINISHED MAPPING personas to topics, got: {len(list(persona_strings_to_topics.keys()))} persona strings to map to topics.'
)
return topics_to_persona_strings, persona_strings_to_topics
def __calculate_word_overlap(self, a, b):
"""
Very rudimentary way to calculate word overlap.
"""
score = 0
tokens_a = a.split(' ')
tokens_a = [ta for ta in tokens_a if len(ta) >= 5]
for ta in tokens_a:
if ta in b:
score += 1
tokens_b = b.split(' ')
tokens_b = [tb for tb in tokens_b if len(tb) >= 5]
for tb in tokens_b:
if tb in a:
score += 1
return score
def __choose_persona_from_text(self, utt):
utt = utt.strip()
if utt not in self.utterance_to_persona_map:
best_word_overlap = 0
best_persona = None
for p in self.personas:
word_overlap = self.__calculate_word_overlap(utt, p)
if word_overlap >= best_word_overlap:
best_word_overlap = word_overlap
best_persona = p
if not best_persona:
raise Exception(
f'No persona found for utterance: \"{utt}\". This should not happen.'
)
self.utterance_to_persona_map[utt] = best_persona
# Should have a \n at the end of it already
return best_persona
return self.utterance_to_persona_map[utt]
def __choose_persona_from_topic(self, topic):
topic = topic.strip()
persona_strings = self.wow_topics_to_persona_strings_map[topic]
for p in persona_strings:
for persona in self.personas:
if p in persona:
return persona
if self.no_persona_is_error:
raise ValueError(f'ERROR: Found no persona for topic: {topic}.')
else:
warn_once(f'Found no persona for topic: {topic}. Returning first persona.')
return self.personas[0]
def __choose_topic(self, persona):
persona_lines = persona.strip().split('\n')
for p in persona_lines:
p_str = p.replace('your persona:', '')
p_str = p_str.strip()
if p_str in self.persona_strings_to_wow_topics_map:
topics = self.persona_strings_to_wow_topics_map[p_str]
topic = topics[0] + '\n'
return topic
for utt, topics in self.persona_strings_to_wow_topics_map.items():
utt_words = utt.split()
utt_words_long = [utt for utt in utt_words if len(utt) > 6]
for long_utt in utt_words_long:
if long_utt in persona:
return topics[0] + '\n'
return topics[0] + '\n'
def get_modified_text(self, text):
# Order should be <Persona> \n <Topic> \n <Utterance>
# Should be used for entry_idx == 0 only (for all first
# utterances only)
# has_neither = 'persona:' not in text and '\n' not in text
# has_wow_topic_only = 'persona:' not in text and '\n' in text
# has_persona_only = 'persona:' in text
has_neither = not self.should_have_personas and not self.should_have_topics
has_wow_topic_only = not self.should_have_personas and self.should_have_topics
has_persona_only = not self.should_have_topics and self.should_have_personas
if (self.should_have_personas and (has_neither or has_wow_topic_only)) or (
self.should_have_topics and (has_neither or has_persona_only)
):
raise Exception(
f'Malformed text: {text}, should_have_personas: {self.should_have_personas}, should_have_topics: {self.should_have_topics}, has_neither: {has_neither}, has_wow_topic_only: {has_wow_topic_only}, has_persona_only: {has_persona_only}'
)
if has_neither:
# Will occur with ED
persona = self.__choose_persona_from_text(text)
topic = self.__choose_topic(persona)
utt = text
elif has_wow_topic_only:
# Will occur with Wizard
parts = text.strip().split('\n')
if len(parts) > 1:
topic = parts[0] + '\n'
utt = parts[1]
persona = self.__choose_persona_from_topic(topic)
else:
# Only has a topic, no utterance
topic = parts[0] + '\n'
utt = ''
persona = self.__choose_persona_from_topic(topic)
elif has_persona_only:
# Will occur with Convai2
lines = text.strip().split('\n')
utt = lines[-1]
persona = ''.join(l + '\n' for l in lines[:-1])
topic = self.__choose_topic(persona)
else:
raise Exception(f'Unknown structure of utterance: {text}')
modified_utterance = persona + topic + utt
return modified_utterance
class AllTeacher(MultiTaskTeacher):
"""
Multitask teacher that combines all "Persona Topicifier" teachers.
"""
def __init__(self, opt, shared=None):
topicifier_tasks = [
'blended_skill_talk:ConvAI2PersonaTopicifier', # ConvAI2
'blended_skill_talk:EDPersonaTopicifier', # Empathetic Dialogues
'blended_skill_talk:WoWPersonaTopicifier', # Wizard of Wikipedia
'blended_skill_talk:BlendedSkillTalk', # Blended Skill Talk
]
opt = copy.deepcopy(opt)
opt['task'] = ','.join(topicifier_tasks)
super().__init__(opt, shared)
| 38.018648 | 247 | 0.614715 |
import copy
import json
import os
import re
from collections import defaultdict
from typing import List, Optional, Dict
from tqdm import tqdm
from parlai.core.opt import Opt
from parlai.core.teachers import (
ParlAIDialogTeacher,
create_task_agent_from_taskname,
MultiTaskTeacher,
)
from parlai.tasks.convai2.agents import DefaultTeacher as Convai2DefaultTeacher
from parlai.tasks.empathetic_dialogues.agents import EmpatheticDialoguesTeacher
from parlai.tasks.wizard_of_wikipedia.agents import WizardDialogKnowledgeTeacher
from parlai.utils.misc import warn_once
from .build import build
core += 1
return score
def __choose_persona_from_text(self, utt):
utt = utt.strip()
if utt not in self.utterance_to_persona_map:
best_word_overlap = 0
best_persona = None
for p in self.personas:
word_overlap = self.__calculate_word_overlap(utt, p)
if word_overlap >= best_word_overlap:
best_word_overlap = word_overlap
best_persona = p
if not best_persona:
raise Exception(
f'No persona found for utterance: \"{utt}\". This should not happen.'
)
self.utterance_to_persona_map[utt] = best_persona
return best_persona
return self.utterance_to_persona_map[utt]
def __choose_persona_from_topic(self, topic):
topic = topic.strip()
persona_strings = self.wow_topics_to_persona_strings_map[topic]
for p in persona_strings:
for persona in self.personas:
if p in persona:
return persona
if self.no_persona_is_error:
raise ValueError(f'ERROR: Found no persona for topic: {topic}.')
else:
warn_once(f'Found no persona for topic: {topic}. Returning first persona.')
return self.personas[0]
def __choose_topic(self, persona):
persona_lines = persona.strip().split('\n')
for p in persona_lines:
p_str = p.replace('your persona:', '')
p_str = p_str.strip()
if p_str in self.persona_strings_to_wow_topics_map:
topics = self.persona_strings_to_wow_topics_map[p_str]
topic = topics[0] + '\n'
return topic
for utt, topics in self.persona_strings_to_wow_topics_map.items():
utt_words = utt.split()
utt_words_long = [utt for utt in utt_words if len(utt) > 6]
for long_utt in utt_words_long:
if long_utt in persona:
return topics[0] + '\n'
return topics[0] + '\n'
def get_modified_text(self, text):
has_neither = not self.should_have_personas and not self.should_have_topics
has_wow_topic_only = not self.should_have_personas and self.should_have_topics
has_persona_only = not self.should_have_topics and self.should_have_personas
if (self.should_have_personas and (has_neither or has_wow_topic_only)) or (
self.should_have_topics and (has_neither or has_persona_only)
):
raise Exception(
f'Malformed text: {text}, should_have_personas: {self.should_have_personas}, should_have_topics: {self.should_have_topics}, has_neither: {has_neither}, has_wow_topic_only: {has_wow_topic_only}, has_persona_only: {has_persona_only}'
)
if has_neither:
persona = self.__choose_persona_from_text(text)
topic = self.__choose_topic(persona)
utt = text
elif has_wow_topic_only:
parts = text.strip().split('\n')
if len(parts) > 1:
topic = parts[0] + '\n'
utt = parts[1]
persona = self.__choose_persona_from_topic(topic)
else:
topic = parts[0] + '\n'
utt = ''
persona = self.__choose_persona_from_topic(topic)
elif has_persona_only:
lines = text.strip().split('\n')
utt = lines[-1]
persona = ''.join(l + '\n' for l in lines[:-1])
topic = self.__choose_topic(persona)
else:
raise Exception(f'Unknown structure of utterance: {text}')
modified_utterance = persona + topic + utt
return modified_utterance
class AllTeacher(MultiTaskTeacher):
def __init__(self, opt, shared=None):
topicifier_tasks = [
'blended_skill_talk:ConvAI2PersonaTopicifier',
'blended_skill_talk:EDPersonaTopicifier',
'blended_skill_talk:WoWPersonaTopicifier',
'blended_skill_talk:BlendedSkillTalk',
]
opt = copy.deepcopy(opt)
opt['task'] = ','.join(topicifier_tasks)
super().__init__(opt, shared)
| true | true |
f7fff262cc16ee1944b9c7c01e1a4feef815e52e | 14,803 | py | Python | sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/_file_services_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-03-09T08:59:13.000Z | 2022-03-09T08:59:13.000Z | sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/_file_services_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/operations/_file_services_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-03-04T06:21:56.000Z | 2022-03-04T06:21:56.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-04-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_set_service_properties_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2019-04-01"
file_services_name = "default"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"FileServicesName": _SERIALIZER.url("file_services_name", file_services_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_service_properties_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-04-01"
file_services_name = "default"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"FileServicesName": _SERIALIZER.url("file_services_name", file_services_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class FileServicesOperations(object):
"""FileServicesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2019_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> "_models.FileServiceItems":
"""List all file services in storage accounts.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FileServiceItems, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.FileServiceItems
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FileServiceItems"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('FileServiceItems', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices'} # type: ignore
@distributed_trace
def set_service_properties(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.FileServiceProperties",
**kwargs: Any
) -> "_models.FileServiceProperties":
"""Sets the properties of file services in storage accounts, including CORS (Cross-Origin Resource
Sharing) rules.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The properties of file services in storage accounts, including CORS
(Cross-Origin Resource Sharing) rules.
:type parameters: ~azure.mgmt.storage.v2019_04_01.models.FileServiceProperties
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FileServiceProperties, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.FileServiceProperties
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FileServiceProperties"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'FileServiceProperties')
request = build_set_service_properties_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.set_service_properties.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('FileServiceProperties', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}'} # type: ignore
@distributed_trace
def get_service_properties(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> "_models.FileServiceProperties":
"""Gets the properties of file services in storage accounts, including CORS (Cross-Origin Resource
Sharing) rules.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FileServiceProperties, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.FileServiceProperties
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FileServiceProperties"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_service_properties_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.get_service_properties.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('FileServiceProperties', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}'} # type: ignore
| 43.538235 | 221 | 0.694656 |
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-04-01"
accept = "application/json"
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
query_parameters = kwargs.pop("params", {})
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
header_parameters = kwargs.pop("headers", {})
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_set_service_properties_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None)
api_version = "2019-04-01"
file_services_name = "default"
accept = "application/json"
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"FileServicesName": _SERIALIZER.url("file_services_name", file_services_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
query_parameters = kwargs.pop("params", {})
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
header_parameters = kwargs.pop("headers", {})
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_service_properties_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-04-01"
file_services_name = "default"
accept = "application/json"
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"FileServicesName": _SERIALIZER.url("file_services_name", file_services_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
query_parameters = kwargs.pop("params", {})
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
header_parameters = kwargs.pop("headers", {})
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class FileServicesOperations(object):
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> "_models.FileServiceItems":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('FileServiceItems', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices'}
@distributed_trace
def set_service_properties(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.FileServiceProperties",
**kwargs: Any
) -> "_models.FileServiceProperties":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json")
_json = self._serialize.body(parameters, 'FileServiceProperties')
request = build_set_service_properties_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.set_service_properties.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('FileServiceProperties', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}'}
@distributed_trace
def get_service_properties(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> "_models.FileServiceProperties":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_service_properties_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.get_service_properties.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('FileServiceProperties', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/{FileServicesName}'}
| true | true |
f7fff6388d20cf11a663e38b2a0096c3fefada1f | 977 | py | Python | blog/migrations/0006_gin_indexes.py | simonw/simonwillisonblog | fd3170cd5473dbd6520d4e48c57923d2b894972d | [
"Apache-2.0"
] | 45 | 2017-10-01T23:21:22.000Z | 2022-03-31T08:20:46.000Z | blog/migrations/0006_gin_indexes.py | simonw/simonwillisonblog | fd3170cd5473dbd6520d4e48c57923d2b894972d | [
"Apache-2.0"
] | 109 | 2017-10-05T06:40:00.000Z | 2022-03-31T13:13:44.000Z | blog/migrations/0006_gin_indexes.py | simonw/simonwillisonblog | fd3170cd5473dbd6520d4e48c57923d2b894972d | [
"Apache-2.0"
] | 11 | 2017-10-17T15:16:26.000Z | 2022-02-20T07:22:32.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-01 01:48
import django.contrib.postgres.indexes
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("blog", "0005_search_document"),
]
operations = [
migrations.AddIndex(
model_name="blogmark",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_document"], name="blog_blogma_search__45eeb9_gin"
),
),
migrations.AddIndex(
model_name="quotation",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_document"], name="blog_quotat_search__aa2d47_gin"
),
),
migrations.AddIndex(
model_name="entry",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_document"], name="blog_entry_search__d62c3b_gin"
),
),
]
| 27.914286 | 81 | 0.592631 |
import django.contrib.postgres.indexes
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("blog", "0005_search_document"),
]
operations = [
migrations.AddIndex(
model_name="blogmark",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_document"], name="blog_blogma_search__45eeb9_gin"
),
),
migrations.AddIndex(
model_name="quotation",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_document"], name="blog_quotat_search__aa2d47_gin"
),
),
migrations.AddIndex(
model_name="entry",
index=django.contrib.postgres.indexes.GinIndex(
fields=["search_document"], name="blog_entry_search__d62c3b_gin"
),
),
]
| true | true |
f7fff664ca3755ab704f3cd271d7ec016e3ca1e2 | 483 | py | Python | dpybot/cogs/samplecog/converters.py | jack1142/DpyBot | 6be031c4556de8fb570ad99aac375580f04f947c | [
"Apache-2.0"
] | null | null | null | dpybot/cogs/samplecog/converters.py | jack1142/DpyBot | 6be031c4556de8fb570ad99aac375580f04f947c | [
"Apache-2.0"
] | null | null | null | dpybot/cogs/samplecog/converters.py | jack1142/DpyBot | 6be031c4556de8fb570ad99aac375580f04f947c | [
"Apache-2.0"
] | null | null | null | from discord.ext import commands
from .enums import Color
class SpecialColor(commands.Converter):
async def convert(self, ctx: commands.Context, arg: str) -> Color:
try:
number = int(arg)
except ValueError:
raise commands.BadArgument(f"`{arg}` is not a valid number!")
try:
return Color(int(number))
except ValueError:
raise commands.BadArgument(f"We don't have a color with number `{number}`.")
| 30.1875 | 88 | 0.625259 | from discord.ext import commands
from .enums import Color
class SpecialColor(commands.Converter):
async def convert(self, ctx: commands.Context, arg: str) -> Color:
try:
number = int(arg)
except ValueError:
raise commands.BadArgument(f"`{arg}` is not a valid number!")
try:
return Color(int(number))
except ValueError:
raise commands.BadArgument(f"We don't have a color with number `{number}`.")
| true | true |
f7fff89b764c635cc196776558a821e6aa38bdad | 824 | py | Python | plyer/platforms/linux/uniqueid.py | malverick/noti_improve | c8b379285e0de041f2133ec4d22b0affdf4a993e | [
"MIT"
] | 3 | 2020-07-17T16:23:14.000Z | 2021-05-25T21:00:49.000Z | plyer/platforms/linux/uniqueid.py | malverick/noti_improve | c8b379285e0de041f2133ec4d22b0affdf4a993e | [
"MIT"
] | 1 | 2021-05-25T20:54:22.000Z | 2021-05-26T20:22:30.000Z | plyer/platforms/linux/uniqueid.py | malverick/noti_improve | c8b379285e0de041f2133ec4d22b0affdf4a993e | [
"MIT"
] | 1 | 2019-05-17T09:45:00.000Z | 2019-05-17T09:45:00.000Z | from subprocess import Popen, PIPE
from plyer.facades import UniqueID
from plyer.utils import whereis_exe
from os import environ
class LinuxUniqueID(UniqueID):
def _get_uid(self):
old_lang = environ.get('LANG')
environ['LANG'] = 'C'
lshw_process = Popen(["lshw", "-quiet"], stdout=PIPE, stderr=PIPE)
grep_process = Popen(["grep", "-m1", "serial:"],
stdin=lshw_process.stdout, stdout=PIPE)
lshw_process.stdout.close()
output = grep_process.communicate()[0]
environ['LANG'] = old_lang
if output:
return output.split()[1]
else:
return None
def instance():
import sys
if whereis_exe('lshw'):
return LinuxUniqueID()
sys.stderr.write("lshw not found.")
return UniqueID()
| 26.580645 | 74 | 0.606796 | from subprocess import Popen, PIPE
from plyer.facades import UniqueID
from plyer.utils import whereis_exe
from os import environ
class LinuxUniqueID(UniqueID):
def _get_uid(self):
old_lang = environ.get('LANG')
environ['LANG'] = 'C'
lshw_process = Popen(["lshw", "-quiet"], stdout=PIPE, stderr=PIPE)
grep_process = Popen(["grep", "-m1", "serial:"],
stdin=lshw_process.stdout, stdout=PIPE)
lshw_process.stdout.close()
output = grep_process.communicate()[0]
environ['LANG'] = old_lang
if output:
return output.split()[1]
else:
return None
def instance():
import sys
if whereis_exe('lshw'):
return LinuxUniqueID()
sys.stderr.write("lshw not found.")
return UniqueID()
| true | true |
f7fffa531ddbbd10451622d9cb24b825c131564b | 662 | py | Python | setup.py | sapcc/osc-ccloud-server-group-member-changes | e9be3ecc963deb7e76c318da56af8fc2d45c398f | [
"Apache-2.0"
] | null | null | null | setup.py | sapcc/osc-ccloud-server-group-member-changes | e9be3ecc963deb7e76c318da56af8fc2d45c398f | [
"Apache-2.0"
] | null | null | null | setup.py | sapcc/osc-ccloud-server-group-member-changes | e9be3ecc963deb7e76c318da56af8fc2d45c398f | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2021 SAP SE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
| 31.52381 | 69 | 0.744713 |
import setuptools
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
| true | true |
f7fffaca3ceb9b4063aef2c03e704c87cfe9e3c2 | 223 | py | Python | Chapter10/Ch10/server/create_database.py | henrryyanez/Tkinter-GUI-Programming-by-Example | c8a326d6034b5e54f77605a8ec840cb8fac89412 | [
"MIT"
] | 127 | 2018-08-27T16:34:43.000Z | 2022-03-22T19:20:53.000Z | Chapter10/Ch10/server/create_database.py | PiotrAdaszewski/Tkinter-GUI-Programming-by-Example | c8a326d6034b5e54f77605a8ec840cb8fac89412 | [
"MIT"
] | 8 | 2019-04-11T06:47:36.000Z | 2022-03-11T23:23:42.000Z | Chapter10/Ch10/server/create_database.py | PiotrAdaszewski/Tkinter-GUI-Programming-by-Example | c8a326d6034b5e54f77605a8ec840cb8fac89412 | [
"MIT"
] | 85 | 2018-04-30T19:42:21.000Z | 2022-03-30T01:22:54.000Z | import sqlite3
database = sqlite3.connect("chat.db")
cursor = database.cursor()
create_users_sql = "CREATE TABLE users (username TEXT, real_name TEXT)"
cursor.execute(create_users_sql)
database.commit()
database.close()
| 20.272727 | 71 | 0.780269 | import sqlite3
database = sqlite3.connect("chat.db")
cursor = database.cursor()
create_users_sql = "CREATE TABLE users (username TEXT, real_name TEXT)"
cursor.execute(create_users_sql)
database.commit()
database.close()
| true | true |
f7fffadb472bb27e2b2411f95f54edd36b346e4c | 12,505 | py | Python | tmglow/nn/modules/flowUtils.py | zabaras/deep-turbulence | 0daca5daada449d4ba16bce37b703e20b444b6bc | [
"MIT"
] | 20 | 2020-12-01T14:58:01.000Z | 2022-03-15T07:40:10.000Z | tmglow/nn/modules/flowUtils.py | zabaras/deep-turbulence | 0daca5daada449d4ba16bce37b703e20b444b6bc | [
"MIT"
] | 2 | 2021-06-05T14:29:42.000Z | 2022-03-04T15:57:40.000Z | tmglow/nn/modules/flowUtils.py | zabaras/deep-turbulence | 0daca5daada449d4ba16bce37b703e20b444b6bc | [
"MIT"
] | 4 | 2020-09-04T06:11:04.000Z | 2021-09-05T10:47:16.000Z | '''
=====
Distributed by: Notre Dame SCAI Lab (MIT Liscense)
- Associated publication:
url: http://aimsciences.org//article/id/3a9f3d14-3421-4947-a45f-a9cc74edd097
doi: https://dx.doi.org/10.3934/fods.2020019
github: https://github.com/zabaras/deep-turbulence
=====
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import math
from torch.autograd import Variable
from torch.nn.modules.utils import _pair, _quadruple
class Squeeze(nn.Module):
"""Squeezes feature map by reducing the dimensions of the feature
and increasing channel number in chunks.
:param factor: factor to reduce feature dimensions by, defaults to 2
:type factor: int, optional
:note: This is the squeeze approached used in "Glow: Generative flow with invertible 1x1 convolutions"
by Kingma et al. https://arxiv.org/abs/1807.03039
"""
def __init__(self, factor=2):
"""Constructor method
"""
super(Squeeze, self).__init__()
assert factor >= 1
if factor == 1:
Warning('Squeeze factor is 1, this is identity function')
self.factor = factor
def forward(self, x):
"""Forward pass
:param x: [B, in_features, H, W] input feature tensor
:type x: torch.Tensor
:returns:
- y: [B, factor**2 * in_features, H/factor, W/factor] Squeezed output feature tensor
:rtype: torch.Tensor
"""
if self.factor == 1:
return x
# n_channels, height, width
B, C, H, W = x.shape[:]
assert H % self.factor == 0 and W % self.factor == 0
x = x.reshape(-1, C, self.factor, H//self.factor, self.factor, W//self.factor)
x = x.transpose(3, 4)
y = x.reshape(-1, C * self.factor ** 2, H//self.factor, W//self.factor)
return y
def reverse(self, y):
"""Backward pass
:param y: [B, factor**2 * in_features, H/factor, W/factor] Squeezed input feature tensor
:type y: torch.Tensor
:returns:
- x: [B, in_features, H, W] Output feature tensor
:rtype: torch.Tensor
"""
if self.factor == 1:
return y
B, C, H, W = y.shape[:]
assert C >= self.factor ** 2 and C % self.factor ** 2 == 0
y = y.reshape(-1, C // self.factor ** 2, self.factor, self.factor, H, W)
y = y.transpose(3, 4)
x = y.reshape(-1, C // self.factor ** 2, H * self.factor, W * self.factor)
return x
class CheckerSqueeze(nn.Module):
"""Squeezes feature map by reducing the dimensions of the feature
and increasing channel number in a checkered pattern.
See Fig. 8 of paper: https://arxiv.org/abs/2006.04731
:param factor: factor to reduce feature dimensions by, defaults to 2
:type factor: int, optional
:note: This is the squeeze approached used in "Density estimation using real nvp"
by Dinh et al. https://arxiv.org/abs/1605.08803
"""
def __init__(self, factor=2):
"""Constructor method
"""
super(CheckerSqueeze, self).__init__()
assert factor >= 1
if factor == 1:
Warning('Squeeze factor is 1, this is identity function')
# Not tested for other factor values
factor = 2
self.factor = factor
def forward(self, x):
"""Forward pass
:param x: [B, in_features, H, W] input feature tensor
:type x: torch.Tensor
:returns:
- y: [B, factor**2 * in_features, H/factor, W/factor] Squeezed output feature tensor
:rtype: torch.Tensor
"""
if self.factor == 1:
return x
# n_channels, height, width
B, C, H, W = x.shape[:]
assert H % self.factor == 0 and W % self.factor == 0
y = torch.zeros(B, C * self.factor ** 2, H//self.factor, W//self.factor).type(x.type())
c0 = C
y[:,:c0,:,:] = x[:,:,::self.factor,::self.factor]
y[:,c0:2*c0,:,:] = x[:,:,1::self.factor,::self.factor]
y[:,2*c0:3*c0,:,:] = x[:,:,1::self.factor,1::self.factor]
y[:,3*c0:,:,:] = x[:,:,::self.factor,1::self.factor]
return y
def reverse(self, y):
"""Backward pass
:param y: [B, factor**2 * in_features, H/factor, W/factor] Squeezed input feature tensor
:type y: torch.Tensor
:returns:
- x: [B, in_features, H, W] Output feature tensor
:rtype: torch.Tensor
"""
if self.factor == 1:
return y
B, C, H, W = y.shape[:]
assert C >= self.factor ** 2 and C % self.factor ** 2 == 0
x = torch.zeros(B, C//self.factor ** 2, H* self.factor, W* self.factor).type(y.type())
c0 = C//self.factor ** 2
x[:,:,::self.factor,::self.factor] = y[:,:c0,:,:]
x[:,:,1::self.factor,::self.factor] = y[:,c0:2*c0,:,:]
x[:,:,1::self.factor,1::self.factor] = y[:,2*c0:3*c0,:,:]
x[:,:,::self.factor,1::self.factor] = y[:,3*c0:,:,:]
return x
class GaussianDiag(object):
"""Multi-variate Gaussian class with diagonal covariance
for representing the latent variables
:param mean: [B, in_features, H, W] tensor of mean values
:type mean: torch.Tensor
:param log_stddev: [B, in_features, H, W] tensor of log sandard deviations
:type log_stddev: torch.Tensor
"""
Log2PI = float(np.log(2 * np.pi))
def __init__(self, mean, log_stddev):
"""Constructor method
"""
super().__init__()
self.mean = mean
self.log_stddev = log_stddev.clamp_(min=-10., max=math.log(5.))
# self._backward_hook = self.log_stddev.register_hook(
# lambda grad: torch.clamp_(grad, -10., 10.))
def likelihood(self, x):
"""Computes the Gaussian log-likelihood of each element
:param x: [B, in_features, H, W] input feature tensor
:type x: torch.Tensor
:return:
- like: [B, in_features, H, W] log-likelihood tensor
:rtype: torch.Tensor
"""
like = -0.5 * (GaussianDiag.Log2PI + self.log_stddev * 2. \
+ (x - self.mean) ** 2 / (self.log_stddev * 2.).exp())
return like
def log_prob(self, x):
"""Computes the log product (sum) of Gaussian likelihoods
over the entire input feature tensor
:param x: [B, in_features, H, W] input feature tensor
:type x: torch.Tensor
:return:
- likelihood: [B] sum log-likelihood over features
:rtype: torch.Tensor
"""
likelihood = self.likelihood(x)
return likelihood.view(x.shape[0], -1).sum(1)
def sample(self, eps=None):
"""Samples latent variables from learned Gaussian density
:param eps: [B, in_features, H, W] Latent samples from the unit Gaussian to reconstruct specific latent variables.
If none are provided latent variables are sampled randomly from learned density, defaults to None
:type eps: torch.Tensor, optional
:return:
- z: [B, in_features, H, W] sum log-likelihood over features
:rtype: torch.Tensor
"""
self.log_stddev.data.clamp_(min=-10., max=math.log(5.))
if eps is None:
eps = torch.randn_like(self.log_stddev)
# print(eps, self.log_stddev.data )
z = self.mean + self.log_stddev.exp() * eps
return z
class Conv2dZeros(nn.Module):
"""Convolution with weight and bias initialized to zero followed by channel-wise scaling
:math:`x*exp(scale * logscale\_factor)`
:param in_features: Number of input feature channels
:type in_features: int
:param out_features: Number of output feature channels
:type out_features: int
:param logscale_factor: log factor to scale output tensor by, defaults to 1
:type logscale_factor: int, optional
:note: This is proposed in "Glow: Generative flow with invertible 1x1 convolutions"
by Kingma et al. https://arxiv.org/abs/1807.03039. Appears to help with stability.
"""
def __init__(self, in_features, out_features, logscale_factor=1):
"""Constructor method
"""
super(Conv2dZeros, self).__init__()
self.conv = nn.Conv2d(in_features, out_features, kernel_size=3,
stride=1, padding=0, bias=True)
self.conv.weight.data.zero_()
self.conv.bias.data.zero_()
self.scale = nn.Parameter(torch.zeros(1, 1, 1, 1))
self.logscale_factor = logscale_factor
# self.sigmoid = nn.Sigmoid()
def forward(self, x):
"""Forward pass.
:param x: [B, in_features, H, W] input feature tensor
:type x: torch.Tensor
:return:
- out: [B, out_features, H, W] output feature tensor
:rtype: torch.Tensor
"""
x = self.conv(F.pad(x, _quadruple(1), mode='replicate'))
return x * torch.exp(torch.clamp(self.scale, -4., np.log(4)) * self.logscale_factor)
class LatentEncoder(nn.Module):
"""Latent encoder used to compute mu and std for Gaussian density
from split feature map. See NN block in Fig. 8 of paper:
https://arxiv.org/abs/2006.04731
:param in_features: Number of input feature channels
:type in_features: int
"""
def __init__(self, in_features):
"""Constructor method
"""
super(LatentEncoder, self).__init__()
self.conv2d = Conv2dZeros(in_features, in_features * 2)
self.hardtanh = nn.Hardtanh(min_val=-2.0, max_val=np.log(5.0), inplace=False)
# self.hardtanh = nn.Sigmoid()
def forward(self, x):
"""Forward pass
:param x: [B, in_features, H, W] input feature tensor
:type x: torch.Tensor
:return:
- gauss_diag: Gaussian prior
:rtype: :class:`nn.modules.flowUtils.GaussianDiag`
"""
mean, log_stddev = (self.hardtanh(self.conv2d(x))).chunk(2, 1)
gauss_diag = GaussianDiag(mean, log_stddev)
return gauss_diag
class Split(nn.Module):
"""Splits input features into half features that are passed deeper in the model
and the other half modeled as a Gaussian density.
See NN block in Fig. 8 of paper: https://arxiv.org/abs/2006.04731
:param in_features: Number of input feature channels
:type in_features: int
"""
def __init__(self, in_features):
"""Constructor method
"""
super(Split, self).__init__()
self.latent_encoder = LatentEncoder(in_features // 2)
def forward(self, z, return_eps=False):
"""Forward split
:param z: [B, in_features, H, W] input feature tensor
:type z: torch.Tensor
:param return_eps: Return samples from latent densities, defaults to False
:type return_eps: bool, optional
:return:
- z1: [B, in_features//2, H, W] output feature tensor
- log_prob_prior: [B] log-likelihood of split features
- eps: [B, in_features//2, H, W] tensor of sampled latent variables from unit gaussian
:rtype: (torch.Tensor, torch.Tensor, torch.Tensor)
"""
# split out z2, and evalute log prob at z2 which takes the form of
# diagonal Gaussian are reparameterized by latent_encoder
z1, z2 = z.chunk(2, 1)
prior = self.latent_encoder(z1)
log_prob_prior = prior.log_prob(z2)
if return_eps:
eps = (z2 - prior.mean) / prior.log_stddev.exp()
else:
eps = None
return z1, log_prob_prior, eps
def reverse(self, z1, eps=None):
"""Backward split
:param z1: [B, in_features//2, H, W] input split feature tensor
:type z1: torch.Tensor
:param eps: [B, in_features//2, H, W] Latent samples from the unit Gaussian to reconstruct specific latent variables.
If none are provided latent variables are sampled randomly from learned density, defaults to None
:type eps: torch.Tensor, optional
:return:
- z: [B, in_features, H, W] output reconstructed feature tensor
- log_prob_prior: [B] log-likelihood of split features
:rtype: (torch.Tensor, torch.Tensor)
"""
# sample z2, then concat with z1
# intermediate flow, z2 is the split-out latent
prior = self.latent_encoder(z1)
z2 = prior.sample(eps)
z = torch.cat((z1, z2), 1)
log_prob_prior = prior.log_prob(z2)
return z, log_prob_prior | 37.328358 | 125 | 0.601359 | import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import math
from torch.autograd import Variable
from torch.nn.modules.utils import _pair, _quadruple
class Squeeze(nn.Module):
def __init__(self, factor=2):
super(Squeeze, self).__init__()
assert factor >= 1
if factor == 1:
Warning('Squeeze factor is 1, this is identity function')
self.factor = factor
def forward(self, x):
if self.factor == 1:
return x
B, C, H, W = x.shape[:]
assert H % self.factor == 0 and W % self.factor == 0
x = x.reshape(-1, C, self.factor, H//self.factor, self.factor, W//self.factor)
x = x.transpose(3, 4)
y = x.reshape(-1, C * self.factor ** 2, H//self.factor, W//self.factor)
return y
def reverse(self, y):
if self.factor == 1:
return y
B, C, H, W = y.shape[:]
assert C >= self.factor ** 2 and C % self.factor ** 2 == 0
y = y.reshape(-1, C // self.factor ** 2, self.factor, self.factor, H, W)
y = y.transpose(3, 4)
x = y.reshape(-1, C // self.factor ** 2, H * self.factor, W * self.factor)
return x
class CheckerSqueeze(nn.Module):
def __init__(self, factor=2):
super(CheckerSqueeze, self).__init__()
assert factor >= 1
if factor == 1:
Warning('Squeeze factor is 1, this is identity function')
factor = 2
self.factor = factor
def forward(self, x):
if self.factor == 1:
return x
B, C, H, W = x.shape[:]
assert H % self.factor == 0 and W % self.factor == 0
y = torch.zeros(B, C * self.factor ** 2, H//self.factor, W//self.factor).type(x.type())
c0 = C
y[:,:c0,:,:] = x[:,:,::self.factor,::self.factor]
y[:,c0:2*c0,:,:] = x[:,:,1::self.factor,::self.factor]
y[:,2*c0:3*c0,:,:] = x[:,:,1::self.factor,1::self.factor]
y[:,3*c0:,:,:] = x[:,:,::self.factor,1::self.factor]
return y
def reverse(self, y):
if self.factor == 1:
return y
B, C, H, W = y.shape[:]
assert C >= self.factor ** 2 and C % self.factor ** 2 == 0
x = torch.zeros(B, C//self.factor ** 2, H* self.factor, W* self.factor).type(y.type())
c0 = C//self.factor ** 2
x[:,:,::self.factor,::self.factor] = y[:,:c0,:,:]
x[:,:,1::self.factor,::self.factor] = y[:,c0:2*c0,:,:]
x[:,:,1::self.factor,1::self.factor] = y[:,2*c0:3*c0,:,:]
x[:,:,::self.factor,1::self.factor] = y[:,3*c0:,:,:]
return x
class GaussianDiag(object):
Log2PI = float(np.log(2 * np.pi))
def __init__(self, mean, log_stddev):
super().__init__()
self.mean = mean
self.log_stddev = log_stddev.clamp_(min=-10., max=math.log(5.))
def likelihood(self, x):
like = -0.5 * (GaussianDiag.Log2PI + self.log_stddev * 2. \
+ (x - self.mean) ** 2 / (self.log_stddev * 2.).exp())
return like
def log_prob(self, x):
likelihood = self.likelihood(x)
return likelihood.view(x.shape[0], -1).sum(1)
def sample(self, eps=None):
self.log_stddev.data.clamp_(min=-10., max=math.log(5.))
if eps is None:
eps = torch.randn_like(self.log_stddev)
z = self.mean + self.log_stddev.exp() * eps
return z
class Conv2dZeros(nn.Module):
def __init__(self, in_features, out_features, logscale_factor=1):
super(Conv2dZeros, self).__init__()
self.conv = nn.Conv2d(in_features, out_features, kernel_size=3,
stride=1, padding=0, bias=True)
self.conv.weight.data.zero_()
self.conv.bias.data.zero_()
self.scale = nn.Parameter(torch.zeros(1, 1, 1, 1))
self.logscale_factor = logscale_factor
def forward(self, x):
x = self.conv(F.pad(x, _quadruple(1), mode='replicate'))
return x * torch.exp(torch.clamp(self.scale, -4., np.log(4)) * self.logscale_factor)
class LatentEncoder(nn.Module):
def __init__(self, in_features):
super(LatentEncoder, self).__init__()
self.conv2d = Conv2dZeros(in_features, in_features * 2)
self.hardtanh = nn.Hardtanh(min_val=-2.0, max_val=np.log(5.0), inplace=False)
def forward(self, x):
mean, log_stddev = (self.hardtanh(self.conv2d(x))).chunk(2, 1)
gauss_diag = GaussianDiag(mean, log_stddev)
return gauss_diag
class Split(nn.Module):
def __init__(self, in_features):
super(Split, self).__init__()
self.latent_encoder = LatentEncoder(in_features // 2)
def forward(self, z, return_eps=False):
z1, z2 = z.chunk(2, 1)
prior = self.latent_encoder(z1)
log_prob_prior = prior.log_prob(z2)
if return_eps:
eps = (z2 - prior.mean) / prior.log_stddev.exp()
else:
eps = None
return z1, log_prob_prior, eps
def reverse(self, z1, eps=None):
prior = self.latent_encoder(z1)
z2 = prior.sample(eps)
z = torch.cat((z1, z2), 1)
log_prob_prior = prior.log_prob(z2)
return z, log_prob_prior | true | true |
f7fffc5072519c36d33dbea2959605cf69be1cab | 1,509 | py | Python | build/lib/oddstream/kde_estimation.py | tartaruszen/oddstream | c3f2a4d6cba9753052acf8be03e5df038d40b745 | [
"MIT"
] | 1 | 2021-11-15T08:47:52.000Z | 2021-11-15T08:47:52.000Z | oddstream/kde_estimation.py | tartaruszen/oddstream | c3f2a4d6cba9753052acf8be03e5df038d40b745 | [
"MIT"
] | null | null | null | oddstream/kde_estimation.py | tartaruszen/oddstream | c3f2a4d6cba9753052acf8be03e5df038d40b745 | [
"MIT"
] | null | null | null | from fastkde import fastKDE
import numpy as np
"""
Fast 2D Kernel Density Estimation with simple point evaluation
"""
class KDEEstimation2D(object):
def __init__(self, X):
self.pdf, self.axes = fastKDE.pdf(X[:, 0], X[:, 1])
def evaluate_points(self, X):
m = X.shape[0]
values = np.array(range(0, m), dtype=float)
for i in range(0, m):
values[i] = self.evaluate_pdf_value(X[i, :])
return values
def evaluate_pdf_value(self, s):
x_up = s[0] <= self.axes[0]
index_up_x = self.get_index_upper(x_up, 0)
x_low = s[0] >= self.axes[0]
index_low_x = self.get_index_lower(x_low)
y_up = s[1] <= self.axes[1]
index_up_y = self.get_index_upper(y_up, 1)
y_low = s[1] >= self.axes[1]
index_low_y = self.get_index_lower(y_low)
# TODO
value = 0.0
for i in range(index_low_x, index_up_x + 1):
for j in range(index_low_y, index_up_y + 1):
value += self.pdf.T[i, j]
value /= 4
return value
def get_index_upper(self, values, index):
c = [i for i in range(0, len(values)) if values[i]]
if len(c) == 0:
up = self.pdf.shape[index] - 2
else:
up = np.min(c)
return up
def get_index_lower(self, values):
c = [i for i in range(0, len(values)) if values[i]]
if len(c) == 0:
up = 0
else:
up = np.max(c)
return up | 29.588235 | 66 | 0.542081 | from fastkde import fastKDE
import numpy as np
class KDEEstimation2D(object):
def __init__(self, X):
self.pdf, self.axes = fastKDE.pdf(X[:, 0], X[:, 1])
def evaluate_points(self, X):
m = X.shape[0]
values = np.array(range(0, m), dtype=float)
for i in range(0, m):
values[i] = self.evaluate_pdf_value(X[i, :])
return values
def evaluate_pdf_value(self, s):
x_up = s[0] <= self.axes[0]
index_up_x = self.get_index_upper(x_up, 0)
x_low = s[0] >= self.axes[0]
index_low_x = self.get_index_lower(x_low)
y_up = s[1] <= self.axes[1]
index_up_y = self.get_index_upper(y_up, 1)
y_low = s[1] >= self.axes[1]
index_low_y = self.get_index_lower(y_low)
value = 0.0
for i in range(index_low_x, index_up_x + 1):
for j in range(index_low_y, index_up_y + 1):
value += self.pdf.T[i, j]
value /= 4
return value
def get_index_upper(self, values, index):
c = [i for i in range(0, len(values)) if values[i]]
if len(c) == 0:
up = self.pdf.shape[index] - 2
else:
up = np.min(c)
return up
def get_index_lower(self, values):
c = [i for i in range(0, len(values)) if values[i]]
if len(c) == 0:
up = 0
else:
up = np.max(c)
return up | true | true |
f7fffca807f6e130c47099320eb8a860afe7e2d3 | 1,567 | py | Python | tests/test_sheet.py | tomviner/dojo-monthly-emailer | 08c0bf9a28384cd3d8df8289c310a3028de7259d | [
"MIT"
] | null | null | null | tests/test_sheet.py | tomviner/dojo-monthly-emailer | 08c0bf9a28384cd3d8df8289c310a3028de7259d | [
"MIT"
] | 3 | 2017-08-29T02:43:57.000Z | 2018-02-12T15:04:34.000Z | tests/test_sheet.py | tomviner/dojo-monthly-emailer | 08c0bf9a28384cd3d8df8289c310a3028de7259d | [
"MIT"
] | null | null | null | "Integration tests. Makes actual calls to Google Spreadsheets API."
import re
from datetime import date
from dojo_emailer.app import (
GDOCS_URL_PREFIX,
GDOCS_URL_SHEET_NAME,
SPREADSHEET_NAME
)
from dojo_emailer.sheet import (
get_dojo_data_from_date,
get_first_cell_by_prefix,
get_spreadsheet_id
)
def test_get_spreadsheet_id():
# check start and end of id without divulging entire value here
expected_id_regex = r'18yYx.{34}q39DM'
spreadsheet_id = get_spreadsheet_id(SPREADSHEET_NAME)
assert re.match(expected_id_regex, spreadsheet_id)
def test_get_first_cell_by_prefix():
expected_url_regex = \
r'https://docs.google.com/document/d/1WyDe.{34}Z0z9s/.*'
url = get_first_cell_by_prefix(
SPREADSHEET_NAME, GDOCS_URL_SHEET_NAME, GDOCS_URL_PREFIX)
assert re.match(expected_url_regex, url)
def test_get_first_cell_by_prefix_with_missing_prefix():
sheet_not_containing_a_gdocs_url = 'Email Addresses'
url = get_first_cell_by_prefix(
SPREADSHEET_NAME, sheet_not_containing_a_gdocs_url, GDOCS_URL_PREFIX)
assert url is None
def test_get_dojo_data_from_date():
expected_data = {
'S': 8,
'E': 5,
'Cat-Herder': 'Tom',
'Day': 'Thursday 5th January',
'Month': 'January 2017',
'Person on the Inside': 'Marcus',
'Venue': 'SohoNet',
}
date_ = date(2017, 1, 1)
data = get_dojo_data_from_date(SPREADSHEET_NAME, date_)
for key, expected_value in expected_data.items():
assert data[key] == expected_value
| 29.566038 | 77 | 0.716018 | import re
from datetime import date
from dojo_emailer.app import (
GDOCS_URL_PREFIX,
GDOCS_URL_SHEET_NAME,
SPREADSHEET_NAME
)
from dojo_emailer.sheet import (
get_dojo_data_from_date,
get_first_cell_by_prefix,
get_spreadsheet_id
)
def test_get_spreadsheet_id():
expected_id_regex = r'18yYx.{34}q39DM'
spreadsheet_id = get_spreadsheet_id(SPREADSHEET_NAME)
assert re.match(expected_id_regex, spreadsheet_id)
def test_get_first_cell_by_prefix():
expected_url_regex = \
r'https://docs.google.com/document/d/1WyDe.{34}Z0z9s/.*'
url = get_first_cell_by_prefix(
SPREADSHEET_NAME, GDOCS_URL_SHEET_NAME, GDOCS_URL_PREFIX)
assert re.match(expected_url_regex, url)
def test_get_first_cell_by_prefix_with_missing_prefix():
sheet_not_containing_a_gdocs_url = 'Email Addresses'
url = get_first_cell_by_prefix(
SPREADSHEET_NAME, sheet_not_containing_a_gdocs_url, GDOCS_URL_PREFIX)
assert url is None
def test_get_dojo_data_from_date():
expected_data = {
'S': 8,
'E': 5,
'Cat-Herder': 'Tom',
'Day': 'Thursday 5th January',
'Month': 'January 2017',
'Person on the Inside': 'Marcus',
'Venue': 'SohoNet',
}
date_ = date(2017, 1, 1)
data = get_dojo_data_from_date(SPREADSHEET_NAME, date_)
for key, expected_value in expected_data.items():
assert data[key] == expected_value
| true | true |
f7fffd719a5cd8692bc4e44cdc86e3420276144d | 10,949 | py | Python | docs/conf.py | Pyco7/django-ajax-views | 5361936867b8c9b9b444d7e33bfc6ef1268b4611 | [
"MIT"
] | 6 | 2016-10-27T11:09:35.000Z | 2021-06-05T18:19:51.000Z | docs/conf.py | Pyco7/django-ajax-views | 5361936867b8c9b9b444d7e33bfc6ef1268b4611 | [
"MIT"
] | 3 | 2020-02-12T00:08:06.000Z | 2021-06-10T19:46:55.000Z | docs/conf.py | Pyco7/django-ajax-views | 5361936867b8c9b9b444d7e33bfc6ef1268b4611 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# django-ajax-views documentation build configuration file, created by
# sphinx-quickstart on Fri Sep 9 16:09:03 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
import django
from django.conf import settings
# sys.path.insert(0, os.path.abspath(os.path.join('..', 'ajaxviews', 'static', 'require-ajax-views', 'src')))
sys.path.insert(0, os.path.abspath('..'))
import ajaxviews
settings.configure(
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
]
)
django.setup()
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.viewcode',
# 'sphinxcontrib.coffeedomain',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'django-ajax-views'
copyright = '2016, Emanuel Hafner'
author = 'Emanuel Hafner'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join(map(str, ajaxviews.__version_info__))
# The full version, including alpha/beta/rc tags.
release = ajaxviews.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'django-ajax-views v0.0.2'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-ajax-viewsdoc'
# def setup(app):
# app.add_stylesheet('custom.css')
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'django-ajax-views.tex', 'django-ajax-views Documentation',
'Emanuel Hafner', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'django-ajax-views', 'django-ajax-views Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'django-ajax-views', 'django-ajax-views Documentation',
author, 'django-ajax-views', 'Django class-based views working together with require-ajax-views.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# -- Options for CoffeeScript output -------------------------------------------
# coffee_src_dir = os.path.abspath(os.path.join('..', 'ajaxviews', 'static', 'require-ajax-views', 'src'))
#
# coffee_src_parser = 'requirejs'
# primary_domain = 'python'
| 28.965608 | 109 | 0.700521 |
import os
import sys
import django
from django.conf import settings
sys.path.insert(0, os.path.abspath('..'))
import ajaxviews
settings.configure(
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
]
)
django.setup()
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.viewcode',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'django-ajax-views'
copyright = '2016, Emanuel Hafner'
author = 'Emanuel Hafner'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join(map(str, ajaxviews.__version_info__))
# The full version, including alpha/beta/rc tags.
release = ajaxviews.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'django-ajax-views v0.0.2'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-ajax-viewsdoc'
# def setup(app):
# app.add_stylesheet('custom.css')
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'django-ajax-views.tex', 'django-ajax-views Documentation',
'Emanuel Hafner', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'django-ajax-views', 'django-ajax-views Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'django-ajax-views', 'django-ajax-views Documentation',
author, 'django-ajax-views', 'Django class-based views working together with require-ajax-views.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
intersphinx_mapping = {'https://docs.python.org/': None}
| true | true |
f7fffddbc301f3e555f75ad4cb2d067430628925 | 7,189 | py | Python | bluetooth/generate.py | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 14,668 | 2015-01-01T01:57:10.000Z | 2022-03-31T23:33:32.000Z | bluetooth/generate.py | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 7,642 | 2018-05-28T09:38:03.000Z | 2022-03-31T20:55:48.000Z | bluetooth/generate.py | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 5,941 | 2015-01-02T11:32:21.000Z | 2022-03-31T16:35:46.000Z | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# TODO(509038): Delete the file in LayoutTests/bluetooth after all the script
# tests have been migrated to this directory.
"""Generator script for Web Bluetooth LayoutTests.
For each script-tests/X.js creates the following test files depending on the
contents of X.js
- getPrimaryService/X.https.window.js
- getPrimaryServices/X.https.window.js
- getPrimaryServices/X-with-uuid.https.window.js
script-tests/X.js files should contain "CALLS([variation1 | variation2 | ...])"
tokens that indicate what files to generate. Each variation in CALLS([...])
should corresponds to a js function call and its arguments. Additionally a
variation can end in [UUID] to indicate that the generated file's name should
have the -with-uuid suffix.
The PREVIOUS_CALL token will be replaced with the function that replaced CALLS.
The FUNCTION_NAME token will be replaced with the name of the function that
replaced CALLS.
For example, for the following template file:
// script-tests/example.js
promise_test(() => {
return navigator.bluetooth.requestDevice(...)
.then(device => device.gatt.CALLS([
getPrimaryService('heart_rate')|
getPrimaryServices('heart_rate')[UUID]]))
.then(device => device.gatt.PREVIOUS_CALL);
}, 'example test for FUNCTION_NAME');
this script will generate:
// getPrimaryService/example.https.window.js
promise_test(() => {
return navigator.bluetooth.requestDevice(...)
.then(device => device.gatt.getPrimaryService('heart_rate'))
.then(device => device.gatt.getPrimaryService('heart_rate'));
}, 'example test for getPrimaryService');
// getPrimaryServices/example-with-uuid.https.window.js
promise_test(() => {
return navigator.bluetooth.requestDevice(...)
.then(device => device.gatt.getPrimaryServices('heart_rate'))
.then(device => device.gatt.getPrimaryServices('heart_rate'));
}, 'example test for getPrimaryServices');
Run
$ python //third_party/WebKit/LayoutTests/bluetooth/generate.py
and commit the generated files.
"""
import fnmatch
import os
import re
import sys
import logging
TEMPLATES_DIR = 'script-tests'
class GeneratedTest:
def __init__(self, data, path, template):
self.data = data
self.path = path
self.template = template
def GetGeneratedTests():
"""Yields a GeneratedTest for each call in templates in script-tests."""
bluetooth_tests_dir = os.path.dirname(os.path.realpath(__file__))
# Read Base Test Template.
base_template_file_handle = open(
os.path.join(
bluetooth_tests_dir,
TEMPLATES_DIR,
'base_test_js.template'
), 'r')
base_template_file_data = base_template_file_handle.read().decode('utf-8')
base_template_file_handle.close()
# Get Templates.
template_path = os.path.join(bluetooth_tests_dir, TEMPLATES_DIR)
available_templates = []
for root, _, files in os.walk(template_path):
for template in files:
if template.endswith('.js'):
available_templates.append(os.path.join(root, template))
# Generate Test Files
for template in available_templates:
# Read template
template_file_handle = open(template, 'r')
template_file_data = template_file_handle.read().decode('utf-8')
template_file_handle.close()
template_name = os.path.splitext(os.path.basename(template))[0]
# Find function names in multiline pattern: CALLS( [ function_name,function_name2[UUID] ])
result = re.search(
r'CALLS\(' + # CALLS(
r'[^\[]*' + # Any characters not [, allowing for new lines.
r'\[' + # [
r'(.*?)' + # group matching: function_name(), function_name2[UUID]
r'\]\)', # adjacent closing characters: ])
template_file_data, re.MULTILINE | re.DOTALL)
if result is None:
raise Exception('Template must contain \'CALLS\' tokens')
new_test_file_data = base_template_file_data.replace('TEST',
template_file_data)
# Replace CALLS([...]) with CALLS so that we don't have to replace the
# CALLS([...]) for every new test file.
new_test_file_data = new_test_file_data.replace(result.group(), 'CALLS')
# Replace 'PREVIOUS_CALL' with 'CALLS' so that we can replace it while
# replacing CALLS.
new_test_file_data = new_test_file_data.replace('PREVIOUS_CALL', 'CALLS')
for call in result.group(1).split('|'):
# Parse call
call = call.strip()
function_name, args, uuid_suffix = re.search(r'(.*?)\((.*)\)(\[UUID\])?', call).groups()
# Replace template tokens
call_test_file_data = new_test_file_data
call_test_file_data = call_test_file_data.replace('CALLS', '{}({})'.format(function_name, args))
call_test_file_data = call_test_file_data.replace('FUNCTION_NAME', function_name)
# Get test file name
group_dir = os.path.basename(os.path.abspath(os.path.join(template, os.pardir)))
call_test_file_name = 'gen-{}{}.https.window.js'.format(template_name, '-with-uuid' if uuid_suffix else '')
call_test_file_path = os.path.join(bluetooth_tests_dir, group_dir, function_name, call_test_file_name)
yield GeneratedTest(call_test_file_data, call_test_file_path, template)
def main():
logging.basicConfig(level=logging.INFO)
previous_generated_files = set()
current_path = os.path.dirname(os.path.realpath(__file__))
for root, _, filenames in os.walk(current_path):
for filename in fnmatch.filter(filenames, 'gen-*.https.window.js'):
previous_generated_files.add(os.path.join(root, filename))
generated_files = set()
for generated_test in GetGeneratedTests():
prev_len = len(generated_files)
generated_files.add(generated_test.path)
if prev_len == len(generated_files):
logging.info('Generated the same test twice for template:\n%s',
generated_test.template)
# Create or open test file
directory = os.path.dirname(generated_test.path)
if not os.path.exists(directory):
os.makedirs(directory)
test_file_handle = open(generated_test.path, 'wb')
# Write contents
test_file_handle.write(generated_test.data.encode('utf-8'))
test_file_handle.close()
new_generated_files = generated_files - previous_generated_files
if len(new_generated_files) != 0:
logging.info('Newly generated tests:')
for generated_file in new_generated_files:
logging.info(generated_file)
obsolete_files = previous_generated_files - generated_files
if len(obsolete_files) != 0:
logging.warning('The following files might be obsolete:')
for generated_file in obsolete_files:
logging.warning(generated_file)
if __name__ == '__main__':
sys.exit(main())
| 37.836842 | 119 | 0.678815 |
import fnmatch
import os
import re
import sys
import logging
TEMPLATES_DIR = 'script-tests'
class GeneratedTest:
def __init__(self, data, path, template):
self.data = data
self.path = path
self.template = template
def GetGeneratedTests():
bluetooth_tests_dir = os.path.dirname(os.path.realpath(__file__))
base_template_file_handle = open(
os.path.join(
bluetooth_tests_dir,
TEMPLATES_DIR,
'base_test_js.template'
), 'r')
base_template_file_data = base_template_file_handle.read().decode('utf-8')
base_template_file_handle.close()
template_path = os.path.join(bluetooth_tests_dir, TEMPLATES_DIR)
available_templates = []
for root, _, files in os.walk(template_path):
for template in files:
if template.endswith('.js'):
available_templates.append(os.path.join(root, template))
for template in available_templates:
template_file_handle = open(template, 'r')
template_file_data = template_file_handle.read().decode('utf-8')
template_file_handle.close()
template_name = os.path.splitext(os.path.basename(template))[0]
result = re.search(
r'CALLS\(' +
r'[^\[]*' +
r'\[' +
r'(.*?)' +
r'\]\)',
template_file_data, re.MULTILINE | re.DOTALL)
if result is None:
raise Exception('Template must contain \'CALLS\' tokens')
new_test_file_data = base_template_file_data.replace('TEST',
template_file_data)
# CALLS([...]) for every new test file.
new_test_file_data = new_test_file_data.replace(result.group(), 'CALLS')
# Replace 'PREVIOUS_CALL' with 'CALLS' so that we can replace it while
# replacing CALLS.
new_test_file_data = new_test_file_data.replace('PREVIOUS_CALL', 'CALLS')
for call in result.group(1).split('|'):
# Parse call
call = call.strip()
function_name, args, uuid_suffix = re.search(r'(.*?)\((.*)\)(\[UUID\])?', call).groups()
# Replace template tokens
call_test_file_data = new_test_file_data
call_test_file_data = call_test_file_data.replace('CALLS', '{}({})'.format(function_name, args))
call_test_file_data = call_test_file_data.replace('FUNCTION_NAME', function_name)
# Get test file name
group_dir = os.path.basename(os.path.abspath(os.path.join(template, os.pardir)))
call_test_file_name = 'gen-{}{}.https.window.js'.format(template_name, '-with-uuid' if uuid_suffix else '')
call_test_file_path = os.path.join(bluetooth_tests_dir, group_dir, function_name, call_test_file_name)
yield GeneratedTest(call_test_file_data, call_test_file_path, template)
def main():
logging.basicConfig(level=logging.INFO)
previous_generated_files = set()
current_path = os.path.dirname(os.path.realpath(__file__))
for root, _, filenames in os.walk(current_path):
for filename in fnmatch.filter(filenames, 'gen-*.https.window.js'):
previous_generated_files.add(os.path.join(root, filename))
generated_files = set()
for generated_test in GetGeneratedTests():
prev_len = len(generated_files)
generated_files.add(generated_test.path)
if prev_len == len(generated_files):
logging.info('Generated the same test twice for template:\n%s',
generated_test.template)
# Create or open test file
directory = os.path.dirname(generated_test.path)
if not os.path.exists(directory):
os.makedirs(directory)
test_file_handle = open(generated_test.path, 'wb')
# Write contents
test_file_handle.write(generated_test.data.encode('utf-8'))
test_file_handle.close()
new_generated_files = generated_files - previous_generated_files
if len(new_generated_files) != 0:
logging.info('Newly generated tests:')
for generated_file in new_generated_files:
logging.info(generated_file)
obsolete_files = previous_generated_files - generated_files
if len(obsolete_files) != 0:
logging.warning('The following files might be obsolete:')
for generated_file in obsolete_files:
logging.warning(generated_file)
if __name__ == '__main__':
sys.exit(main())
| true | true |
f7ffff2c3473af10e580059f64b75ea3164113cc | 119 | py | Python | examples/echo/submissions/partially_accepted/sol.py | jsannemo/problemtools | 591f24e99c8d66a819fc8dde82c5ec55173b3c83 | [
"MIT"
] | null | null | null | examples/echo/submissions/partially_accepted/sol.py | jsannemo/problemtools | 591f24e99c8d66a819fc8dde82c5ec55173b3c83 | [
"MIT"
] | null | null | null | examples/echo/submissions/partially_accepted/sol.py | jsannemo/problemtools | 591f24e99c8d66a819fc8dde82c5ec55173b3c83 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
input()
A = input()
B = input()
C = input()
D = input()
E = input()
print(A)
print(C)
print(E)
| 9.916667 | 22 | 0.579832 |
input()
A = input()
B = input()
C = input()
D = input()
E = input()
print(A)
print(C)
print(E)
| true | true |
f7ffff8e2c23d4349b4f870e5d0fb30bb2f3b2a3 | 12,427 | py | Python | cleverhans/train.py | iamgroot42/cleverhans | 53da9cd6daf9d7457800831c3eaa75f729a39145 | [
"MIT"
] | 21 | 2019-06-07T17:05:30.000Z | 2022-02-07T03:25:15.000Z | cleverhans/train.py | iamgroot42/cleverhans | 53da9cd6daf9d7457800831c3eaa75f729a39145 | [
"MIT"
] | 7 | 2019-12-16T22:20:01.000Z | 2022-02-10T00:45:21.000Z | cleverhans/train.py | iamgroot42/cleverhans | 53da9cd6daf9d7457800831c3eaa75f729a39145 | [
"MIT"
] | 8 | 2019-06-11T03:06:29.000Z | 2022-01-18T04:18:27.000Z | """
Multi-replica synchronous training
NOTE: This module is much more free to change than many other modules
in CleverHans. CleverHans is very conservative about changes to any
code that affects the output of benchmark tests (attacks, evaluation
methods, etc.). This module provides *model training* functionality
not *benchmarks* and thus is free to change rapidly to provide better
speed, accuracy, etc.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import os
import time
import warnings
import math
import numpy as np
from six.moves import xrange
import tensorflow as tf
from cleverhans import canary
from cleverhans.utils import _ArgsWrapper, create_logger
from cleverhans.utils import safe_zip
from cleverhans.utils_tf import infer_devices
from cleverhans.utils_tf import initialize_uninitialized_global_variables
_logger = create_logger("train")
_logger.setLevel(logging.INFO)
def train(sess, loss, x_train, y_train,
init_all=False, evaluate=None, feed=None, args=None,
rng=None, var_list=None, fprop_args=None, optimizer=None,
devices=None, x_batch_preprocessor=None, use_ema=False,
ema_decay=.998, run_canary=None,
loss_threshold=1e5, dataset_train=None, dataset_size=None):
"""
Run (optionally multi-replica, synchronous) training to minimize `loss`
:param sess: TF session to use when training the graph
:param loss: tensor, the loss to minimize
:param x_train: numpy array with training inputs or tf Dataset
:param y_train: numpy array with training outputs or tf Dataset
:param init_all: (boolean) If set to true, all TF variables in the session
are (re)initialized, otherwise only previously
uninitialized variables are initialized before training.
:param evaluate: function that is run after each training iteration
(typically to display the test/validation accuracy).
:param feed: An optional dictionary that is appended to the feeding
dictionary before the session runs. Can be used to feed
the learning phase of a Keras model for instance.
:param args: dict or argparse `Namespace` object.
Should contain `nb_epochs`, `learning_rate`,
`batch_size`
:param rng: Instance of numpy.random.RandomState
:param var_list: Optional list of parameters to train.
:param fprop_args: dict, extra arguments to pass to fprop (loss and model).
:param optimizer: Optimizer to be used for training
:param devices: list of device names to use for training
If None, defaults to: all GPUs, if GPUs are available
all devices, if no GPUs are available
:param x_batch_preprocessor: callable
Takes a single tensor containing an x_train batch as input
Returns a single tensor containing an x_train batch as output
Called to preprocess the data before passing the data to the Loss
:param use_ema: bool
If true, uses an exponential moving average of the model parameters
:param ema_decay: float or callable
The decay parameter for EMA, if EMA is used
If a callable rather than a float, this is a callable that takes
the epoch and batch as arguments and returns the ema_decay for
the current batch.
:param loss_threshold: float
Raise an exception if the loss exceeds this value.
This is intended to rapidly detect numerical problems.
Sometimes the loss may legitimately be higher than this value. In
such cases, raise the value. If needed it can be np.inf.
:param dataset_train: tf Dataset instance.
Used as a replacement for x_train, y_train for faster performance.
:param dataset_size: integer, the size of the dataset_train.
:return: True if model trained
"""
# Check whether the hardware is working correctly
canary.run_canary()
if run_canary is not None:
warnings.warn("The `run_canary` argument is deprecated. The canary "
"is now much cheaper and thus runs all the time. The "
"canary now uses its own loss function so it is not "
"necessary to turn off the canary when training with "
" a stochastic loss. Simply quit passing `run_canary`."
"Passing `run_canary` may become an error on or after "
"2019-10-16.")
args = _ArgsWrapper(args or {})
fprop_args = fprop_args or {}
# Check that necessary arguments were given (see doc above)
# Be sure to support 0 epochs for debugging purposes
if args.nb_epochs is None:
raise ValueError("`args` must specify number of epochs")
if optimizer is None:
if args.learning_rate is None:
raise ValueError("Learning rate was not given in args dict")
assert args.batch_size, "Batch size was not given in args dict"
if rng is None:
rng = np.random.RandomState()
if optimizer is None:
optimizer = tf.train.AdamOptimizer(learning_rate=args.learning_rate)
else:
if not isinstance(optimizer, tf.train.Optimizer):
raise ValueError("optimizer object must be from a child class of "
"tf.train.Optimizer")
grads = []
xs = []
preprocessed_xs = []
ys = []
if dataset_train is not None:
assert x_train is None and y_train is None and x_batch_preprocessor is None
if dataset_size is None:
raise ValueError("You must provide a dataset size")
data_iterator = dataset_train.make_one_shot_iterator().get_next()
x_train, y_train = sess.run(data_iterator)
devices = infer_devices(devices)
for device in devices:
with tf.device(device):
x = tf.placeholder(x_train.dtype, (None,) + x_train.shape[1:])
y = tf.placeholder(y_train.dtype, (None,) + y_train.shape[1:])
xs.append(x)
ys.append(y)
if x_batch_preprocessor is not None:
x = x_batch_preprocessor(x)
# We need to keep track of these so that the canary can feed
# preprocessed values. If the canary had to feed raw values,
# stochastic preprocessing could make the canary fail.
preprocessed_xs.append(x)
loss_value = loss.fprop(x, y, **fprop_args)
grads.append(optimizer.compute_gradients(
loss_value, var_list=var_list))
num_devices = len(devices)
print("num_devices: ", num_devices)
grad = avg_grads(grads)
# Trigger update operations within the default graph (such as batch_norm).
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
train_step = optimizer.apply_gradients(grad)
epoch_tf = tf.placeholder(tf.int32, [])
batch_tf = tf.placeholder(tf.int32, [])
if use_ema:
if callable(ema_decay):
ema_decay = ema_decay(epoch_tf, batch_tf)
ema = tf.train.ExponentialMovingAverage(decay=ema_decay)
with tf.control_dependencies([train_step]):
train_step = ema.apply(var_list)
# Get pointers to the EMA's running average variables
avg_params = [ema.average(param) for param in var_list]
# Make temporary buffers used for swapping the live and running average
# parameters
tmp_params = [tf.Variable(param, trainable=False)
for param in var_list]
# Define the swapping operation
param_to_tmp = [tf.assign(tmp, param)
for tmp, param in safe_zip(tmp_params, var_list)]
with tf.control_dependencies(param_to_tmp):
avg_to_param = [tf.assign(param, avg)
for param, avg in safe_zip(var_list, avg_params)]
with tf.control_dependencies(avg_to_param):
tmp_to_avg = [tf.assign(avg, tmp)
for avg, tmp in safe_zip(avg_params, tmp_params)]
swap = tmp_to_avg
batch_size = args.batch_size
assert batch_size % num_devices == 0
device_batch_size = batch_size // num_devices
if init_all:
sess.run(tf.global_variables_initializer())
else:
initialize_uninitialized_global_variables(sess)
for epoch in xrange(args.nb_epochs):
if dataset_train is not None:
nb_batches = int(math.ceil(float(dataset_size) / batch_size))
else:
# Indices to shuffle training set
index_shuf = list(range(len(x_train)))
# Randomly repeat a few training examples each epoch to avoid
# having a too-small batch
while len(index_shuf) % batch_size != 0:
index_shuf.append(rng.randint(len(x_train)))
nb_batches = len(index_shuf) // batch_size
rng.shuffle(index_shuf)
# Shuffling here versus inside the loop doesn't seem to affect
# timing very much, but shuffling here makes the code slightly
# easier to read
x_train_shuffled = x_train[index_shuf]
y_train_shuffled = y_train[index_shuf]
prev = time.time()
for batch in range(nb_batches):
if dataset_train is not None:
x_train_shuffled, y_train_shuffled = sess.run(data_iterator)
start, end = 0, batch_size
else:
# Compute batch start and end indices
start = batch * batch_size
end = (batch + 1) * batch_size
# Perform one training step
diff = end - start
assert diff == batch_size
feed_dict = {epoch_tf: epoch, batch_tf: batch}
for dev_idx in xrange(num_devices):
cur_start = start + dev_idx * device_batch_size
cur_end = start + (dev_idx + 1) * device_batch_size
feed_dict[xs[dev_idx]] = x_train_shuffled[cur_start:cur_end]
feed_dict[ys[dev_idx]] = y_train_shuffled[cur_start:cur_end]
if cur_end != end and dataset_train is None:
msg = ("batch_size (%d) must be a multiple of num_devices "
"(%d).\nCUDA_VISIBLE_DEVICES: %s"
"\ndevices: %s")
args = (batch_size, num_devices,
os.environ['CUDA_VISIBLE_DEVICES'],
str(devices))
raise ValueError(msg % args)
if feed is not None:
feed_dict.update(feed)
_, loss_numpy = sess.run(
[train_step, loss_value], feed_dict=feed_dict)
if np.abs(loss_numpy) > loss_threshold:
raise ValueError("Extreme loss during training: ", loss_numpy)
if np.isnan(loss_numpy) or np.isinf(loss_numpy):
raise ValueError("NaN/Inf loss during training")
assert (dataset_train is not None or
end == len(index_shuf)) # Check that all examples were used
cur = time.time()
_logger.info("Epoch " + str(epoch) + " took " +
str(cur - prev) + " seconds")
if evaluate is not None:
if use_ema:
# Before running evaluation, load the running average
# parameters into the live slot, so we can see how well
# the EMA parameters are performing
sess.run(swap)
evaluate()
if use_ema:
# Swap the parameters back, so that we continue training
# on the live parameters
sess.run(swap)
if use_ema:
# When training is done, swap the running average parameters into
# the live slot, so that we use them when we deploy the model
sess.run(swap)
return True
def avg_grads(tower_grads):
"""Calculate the average gradient for each shared variable across all
towers.
Note that this function provides a synchronization point across all towers.
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer list
is over individual gradients. The inner list is over the gradient
calculation for each tower.
Returns:
List of pairs of (gradient, variable) where the gradient has been
averaged across all towers.
Modified from this tutorial: https://tinyurl.com/n3jr2vm
"""
if len(tower_grads) == 1:
return tower_grads[0]
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = [g for g, _ in grad_and_vars]
# Average over the 'tower' dimension.
grad = tf.add_n(grads) / len(grads)
# Keep in mind that the Variables are redundant because they are shared
# across towers. So .. we will just return the first tower's pointer to
# the Variable.
v = grad_and_vars[0][1]
assert all(v is grad_and_var[1] for grad_and_var in grad_and_vars)
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
| 40.087097 | 79 | 0.692042 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import os
import time
import warnings
import math
import numpy as np
from six.moves import xrange
import tensorflow as tf
from cleverhans import canary
from cleverhans.utils import _ArgsWrapper, create_logger
from cleverhans.utils import safe_zip
from cleverhans.utils_tf import infer_devices
from cleverhans.utils_tf import initialize_uninitialized_global_variables
_logger = create_logger("train")
_logger.setLevel(logging.INFO)
def train(sess, loss, x_train, y_train,
init_all=False, evaluate=None, feed=None, args=None,
rng=None, var_list=None, fprop_args=None, optimizer=None,
devices=None, x_batch_preprocessor=None, use_ema=False,
ema_decay=.998, run_canary=None,
loss_threshold=1e5, dataset_train=None, dataset_size=None):
canary.run_canary()
if run_canary is not None:
warnings.warn("The `run_canary` argument is deprecated. The canary "
"is now much cheaper and thus runs all the time. The "
"canary now uses its own loss function so it is not "
"necessary to turn off the canary when training with "
" a stochastic loss. Simply quit passing `run_canary`."
"Passing `run_canary` may become an error on or after "
"2019-10-16.")
args = _ArgsWrapper(args or {})
fprop_args = fprop_args or {}
if args.nb_epochs is None:
raise ValueError("`args` must specify number of epochs")
if optimizer is None:
if args.learning_rate is None:
raise ValueError("Learning rate was not given in args dict")
assert args.batch_size, "Batch size was not given in args dict"
if rng is None:
rng = np.random.RandomState()
if optimizer is None:
optimizer = tf.train.AdamOptimizer(learning_rate=args.learning_rate)
else:
if not isinstance(optimizer, tf.train.Optimizer):
raise ValueError("optimizer object must be from a child class of "
"tf.train.Optimizer")
grads = []
xs = []
preprocessed_xs = []
ys = []
if dataset_train is not None:
assert x_train is None and y_train is None and x_batch_preprocessor is None
if dataset_size is None:
raise ValueError("You must provide a dataset size")
data_iterator = dataset_train.make_one_shot_iterator().get_next()
x_train, y_train = sess.run(data_iterator)
devices = infer_devices(devices)
for device in devices:
with tf.device(device):
x = tf.placeholder(x_train.dtype, (None,) + x_train.shape[1:])
y = tf.placeholder(y_train.dtype, (None,) + y_train.shape[1:])
xs.append(x)
ys.append(y)
if x_batch_preprocessor is not None:
x = x_batch_preprocessor(x)
preprocessed_xs.append(x)
loss_value = loss.fprop(x, y, **fprop_args)
grads.append(optimizer.compute_gradients(
loss_value, var_list=var_list))
num_devices = len(devices)
print("num_devices: ", num_devices)
grad = avg_grads(grads)
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
train_step = optimizer.apply_gradients(grad)
epoch_tf = tf.placeholder(tf.int32, [])
batch_tf = tf.placeholder(tf.int32, [])
if use_ema:
if callable(ema_decay):
ema_decay = ema_decay(epoch_tf, batch_tf)
ema = tf.train.ExponentialMovingAverage(decay=ema_decay)
with tf.control_dependencies([train_step]):
train_step = ema.apply(var_list)
avg_params = [ema.average(param) for param in var_list]
# Make temporary buffers used for swapping the live and running average
# parameters
tmp_params = [tf.Variable(param, trainable=False)
for param in var_list]
# Define the swapping operation
param_to_tmp = [tf.assign(tmp, param)
for tmp, param in safe_zip(tmp_params, var_list)]
with tf.control_dependencies(param_to_tmp):
avg_to_param = [tf.assign(param, avg)
for param, avg in safe_zip(var_list, avg_params)]
with tf.control_dependencies(avg_to_param):
tmp_to_avg = [tf.assign(avg, tmp)
for avg, tmp in safe_zip(avg_params, tmp_params)]
swap = tmp_to_avg
batch_size = args.batch_size
assert batch_size % num_devices == 0
device_batch_size = batch_size // num_devices
if init_all:
sess.run(tf.global_variables_initializer())
else:
initialize_uninitialized_global_variables(sess)
for epoch in xrange(args.nb_epochs):
if dataset_train is not None:
nb_batches = int(math.ceil(float(dataset_size) / batch_size))
else:
# Indices to shuffle training set
index_shuf = list(range(len(x_train)))
# Randomly repeat a few training examples each epoch to avoid
# having a too-small batch
while len(index_shuf) % batch_size != 0:
index_shuf.append(rng.randint(len(x_train)))
nb_batches = len(index_shuf) // batch_size
rng.shuffle(index_shuf)
# Shuffling here versus inside the loop doesn't seem to affect
x_train_shuffled = x_train[index_shuf]
y_train_shuffled = y_train[index_shuf]
prev = time.time()
for batch in range(nb_batches):
if dataset_train is not None:
x_train_shuffled, y_train_shuffled = sess.run(data_iterator)
start, end = 0, batch_size
else:
start = batch * batch_size
end = (batch + 1) * batch_size
diff = end - start
assert diff == batch_size
feed_dict = {epoch_tf: epoch, batch_tf: batch}
for dev_idx in xrange(num_devices):
cur_start = start + dev_idx * device_batch_size
cur_end = start + (dev_idx + 1) * device_batch_size
feed_dict[xs[dev_idx]] = x_train_shuffled[cur_start:cur_end]
feed_dict[ys[dev_idx]] = y_train_shuffled[cur_start:cur_end]
if cur_end != end and dataset_train is None:
msg = ("batch_size (%d) must be a multiple of num_devices "
"(%d).\nCUDA_VISIBLE_DEVICES: %s"
"\ndevices: %s")
args = (batch_size, num_devices,
os.environ['CUDA_VISIBLE_DEVICES'],
str(devices))
raise ValueError(msg % args)
if feed is not None:
feed_dict.update(feed)
_, loss_numpy = sess.run(
[train_step, loss_value], feed_dict=feed_dict)
if np.abs(loss_numpy) > loss_threshold:
raise ValueError("Extreme loss during training: ", loss_numpy)
if np.isnan(loss_numpy) or np.isinf(loss_numpy):
raise ValueError("NaN/Inf loss during training")
assert (dataset_train is not None or
end == len(index_shuf))
cur = time.time()
_logger.info("Epoch " + str(epoch) + " took " +
str(cur - prev) + " seconds")
if evaluate is not None:
if use_ema:
sess.run(swap)
evaluate()
if use_ema:
sess.run(swap)
if use_ema:
sess.run(swap)
return True
def avg_grads(tower_grads):
if len(tower_grads) == 1:
return tower_grads[0]
average_grads = []
for grad_and_vars in zip(*tower_grads):
grads = [g for g, _ in grad_and_vars]
grad = tf.add_n(grads) / len(grads)
# the Variable.
v = grad_and_vars[0][1]
assert all(v is grad_and_var[1] for grad_and_var in grad_and_vars)
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
| true | true |
790000c95e646d16c85155649652ef660ef0c928 | 64,381 | py | Python | python/ccxt/base/exchange.py | tssujt/ccxt | 95a8befe3540043bac408b36794342b0a9e724cd | [
"MIT"
] | null | null | null | python/ccxt/base/exchange.py | tssujt/ccxt | 95a8befe3540043bac408b36794342b0a9e724cd | [
"MIT"
] | null | null | null | python/ccxt/base/exchange.py | tssujt/ccxt | 95a8befe3540043bac408b36794342b0a9e724cd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Base exchange class"""
# -----------------------------------------------------------------------------
__version__ = '1.17.322'
# -----------------------------------------------------------------------------
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import NetworkError
from ccxt.base.errors import NotSupported
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import InvalidAddress
# -----------------------------------------------------------------------------
from ccxt.base.decimal_to_precision import decimal_to_precision
from ccxt.base.decimal_to_precision import DECIMAL_PLACES, TRUNCATE, ROUND
# -----------------------------------------------------------------------------
__all__ = [
'Exchange',
]
# -----------------------------------------------------------------------------
# Python 2 & 3
import logging
import base64
import calendar
import collections
import datetime
from email.utils import parsedate
import functools
import gzip
import hashlib
import hmac
import io
import json
import math
from numbers import Number
import re
from requests import Session
from requests.utils import default_user_agent
from requests.exceptions import HTTPError, Timeout, TooManyRedirects, RequestException
# import socket
from ssl import SSLError
# import sys
import time
import uuid
import zlib
from decimal import Decimal
# -----------------------------------------------------------------------------
try:
basestring # basestring was removed in python 3.0
except NameError:
basestring = str
# -----------------------------------------------------------------------------
try:
import urllib.parse as _urlencode # Python 3
except ImportError:
import urllib as _urlencode # Python 2
# -----------------------------------------------------------------------------
# web3/0x imports
try:
# from web3.auto import w3
from web3 import Web3, HTTPProvider
from web3.utils.encoding import hex_encode_abi_type
except ImportError:
Web3 = HTTPProvider = None # web3/0x not supported in Python 2
# -----------------------------------------------------------------------------
class Exchange(object):
"""Base exchange class"""
id = None
version = None
certified = False
# rate limiter settings
enableRateLimit = False
rateLimit = 2000 # milliseconds = seconds * 1000
timeout = 10000 # milliseconds = seconds * 1000
asyncio_loop = None
aiohttp_proxy = None
aiohttp_trust_env = False
session = None # Session () by default
logger = None # logging.getLogger(__name__) by default
userAgent = None
userAgents = {
'chrome': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36',
'chrome39': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
}
verbose = False
markets = None
symbols = None
fees = {
'trading': {
'fee_loaded': False,
'percentage': True, # subclasses should rarely have to redefine this
},
'funding': {
'fee_loaded': False,
'withdraw': {},
'deposit': {},
},
}
ids = None
tickers = None
api = None
parseJsonResponse = True
proxy = ''
origin = '*' # CORS origin
proxies = None
hostname = None # in case of inaccessibility of the "main" domain
apiKey = ''
secret = ''
password = ''
uid = ''
privateKey = '' # a "0x"-prefixed hexstring private key for a wallet
walletAddress = '' # the wallet address "0x"-prefixed hexstring
twofa = False
marketsById = None
markets_by_id = None
currencies_by_id = None
precision = None
limits = None
exceptions = None
httpExceptions = {
'422': ExchangeError,
'418': DDoSProtection,
'429': DDoSProtection,
'404': ExchangeNotAvailable,
'409': ExchangeNotAvailable,
'500': ExchangeNotAvailable,
'501': ExchangeNotAvailable,
'502': ExchangeNotAvailable,
'520': ExchangeNotAvailable,
'521': ExchangeNotAvailable,
'522': ExchangeNotAvailable,
'525': ExchangeNotAvailable,
'400': ExchangeNotAvailable,
'403': ExchangeNotAvailable,
'405': ExchangeNotAvailable,
'503': ExchangeNotAvailable,
'530': ExchangeNotAvailable,
'408': RequestTimeout,
'504': RequestTimeout,
'401': AuthenticationError,
'511': AuthenticationError,
}
headers = None
balance = None
orderbooks = None
orders = None
trades = None
transactions = None
currencies = None
options = None # Python does not allow to define properties in run-time with setattr
requiredCredentials = {
'apiKey': True,
'secret': True,
'uid': False,
'login': False,
'password': False,
'twofa': False, # 2-factor authentication (one-time password key)
'privateKey': False, # a "0x"-prefixed hexstring private key for a wallet
'walletAddress': False, # the wallet address "0x"-prefixed hexstring
}
# API method metainfo
has = {
'publicAPI': True,
'privateAPI': True,
'CORS': False,
'cancelOrder': True,
'cancelOrders': False,
'createDepositAddress': False,
'createOrder': True,
'createMarketOrder': True,
'createLimitOrder': True,
'deposit': False,
'editOrder': 'emulated',
'fetchBalance': True,
'fetchClosedOrders': False,
'fetchCurrencies': False,
'fetchDepositAddress': False,
'fetchDeposits': False,
'fetchFundingFees': False,
'fetchL2OrderBook': True,
'fetchMarkets': True,
'fetchMyTrades': False,
'fetchOHLCV': 'emulated',
'fetchOpenOrders': False,
'fetchOrder': False,
'fetchOrderBook': True,
'fetchOrderBooks': False,
'fetchOrders': False,
'fetchTicker': True,
'fetchTickers': False,
'fetchTrades': True,
'fetchTradingFees': False,
'fetchTradingLimits': False,
'fetchTransactions': False,
'fetchWithdrawals': False,
'withdraw': False,
}
precisionMode = DECIMAL_PLACES
minFundingAddressLength = 1 # used in check_address
substituteCommonCurrencyCodes = True
lastRestRequestTimestamp = 0
lastRestPollTimestamp = 0
restRequestQueue = None
restPollerLoopIsRunning = False
rateLimitTokens = 16
rateLimitMaxTokens = 16
rateLimitUpdateTime = 0
enableLastHttpResponse = True
enableLastJsonResponse = True
enableLastResponseHeaders = True
last_http_response = None
last_json_response = None
last_response_headers = None
web3 = None
commonCurrencies = {
'XBT': 'BTC',
'BCC': 'BCH',
'DRK': 'DASH',
}
def __init__(self, config={}):
self.precision = dict() if self.precision is None else self.precision
self.limits = dict() if self.limits is None else self.limits
self.exceptions = dict() if self.exceptions is None else self.exceptions
self.headers = dict() if self.headers is None else self.headers
self.balance = dict() if self.balance is None else self.balance
self.orderbooks = dict() if self.orderbooks is None else self.orderbooks
self.orders = dict() if self.orders is None else self.orders
self.trades = dict() if self.trades is None else self.trades
self.transactions = dict() if self.transactions is None else self.transactions
self.currencies = dict() if self.currencies is None else self.currencies
self.options = dict() if self.options is None else self.options # Python does not allow to define properties in run-time with setattr
self.decimalToPrecision = self.decimal_to_precision = decimal_to_precision
# version = '.'.join(map(str, sys.version_info[:3]))
# self.userAgent = {
# 'User-Agent': 'ccxt/' + __version__ + ' (+https://github.com/ccxt/ccxt) Python/' + version
# }
self.userAgent = default_user_agent()
settings = self.deep_extend(self.describe(), config)
for key in settings:
if hasattr(self, key) and isinstance(getattr(self, key), dict):
setattr(self, key, self.deep_extend(getattr(self, key), settings[key]))
else:
setattr(self, key, settings[key])
if self.api:
self.define_rest_api(self.api, 'request')
if self.markets:
self.set_markets(self.markets)
# convert all properties from underscore notation foo_bar to camelcase notation fooBar
for name in dir(self):
if name[0] != '_'and name[-1] != '_' and '_' in name:
parts = name.split('_')
camelcase = parts[0] + ''.join(self.capitalize(i) for i in parts[1:])
setattr(self, camelcase, getattr(self, name))
self.tokenBucket = self.extend({
'refillRate': 1.0 / self.rateLimit,
'delay': 1.0,
'capacity': 1.0,
'defaultCost': 1.0,
}, getattr(self, 'tokenBucket') if hasattr(self, 'tokenBucket') else {})
self.session = self.session if self.session else Session()
self.logger = self.logger if self.logger else logging.getLogger(__name__)
if Web3 and not self.web3:
# self.web3 = w3 if w3 else Web3(HTTPProvider())
self.web3 = Web3(HTTPProvider())
def __del__(self):
if self.session:
self.session.close()
def describe(self):
return {}
def define_rest_api(self, api, method_name, options={}):
delimiters = re.compile('[^a-zA-Z0-9]')
for api_type, methods in api.items():
for http_method, urls in methods.items():
for url in urls:
url = url.strip()
split_path = delimiters.split(url)
uppercase_method = http_method.upper()
lowercase_method = http_method.lower()
camelcase_method = lowercase_method.capitalize()
camelcase_suffix = ''.join([Exchange.capitalize(x) for x in split_path])
lowercase_path = [x.strip().lower() for x in split_path]
underscore_suffix = '_'.join([k for k in lowercase_path if len(k)])
camelcase = api_type + camelcase_method + Exchange.capitalize(camelcase_suffix)
underscore = api_type + '_' + lowercase_method + '_' + underscore_suffix.lower()
if 'suffixes' in options:
if 'camelcase' in options['suffixes']:
camelcase += options['suffixes']['camelcase']
if 'underscore' in options['suffixes']:
underscore += options['suffixes']['underscore']
partial = functools.partial(getattr(self, method_name), url, api_type, uppercase_method)
setattr(self, camelcase, partial)
setattr(self, underscore, partial)
def raise_error(self, exception_type, url=None, method=None, error=None, details=None):
if error:
error = str(error)
output = ' '.join([self.id] + [var for var in (url, method, error, details) if var is not None])
raise exception_type(output)
def throttle(self):
now = float(self.milliseconds())
elapsed = now - self.lastRestRequestTimestamp
if elapsed < self.rateLimit:
delay = self.rateLimit - elapsed
time.sleep(delay / 1000.0)
def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
"""A better wrapper over request for deferred signing"""
if self.enableRateLimit:
self.throttle()
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return self.fetch(request['url'], request['method'], request['headers'], request['body'])
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
return self.fetch2(path, api, method, params, headers, body)
@staticmethod
def gzip_deflate(response, text):
encoding = response.info().get('Content-Encoding')
if encoding in ('gzip', 'x-gzip', 'deflate'):
if encoding == 'deflate':
return zlib.decompress(text, -zlib.MAX_WBITS)
else:
return gzip.GzipFile('', 'rb', 9, io.BytesIO(text)).read()
return text
def find_broadly_matched_key(self, broad, string):
"""A helper method for matching error strings exactly vs broadly"""
keys = list(broad.keys())
for i in range(0, len(keys)):
key = keys[i]
if string.find(key) >= 0:
return key
return None
def handle_errors(self, code, reason, url, method, headers, body):
pass
def prepare_request_headers(self, headers=None):
headers = headers or {}
headers.update(self.headers)
if self.userAgent:
if type(self.userAgent) is str:
headers.update({'User-Agent': self.userAgent})
elif (type(self.userAgent) is dict) and ('User-Agent' in self.userAgent):
headers.update(self.userAgent)
if self.proxy:
headers.update({'Origin': self.origin})
headers.update({'Accept-Encoding': 'gzip, deflate'})
return headers
def fetch(self, url, method='GET', headers=None, body=None):
"""Perform a HTTP request and return decoded JSON data"""
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
print("\nRequest:", method, url, request_headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, request_headers, body)
if body:
body = body.encode()
self.session.cookies.clear()
response = None
http_response = None
try:
response = self.session.request(
method,
url,
data=body,
headers=request_headers,
timeout=int(self.timeout / 1000),
proxies=self.proxies
)
http_response = response.text
if self.enableLastHttpResponse:
self.last_http_response = http_response
headers = response.headers
if self.enableLastResponseHeaders:
self.last_response_headers = headers
if self.verbose:
print("\nResponse:", method, url, str(response.status_code), str(headers), http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, response.status_code, headers, http_response)
response.raise_for_status()
except Timeout as e:
self.raise_error(RequestTimeout, method, url, e)
except TooManyRedirects as e:
self.raise_error(ExchangeError, url, method, e)
except SSLError as e:
self.raise_error(ExchangeError, url, method, e)
except HTTPError as e:
self.handle_errors(response.status_code, response.reason, url, method, headers, http_response)
self.handle_rest_errors(e, response.status_code, http_response, url, method)
self.raise_error(ExchangeError, url, method, e, http_response)
except RequestException as e: # base exception class
error_string = str(e)
if ('ECONNRESET' in error_string) or ('Connection aborted.' in error_string):
self.raise_error(NetworkError, url, method, e)
else:
self.raise_error(ExchangeError, url, method, e)
self.handle_errors(response.status_code, response.reason, url, method, None, http_response)
return self.handle_rest_response(http_response, url, method, headers, body)
def handle_rest_errors(self, exception, http_status_code, response, url, method='GET'):
error = None
string_code = str(http_status_code)
if string_code in self.httpExceptions:
error = self.httpExceptions[string_code]
if error == ExchangeNotAvailable:
if re.search('(cloudflare|incapsula|overload|ddos)', response, flags=re.IGNORECASE):
error = DDoSProtection
if error:
self.raise_error(error, url, method, exception if exception else http_status_code, response)
def handle_rest_response(self, response, url, method='GET', headers=None, body=None):
try:
if self.parseJsonResponse:
json_response = json.loads(response) if len(response) > 1 else None
if self.enableLastJsonResponse:
self.last_json_response = json_response
return json_response
else:
return response
except ValueError as e: # ValueError == JsonDecodeError
ddos_protection = re.search('(cloudflare|incapsula|overload|ddos)', response, flags=re.IGNORECASE)
exchange_not_available = re.search('(offline|busy|retry|wait|unavailable|maintain|maintenance|maintenancing)', response, flags=re.IGNORECASE)
if ddos_protection:
self.raise_error(DDoSProtection, method, url, None, response)
if exchange_not_available:
message = response + ' exchange downtime, exchange closed for maintenance or offline, DDoS protection or rate-limiting in effect'
self.raise_error(ExchangeNotAvailable, method, url, None, message)
self.raise_error(ExchangeError, method, url, e, response)
@staticmethod
def safe_float(dictionary, key, default_value=None):
value = default_value
try:
if isinstance(dictionary, list) and isinstance(key, int) and len(dictionary) > key:
value = float(dictionary[key])
else:
value = float(dictionary[key]) if (key is not None) and (key in dictionary) and (dictionary[key] is not None) else default_value
except ValueError as e:
value = default_value
return value
@staticmethod
def safe_string(dictionary, key, default_value=None):
return str(dictionary[key]) if key is not None and (key in dictionary) and dictionary[key] is not None else default_value
@staticmethod
def safe_integer(dictionary, key, default_value=None):
if key is None or (key not in dictionary):
return default_value
value = dictionary[key]
if isinstance(value, Number) or (isinstance(value, basestring) and value.isnumeric()):
return int(value)
return default_value
@staticmethod
def safe_value(dictionary, key, default_value=None):
return dictionary[key] if key is not None and (key in dictionary) and dictionary[key] is not None else default_value
# we're not using safe_floats with a list argument as we're trying to save some cycles here
# we're not using safe_float_3 either because those cases are too rare to deserve their own optimization
@staticmethod
def safe_float_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_float, dictionary, key1, key2, default_value)
@staticmethod
def safe_string_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_string, dictionary, key1, key2, default_value)
@staticmethod
def safe_integer_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_integer, dictionary, key1, key2, default_value)
@staticmethod
def safe_value_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_value, dictionary, key1, key2, default_value)
@staticmethod
def safe_either(method, dictionary, key1, key2, default_value=None):
"""A helper-wrapper for the safe_value_2() family."""
value = method(dictionary, key1)
return value if value is not None else method(dictionary, key2, default_value)
@staticmethod
def truncate(num, precision=0):
"""Deprecated, use decimal_to_precision instead"""
if precision > 0:
decimal_precision = math.pow(10, precision)
return math.trunc(num * decimal_precision) / decimal_precision
return int(Exchange.truncate_to_string(num, precision))
@staticmethod
def truncate_to_string(num, precision=0):
"""Deprecated, todo: remove references from subclasses"""
if precision > 0:
parts = ('{0:.%df}' % precision).format(Decimal(num)).split('.')
decimal_digits = parts[1][:precision].rstrip('0')
decimal_digits = decimal_digits if len(decimal_digits) else '0'
return parts[0] + '.' + decimal_digits
return ('%d' % num)
@staticmethod
def uuid():
return str(uuid.uuid4())
@staticmethod
def capitalize(string): # first character only, rest characters unchanged
# the native pythonic .capitalize() method lowercases all other characters
# which is an unwanted behaviour, therefore we use this custom implementation
# check it yourself: print('foobar'.capitalize(), 'fooBar'.capitalize())
if len(string) > 1:
return "%s%s" % (string[0].upper(), string[1:])
return string.upper()
@staticmethod
def keysort(dictionary):
return collections.OrderedDict(sorted(dictionary.items(), key=lambda t: t[0]))
@staticmethod
def extend(*args):
if args is not None:
result = None
if type(args[0]) is collections.OrderedDict:
result = collections.OrderedDict()
else:
result = {}
for arg in args:
result.update(arg)
return result
return {}
@staticmethod
def deep_extend(*args):
result = None
for arg in args:
if isinstance(arg, dict):
if not isinstance(result, dict):
result = {}
for key in arg:
result[key] = Exchange.deep_extend(result[key] if key in result else None, arg[key])
else:
result = arg
return result
@staticmethod
def filter_by(array, key, value=None):
if value:
grouped = Exchange.group_by(array, key)
if value in grouped:
return grouped[value]
return []
return array
@staticmethod
def filterBy(self, array, key, value=None):
return Exchange.filter_by(array, key, value)
@staticmethod
def group_by(array, key):
result = {}
array = Exchange.to_array(array)
array = [entry for entry in array if (key in entry) and (entry[key] is not None)]
for entry in array:
if entry[key] not in result:
result[entry[key]] = []
result[entry[key]].append(entry)
return result
@staticmethod
def groupBy(array, key):
return Exchange.group_by(array, key)
@staticmethod
def index_by(array, key):
result = {}
if type(array) is dict:
array = Exchange.keysort(array).values()
for element in array:
if (key in element) and (element[key] is not None):
k = element[key]
result[k] = element
return result
@staticmethod
def sort_by(array, key, descending=False):
return sorted(array, key=lambda k: k[key] if k[key] is not None else "", reverse=descending)
@staticmethod
def array_concat(a, b):
return a + b
@staticmethod
def in_array(needle, haystack):
return needle in haystack
@staticmethod
def is_empty(object):
return not object
@staticmethod
def extract_params(string):
return re.findall(r'{([\w-]+)}', string)
@staticmethod
def implode_params(string, params):
for key in params:
string = string.replace('{' + key + '}', str(params[key]))
return string
@staticmethod
def url(path, params={}):
result = Exchange.implode_params(path, params)
query = Exchange.omit(params, Exchange.extract_params(path))
if query:
result += '?' + _urlencode.urlencode(query)
return result
@staticmethod
def urlencode(params={}):
if (type(params) is dict) or isinstance(params, collections.OrderedDict):
return _urlencode.urlencode(params)
return params
@staticmethod
def rawencode(params={}):
return _urlencode.unquote(Exchange.urlencode(params))
@staticmethod
def encode_uri_component(uri):
return _urlencode.quote(uri, safe="~()*!.'")
@staticmethod
def omit(d, *args):
result = d.copy()
for arg in args:
if type(arg) is list:
for key in arg:
if key in result:
del result[key]
else:
if arg in result:
del result[arg]
return result
@staticmethod
def unique(array):
return list(set(array))
@staticmethod
def pluck(array, key):
return [
element[key]
for element in array
if (key in element) and (element[key] is not None)
]
@staticmethod
def sum(*args):
return sum([arg for arg in args if isinstance(arg, (float, int))])
@staticmethod
def ordered(array):
return collections.OrderedDict(array)
@staticmethod
def aggregate(bidasks):
ordered = Exchange.ordered({})
for [price, volume] in bidasks:
if volume > 0:
ordered[price] = (ordered[price] if price in ordered else 0) + volume
result = []
items = list(ordered.items())
for price, volume in items:
result.append([price, volume])
return result
@staticmethod
def sec():
return Exchange.seconds()
@staticmethod
def msec():
return Exchange.milliseconds()
@staticmethod
def usec():
return Exchange.microseconds()
@staticmethod
def seconds():
return int(time.time())
@staticmethod
def milliseconds():
return int(time.time() * 1000)
@staticmethod
def microseconds():
return int(time.time() * 1000000)
@staticmethod
def iso8601(timestamp=None):
if timestamp is None:
return timestamp
if not isinstance(timestamp, int):
return None
if int(timestamp) < 0:
return None
try:
utc = datetime.datetime.utcfromtimestamp(timestamp // 1000)
return utc.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-6] + "{:03d}".format(int(timestamp) % 1000) + 'Z'
except (TypeError, OverflowError, OSError):
return None
@staticmethod
def dmy(timestamp, infix='-'):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%m' + infix + '%d' + infix + '%Y')
@staticmethod
def ymd(timestamp, infix='-'):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%Y' + infix + '%m' + infix + '%d')
@staticmethod
def ymdhms(timestamp, infix=' '):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%Y-%m-%d' + infix + '%H:%M:%S')
@staticmethod
def parse_date(timestamp=None):
if timestamp is None:
return timestamp
if not isinstance(timestamp, str):
return None
if 'GMT' in timestamp:
try:
string = ''.join([str(value) for value in parsedate(timestamp)[:6]]) + '.000Z'
dt = datetime.datetime.strptime(string, "%Y%m%d%H%M%S.%fZ")
return calendar.timegm(dt.utctimetuple()) * 1000
except (TypeError, OverflowError, OSError):
return None
else:
return Exchange.parse8601(timestamp)
@staticmethod
def parse8601(timestamp=None):
if timestamp is None:
return timestamp
yyyy = '([0-9]{4})-?'
mm = '([0-9]{2})-?'
dd = '([0-9]{2})(?:T|[\\s])?'
h = '([0-9]{2}):?'
m = '([0-9]{2}):?'
s = '([0-9]{2})'
ms = '(\\.[0-9]{1,3})?'
tz = '(?:(\\+|\\-)([0-9]{2})\\:?([0-9]{2})|Z)?'
regex = r'' + yyyy + mm + dd + h + m + s + ms + tz
try:
match = re.search(regex, timestamp, re.IGNORECASE)
if match is None:
return None
yyyy, mm, dd, h, m, s, ms, sign, hours, minutes = match.groups()
ms = ms or '.000'
msint = int(ms[1:])
sign = sign or ''
sign = int(sign + '1')
hours = int(hours or 0) * sign
minutes = int(minutes or 0) * sign
offset = datetime.timedelta(hours=hours, minutes=minutes)
string = yyyy + mm + dd + h + m + s + ms + 'Z'
dt = datetime.datetime.strptime(string, "%Y%m%d%H%M%S.%fZ")
dt = dt + offset
return calendar.timegm(dt.utctimetuple()) * 1000 + msint
except (TypeError, OverflowError, OSError, ValueError):
return None
@staticmethod
def hash(request, algorithm='md5', digest='hex'):
h = hashlib.new(algorithm, request)
if digest == 'hex':
return h.hexdigest()
elif digest == 'base64':
return base64.b64encode(h.digest())
return h.digest()
@staticmethod
def hmac(request, secret, algorithm=hashlib.sha256, digest='hex'):
h = hmac.new(secret, request, algorithm)
if digest == 'hex':
return h.hexdigest()
elif digest == 'base64':
return base64.b64encode(h.digest())
return h.digest()
@staticmethod
def binary_concat(*args):
result = bytes()
for arg in args:
result = result + arg
return result
@staticmethod
def binary_to_string(s):
return s.decode('ascii')
@staticmethod
def base64urlencode(s):
return Exchange.decode(base64.urlsafe_b64encode(s)).replace('=', '')
@staticmethod
def jwt(request, secret, algorithm=hashlib.sha256, alg='HS256'):
header = Exchange.encode(Exchange.json({
'alg': alg,
'typ': 'JWT',
}))
encodedHeader = Exchange.base64urlencode(header)
encodedData = Exchange.base64urlencode(Exchange.encode(Exchange.json(request)))
token = encodedHeader + '.' + encodedData
hmac = Exchange.hmac(Exchange.encode(token), Exchange.encode(secret), algorithm, 'binary')
signature = Exchange.base64urlencode(hmac)
return token + '.' + signature
@staticmethod
def unjson(input):
return json.loads(input)
@staticmethod
def json(data, params=None):
return json.dumps(data, separators=(',', ':'))
@staticmethod
def parse_if_json_encoded_object(input):
return json.loads(input) if Exchange.is_json_encoded_object(input) else input
@staticmethod
def is_json_encoded_object(input):
return (isinstance(input, basestring) and
(len(input) >= 2) and
((input[0] == '{') or (input[0] == '[')))
@staticmethod
def encode(string):
return string.encode()
@staticmethod
def decode(string):
return string.decode()
@staticmethod
def to_array(value):
return list(value.values()) if type(value) is dict else value
def nonce(self):
return Exchange.seconds()
def check_required_credentials(self):
keys = list(self.requiredCredentials.keys())
for key in keys:
if self.requiredCredentials[key] and not getattr(self, key):
self.raise_error(AuthenticationError, details='requires `' + key + '`')
def check_address(self, address):
"""Checks an address is not the same character repeated or an empty sequence"""
if address is None:
self.raise_error(InvalidAddress, details='address is None')
if all(letter == address[0] for letter in address) or len(address) < self.minFundingAddressLength or ' ' in address:
self.raise_error(InvalidAddress, details='address is invalid or has less than ' + str(self.minFundingAddressLength) + ' characters: "' + str(address) + '"')
return address
def account(self):
return {
'free': 0.0,
'used': 0.0,
'total': 0.0,
}
def common_currency_code(self, currency):
if not self.substituteCommonCurrencyCodes:
return currency
return self.safe_string(self.commonCurrencies, currency, currency)
def currency_id(self, commonCode):
if self.currencies:
if commonCode in self.currencies:
return self.currencies[commonCode]['id']
currencyIds = {v: k for k, v in self.commonCurrencies.items()}
return self.safe_string(currencyIds, commonCode, commonCode)
def fromWei(self, amount, unit='ether'):
if Web3 is None:
self.raise_error(NotSupported, details="ethereum web3 methods require Python 3: https://pythonclock.org")
if amount is None:
return amount
return float(Web3.fromWei(int(amount), unit))
def toWei(self, amount, unit='ether'):
if Web3 is None:
self.raise_error(NotSupported, details="ethereum web3 methods require Python 3: https://pythonclock.org")
if amount is None:
return amount
return str(Web3.toWei(int(amount), unit))
def precision_from_string(self, string):
parts = re.sub(r'0+$', '', string).split('.')
return len(parts[1]) if len(parts) > 1 else 0
def cost_to_precision(self, symbol, cost):
return self.decimal_to_precision(cost, ROUND, self.markets[symbol]['precision']['price'], self.precisionMode)
def price_to_precision(self, symbol, price):
return self.decimal_to_precision(price, ROUND, self.markets[symbol]['precision']['price'], self.precisionMode)
def amount_to_precision(self, symbol, amount):
return self.decimal_to_precision(amount, TRUNCATE, self.markets[symbol]['precision']['amount'], self.precisionMode)
def fee_to_precision(self, symbol, fee):
return self.decimal_to_precision(fee, ROUND, self.markets[symbol]['precision']['price'], self.precisionMode)
def currency_to_precision(self, currency, fee):
return self.decimal_to_precision(fee, ROUND, self.currencies[currency]['precision'], self.precisionMode)
def set_markets(self, markets, currencies=None):
values = list(markets.values()) if type(markets) is dict else markets
for i in range(0, len(values)):
values[i] = self.extend(
self.fees['trading'],
{'precision': self.precision, 'limits': self.limits},
values[i]
)
self.markets = self.index_by(values, 'symbol')
self.markets_by_id = self.index_by(values, 'id')
self.marketsById = self.markets_by_id
self.symbols = sorted(list(self.markets.keys()))
self.ids = sorted(list(self.markets_by_id.keys()))
if currencies:
self.currencies = self.deep_extend(currencies, self.currencies)
else:
base_currencies = [{
'id': market['baseId'] if 'baseId' in market else market['base'],
'numericId': market['baseNumericId'] if 'baseNumericId' in market else None,
'code': market['base'],
'precision': (
market['precision']['base'] if 'base' in market['precision'] else (
market['precision']['amount'] if 'amount' in market['precision'] else None
)
) if 'precision' in market else 8,
} for market in values if 'base' in market]
quote_currencies = [{
'id': market['quoteId'] if 'quoteId' in market else market['quote'],
'numericId': market['quoteNumericId'] if 'quoteNumericId' in market else None,
'code': market['quote'],
'precision': (
market['precision']['quote'] if 'quote' in market['precision'] else (
market['precision']['price'] if 'price' in market['precision'] else None
)
) if 'precision' in market else 8,
} for market in values if 'quote' in market]
currencies = self.sort_by(base_currencies + quote_currencies, 'code')
self.currencies = self.deep_extend(self.index_by(currencies, 'code'), self.currencies)
self.currencies_by_id = self.index_by(list(self.currencies.values()), 'id')
return self.markets
def load_markets(self, reload=False):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
markets = self.fetch_markets()
currencies = None
if self.has['fetchCurrencies']:
currencies = self.fetch_currencies()
return self.set_markets(markets, currencies)
def populate_fees(self):
if not (hasattr(self, 'markets') or hasattr(self, 'currencies')):
return
for currency, data in self.currencies.items(): # try load withdrawal fees from currencies
if 'fee' in data and data['fee'] is not None:
self.fees['funding']['withdraw'][currency] = data['fee']
self.fees['funding']['fee_loaded'] = True
# find a way to populate trading fees from markets
def load_fees(self):
self.load_markets()
self.populate_fees()
if not (self.has['fetchTradingFees'] or self.has['fetchFundingFees']):
return self.fees
fetched_fees = self.fetch_fees()
if fetched_fees['funding']:
self.fees['funding']['fee_loaded'] = True
if fetched_fees['trading']:
self.fees['trading']['fee_loaded'] = True
self.fees = self.deep_extend(self.fees, fetched_fees)
return self.fees
def fetch_markets(self):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.to_array(self.markets)
def fetch_currencies(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.currencies
def fetch_fees(self):
trading = {}
funding = {}
try:
trading = self.fetch_trading_fees()
except AuthenticationError:
pass
except AttributeError:
pass
try:
funding = self.fetch_funding_fees()
except AuthenticationError:
pass
except AttributeError:
pass
return {
'trading': trading,
'funding': funding,
}
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.raise_error(NotSupported, details='create_order() not implemented yet')
def cancel_order(self, id, symbol=None, params={}):
self.raise_error(NotSupported, details='cancel_order() not implemented yet')
def fetch_bids_asks(self, symbols=None, params={}):
self.raise_error(NotSupported, details='API does not allow to fetch all prices at once with a single call to fetch_bids_asks() for now')
def fetch_tickers(self, symbols=None, params={}):
self.raise_error(NotSupported, details='API does not allow to fetch all tickers at once with a single call to fetch_tickers() for now')
def fetch_order_status(self, id, market=None):
order = self.fetch_order(id)
return order['status']
def purge_cached_orders(self, before):
orders = self.to_array(self.orders)
orders = [order for order in orders if (order['status'] == 'open') or (order['timestamp'] >= before)]
self.orders = self.index_by(orders, 'id')
return self.orders
def fetch_order(self, id, symbol=None, params={}):
self.raise_error(NotSupported, details='fetch_order() is not implemented yet')
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_orders() is not implemented yet')
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_open_orders() is not implemented yet')
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_closed_orders() is not implemented yet')
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_my_trades() is not implemented yet')
def fetch_order_trades(self, id, symbol=None, params={}):
self.raise_error(NotSupported, details='fetch_order_trades() is not implemented yet')
def fetch_transactions(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_transactions() is not implemented yet')
def fetch_deposits(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_deposits() is not implemented yet')
def fetch_withdrawals(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_withdrawals() is not implemented yet')
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return ohlcv[0:6] if isinstance(ohlcv, list) else ohlcv
def parse_ohlcvs(self, ohlcvs, market=None, timeframe='1m', since=None, limit=None):
ohlcvs = self.to_array(ohlcvs)
num_ohlcvs = len(ohlcvs)
result = []
i = 0
while i < num_ohlcvs:
if limit and (len(result) >= limit):
break
ohlcv = self.parse_ohlcv(ohlcvs[i], market, timeframe, since, limit)
i = i + 1
if since and (ohlcv[0] < since):
continue
result.append(ohlcv)
return self.sort_by(result, 0)
def parse_bid_ask(self, bidask, price_key=0, amount_key=0):
return [float(bidask[price_key]), float(bidask[amount_key])]
def parse_bids_asks(self, bidasks, price_key=0, amount_key=1):
result = []
if len(bidasks):
if type(bidasks[0]) is list:
for bidask in bidasks:
if bidask[price_key] and bidask[amount_key]:
result.append(self.parse_bid_ask(bidask, price_key, amount_key))
elif type(bidasks[0]) is dict:
for bidask in bidasks:
if (price_key in bidask) and (amount_key in bidask) and (bidask[price_key] and bidask[amount_key]):
result.append(self.parse_bid_ask(bidask, price_key, amount_key))
else:
self.raise_error(ExchangeError, details='unrecognized bidask format: ' + str(bidasks[0]))
return result
def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
def parse_order_book(self, orderbook, timestamp=None, bids_key='bids', asks_key='asks', price_key=0, amount_key=1):
return {
'bids': self.sort_by(self.parse_bids_asks(orderbook[bids_key], price_key, amount_key) if (bids_key in orderbook) and isinstance(orderbook[bids_key], list) else [], 0, True),
'asks': self.sort_by(self.parse_bids_asks(orderbook[asks_key], price_key, amount_key) if (asks_key in orderbook) and isinstance(orderbook[asks_key], list) else [], 0),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp) if timestamp is not None else None,
'nonce': None,
}
def parse_balance(self, balance):
currencies = self.omit(balance, 'info').keys()
for account in ['free', 'used', 'total']:
balance[account] = {}
for currency in currencies:
balance[account][currency] = balance[currency][account]
return balance
def fetch_partial_balance(self, part, params={}):
balance = self.fetch_balance(params)
return balance[part]
def fetch_free_balance(self, params={}):
return self.fetch_partial_balance('free', params)
def fetch_used_balance(self, params={}):
return self.fetch_partial_balance('used', params)
def fetch_total_balance(self, params={}):
return self.fetch_partial_balance('total', params)
def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = self.fetch_trading_limits(symbols)
for i in range(0, len(symbols)):
symbol = symbols[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], response[symbol])
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
self.raise_error(NotSupported, details='fetch_ohlcv() not implemented yet')
self.load_markets()
trades = self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcv(trades, timeframe, since, limit)
def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return self.fetch_ohlcv(symbol, timeframe, since, limit, params)
def convert_trading_view_to_ohlcv(self, ohlcvs):
result = []
for i in range(0, len(ohlcvs['t'])):
result.append([
ohlcvs['t'][i] * 1000,
ohlcvs['o'][i],
ohlcvs['h'][i],
ohlcvs['l'][i],
ohlcvs['c'][i],
ohlcvs['v'][i],
])
return result
def convert_ohlcv_to_trading_view(self, ohlcvs):
result = {
't': [],
'o': [],
'h': [],
'l': [],
'c': [],
'v': [],
}
for i in range(0, len(ohlcvs)):
result['t'].append(int(ohlcvs[i][0] / 1000))
result['o'].append(ohlcvs[i][1])
result['h'].append(ohlcvs[i][2])
result['l'].append(ohlcvs[i][3])
result['c'].append(ohlcvs[i][4])
result['v'].append(ohlcvs[i][5])
return result
def build_ohlcv(self, trades, timeframe='1m', since=None, limit=None):
ms = self.parse_timeframe(timeframe) * 1000
ohlcvs = []
(high, low, close, volume) = (2, 3, 4, 5)
num_trades = len(trades)
oldest = (num_trades - 1) if limit is None else min(num_trades - 1, limit)
for i in range(0, oldest):
trade = trades[i]
if (since is not None) and (trade['timestamp'] < since):
continue
opening_time = int(math.floor(trade['timestamp'] / ms) * ms) # Shift the edge of the m/h/d (but not M)
j = len(ohlcvs)
if (j == 0) or opening_time >= ohlcvs[j - 1][0] + ms:
# moved to a new timeframe -> create a new candle from opening trade
ohlcvs.append([
opening_time,
trade['price'],
trade['price'],
trade['price'],
trade['price'],
trade['amount'],
])
else:
# still processing the same timeframe -> update opening trade
ohlcvs[j - 1][high] = max(ohlcvs[j - 1][high], trade['price'])
ohlcvs[j - 1][low] = min(ohlcvs[j - 1][low], trade['price'])
ohlcvs[j - 1][close] = trade['price']
ohlcvs[j - 1][volume] += trade['amount']
return ohlcvs
def parse_timeframe(self, timeframe):
amount = int(timeframe[0:-1])
unit = timeframe[-1]
if 'y' in unit:
scale = 60 * 60 * 24 * 365
elif 'M' in unit:
scale = 60 * 60 * 24 * 30
elif 'w' in unit:
scale = 60 * 60 * 24 * 7
elif 'd' in unit:
scale = 60 * 60 * 24
elif 'h' in unit:
scale = 60 * 60
else:
scale = 60 # 1m by default
return amount * scale
def parse_trades(self, trades, market=None, since=None, limit=None):
array = self.to_array(trades)
array = [self.parse_trade(trade, market) for trade in array]
array = self.sort_by(array, 'timestamp')
symbol = market['symbol'] if market else None
return self.filter_by_symbol_since_limit(array, symbol, since, limit)
def parse_transactions(self, transactions, currency=None, since=None, limit=None):
array = self.to_array(transactions)
array = [self.parse_transaction(transaction, currency) for transaction in array]
array = self.sort_by(array, 'timestamp')
code = currency['code'] if currency else None
return self.filter_by_currency_since_limit(array, code, since, limit)
def parse_orders(self, orders, market=None, since=None, limit=None):
array = self.to_array(orders)
array = [self.parse_order(order, market) for order in array]
array = self.sort_by(array, 'timestamp')
symbol = market['symbol'] if market else None
return self.filter_by_symbol_since_limit(array, symbol, since, limit)
def filter_by_value_since_limit(self, array, field, value=None, since=None, limit=None):
array = self.to_array(array)
if value:
array = [entry for entry in array if entry[field] == value]
if since:
array = [entry for entry in array if entry['timestamp'] >= since]
if limit:
array = array[0:limit]
return array
def filter_by_symbol_since_limit(self, array, symbol=None, since=None, limit=None):
return self.filter_by_value_since_limit(array, 'symbol', symbol, since, limit)
def filter_by_currency_since_limit(self, array, code=None, since=None, limit=None):
return self.filter_by_value_since_limit(array, 'currency', code, since, limit)
def filter_by_since_limit(self, array, since=None, limit=None):
array = self.to_array(array)
if since:
array = [entry for entry in array if entry['timestamp'] >= since]
if limit:
array = array[0:limit]
return array
def filter_by_symbol(self, array, symbol=None):
array = self.to_array(array)
if symbol:
return [entry for entry in array if entry['symbol'] == symbol]
return array
def filter_by_array(self, objects, key, values=None, indexed=True):
objects = self.to_array(objects)
# return all of them if no values were passed in
if values is None:
return self.index_by(objects, key) if indexed else objects
result = []
for i in range(0, len(objects)):
value = objects[i][key] if key in objects[i] else None
if value in values:
result.append(objects[i])
return self.index_by(result, key) if indexed else result
def currency(self, code):
if not self.currencies:
self.raise_error(ExchangeError, details='Currencies not loaded')
if isinstance(code, basestring) and (code in self.currencies):
return self.currencies[code]
self.raise_error(ExchangeError, details='Does not have currency code ' + str(code))
def find_market(self, string):
if not self.markets:
self.raise_error(ExchangeError, details='Markets not loaded')
if isinstance(string, basestring):
if string in self.markets_by_id:
return self.markets_by_id[string]
if string in self.markets:
return self.markets[string]
return string
def find_symbol(self, string, market=None):
if market is None:
market = self.find_market(string)
if isinstance(market, dict):
return market['symbol']
return string
def market(self, symbol):
if not self.markets:
self.raise_error(ExchangeError, details='Markets not loaded')
if isinstance(symbol, basestring) and (symbol in self.markets):
return self.markets[symbol]
self.raise_error(ExchangeError, details='No market symbol ' + str(symbol))
def market_ids(self, symbols):
return [self.market_id(symbol) for symbol in symbols]
def market_id(self, symbol):
market = self.market(symbol)
return market['id'] if type(market) is dict else symbol
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
rate = market[takerOrMaker]
cost = float(self.cost_to_precision(symbol, amount * price))
return {
'rate': rate,
'type': takerOrMaker,
'currency': market['quote'],
'cost': float(self.fee_to_precision(symbol, rate * cost)),
}
def edit_limit_buy_order(self, id, symbol, *args):
return self.edit_limit_order(id, symbol, 'buy', *args)
def edit_limit_sell_order(self, id, symbol, *args):
return self.edit_limit_order(id, symbol, 'sell', *args)
def edit_limit_order(self, id, symbol, *args):
return self.edit_order(id, symbol, 'limit', *args)
def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
self.raise_error(ExchangeError, details='edit_order() requires enableRateLimit = true')
self.cancel_order(id, symbol)
return self.create_order(symbol, *args)
def create_limit_order(self, symbol, *args):
return self.create_order(symbol, 'limit', *args)
def create_market_order(self, symbol, *args):
return self.create_order(symbol, 'market', *args)
def create_limit_buy_order(self, symbol, *args):
return self.create_order(symbol, 'limit', 'buy', *args)
def create_limit_sell_order(self, symbol, *args):
return self.create_order(symbol, 'limit', 'sell', *args)
def create_market_buy_order(self, symbol, amount, params={}):
return self.create_order(symbol, 'market', 'buy', amount, None, params)
def create_market_sell_order(self, symbol, amount, params={}):
return self.create_order(symbol, 'market', 'sell', amount, None, params)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
raise NotSupported(self.id + ' sign() pure method must be redefined in derived classes')
# -------------------------------------------------------------------------
# web3 / 0x methods
def decryptAccountFromJSON(self, value, password):
return self.decryptAccount(json.loads(value) if isinstance(value, basestring) else value, password)
def decryptAccount(self, key, password):
return self.web3.eth.accounts.decrypt(key, password)
def decryptAccountFromPrivateKey(self, privateKey):
return self.web3.eth.accounts.privateKeyToAccount(privateKey)
def soliditySha3(self, array):
values = self.solidityValues(array)
types = self.solidityTypes(values)
return self.web3.soliditySha3(types, values).hex()
def soliditySha256(self, values):
types = self.solidityTypes(values)
solidity_values = self.solidityValues(values)
encoded_values = [hex_encode_abi_type(abi_type, value)[2:] for abi_type, value in zip(types, solidity_values)]
hex_string = '0x' + ''.join(encoded_values)
return '0x' + self.hash(self.encode(self.web3.toText(hex_string)), 'sha256')
def solidityTypes(self, array):
return ['address' if self.web3.isAddress(value) else 'uint256' for value in array]
def solidityValues(self, array):
return [self.web3.toChecksumAddress(value) if self.web3.isAddress(value) else int(value) for value in array]
def getZeroExOrderHash2(self, order):
return self.soliditySha3([
order['exchangeContractAddress'], # address
order['maker'], # address
order['taker'], # address
order['makerTokenAddress'], # address
order['takerTokenAddress'], # address
order['feeRecipient'], # address
order['makerTokenAmount'], # uint256
order['takerTokenAmount'], # uint256
order['makerFee'], # uint256
order['takerFee'], # uint256
order['expirationUnixTimestampSec'], # uint256
order['salt'], # uint256
])
def getZeroExOrderHash(self, order):
unpacked = [
self.web3.toChecksumAddress(order['exchangeContractAddress']), # { value: order.exchangeContractAddress, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['maker']), # { value: order.maker, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['taker']), # { value: order.taker, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['makerTokenAddress']), # { value: order.makerTokenAddress, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['takerTokenAddress']), # { value: order.takerTokenAddress, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['feeRecipient']), # { value: order.feeRecipient, type: types_1.SolidityTypes.Address },
int(order['makerTokenAmount']), # { value: bigNumberToBN(order.makerTokenAmount), type: types_1.SolidityTypes.Uint256, },
int(order['takerTokenAmount']), # { value: bigNumberToBN(order.takerTokenAmount), type: types_1.SolidityTypes.Uint256, },
int(order['makerFee']), # { value: bigNumberToBN(order.makerFee), type: types_1.SolidityTypes.Uint256, },
int(order['takerFee']), # { value: bigNumberToBN(order.takerFee), type: types_1.SolidityTypes.Uint256, },
int(order['expirationUnixTimestampSec']), # { value: bigNumberToBN(order.expirationUnixTimestampSec), type: types_1.SolidityTypes.Uint256, },
int(order['salt']), # { value: bigNumberToBN(order.salt), type: types_1.SolidityTypes.Uint256 },
]
types = [
'address', # { value: order.exchangeContractAddress, type: types_1.SolidityTypes.Address },
'address', # { value: order.maker, type: types_1.SolidityTypes.Address },
'address', # { value: order.taker, type: types_1.SolidityTypes.Address },
'address', # { value: order.makerTokenAddress, type: types_1.SolidityTypes.Address },
'address', # { value: order.takerTokenAddress, type: types_1.SolidityTypes.Address },
'address', # { value: order.feeRecipient, type: types_1.SolidityTypes.Address },
'uint256', # { value: bigNumberToBN(order.makerTokenAmount), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.takerTokenAmount), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.makerFee), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.takerFee), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.expirationUnixTimestampSec), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.salt), type: types_1.SolidityTypes.Uint256 },
]
return self.web3.soliditySha3(types, unpacked).hex()
def signZeroExOrder(self, order):
orderHash = self.getZeroExOrderHash(order)
signature = self.signMessage(orderHash[-64:], self.privateKey)
return self.extend(order, {
'orderHash': orderHash,
'ecSignature': signature, # todo fix v if needed
})
def hashMessage(self, message):
message_bytes = bytes.fromhex(message)
return self.web3.sha3(b"\x19Ethereum Signed Message:\n" + str(len(message_bytes)).encode() + message_bytes).hex()
def signHash(self, hash, privateKey):
signature = self.web3.eth.account.signHash(hash[-64:], private_key=privateKey[-64:])
return {
'v': signature.v, # integer
'r': self.web3.toHex(signature.r), # '0x'-prefixed hex string
's': self.web3.toHex(signature.s), # '0x'-prefixed hex string
}
def signMessage(self, message, privateKey):
#
# The following comment is related to MetaMask, we use the upper type of signature prefix:
#
# z.ecSignOrderHashAsync ('0xcfdb0a485324ff37699b4c8557f6858f25916fc6fce5993b32fe018aea510b9f',
# '0x731fc101bbe102221c91c31ed0489f1ddfc439a3', {
# prefixType: 'ETH_SIGN',
# shouldAddPrefixBeforeCallingEthSign: true
# }).then ((e, r) => console.log (e,r))
#
# { ↓
# v: 28,
# r: "0xea7a68268b47c48d5d7a4c900e6f9af0015bf70951b3db2f1d835c5d544aaec2",
# s: "0x5d1db2a060c955c1fde4c967237b995c2361097405407b33c6046c8aeb3ccbdf"
# }
#
# --------------------------------------------------------------------
#
# z.ecSignOrderHashAsync ('0xcfdb0a485324ff37699b4c8557f6858f25916fc6fce5993b32fe018aea510b9f',
# '0x731fc101bbe102221c91c31ed0489f1ddfc439a3', {
# prefixType: 'NONE',
# shouldAddPrefixBeforeCallingEthSign: true
# }).then ((e, r) => console.log (e,r))
#
# { ↓
# v: 27,
# r: "0xc8c710022c57de4f529d448e9b40517dd9bfb49ff1eb245f5856664b865d14a6",
# s: "0x0740bb21f4f094fbbdbafa903bb8f057f82e0c6e4fe65d19a1daed4ed97cd394"
# }
#
message_hash = self.hashMessage(message)
signature = self.signHash(message_hash[-64:], privateKey[-64:])
return signature
| 40.16282 | 185 | 0.600177 |
__version__ = '1.17.322'
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import NetworkError
from ccxt.base.errors import NotSupported
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import InvalidAddress
from ccxt.base.decimal_to_precision import decimal_to_precision
from ccxt.base.decimal_to_precision import DECIMAL_PLACES, TRUNCATE, ROUND
__all__ = [
'Exchange',
]
import logging
import base64
import calendar
import collections
import datetime
from email.utils import parsedate
import functools
import gzip
import hashlib
import hmac
import io
import json
import math
from numbers import Number
import re
from requests import Session
from requests.utils import default_user_agent
from requests.exceptions import HTTPError, Timeout, TooManyRedirects, RequestException
from ssl import SSLError
import time
import uuid
import zlib
from decimal import Decimal
try:
basestring
except NameError:
basestring = str
try:
import urllib.parse as _urlencode
except ImportError:
import urllib as _urlencode
try:
from web3 import Web3, HTTPProvider
from web3.utils.encoding import hex_encode_abi_type
except ImportError:
Web3 = HTTPProvider = None
class Exchange(object):
id = None
version = None
certified = False
enableRateLimit = False
rateLimit = 2000
timeout = 10000
asyncio_loop = None
aiohttp_proxy = None
aiohttp_trust_env = False
session = None
logger = None
userAgent = None
userAgents = {
'chrome': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36',
'chrome39': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
}
verbose = False
markets = None
symbols = None
fees = {
'trading': {
'fee_loaded': False,
'percentage': True,
},
'funding': {
'fee_loaded': False,
'withdraw': {},
'deposit': {},
},
}
ids = None
tickers = None
api = None
parseJsonResponse = True
proxy = ''
origin = '*'
proxies = None
hostname = None
apiKey = ''
secret = ''
password = ''
uid = ''
privateKey = ''
walletAddress = ''
twofa = False
marketsById = None
markets_by_id = None
currencies_by_id = None
precision = None
limits = None
exceptions = None
httpExceptions = {
'422': ExchangeError,
'418': DDoSProtection,
'429': DDoSProtection,
'404': ExchangeNotAvailable,
'409': ExchangeNotAvailable,
'500': ExchangeNotAvailable,
'501': ExchangeNotAvailable,
'502': ExchangeNotAvailable,
'520': ExchangeNotAvailable,
'521': ExchangeNotAvailable,
'522': ExchangeNotAvailable,
'525': ExchangeNotAvailable,
'400': ExchangeNotAvailable,
'403': ExchangeNotAvailable,
'405': ExchangeNotAvailable,
'503': ExchangeNotAvailable,
'530': ExchangeNotAvailable,
'408': RequestTimeout,
'504': RequestTimeout,
'401': AuthenticationError,
'511': AuthenticationError,
}
headers = None
balance = None
orderbooks = None
orders = None
trades = None
transactions = None
currencies = None
options = None
requiredCredentials = {
'apiKey': True,
'secret': True,
'uid': False,
'login': False,
'password': False,
'twofa': False,
'privateKey': False,
'walletAddress': False,
}
has = {
'publicAPI': True,
'privateAPI': True,
'CORS': False,
'cancelOrder': True,
'cancelOrders': False,
'createDepositAddress': False,
'createOrder': True,
'createMarketOrder': True,
'createLimitOrder': True,
'deposit': False,
'editOrder': 'emulated',
'fetchBalance': True,
'fetchClosedOrders': False,
'fetchCurrencies': False,
'fetchDepositAddress': False,
'fetchDeposits': False,
'fetchFundingFees': False,
'fetchL2OrderBook': True,
'fetchMarkets': True,
'fetchMyTrades': False,
'fetchOHLCV': 'emulated',
'fetchOpenOrders': False,
'fetchOrder': False,
'fetchOrderBook': True,
'fetchOrderBooks': False,
'fetchOrders': False,
'fetchTicker': True,
'fetchTickers': False,
'fetchTrades': True,
'fetchTradingFees': False,
'fetchTradingLimits': False,
'fetchTransactions': False,
'fetchWithdrawals': False,
'withdraw': False,
}
precisionMode = DECIMAL_PLACES
minFundingAddressLength = 1
substituteCommonCurrencyCodes = True
lastRestRequestTimestamp = 0
lastRestPollTimestamp = 0
restRequestQueue = None
restPollerLoopIsRunning = False
rateLimitTokens = 16
rateLimitMaxTokens = 16
rateLimitUpdateTime = 0
enableLastHttpResponse = True
enableLastJsonResponse = True
enableLastResponseHeaders = True
last_http_response = None
last_json_response = None
last_response_headers = None
web3 = None
commonCurrencies = {
'XBT': 'BTC',
'BCC': 'BCH',
'DRK': 'DASH',
}
def __init__(self, config={}):
self.precision = dict() if self.precision is None else self.precision
self.limits = dict() if self.limits is None else self.limits
self.exceptions = dict() if self.exceptions is None else self.exceptions
self.headers = dict() if self.headers is None else self.headers
self.balance = dict() if self.balance is None else self.balance
self.orderbooks = dict() if self.orderbooks is None else self.orderbooks
self.orders = dict() if self.orders is None else self.orders
self.trades = dict() if self.trades is None else self.trades
self.transactions = dict() if self.transactions is None else self.transactions
self.currencies = dict() if self.currencies is None else self.currencies
self.options = dict() if self.options is None else self.options
self.decimalToPrecision = self.decimal_to_precision = decimal_to_precision
self.userAgent = default_user_agent()
settings = self.deep_extend(self.describe(), config)
for key in settings:
if hasattr(self, key) and isinstance(getattr(self, key), dict):
setattr(self, key, self.deep_extend(getattr(self, key), settings[key]))
else:
setattr(self, key, settings[key])
if self.api:
self.define_rest_api(self.api, 'request')
if self.markets:
self.set_markets(self.markets)
for name in dir(self):
if name[0] != '_'and name[-1] != '_' and '_' in name:
parts = name.split('_')
camelcase = parts[0] + ''.join(self.capitalize(i) for i in parts[1:])
setattr(self, camelcase, getattr(self, name))
self.tokenBucket = self.extend({
'refillRate': 1.0 / self.rateLimit,
'delay': 1.0,
'capacity': 1.0,
'defaultCost': 1.0,
}, getattr(self, 'tokenBucket') if hasattr(self, 'tokenBucket') else {})
self.session = self.session if self.session else Session()
self.logger = self.logger if self.logger else logging.getLogger(__name__)
if Web3 and not self.web3:
self.web3 = Web3(HTTPProvider())
def __del__(self):
if self.session:
self.session.close()
def describe(self):
return {}
def define_rest_api(self, api, method_name, options={}):
delimiters = re.compile('[^a-zA-Z0-9]')
for api_type, methods in api.items():
for http_method, urls in methods.items():
for url in urls:
url = url.strip()
split_path = delimiters.split(url)
uppercase_method = http_method.upper()
lowercase_method = http_method.lower()
camelcase_method = lowercase_method.capitalize()
camelcase_suffix = ''.join([Exchange.capitalize(x) for x in split_path])
lowercase_path = [x.strip().lower() for x in split_path]
underscore_suffix = '_'.join([k for k in lowercase_path if len(k)])
camelcase = api_type + camelcase_method + Exchange.capitalize(camelcase_suffix)
underscore = api_type + '_' + lowercase_method + '_' + underscore_suffix.lower()
if 'suffixes' in options:
if 'camelcase' in options['suffixes']:
camelcase += options['suffixes']['camelcase']
if 'underscore' in options['suffixes']:
underscore += options['suffixes']['underscore']
partial = functools.partial(getattr(self, method_name), url, api_type, uppercase_method)
setattr(self, camelcase, partial)
setattr(self, underscore, partial)
def raise_error(self, exception_type, url=None, method=None, error=None, details=None):
if error:
error = str(error)
output = ' '.join([self.id] + [var for var in (url, method, error, details) if var is not None])
raise exception_type(output)
def throttle(self):
now = float(self.milliseconds())
elapsed = now - self.lastRestRequestTimestamp
if elapsed < self.rateLimit:
delay = self.rateLimit - elapsed
time.sleep(delay / 1000.0)
def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
if self.enableRateLimit:
self.throttle()
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return self.fetch(request['url'], request['method'], request['headers'], request['body'])
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
return self.fetch2(path, api, method, params, headers, body)
@staticmethod
def gzip_deflate(response, text):
encoding = response.info().get('Content-Encoding')
if encoding in ('gzip', 'x-gzip', 'deflate'):
if encoding == 'deflate':
return zlib.decompress(text, -zlib.MAX_WBITS)
else:
return gzip.GzipFile('', 'rb', 9, io.BytesIO(text)).read()
return text
def find_broadly_matched_key(self, broad, string):
keys = list(broad.keys())
for i in range(0, len(keys)):
key = keys[i]
if string.find(key) >= 0:
return key
return None
def handle_errors(self, code, reason, url, method, headers, body):
pass
def prepare_request_headers(self, headers=None):
headers = headers or {}
headers.update(self.headers)
if self.userAgent:
if type(self.userAgent) is str:
headers.update({'User-Agent': self.userAgent})
elif (type(self.userAgent) is dict) and ('User-Agent' in self.userAgent):
headers.update(self.userAgent)
if self.proxy:
headers.update({'Origin': self.origin})
headers.update({'Accept-Encoding': 'gzip, deflate'})
return headers
def fetch(self, url, method='GET', headers=None, body=None):
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
print("\nRequest:", method, url, request_headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, request_headers, body)
if body:
body = body.encode()
self.session.cookies.clear()
response = None
http_response = None
try:
response = self.session.request(
method,
url,
data=body,
headers=request_headers,
timeout=int(self.timeout / 1000),
proxies=self.proxies
)
http_response = response.text
if self.enableLastHttpResponse:
self.last_http_response = http_response
headers = response.headers
if self.enableLastResponseHeaders:
self.last_response_headers = headers
if self.verbose:
print("\nResponse:", method, url, str(response.status_code), str(headers), http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, response.status_code, headers, http_response)
response.raise_for_status()
except Timeout as e:
self.raise_error(RequestTimeout, method, url, e)
except TooManyRedirects as e:
self.raise_error(ExchangeError, url, method, e)
except SSLError as e:
self.raise_error(ExchangeError, url, method, e)
except HTTPError as e:
self.handle_errors(response.status_code, response.reason, url, method, headers, http_response)
self.handle_rest_errors(e, response.status_code, http_response, url, method)
self.raise_error(ExchangeError, url, method, e, http_response)
except RequestException as e:
error_string = str(e)
if ('ECONNRESET' in error_string) or ('Connection aborted.' in error_string):
self.raise_error(NetworkError, url, method, e)
else:
self.raise_error(ExchangeError, url, method, e)
self.handle_errors(response.status_code, response.reason, url, method, None, http_response)
return self.handle_rest_response(http_response, url, method, headers, body)
def handle_rest_errors(self, exception, http_status_code, response, url, method='GET'):
error = None
string_code = str(http_status_code)
if string_code in self.httpExceptions:
error = self.httpExceptions[string_code]
if error == ExchangeNotAvailable:
if re.search('(cloudflare|incapsula|overload|ddos)', response, flags=re.IGNORECASE):
error = DDoSProtection
if error:
self.raise_error(error, url, method, exception if exception else http_status_code, response)
def handle_rest_response(self, response, url, method='GET', headers=None, body=None):
try:
if self.parseJsonResponse:
json_response = json.loads(response) if len(response) > 1 else None
if self.enableLastJsonResponse:
self.last_json_response = json_response
return json_response
else:
return response
except ValueError as e:
ddos_protection = re.search('(cloudflare|incapsula|overload|ddos)', response, flags=re.IGNORECASE)
exchange_not_available = re.search('(offline|busy|retry|wait|unavailable|maintain|maintenance|maintenancing)', response, flags=re.IGNORECASE)
if ddos_protection:
self.raise_error(DDoSProtection, method, url, None, response)
if exchange_not_available:
message = response + ' exchange downtime, exchange closed for maintenance or offline, DDoS protection or rate-limiting in effect'
self.raise_error(ExchangeNotAvailable, method, url, None, message)
self.raise_error(ExchangeError, method, url, e, response)
@staticmethod
def safe_float(dictionary, key, default_value=None):
value = default_value
try:
if isinstance(dictionary, list) and isinstance(key, int) and len(dictionary) > key:
value = float(dictionary[key])
else:
value = float(dictionary[key]) if (key is not None) and (key in dictionary) and (dictionary[key] is not None) else default_value
except ValueError as e:
value = default_value
return value
@staticmethod
def safe_string(dictionary, key, default_value=None):
return str(dictionary[key]) if key is not None and (key in dictionary) and dictionary[key] is not None else default_value
@staticmethod
def safe_integer(dictionary, key, default_value=None):
if key is None or (key not in dictionary):
return default_value
value = dictionary[key]
if isinstance(value, Number) or (isinstance(value, basestring) and value.isnumeric()):
return int(value)
return default_value
@staticmethod
def safe_value(dictionary, key, default_value=None):
return dictionary[key] if key is not None and (key in dictionary) and dictionary[key] is not None else default_value
@staticmethod
def safe_float_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_float, dictionary, key1, key2, default_value)
@staticmethod
def safe_string_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_string, dictionary, key1, key2, default_value)
@staticmethod
def safe_integer_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_integer, dictionary, key1, key2, default_value)
@staticmethod
def safe_value_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_value, dictionary, key1, key2, default_value)
@staticmethod
def safe_either(method, dictionary, key1, key2, default_value=None):
value = method(dictionary, key1)
return value if value is not None else method(dictionary, key2, default_value)
@staticmethod
def truncate(num, precision=0):
if precision > 0:
decimal_precision = math.pow(10, precision)
return math.trunc(num * decimal_precision) / decimal_precision
return int(Exchange.truncate_to_string(num, precision))
@staticmethod
def truncate_to_string(num, precision=0):
if precision > 0:
parts = ('{0:.%df}' % precision).format(Decimal(num)).split('.')
decimal_digits = parts[1][:precision].rstrip('0')
decimal_digits = decimal_digits if len(decimal_digits) else '0'
return parts[0] + '.' + decimal_digits
return ('%d' % num)
@staticmethod
def uuid():
return str(uuid.uuid4())
@staticmethod
def capitalize(string): # first character only, rest characters unchanged
# the native pythonic .capitalize() method lowercases all other characters
# which is an unwanted behaviour, therefore we use this custom implementation
# check it yourself: print('foobar'.capitalize(), 'fooBar'.capitalize())
if len(string) > 1:
return "%s%s" % (string[0].upper(), string[1:])
return string.upper()
@staticmethod
def keysort(dictionary):
return collections.OrderedDict(sorted(dictionary.items(), key=lambda t: t[0]))
@staticmethod
def extend(*args):
if args is not None:
result = None
if type(args[0]) is collections.OrderedDict:
result = collections.OrderedDict()
else:
result = {}
for arg in args:
result.update(arg)
return result
return {}
@staticmethod
def deep_extend(*args):
result = None
for arg in args:
if isinstance(arg, dict):
if not isinstance(result, dict):
result = {}
for key in arg:
result[key] = Exchange.deep_extend(result[key] if key in result else None, arg[key])
else:
result = arg
return result
@staticmethod
def filter_by(array, key, value=None):
if value:
grouped = Exchange.group_by(array, key)
if value in grouped:
return grouped[value]
return []
return array
@staticmethod
def filterBy(self, array, key, value=None):
return Exchange.filter_by(array, key, value)
@staticmethod
def group_by(array, key):
result = {}
array = Exchange.to_array(array)
array = [entry for entry in array if (key in entry) and (entry[key] is not None)]
for entry in array:
if entry[key] not in result:
result[entry[key]] = []
result[entry[key]].append(entry)
return result
@staticmethod
def groupBy(array, key):
return Exchange.group_by(array, key)
@staticmethod
def index_by(array, key):
result = {}
if type(array) is dict:
array = Exchange.keysort(array).values()
for element in array:
if (key in element) and (element[key] is not None):
k = element[key]
result[k] = element
return result
@staticmethod
def sort_by(array, key, descending=False):
return sorted(array, key=lambda k: k[key] if k[key] is not None else "", reverse=descending)
@staticmethod
def array_concat(a, b):
return a + b
@staticmethod
def in_array(needle, haystack):
return needle in haystack
@staticmethod
def is_empty(object):
return not object
@staticmethod
def extract_params(string):
return re.findall(r'{([\w-]+)}', string)
@staticmethod
def implode_params(string, params):
for key in params:
string = string.replace('{' + key + '}', str(params[key]))
return string
@staticmethod
def url(path, params={}):
result = Exchange.implode_params(path, params)
query = Exchange.omit(params, Exchange.extract_params(path))
if query:
result += '?' + _urlencode.urlencode(query)
return result
@staticmethod
def urlencode(params={}):
if (type(params) is dict) or isinstance(params, collections.OrderedDict):
return _urlencode.urlencode(params)
return params
@staticmethod
def rawencode(params={}):
return _urlencode.unquote(Exchange.urlencode(params))
@staticmethod
def encode_uri_component(uri):
return _urlencode.quote(uri, safe="~()*!.'")
@staticmethod
def omit(d, *args):
result = d.copy()
for arg in args:
if type(arg) is list:
for key in arg:
if key in result:
del result[key]
else:
if arg in result:
del result[arg]
return result
@staticmethod
def unique(array):
return list(set(array))
@staticmethod
def pluck(array, key):
return [
element[key]
for element in array
if (key in element) and (element[key] is not None)
]
@staticmethod
def sum(*args):
return sum([arg for arg in args if isinstance(arg, (float, int))])
@staticmethod
def ordered(array):
return collections.OrderedDict(array)
@staticmethod
def aggregate(bidasks):
ordered = Exchange.ordered({})
for [price, volume] in bidasks:
if volume > 0:
ordered[price] = (ordered[price] if price in ordered else 0) + volume
result = []
items = list(ordered.items())
for price, volume in items:
result.append([price, volume])
return result
@staticmethod
def sec():
return Exchange.seconds()
@staticmethod
def msec():
return Exchange.milliseconds()
@staticmethod
def usec():
return Exchange.microseconds()
@staticmethod
def seconds():
return int(time.time())
@staticmethod
def milliseconds():
return int(time.time() * 1000)
@staticmethod
def microseconds():
return int(time.time() * 1000000)
@staticmethod
def iso8601(timestamp=None):
if timestamp is None:
return timestamp
if not isinstance(timestamp, int):
return None
if int(timestamp) < 0:
return None
try:
utc = datetime.datetime.utcfromtimestamp(timestamp // 1000)
return utc.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-6] + "{:03d}".format(int(timestamp) % 1000) + 'Z'
except (TypeError, OverflowError, OSError):
return None
@staticmethod
def dmy(timestamp, infix='-'):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%m' + infix + '%d' + infix + '%Y')
@staticmethod
def ymd(timestamp, infix='-'):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%Y' + infix + '%m' + infix + '%d')
@staticmethod
def ymdhms(timestamp, infix=' '):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%Y-%m-%d' + infix + '%H:%M:%S')
@staticmethod
def parse_date(timestamp=None):
if timestamp is None:
return timestamp
if not isinstance(timestamp, str):
return None
if 'GMT' in timestamp:
try:
string = ''.join([str(value) for value in parsedate(timestamp)[:6]]) + '.000Z'
dt = datetime.datetime.strptime(string, "%Y%m%d%H%M%S.%fZ")
return calendar.timegm(dt.utctimetuple()) * 1000
except (TypeError, OverflowError, OSError):
return None
else:
return Exchange.parse8601(timestamp)
@staticmethod
def parse8601(timestamp=None):
if timestamp is None:
return timestamp
yyyy = '([0-9]{4})-?'
mm = '([0-9]{2})-?'
dd = '([0-9]{2})(?:T|[\\s])?'
h = '([0-9]{2}):?'
m = '([0-9]{2}):?'
s = '([0-9]{2})'
ms = '(\\.[0-9]{1,3})?'
tz = '(?:(\\+|\\-)([0-9]{2})\\:?([0-9]{2})|Z)?'
regex = r'' + yyyy + mm + dd + h + m + s + ms + tz
try:
match = re.search(regex, timestamp, re.IGNORECASE)
if match is None:
return None
yyyy, mm, dd, h, m, s, ms, sign, hours, minutes = match.groups()
ms = ms or '.000'
msint = int(ms[1:])
sign = sign or ''
sign = int(sign + '1')
hours = int(hours or 0) * sign
minutes = int(minutes or 0) * sign
offset = datetime.timedelta(hours=hours, minutes=minutes)
string = yyyy + mm + dd + h + m + s + ms + 'Z'
dt = datetime.datetime.strptime(string, "%Y%m%d%H%M%S.%fZ")
dt = dt + offset
return calendar.timegm(dt.utctimetuple()) * 1000 + msint
except (TypeError, OverflowError, OSError, ValueError):
return None
@staticmethod
def hash(request, algorithm='md5', digest='hex'):
h = hashlib.new(algorithm, request)
if digest == 'hex':
return h.hexdigest()
elif digest == 'base64':
return base64.b64encode(h.digest())
return h.digest()
@staticmethod
def hmac(request, secret, algorithm=hashlib.sha256, digest='hex'):
h = hmac.new(secret, request, algorithm)
if digest == 'hex':
return h.hexdigest()
elif digest == 'base64':
return base64.b64encode(h.digest())
return h.digest()
@staticmethod
def binary_concat(*args):
result = bytes()
for arg in args:
result = result + arg
return result
@staticmethod
def binary_to_string(s):
return s.decode('ascii')
@staticmethod
def base64urlencode(s):
return Exchange.decode(base64.urlsafe_b64encode(s)).replace('=', '')
@staticmethod
def jwt(request, secret, algorithm=hashlib.sha256, alg='HS256'):
header = Exchange.encode(Exchange.json({
'alg': alg,
'typ': 'JWT',
}))
encodedHeader = Exchange.base64urlencode(header)
encodedData = Exchange.base64urlencode(Exchange.encode(Exchange.json(request)))
token = encodedHeader + '.' + encodedData
hmac = Exchange.hmac(Exchange.encode(token), Exchange.encode(secret), algorithm, 'binary')
signature = Exchange.base64urlencode(hmac)
return token + '.' + signature
@staticmethod
def unjson(input):
return json.loads(input)
@staticmethod
def json(data, params=None):
return json.dumps(data, separators=(',', ':'))
@staticmethod
def parse_if_json_encoded_object(input):
return json.loads(input) if Exchange.is_json_encoded_object(input) else input
@staticmethod
def is_json_encoded_object(input):
return (isinstance(input, basestring) and
(len(input) >= 2) and
((input[0] == '{') or (input[0] == '[')))
@staticmethod
def encode(string):
return string.encode()
@staticmethod
def decode(string):
return string.decode()
@staticmethod
def to_array(value):
return list(value.values()) if type(value) is dict else value
def nonce(self):
return Exchange.seconds()
def check_required_credentials(self):
keys = list(self.requiredCredentials.keys())
for key in keys:
if self.requiredCredentials[key] and not getattr(self, key):
self.raise_error(AuthenticationError, details='requires `' + key + '`')
def check_address(self, address):
if address is None:
self.raise_error(InvalidAddress, details='address is None')
if all(letter == address[0] for letter in address) or len(address) < self.minFundingAddressLength or ' ' in address:
self.raise_error(InvalidAddress, details='address is invalid or has less than ' + str(self.minFundingAddressLength) + ' characters: "' + str(address) + '"')
return address
def account(self):
return {
'free': 0.0,
'used': 0.0,
'total': 0.0,
}
def common_currency_code(self, currency):
if not self.substituteCommonCurrencyCodes:
return currency
return self.safe_string(self.commonCurrencies, currency, currency)
def currency_id(self, commonCode):
if self.currencies:
if commonCode in self.currencies:
return self.currencies[commonCode]['id']
currencyIds = {v: k for k, v in self.commonCurrencies.items()}
return self.safe_string(currencyIds, commonCode, commonCode)
def fromWei(self, amount, unit='ether'):
if Web3 is None:
self.raise_error(NotSupported, details="ethereum web3 methods require Python 3: https://pythonclock.org")
if amount is None:
return amount
return float(Web3.fromWei(int(amount), unit))
def toWei(self, amount, unit='ether'):
if Web3 is None:
self.raise_error(NotSupported, details="ethereum web3 methods require Python 3: https://pythonclock.org")
if amount is None:
return amount
return str(Web3.toWei(int(amount), unit))
def precision_from_string(self, string):
parts = re.sub(r'0+$', '', string).split('.')
return len(parts[1]) if len(parts) > 1 else 0
def cost_to_precision(self, symbol, cost):
return self.decimal_to_precision(cost, ROUND, self.markets[symbol]['precision']['price'], self.precisionMode)
def price_to_precision(self, symbol, price):
return self.decimal_to_precision(price, ROUND, self.markets[symbol]['precision']['price'], self.precisionMode)
def amount_to_precision(self, symbol, amount):
return self.decimal_to_precision(amount, TRUNCATE, self.markets[symbol]['precision']['amount'], self.precisionMode)
def fee_to_precision(self, symbol, fee):
return self.decimal_to_precision(fee, ROUND, self.markets[symbol]['precision']['price'], self.precisionMode)
def currency_to_precision(self, currency, fee):
return self.decimal_to_precision(fee, ROUND, self.currencies[currency]['precision'], self.precisionMode)
def set_markets(self, markets, currencies=None):
values = list(markets.values()) if type(markets) is dict else markets
for i in range(0, len(values)):
values[i] = self.extend(
self.fees['trading'],
{'precision': self.precision, 'limits': self.limits},
values[i]
)
self.markets = self.index_by(values, 'symbol')
self.markets_by_id = self.index_by(values, 'id')
self.marketsById = self.markets_by_id
self.symbols = sorted(list(self.markets.keys()))
self.ids = sorted(list(self.markets_by_id.keys()))
if currencies:
self.currencies = self.deep_extend(currencies, self.currencies)
else:
base_currencies = [{
'id': market['baseId'] if 'baseId' in market else market['base'],
'numericId': market['baseNumericId'] if 'baseNumericId' in market else None,
'code': market['base'],
'precision': (
market['precision']['base'] if 'base' in market['precision'] else (
market['precision']['amount'] if 'amount' in market['precision'] else None
)
) if 'precision' in market else 8,
} for market in values if 'base' in market]
quote_currencies = [{
'id': market['quoteId'] if 'quoteId' in market else market['quote'],
'numericId': market['quoteNumericId'] if 'quoteNumericId' in market else None,
'code': market['quote'],
'precision': (
market['precision']['quote'] if 'quote' in market['precision'] else (
market['precision']['price'] if 'price' in market['precision'] else None
)
) if 'precision' in market else 8,
} for market in values if 'quote' in market]
currencies = self.sort_by(base_currencies + quote_currencies, 'code')
self.currencies = self.deep_extend(self.index_by(currencies, 'code'), self.currencies)
self.currencies_by_id = self.index_by(list(self.currencies.values()), 'id')
return self.markets
def load_markets(self, reload=False):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
markets = self.fetch_markets()
currencies = None
if self.has['fetchCurrencies']:
currencies = self.fetch_currencies()
return self.set_markets(markets, currencies)
def populate_fees(self):
if not (hasattr(self, 'markets') or hasattr(self, 'currencies')):
return
for currency, data in self.currencies.items():
if 'fee' in data and data['fee'] is not None:
self.fees['funding']['withdraw'][currency] = data['fee']
self.fees['funding']['fee_loaded'] = True
def load_fees(self):
self.load_markets()
self.populate_fees()
if not (self.has['fetchTradingFees'] or self.has['fetchFundingFees']):
return self.fees
fetched_fees = self.fetch_fees()
if fetched_fees['funding']:
self.fees['funding']['fee_loaded'] = True
if fetched_fees['trading']:
self.fees['trading']['fee_loaded'] = True
self.fees = self.deep_extend(self.fees, fetched_fees)
return self.fees
def fetch_markets(self):
return self.to_array(self.markets)
def fetch_currencies(self, params={}):
return self.currencies
def fetch_fees(self):
trading = {}
funding = {}
try:
trading = self.fetch_trading_fees()
except AuthenticationError:
pass
except AttributeError:
pass
try:
funding = self.fetch_funding_fees()
except AuthenticationError:
pass
except AttributeError:
pass
return {
'trading': trading,
'funding': funding,
}
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.raise_error(NotSupported, details='create_order() not implemented yet')
def cancel_order(self, id, symbol=None, params={}):
self.raise_error(NotSupported, details='cancel_order() not implemented yet')
def fetch_bids_asks(self, symbols=None, params={}):
self.raise_error(NotSupported, details='API does not allow to fetch all prices at once with a single call to fetch_bids_asks() for now')
def fetch_tickers(self, symbols=None, params={}):
self.raise_error(NotSupported, details='API does not allow to fetch all tickers at once with a single call to fetch_tickers() for now')
def fetch_order_status(self, id, market=None):
order = self.fetch_order(id)
return order['status']
def purge_cached_orders(self, before):
orders = self.to_array(self.orders)
orders = [order for order in orders if (order['status'] == 'open') or (order['timestamp'] >= before)]
self.orders = self.index_by(orders, 'id')
return self.orders
def fetch_order(self, id, symbol=None, params={}):
self.raise_error(NotSupported, details='fetch_order() is not implemented yet')
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_orders() is not implemented yet')
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_open_orders() is not implemented yet')
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_closed_orders() is not implemented yet')
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_my_trades() is not implemented yet')
def fetch_order_trades(self, id, symbol=None, params={}):
self.raise_error(NotSupported, details='fetch_order_trades() is not implemented yet')
def fetch_transactions(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_transactions() is not implemented yet')
def fetch_deposits(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_deposits() is not implemented yet')
def fetch_withdrawals(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_withdrawals() is not implemented yet')
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return ohlcv[0:6] if isinstance(ohlcv, list) else ohlcv
def parse_ohlcvs(self, ohlcvs, market=None, timeframe='1m', since=None, limit=None):
ohlcvs = self.to_array(ohlcvs)
num_ohlcvs = len(ohlcvs)
result = []
i = 0
while i < num_ohlcvs:
if limit and (len(result) >= limit):
break
ohlcv = self.parse_ohlcv(ohlcvs[i], market, timeframe, since, limit)
i = i + 1
if since and (ohlcv[0] < since):
continue
result.append(ohlcv)
return self.sort_by(result, 0)
def parse_bid_ask(self, bidask, price_key=0, amount_key=0):
return [float(bidask[price_key]), float(bidask[amount_key])]
def parse_bids_asks(self, bidasks, price_key=0, amount_key=1):
result = []
if len(bidasks):
if type(bidasks[0]) is list:
for bidask in bidasks:
if bidask[price_key] and bidask[amount_key]:
result.append(self.parse_bid_ask(bidask, price_key, amount_key))
elif type(bidasks[0]) is dict:
for bidask in bidasks:
if (price_key in bidask) and (amount_key in bidask) and (bidask[price_key] and bidask[amount_key]):
result.append(self.parse_bid_ask(bidask, price_key, amount_key))
else:
self.raise_error(ExchangeError, details='unrecognized bidask format: ' + str(bidasks[0]))
return result
def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
def parse_order_book(self, orderbook, timestamp=None, bids_key='bids', asks_key='asks', price_key=0, amount_key=1):
return {
'bids': self.sort_by(self.parse_bids_asks(orderbook[bids_key], price_key, amount_key) if (bids_key in orderbook) and isinstance(orderbook[bids_key], list) else [], 0, True),
'asks': self.sort_by(self.parse_bids_asks(orderbook[asks_key], price_key, amount_key) if (asks_key in orderbook) and isinstance(orderbook[asks_key], list) else [], 0),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp) if timestamp is not None else None,
'nonce': None,
}
def parse_balance(self, balance):
currencies = self.omit(balance, 'info').keys()
for account in ['free', 'used', 'total']:
balance[account] = {}
for currency in currencies:
balance[account][currency] = balance[currency][account]
return balance
def fetch_partial_balance(self, part, params={}):
balance = self.fetch_balance(params)
return balance[part]
def fetch_free_balance(self, params={}):
return self.fetch_partial_balance('free', params)
def fetch_used_balance(self, params={}):
return self.fetch_partial_balance('used', params)
def fetch_total_balance(self, params={}):
return self.fetch_partial_balance('total', params)
def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = self.fetch_trading_limits(symbols)
for i in range(0, len(symbols)):
symbol = symbols[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], response[symbol])
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
self.raise_error(NotSupported, details='fetch_ohlcv() not implemented yet')
self.load_markets()
trades = self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcv(trades, timeframe, since, limit)
def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return self.fetch_ohlcv(symbol, timeframe, since, limit, params)
def convert_trading_view_to_ohlcv(self, ohlcvs):
result = []
for i in range(0, len(ohlcvs['t'])):
result.append([
ohlcvs['t'][i] * 1000,
ohlcvs['o'][i],
ohlcvs['h'][i],
ohlcvs['l'][i],
ohlcvs['c'][i],
ohlcvs['v'][i],
])
return result
def convert_ohlcv_to_trading_view(self, ohlcvs):
result = {
't': [],
'o': [],
'h': [],
'l': [],
'c': [],
'v': [],
}
for i in range(0, len(ohlcvs)):
result['t'].append(int(ohlcvs[i][0] / 1000))
result['o'].append(ohlcvs[i][1])
result['h'].append(ohlcvs[i][2])
result['l'].append(ohlcvs[i][3])
result['c'].append(ohlcvs[i][4])
result['v'].append(ohlcvs[i][5])
return result
def build_ohlcv(self, trades, timeframe='1m', since=None, limit=None):
ms = self.parse_timeframe(timeframe) * 1000
ohlcvs = []
(high, low, close, volume) = (2, 3, 4, 5)
num_trades = len(trades)
oldest = (num_trades - 1) if limit is None else min(num_trades - 1, limit)
for i in range(0, oldest):
trade = trades[i]
if (since is not None) and (trade['timestamp'] < since):
continue
opening_time = int(math.floor(trade['timestamp'] / ms) * ms)
j = len(ohlcvs)
if (j == 0) or opening_time >= ohlcvs[j - 1][0] + ms:
ohlcvs.append([
opening_time,
trade['price'],
trade['price'],
trade['price'],
trade['price'],
trade['amount'],
])
else:
ohlcvs[j - 1][high] = max(ohlcvs[j - 1][high], trade['price'])
ohlcvs[j - 1][low] = min(ohlcvs[j - 1][low], trade['price'])
ohlcvs[j - 1][close] = trade['price']
ohlcvs[j - 1][volume] += trade['amount']
return ohlcvs
def parse_timeframe(self, timeframe):
amount = int(timeframe[0:-1])
unit = timeframe[-1]
if 'y' in unit:
scale = 60 * 60 * 24 * 365
elif 'M' in unit:
scale = 60 * 60 * 24 * 30
elif 'w' in unit:
scale = 60 * 60 * 24 * 7
elif 'd' in unit:
scale = 60 * 60 * 24
elif 'h' in unit:
scale = 60 * 60
else:
scale = 60
return amount * scale
def parse_trades(self, trades, market=None, since=None, limit=None):
array = self.to_array(trades)
array = [self.parse_trade(trade, market) for trade in array]
array = self.sort_by(array, 'timestamp')
symbol = market['symbol'] if market else None
return self.filter_by_symbol_since_limit(array, symbol, since, limit)
def parse_transactions(self, transactions, currency=None, since=None, limit=None):
array = self.to_array(transactions)
array = [self.parse_transaction(transaction, currency) for transaction in array]
array = self.sort_by(array, 'timestamp')
code = currency['code'] if currency else None
return self.filter_by_currency_since_limit(array, code, since, limit)
def parse_orders(self, orders, market=None, since=None, limit=None):
array = self.to_array(orders)
array = [self.parse_order(order, market) for order in array]
array = self.sort_by(array, 'timestamp')
symbol = market['symbol'] if market else None
return self.filter_by_symbol_since_limit(array, symbol, since, limit)
def filter_by_value_since_limit(self, array, field, value=None, since=None, limit=None):
array = self.to_array(array)
if value:
array = [entry for entry in array if entry[field] == value]
if since:
array = [entry for entry in array if entry['timestamp'] >= since]
if limit:
array = array[0:limit]
return array
def filter_by_symbol_since_limit(self, array, symbol=None, since=None, limit=None):
return self.filter_by_value_since_limit(array, 'symbol', symbol, since, limit)
def filter_by_currency_since_limit(self, array, code=None, since=None, limit=None):
return self.filter_by_value_since_limit(array, 'currency', code, since, limit)
def filter_by_since_limit(self, array, since=None, limit=None):
array = self.to_array(array)
if since:
array = [entry for entry in array if entry['timestamp'] >= since]
if limit:
array = array[0:limit]
return array
def filter_by_symbol(self, array, symbol=None):
array = self.to_array(array)
if symbol:
return [entry for entry in array if entry['symbol'] == symbol]
return array
def filter_by_array(self, objects, key, values=None, indexed=True):
objects = self.to_array(objects)
if values is None:
return self.index_by(objects, key) if indexed else objects
result = []
for i in range(0, len(objects)):
value = objects[i][key] if key in objects[i] else None
if value in values:
result.append(objects[i])
return self.index_by(result, key) if indexed else result
def currency(self, code):
if not self.currencies:
self.raise_error(ExchangeError, details='Currencies not loaded')
if isinstance(code, basestring) and (code in self.currencies):
return self.currencies[code]
self.raise_error(ExchangeError, details='Does not have currency code ' + str(code))
def find_market(self, string):
if not self.markets:
self.raise_error(ExchangeError, details='Markets not loaded')
if isinstance(string, basestring):
if string in self.markets_by_id:
return self.markets_by_id[string]
if string in self.markets:
return self.markets[string]
return string
def find_symbol(self, string, market=None):
if market is None:
market = self.find_market(string)
if isinstance(market, dict):
return market['symbol']
return string
def market(self, symbol):
if not self.markets:
self.raise_error(ExchangeError, details='Markets not loaded')
if isinstance(symbol, basestring) and (symbol in self.markets):
return self.markets[symbol]
self.raise_error(ExchangeError, details='No market symbol ' + str(symbol))
def market_ids(self, symbols):
return [self.market_id(symbol) for symbol in symbols]
def market_id(self, symbol):
market = self.market(symbol)
return market['id'] if type(market) is dict else symbol
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
rate = market[takerOrMaker]
cost = float(self.cost_to_precision(symbol, amount * price))
return {
'rate': rate,
'type': takerOrMaker,
'currency': market['quote'],
'cost': float(self.fee_to_precision(symbol, rate * cost)),
}
def edit_limit_buy_order(self, id, symbol, *args):
return self.edit_limit_order(id, symbol, 'buy', *args)
def edit_limit_sell_order(self, id, symbol, *args):
return self.edit_limit_order(id, symbol, 'sell', *args)
def edit_limit_order(self, id, symbol, *args):
return self.edit_order(id, symbol, 'limit', *args)
def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
self.raise_error(ExchangeError, details='edit_order() requires enableRateLimit = true')
self.cancel_order(id, symbol)
return self.create_order(symbol, *args)
def create_limit_order(self, symbol, *args):
return self.create_order(symbol, 'limit', *args)
def create_market_order(self, symbol, *args):
return self.create_order(symbol, 'market', *args)
def create_limit_buy_order(self, symbol, *args):
return self.create_order(symbol, 'limit', 'buy', *args)
def create_limit_sell_order(self, symbol, *args):
return self.create_order(symbol, 'limit', 'sell', *args)
def create_market_buy_order(self, symbol, amount, params={}):
return self.create_order(symbol, 'market', 'buy', amount, None, params)
def create_market_sell_order(self, symbol, amount, params={}):
return self.create_order(symbol, 'market', 'sell', amount, None, params)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
raise NotSupported(self.id + ' sign() pure method must be redefined in derived classes')
def decryptAccountFromJSON(self, value, password):
return self.decryptAccount(json.loads(value) if isinstance(value, basestring) else value, password)
def decryptAccount(self, key, password):
return self.web3.eth.accounts.decrypt(key, password)
def decryptAccountFromPrivateKey(self, privateKey):
return self.web3.eth.accounts.privateKeyToAccount(privateKey)
def soliditySha3(self, array):
values = self.solidityValues(array)
types = self.solidityTypes(values)
return self.web3.soliditySha3(types, values).hex()
def soliditySha256(self, values):
types = self.solidityTypes(values)
solidity_values = self.solidityValues(values)
encoded_values = [hex_encode_abi_type(abi_type, value)[2:] for abi_type, value in zip(types, solidity_values)]
hex_string = '0x' + ''.join(encoded_values)
return '0x' + self.hash(self.encode(self.web3.toText(hex_string)), 'sha256')
def solidityTypes(self, array):
return ['address' if self.web3.isAddress(value) else 'uint256' for value in array]
def solidityValues(self, array):
return [self.web3.toChecksumAddress(value) if self.web3.isAddress(value) else int(value) for value in array]
def getZeroExOrderHash2(self, order):
return self.soliditySha3([
order['exchangeContractAddress'],
order['maker'],
order['taker'],
order['makerTokenAddress'],
order['takerTokenAddress'],
order['feeRecipient'],
order['makerTokenAmount'],
order['takerTokenAmount'],
order['makerFee'],
order['takerFee'],
order['expirationUnixTimestampSec'],
order['salt'],
])
def getZeroExOrderHash(self, order):
unpacked = [
self.web3.toChecksumAddress(order['exchangeContractAddress']),
self.web3.toChecksumAddress(order['maker']),
self.web3.toChecksumAddress(order['taker']),
self.web3.toChecksumAddress(order['makerTokenAddress']),
self.web3.toChecksumAddress(order['takerTokenAddress']),
self.web3.toChecksumAddress(order['feeRecipient']),
int(order['makerTokenAmount']),
int(order['takerTokenAmount']),
int(order['makerFee']),
int(order['takerFee']),
int(order['expirationUnixTimestampSec']),
int(order['salt']),
]
types = [
'address',
'address',
'address',
'address',
'address',
'address',
'uint256',
'uint256',
'uint256',
'uint256',
'uint256',
'uint256',
]
return self.web3.soliditySha3(types, unpacked).hex()
def signZeroExOrder(self, order):
orderHash = self.getZeroExOrderHash(order)
signature = self.signMessage(orderHash[-64:], self.privateKey)
return self.extend(order, {
'orderHash': orderHash,
'ecSignature': signature,
})
def hashMessage(self, message):
message_bytes = bytes.fromhex(message)
return self.web3.sha3(b"\x19Ethereum Signed Message:\n" + str(len(message_bytes)).encode() + message_bytes).hex()
def signHash(self, hash, privateKey):
signature = self.web3.eth.account.signHash(hash[-64:], private_key=privateKey[-64:])
return {
'v': signature.v,
'r': self.web3.toHex(signature.r),
's': self.web3.toHex(signature.s),
}
def signMessage(self, message, privateKey):
message_hash = self.hashMessage(message)
signature = self.signHash(message_hash[-64:], privateKey[-64:])
return signature
| true | true |
790000ca60eca1b680d51304f349756cd85eaf86 | 1,321 | py | Python | src_old/tests/scripts/lambda/pdes/2d/ex10.py | toddrme2178/pyccel | deec37503ab0c5d0bcca1a035f7909f7ce8ef653 | [
"MIT"
] | null | null | null | src_old/tests/scripts/lambda/pdes/2d/ex10.py | toddrme2178/pyccel | deec37503ab0c5d0bcca1a035f7909f7ce8ef653 | [
"MIT"
] | null | null | null | src_old/tests/scripts/lambda/pdes/2d/ex10.py | toddrme2178/pyccel | deec37503ab0c5d0bcca1a035f7909f7ce8ef653 | [
"MIT"
] | null | null | null | # coding: utf-8
# ... import symbolic tools
weak_formulation = load('pyccel.symbolic.gelato', 'weak_formulation', True, 2)
glt_function = load('pyccel.symbolic.gelato', 'glt_function', True, 3)
Grad = load('pyccel.symbolic.gelato', 'Grad', False, 1)
Curl = load('pyccel.symbolic.gelato', 'Curl', False, 1)
Div = load('pyccel.symbolic.gelato', 'Div', False, 1)
Rot = load('pyccel.symbolic.gelato', 'Rot', False, 1)
Cross = load('pyccel.symbolic.gelato', 'Cross', False, 2)
Dot = load('pyccel.symbolic.gelato', 'Dot', False, 2)
# ...
# ... Laplace
a1 = lambda x,y,v,u: Dot(Grad(u), Grad(v))
ga1 = glt_function(a1, [4, 4], [2, 2])
wa1 = weak_formulation(a1, 2)
print(' a1 := ', a1)
print(' glt symbol a1 := ', ga1)
print('wa1 := ', wa1)
print('')
# ...
# ...
a2 = lambda x,y,v,u: Rot(u) * Rot(v) + Div(u) * Div(v) + 0.2 * Dot(u, v)
ga2 = glt_function(a2, [4, 4], [2, 2])
wa2 = weak_formulation(a2, 2)
print(' a2 := ', a2)
print(' glt symbol a2 := ', ga2)
print('wa2 := ', wa2)
print('')
# ...
# ...
a3 = lambda x,y,v,u: Cross(Curl(u), Curl(v)) + 0.2 * u * v
ga3 = glt_function(a3, [4, 4], [2, 2])
wa3 = weak_formulation(a3, 2)
print(' a3 := ', a3)
print(' glt symbol a3 := ', ga3)
print('wa3 := ', wa3)
print('')
# ...
| 25.901961 | 78 | 0.549584 |
weak_formulation = load('pyccel.symbolic.gelato', 'weak_formulation', True, 2)
glt_function = load('pyccel.symbolic.gelato', 'glt_function', True, 3)
Grad = load('pyccel.symbolic.gelato', 'Grad', False, 1)
Curl = load('pyccel.symbolic.gelato', 'Curl', False, 1)
Div = load('pyccel.symbolic.gelato', 'Div', False, 1)
Rot = load('pyccel.symbolic.gelato', 'Rot', False, 1)
Cross = load('pyccel.symbolic.gelato', 'Cross', False, 2)
Dot = load('pyccel.symbolic.gelato', 'Dot', False, 2)
a1 = lambda x,y,v,u: Dot(Grad(u), Grad(v))
ga1 = glt_function(a1, [4, 4], [2, 2])
wa1 = weak_formulation(a1, 2)
print(' a1 := ', a1)
print(' glt symbol a1 := ', ga1)
print('wa1 := ', wa1)
print('')
a2 = lambda x,y,v,u: Rot(u) * Rot(v) + Div(u) * Div(v) + 0.2 * Dot(u, v)
ga2 = glt_function(a2, [4, 4], [2, 2])
wa2 = weak_formulation(a2, 2)
print(' a2 := ', a2)
print(' glt symbol a2 := ', ga2)
print('wa2 := ', wa2)
print('')
a3 = lambda x,y,v,u: Cross(Curl(u), Curl(v)) + 0.2 * u * v
ga3 = glt_function(a3, [4, 4], [2, 2])
wa3 = weak_formulation(a3, 2)
print(' a3 := ', a3)
print(' glt symbol a3 := ', ga3)
print('wa3 := ', wa3)
print('')
| true | true |
790000df29a505a4935451d347ad88914c76144d | 5,295 | py | Python | tableauserverclient/models/flow_item.py | essentia-team/server-client-python | c9375204c581c5288fe4b6abc3c84fea41a887b5 | [
"CC0-1.0",
"MIT"
] | 1 | 2019-10-24T02:00:52.000Z | 2019-10-24T02:00:52.000Z | tableauserverclient/models/flow_item.py | essentia-team/server-client-python | c9375204c581c5288fe4b6abc3c84fea41a887b5 | [
"CC0-1.0",
"MIT"
] | null | null | null | tableauserverclient/models/flow_item.py | essentia-team/server-client-python | c9375204c581c5288fe4b6abc3c84fea41a887b5 | [
"CC0-1.0",
"MIT"
] | 1 | 2022-03-29T23:19:43.000Z | 2022-03-29T23:19:43.000Z | import xml.etree.ElementTree as ET
from .exceptions import UnpopulatedPropertyError
from .property_decorators import property_not_nullable, property_is_boolean
from .tag_item import TagItem
from ..datetime_helpers import parse_datetime
import copy
class FlowItem(object):
def __init__(self, project_id, name=None):
self._webpage_url = None
self._created_at = None
self._id = None
self._initial_tags = set()
self._project_name = None
self._updated_at = None
self.name = name
self.owner_id = None
self.project_id = project_id
self.tags = set()
self.description = None
self._connections = None
self._permissions = None
@property
def connections(self):
if self._connections is None:
error = 'Flow item must be populated with connections first.'
raise UnpopulatedPropertyError(error)
return self._connections()
@property
def permissions(self):
if self._permissions is None:
error = "Project item must be populated with permissions first."
raise UnpopulatedPropertyError(error)
return self._permissions()
@property
def webpage_url(self):
return self._webpage_url
@property
def created_at(self):
return self._created_at
@property
def id(self):
return self._id
@property
def project_id(self):
return self._project_id
@project_id.setter
@property_not_nullable
def project_id(self, value):
self._project_id = value
@property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@property
def project_name(self):
return self._project_name
@property
def flow_type(self):
return self._flow_type
@property
def updated_at(self):
return self._updated_at
def _set_connections(self, connections):
self._connections = connections
def _set_permissions(self, permissions):
self._permissions = permissions
def _parse_common_elements(self, flow_xml, ns):
if not isinstance(flow_xml, ET.Element):
flow_xml = ET.fromstring(flow_xml).find('.//t:flow', namespaces=ns)
if flow_xml is not None:
(_, _, _, _, _, updated_at, _, project_id, project_name, owner_id) = self._parse_element(flow_xml, ns)
self._set_values(None, None, None, None, None, updated_at, None, project_id,
project_name, owner_id)
return self
def _set_values(self, id, name, description, webpage_url, created_at,
updated_at, tags, project_id, project_name, owner_id):
if id is not None:
self._id = id
if name:
self.name = name
if description:
self.description = description
if webpage_url:
self._webpage_url = webpage_url
if created_at:
self._created_at = created_at
if updated_at:
self._updated_at = updated_at
if tags:
self.tags = tags
self._initial_tags = copy.copy(tags)
if project_id:
self.project_id = project_id
if project_name:
self._project_name = project_name
if owner_id:
self.owner_id = owner_id
@classmethod
def from_response(cls, resp, ns):
all_flow_items = list()
parsed_response = ET.fromstring(resp)
all_flow_xml = parsed_response.findall('.//t:flow', namespaces=ns)
for flow_xml in all_flow_xml:
(id_, name, description, webpage_url, created_at, updated_at,
tags, project_id, project_name, owner_id) = cls._parse_element(flow_xml, ns)
flow_item = cls(project_id)
flow_item._set_values(id_, name, description, webpage_url, created_at, updated_at,
tags, None, project_name, owner_id)
all_flow_items.append(flow_item)
return all_flow_items
@staticmethod
def _parse_element(flow_xml, ns):
id_ = flow_xml.get('id', None)
name = flow_xml.get('name', None)
description = flow_xml.get('description', None)
webpage_url = flow_xml.get('webpageUrl', None)
created_at = parse_datetime(flow_xml.get('createdAt', None))
updated_at = parse_datetime(flow_xml.get('updatedAt', None))
tags = None
tags_elem = flow_xml.find('.//t:tags', namespaces=ns)
if tags_elem is not None:
tags = TagItem.from_xml_element(tags_elem, ns)
project_id = None
project_name = None
project_elem = flow_xml.find('.//t:project', namespaces=ns)
if project_elem is not None:
project_id = project_elem.get('id', None)
project_name = project_elem.get('name', None)
owner_id = None
owner_elem = flow_xml.find('.//t:owner', namespaces=ns)
if owner_elem is not None:
owner_id = owner_elem.get('id', None)
return (id_, name, description, webpage_url, created_at, updated_at, tags, project_id,
project_name, owner_id)
| 32.484663 | 114 | 0.628706 | import xml.etree.ElementTree as ET
from .exceptions import UnpopulatedPropertyError
from .property_decorators import property_not_nullable, property_is_boolean
from .tag_item import TagItem
from ..datetime_helpers import parse_datetime
import copy
class FlowItem(object):
def __init__(self, project_id, name=None):
self._webpage_url = None
self._created_at = None
self._id = None
self._initial_tags = set()
self._project_name = None
self._updated_at = None
self.name = name
self.owner_id = None
self.project_id = project_id
self.tags = set()
self.description = None
self._connections = None
self._permissions = None
@property
def connections(self):
if self._connections is None:
error = 'Flow item must be populated with connections first.'
raise UnpopulatedPropertyError(error)
return self._connections()
@property
def permissions(self):
if self._permissions is None:
error = "Project item must be populated with permissions first."
raise UnpopulatedPropertyError(error)
return self._permissions()
@property
def webpage_url(self):
return self._webpage_url
@property
def created_at(self):
return self._created_at
@property
def id(self):
return self._id
@property
def project_id(self):
return self._project_id
@project_id.setter
@property_not_nullable
def project_id(self, value):
self._project_id = value
@property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@property
def project_name(self):
return self._project_name
@property
def flow_type(self):
return self._flow_type
@property
def updated_at(self):
return self._updated_at
def _set_connections(self, connections):
self._connections = connections
def _set_permissions(self, permissions):
self._permissions = permissions
def _parse_common_elements(self, flow_xml, ns):
if not isinstance(flow_xml, ET.Element):
flow_xml = ET.fromstring(flow_xml).find('.//t:flow', namespaces=ns)
if flow_xml is not None:
(_, _, _, _, _, updated_at, _, project_id, project_name, owner_id) = self._parse_element(flow_xml, ns)
self._set_values(None, None, None, None, None, updated_at, None, project_id,
project_name, owner_id)
return self
def _set_values(self, id, name, description, webpage_url, created_at,
updated_at, tags, project_id, project_name, owner_id):
if id is not None:
self._id = id
if name:
self.name = name
if description:
self.description = description
if webpage_url:
self._webpage_url = webpage_url
if created_at:
self._created_at = created_at
if updated_at:
self._updated_at = updated_at
if tags:
self.tags = tags
self._initial_tags = copy.copy(tags)
if project_id:
self.project_id = project_id
if project_name:
self._project_name = project_name
if owner_id:
self.owner_id = owner_id
@classmethod
def from_response(cls, resp, ns):
all_flow_items = list()
parsed_response = ET.fromstring(resp)
all_flow_xml = parsed_response.findall('.//t:flow', namespaces=ns)
for flow_xml in all_flow_xml:
(id_, name, description, webpage_url, created_at, updated_at,
tags, project_id, project_name, owner_id) = cls._parse_element(flow_xml, ns)
flow_item = cls(project_id)
flow_item._set_values(id_, name, description, webpage_url, created_at, updated_at,
tags, None, project_name, owner_id)
all_flow_items.append(flow_item)
return all_flow_items
@staticmethod
def _parse_element(flow_xml, ns):
id_ = flow_xml.get('id', None)
name = flow_xml.get('name', None)
description = flow_xml.get('description', None)
webpage_url = flow_xml.get('webpageUrl', None)
created_at = parse_datetime(flow_xml.get('createdAt', None))
updated_at = parse_datetime(flow_xml.get('updatedAt', None))
tags = None
tags_elem = flow_xml.find('.//t:tags', namespaces=ns)
if tags_elem is not None:
tags = TagItem.from_xml_element(tags_elem, ns)
project_id = None
project_name = None
project_elem = flow_xml.find('.//t:project', namespaces=ns)
if project_elem is not None:
project_id = project_elem.get('id', None)
project_name = project_elem.get('name', None)
owner_id = None
owner_elem = flow_xml.find('.//t:owner', namespaces=ns)
if owner_elem is not None:
owner_id = owner_elem.get('id', None)
return (id_, name, description, webpage_url, created_at, updated_at, tags, project_id,
project_name, owner_id)
| true | true |
790004af7c8a455df790e5cce7f3ac3cfb392fef | 117 | py | Python | apps/common/templatetags/smhtags.py | TransparentHealth/smh-organization | ca32d1cbb7600e8b22e43e06edab83c323a2404d | [
"Apache-2.0"
] | 3 | 2019-03-03T22:41:00.000Z | 2020-04-16T04:07:07.000Z | apps/common/templatetags/smhtags.py | TransparentHealth/smh_app | ca32d1cbb7600e8b22e43e06edab83c323a2404d | [
"Apache-2.0"
] | 112 | 2019-03-04T14:20:50.000Z | 2020-04-29T21:32:07.000Z | apps/common/templatetags/smhtags.py | TransparentHealth/smh-organization | ca32d1cbb7600e8b22e43e06edab83c323a2404d | [
"Apache-2.0"
] | 2 | 2019-08-01T13:08:28.000Z | 2019-12-06T15:53:25.000Z | from django.template.defaulttags import register
@register.filter
def get(data, key):
return data.get(key, '')
| 16.714286 | 48 | 0.735043 | from django.template.defaulttags import register
@register.filter
def get(data, key):
return data.get(key, '')
| true | true |
790005d23316bbe70e998e38bb1dde32ad9767bf | 1,119 | py | Python | setup.py | nikitanovosibirsk/vedro-allure-reporter | da6bb3e89a13a1e67324f0e791458c65e162f18c | [
"Apache-2.0"
] | 1 | 2021-08-25T12:24:46.000Z | 2021-08-25T12:24:46.000Z | setup.py | nikitanovosibirsk/vedro-allure-reporter | da6bb3e89a13a1e67324f0e791458c65e162f18c | [
"Apache-2.0"
] | 1 | 2022-03-22T14:16:22.000Z | 2022-03-23T09:05:46.000Z | setup.py | nikitanovosibirsk/vedro-allure-reporter | da6bb3e89a13a1e67324f0e791458c65e162f18c | [
"Apache-2.0"
] | null | null | null | from setuptools import find_packages, setup
def find_required():
with open("requirements.txt") as f:
return f.read().splitlines()
def find_dev_required():
with open("requirements-dev.txt") as f:
return f.read().splitlines()
setup(
name="vedro-allure-reporter",
version="0.2.4",
description="Allure reporter for Vedro framework",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="Nikita Tsvetkov",
author_email="nikitanovosibirsk@yandex.com",
python_requires=">=3.7",
url="https://github.com/nikitanovosibirsk/vedro-allure-reporter",
license="Apache-2.0",
packages=find_packages(exclude=("tests",)),
package_data={"vedro_allure_reporter": ["py.typed"]},
install_requires=find_required(),
tests_require=find_dev_required(),
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Typing :: Typed",
],
)
| 30.243243 | 69 | 0.662198 | from setuptools import find_packages, setup
def find_required():
with open("requirements.txt") as f:
return f.read().splitlines()
def find_dev_required():
with open("requirements-dev.txt") as f:
return f.read().splitlines()
setup(
name="vedro-allure-reporter",
version="0.2.4",
description="Allure reporter for Vedro framework",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="Nikita Tsvetkov",
author_email="nikitanovosibirsk@yandex.com",
python_requires=">=3.7",
url="https://github.com/nikitanovosibirsk/vedro-allure-reporter",
license="Apache-2.0",
packages=find_packages(exclude=("tests",)),
package_data={"vedro_allure_reporter": ["py.typed"]},
install_requires=find_required(),
tests_require=find_dev_required(),
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Typing :: Typed",
],
)
| true | true |
7900061228d2b80ebd5bff942f29abd5c9db4230 | 12,351 | py | Python | utils/inverter.py | Twizwei/idinvert_pytorch | 11f1126aab517fbe32b488d92f6fdea339463d04 | [
"MIT"
] | null | null | null | utils/inverter.py | Twizwei/idinvert_pytorch | 11f1126aab517fbe32b488d92f6fdea339463d04 | [
"MIT"
] | null | null | null | utils/inverter.py | Twizwei/idinvert_pytorch | 11f1126aab517fbe32b488d92f6fdea339463d04 | [
"MIT"
] | null | null | null | # python 3.7
"""Utility functions to invert a given image back to a latent code."""
from tqdm import tqdm
import cv2
import numpy as np
import torch
from models.stylegan_generator import StyleGANGenerator
from models.stylegan_encoder import StyleGANEncoder
from models.perceptual_model import PerceptualModel
__all__ = ['StyleGANInverter']
def _softplus(x):
"""Implements the softplus function."""
return torch.nn.functional.softplus(x, beta=1, threshold=10000)
def _get_tensor_value(tensor):
"""Gets the value of a torch Tensor."""
return tensor.cpu().detach().numpy()
class StyleGANInverter(object):
"""Defines the class for StyleGAN inversion.
Even having the encoder, the output latent code is not good enough to recover
the target image satisfyingly. To this end, this class optimize the latent
code based on gradient descent algorithm. In the optimization process,
following loss functions will be considered:
(1) Pixel-wise reconstruction loss. (required)
(2) Perceptual loss. (optional, but recommended)
(3) Regularization loss from encoder. (optional, but recommended for in-domain
inversion)
NOTE: The encoder can be missing for inversion, in which case the latent code
will be randomly initialized and the regularization loss will be ignored.
"""
def __init__(self,
model_name,
learning_rate=1e-2,
iteration=100,
reconstruction_loss_weight=1.0,
perceptual_loss_weight=5e-5,
regularization_loss_weight=2.0,
logger=None):
"""Initializes the inverter.
NOTE: Only Adam optimizer is supported in the optimization process.
Args:
model_name: Name of the model on which the inverted is based. The model
should be first registered in `models/model_settings.py`.
logger: Logger to record the log message.
learning_rate: Learning rate for optimization. (default: 1e-2)
iteration: Number of iterations for optimization. (default: 100)
reconstruction_loss_weight: Weight for reconstruction loss. Should always
be a positive number. (default: 1.0)
perceptual_loss_weight: Weight for perceptual loss. 0 disables perceptual
loss. (default: 5e-5)
regularization_loss_weight: Weight for regularization loss from encoder.
This is essential for in-domain inversion. However, this loss will
automatically ignored if the generative model does not include a valid
encoder. 0 disables regularization loss. (default: 2.0)
"""
self.logger = logger
self.model_name = model_name
self.gan_type = 'stylegan'
self.G = StyleGANGenerator(self.model_name, self.logger)
self.E = StyleGANEncoder(self.model_name, self.logger)
self.F = PerceptualModel(min_val=self.G.min_val, max_val=self.G.max_val)
self.encode_dim = [self.G.num_layers, self.G.w_space_dim]
self.run_device = self.G.run_device
assert list(self.encode_dim) == list(self.E.encode_dim)
assert self.G.gan_type == self.gan_type
assert self.E.gan_type == self.gan_type
self.learning_rate = learning_rate
self.iteration = iteration
self.loss_pix_weight = reconstruction_loss_weight
self.loss_feat_weight = perceptual_loss_weight
self.loss_reg_weight = regularization_loss_weight
assert self.loss_pix_weight > 0
def preprocess(self, image):
"""Preprocesses a single image.
This function assumes the input numpy array is with shape [height, width,
channel], channel order `RGB`, and pixel range [0, 255].
The returned image is with shape [channel, new_height, new_width], where
`new_height` and `new_width` are specified by the given generative model.
The channel order of returned image is also specified by the generative
model. The pixel range is shifted to [min_val, max_val], where `min_val` and
`max_val` are also specified by the generative model.
"""
if not isinstance(image, np.ndarray):
raise ValueError(f'Input image should be with type `numpy.ndarray`!')
if image.dtype != np.uint8:
raise ValueError(f'Input image should be with dtype `numpy.uint8`!')
if image.ndim != 3 or image.shape[2] not in [1, 3]:
raise ValueError(f'Input should be with shape [height, width, channel], '
f'where channel equals to 1 or 3!\n'
f'But {image.shape} is received!')
if image.shape[2] == 1 and self.G.image_channels == 3:
image = np.tile(image, (1, 1, 3))
if image.shape[2] != self.G.image_channels:
raise ValueError(f'Number of channels of input image, which is '
f'{image.shape[2]}, is not supported by the current '
f'inverter, which requires {self.G.image_channels} '
f'channels!')
if self.G.image_channels == 3 and self.G.channel_order == 'BGR':
image = image[:, :, ::-1]
if image.shape[1:3] != [self.G.resolution, self.G.resolution]:
image = cv2.resize(image, (self.G.resolution, self.G.resolution))
image = image.astype(np.float32)
image = image / 255.0 * (self.G.max_val - self.G.min_val) + self.G.min_val
image = image.astype(np.float32).transpose(2, 0, 1)
return image
def get_init_code(self, image):
"""Gets initial latent codes as the start point for optimization.
The input image is assumed to have already been preprocessed, meaning to
have shape [self.G.image_channels, self.G.resolution, self.G.resolution],
channel order `self.G.channel_order`, and pixel range [self.G.min_val,
self.G.max_val].
"""
x = image[np.newaxis]
x = self.G.to_tensor(x.astype(np.float32))
z = _get_tensor_value(self.E.net(x).view(1, *self.encode_dim))
return z.astype(np.float32)
def invert(self, image, num_viz=0):
"""Inverts the given image to a latent code.
Basically, this function is based on gradient descent algorithm.
Args:
image: Target image to invert, which is assumed to have already been
preprocessed.
num_viz: Number of intermediate outputs to visualize. (default: 0)
Returns:
A two-element tuple. First one is the inverted code. Second one is a list
of intermediate results, where first image is the input image, second
one is the reconstructed result from the initial latent code, remainings
are from the optimization process every `self.iteration // num_viz`
steps.
"""
x = image[np.newaxis]
x = self.G.to_tensor(x.astype(np.float32))
x.requires_grad = False
init_z = self.get_init_code(image)
z = torch.Tensor(init_z).to(self.run_device)
z.requires_grad = True
optimizer = torch.optim.Adam([z], lr=self.learning_rate)
viz_results = []
viz_results.append(self.G.postprocess(_get_tensor_value(x))[0])
x_init_inv = self.G.net.synthesis(z)
viz_results.append(self.G.postprocess(_get_tensor_value(x_init_inv))[0])
pbar = tqdm(range(1, self.iteration + 1), leave=True)
for step in pbar:
loss = 0.0
# Reconstruction loss.
x_rec = self.G.net.synthesis(z)
loss_pix = torch.mean((x - x_rec) ** 2)
loss = loss + loss_pix * self.loss_pix_weight
log_message = f'loss_pix: {_get_tensor_value(loss_pix):.3f}'
# Perceptual loss.
if self.loss_feat_weight:
x_feat = self.F.net(x)
x_rec_feat = self.F.net(x_rec)
loss_feat = torch.mean((x_feat - x_rec_feat) ** 2)
loss = loss + loss_feat * self.loss_feat_weight
log_message += f', loss_feat: {_get_tensor_value(loss_feat):.3f}'
# Regularization loss.
if self.loss_reg_weight:
z_rec = self.E.net(x_rec).view(1, *self.encode_dim)
loss_reg = torch.mean((z - z_rec) ** 2)
loss = loss + loss_reg * self.loss_reg_weight
log_message += f', loss_reg: {_get_tensor_value(loss_reg):.3f}'
log_message += f', loss: {_get_tensor_value(loss):.3f}'
pbar.set_description_str(log_message)
if self.logger:
self.logger.debug(f'Step: {step:05d}, '
f'lr: {self.learning_rate:.2e}, '
f'{log_message}')
# Do optimization.
optimizer.zero_grad()
loss.backward()
optimizer.step()
if num_viz > 0 and step % (self.iteration // num_viz) == 0:
viz_results.append(self.G.postprocess(_get_tensor_value(x_rec))[0])
return _get_tensor_value(z), viz_results
def easy_invert(self, image, num_viz=0):
"""Wraps functions `preprocess()` and `invert()` together."""
return self.invert(self.preprocess(image), num_viz)
def diffuse(self,
target,
context,
center_x,
center_y,
crop_x,
crop_y,
num_viz=0):
"""Diffuses the target image to a context image.
Basically, this function is a motified version of `self.invert()`. More
concretely, the encoder regularizer is removed from the objectives and the
reconstruction loss is computed from the masked region.
Args:
target: Target image (foreground).
context: Context image (background).
center_x: The x-coordinate of the crop center.
center_y: The y-coordinate of the crop center.
crop_x: The crop size along the x-axis.
crop_y: The crop size along the y-axis.
num_viz: Number of intermediate outputs to visualize. (default: 0)
Returns:
A two-element tuple. First one is the inverted code. Second one is a list
of intermediate results, where first image is the direct copy-paste
image, second one is the reconstructed result from the initial latent
code, remainings are from the optimization process every
`self.iteration // num_viz` steps.
"""
image_shape = (self.G.image_channels, self.G.resolution, self.G.resolution)
mask = np.zeros((1, *image_shape), dtype=np.float32)
xx = center_x - crop_x // 2
yy = center_y - crop_y // 2
mask[:, :, yy:yy + crop_y, xx:xx + crop_x] = 1.0
target = target[np.newaxis]
context = context[np.newaxis]
x = target * mask + context * (1 - mask)
x = self.G.to_tensor(x.astype(np.float32))
x.requires_grad = False
mask = self.G.to_tensor(mask.astype(np.float32))
mask.requires_grad = False
init_z = _get_tensor_value(self.E.net(x).view(1, *self.encode_dim))
init_z = init_z.astype(np.float32)
z = torch.Tensor(init_z).to(self.run_device)
z.requires_grad = True
optimizer = torch.optim.Adam([z], lr=self.learning_rate)
viz_results = []
viz_results.append(self.G.postprocess(_get_tensor_value(x))[0])
x_init_inv = self.G.net.synthesis(z)
viz_results.append(self.G.postprocess(_get_tensor_value(x_init_inv))[0])
pbar = tqdm(range(1, self.iteration + 1), leave=True)
for step in pbar:
loss = 0.0
# Reconstruction loss.
x_rec = self.G.net.synthesis(z)
loss_pix = torch.mean(((x - x_rec) * mask) ** 2)
loss = loss + loss_pix * self.loss_pix_weight
log_message = f'loss_pix: {_get_tensor_value(loss_pix):.3f}'
# Perceptual loss.
if self.loss_feat_weight:
x_feat = self.F.net(x * mask)
x_rec_feat = self.F.net(x_rec * mask)
loss_feat = torch.mean((x_feat - x_rec_feat) ** 2)
loss = loss + loss_feat * self.loss_feat_weight
log_message += f', loss_feat: {_get_tensor_value(loss_feat):.3f}'
log_message += f', loss: {_get_tensor_value(loss):.3f}'
pbar.set_description_str(log_message)
if self.logger:
self.logger.debug(f'Step: {step:05d}, '
f'lr: {self.learning_rate:.2e}, '
f'{log_message}')
# Do optimization.
optimizer.zero_grad()
loss.backward()
optimizer.step()
if num_viz > 0 and step % (self.iteration // num_viz) == 0:
viz_results.append(self.G.postprocess(_get_tensor_value(x_rec))[0])
return _get_tensor_value(z), viz_results
def easy_diffuse(self, target, context, *args, **kwargs):
"""Wraps functions `preprocess()` and `diffuse()` together."""
return self.diffuse(self.preprocess(target),
self.preprocess(context),
*args, **kwargs)
| 39.209524 | 80 | 0.6644 |
from tqdm import tqdm
import cv2
import numpy as np
import torch
from models.stylegan_generator import StyleGANGenerator
from models.stylegan_encoder import StyleGANEncoder
from models.perceptual_model import PerceptualModel
__all__ = ['StyleGANInverter']
def _softplus(x):
return torch.nn.functional.softplus(x, beta=1, threshold=10000)
def _get_tensor_value(tensor):
return tensor.cpu().detach().numpy()
class StyleGANInverter(object):
def __init__(self,
model_name,
learning_rate=1e-2,
iteration=100,
reconstruction_loss_weight=1.0,
perceptual_loss_weight=5e-5,
regularization_loss_weight=2.0,
logger=None):
self.logger = logger
self.model_name = model_name
self.gan_type = 'stylegan'
self.G = StyleGANGenerator(self.model_name, self.logger)
self.E = StyleGANEncoder(self.model_name, self.logger)
self.F = PerceptualModel(min_val=self.G.min_val, max_val=self.G.max_val)
self.encode_dim = [self.G.num_layers, self.G.w_space_dim]
self.run_device = self.G.run_device
assert list(self.encode_dim) == list(self.E.encode_dim)
assert self.G.gan_type == self.gan_type
assert self.E.gan_type == self.gan_type
self.learning_rate = learning_rate
self.iteration = iteration
self.loss_pix_weight = reconstruction_loss_weight
self.loss_feat_weight = perceptual_loss_weight
self.loss_reg_weight = regularization_loss_weight
assert self.loss_pix_weight > 0
def preprocess(self, image):
if not isinstance(image, np.ndarray):
raise ValueError(f'Input image should be with type `numpy.ndarray`!')
if image.dtype != np.uint8:
raise ValueError(f'Input image should be with dtype `numpy.uint8`!')
if image.ndim != 3 or image.shape[2] not in [1, 3]:
raise ValueError(f'Input should be with shape [height, width, channel], '
f'where channel equals to 1 or 3!\n'
f'But {image.shape} is received!')
if image.shape[2] == 1 and self.G.image_channels == 3:
image = np.tile(image, (1, 1, 3))
if image.shape[2] != self.G.image_channels:
raise ValueError(f'Number of channels of input image, which is '
f'{image.shape[2]}, is not supported by the current '
f'inverter, which requires {self.G.image_channels} '
f'channels!')
if self.G.image_channels == 3 and self.G.channel_order == 'BGR':
image = image[:, :, ::-1]
if image.shape[1:3] != [self.G.resolution, self.G.resolution]:
image = cv2.resize(image, (self.G.resolution, self.G.resolution))
image = image.astype(np.float32)
image = image / 255.0 * (self.G.max_val - self.G.min_val) + self.G.min_val
image = image.astype(np.float32).transpose(2, 0, 1)
return image
def get_init_code(self, image):
x = image[np.newaxis]
x = self.G.to_tensor(x.astype(np.float32))
z = _get_tensor_value(self.E.net(x).view(1, *self.encode_dim))
return z.astype(np.float32)
def invert(self, image, num_viz=0):
x = image[np.newaxis]
x = self.G.to_tensor(x.astype(np.float32))
x.requires_grad = False
init_z = self.get_init_code(image)
z = torch.Tensor(init_z).to(self.run_device)
z.requires_grad = True
optimizer = torch.optim.Adam([z], lr=self.learning_rate)
viz_results = []
viz_results.append(self.G.postprocess(_get_tensor_value(x))[0])
x_init_inv = self.G.net.synthesis(z)
viz_results.append(self.G.postprocess(_get_tensor_value(x_init_inv))[0])
pbar = tqdm(range(1, self.iteration + 1), leave=True)
for step in pbar:
loss = 0.0
x_rec = self.G.net.synthesis(z)
loss_pix = torch.mean((x - x_rec) ** 2)
loss = loss + loss_pix * self.loss_pix_weight
log_message = f'loss_pix: {_get_tensor_value(loss_pix):.3f}'
if self.loss_feat_weight:
x_feat = self.F.net(x)
x_rec_feat = self.F.net(x_rec)
loss_feat = torch.mean((x_feat - x_rec_feat) ** 2)
loss = loss + loss_feat * self.loss_feat_weight
log_message += f', loss_feat: {_get_tensor_value(loss_feat):.3f}'
if self.loss_reg_weight:
z_rec = self.E.net(x_rec).view(1, *self.encode_dim)
loss_reg = torch.mean((z - z_rec) ** 2)
loss = loss + loss_reg * self.loss_reg_weight
log_message += f', loss_reg: {_get_tensor_value(loss_reg):.3f}'
log_message += f', loss: {_get_tensor_value(loss):.3f}'
pbar.set_description_str(log_message)
if self.logger:
self.logger.debug(f'Step: {step:05d}, '
f'lr: {self.learning_rate:.2e}, '
f'{log_message}')
optimizer.zero_grad()
loss.backward()
optimizer.step()
if num_viz > 0 and step % (self.iteration // num_viz) == 0:
viz_results.append(self.G.postprocess(_get_tensor_value(x_rec))[0])
return _get_tensor_value(z), viz_results
def easy_invert(self, image, num_viz=0):
return self.invert(self.preprocess(image), num_viz)
def diffuse(self,
target,
context,
center_x,
center_y,
crop_x,
crop_y,
num_viz=0):
image_shape = (self.G.image_channels, self.G.resolution, self.G.resolution)
mask = np.zeros((1, *image_shape), dtype=np.float32)
xx = center_x - crop_x // 2
yy = center_y - crop_y // 2
mask[:, :, yy:yy + crop_y, xx:xx + crop_x] = 1.0
target = target[np.newaxis]
context = context[np.newaxis]
x = target * mask + context * (1 - mask)
x = self.G.to_tensor(x.astype(np.float32))
x.requires_grad = False
mask = self.G.to_tensor(mask.astype(np.float32))
mask.requires_grad = False
init_z = _get_tensor_value(self.E.net(x).view(1, *self.encode_dim))
init_z = init_z.astype(np.float32)
z = torch.Tensor(init_z).to(self.run_device)
z.requires_grad = True
optimizer = torch.optim.Adam([z], lr=self.learning_rate)
viz_results = []
viz_results.append(self.G.postprocess(_get_tensor_value(x))[0])
x_init_inv = self.G.net.synthesis(z)
viz_results.append(self.G.postprocess(_get_tensor_value(x_init_inv))[0])
pbar = tqdm(range(1, self.iteration + 1), leave=True)
for step in pbar:
loss = 0.0
x_rec = self.G.net.synthesis(z)
loss_pix = torch.mean(((x - x_rec) * mask) ** 2)
loss = loss + loss_pix * self.loss_pix_weight
log_message = f'loss_pix: {_get_tensor_value(loss_pix):.3f}'
if self.loss_feat_weight:
x_feat = self.F.net(x * mask)
x_rec_feat = self.F.net(x_rec * mask)
loss_feat = torch.mean((x_feat - x_rec_feat) ** 2)
loss = loss + loss_feat * self.loss_feat_weight
log_message += f', loss_feat: {_get_tensor_value(loss_feat):.3f}'
log_message += f', loss: {_get_tensor_value(loss):.3f}'
pbar.set_description_str(log_message)
if self.logger:
self.logger.debug(f'Step: {step:05d}, '
f'lr: {self.learning_rate:.2e}, '
f'{log_message}')
optimizer.zero_grad()
loss.backward()
optimizer.step()
if num_viz > 0 and step % (self.iteration // num_viz) == 0:
viz_results.append(self.G.postprocess(_get_tensor_value(x_rec))[0])
return _get_tensor_value(z), viz_results
def easy_diffuse(self, target, context, *args, **kwargs):
return self.diffuse(self.preprocess(target),
self.preprocess(context),
*args, **kwargs)
| true | true |
790006f1c3c27e3c020824f39a42af98da3f28b2 | 4,520 | py | Python | analysis/gen_db/mf_grc/gen_mf_locs_210518.py | htem/cb2_project_analysis | a677cbadc7e3bf0074975a94ed1d06b4801899c0 | [
"MIT"
] | null | null | null | analysis/gen_db/mf_grc/gen_mf_locs_210518.py | htem/cb2_project_analysis | a677cbadc7e3bf0074975a94ed1d06b4801899c0 | [
"MIT"
] | null | null | null | analysis/gen_db/mf_grc/gen_mf_locs_210518.py | htem/cb2_project_analysis | a677cbadc7e3bf0074975a94ed1d06b4801899c0 | [
"MIT"
] | null | null | null | import collections
from collections import defaultdict
import sys
import json
import random
from jsmin import jsmin
from io import StringIO
import numpy as np
import copy
import os
script_n = os.path.basename(__file__).split('.')[0]
script_n = script_n.split('_', 1)[1]
def to_ng(loc):
return (int(loc[0]/4), int(loc[1]/4), int(loc[2]/40))
'''Load data'''
import compress_pickle
fname = 'gen_210518_setup01_v2_syndb_threshold_20_coalesced.gz'
grc_mfs_locs = compress_pickle.load(fname)
mfs_locs = defaultdict(list)
for grc in grc_mfs_locs:
for mf in grc_mfs_locs[grc]:
for syn in grc_mfs_locs[grc][mf]:
mfs_locs[mf].append(syn['syn_loc0'])
# print(mfs_locs[mf]); asdf
asdff = (172644, 113468, 89)
asdfff = (137580, 101824, 369)
# white list for big boutons
whitelist = set([
(172644, 113468, 89),
(163520, 98364, 83),
(113008, 109372, 1154),
(70424, 116512, 71),
(186536, 100020, 130),
(86780, 110184, 81),
(177992, 108528, 1164),
(127368, 101716, 1143),
(155036, 103252, 71),
(97884, 104152, 1160),
(109476, 104808, 76),
(82936, 122484, 76),
(113532, 104660, 1150),
(78904, 115540, 1158),
(190684, 91276, 1015),
(160500, 99828, 1165),
(109020, 115476, 74),
(93516, 101476, 858),
(126728, 104988, 86),
(173456, 106376, 71),
(197436, 95688, 898),
(122752, 110608, 85),
(122192, 119344, 70),
(122396, 118840, 83),
(204868, 103452, 145),
(94212, 107860, 1137),
(92360, 105844, 1162),
(84704, 115452, 119),
(54036, 105484, 394),
(110624, 105800, 70),
(170512, 99132, 107),
(71200, 114308, 1123),
(106588, 98692, 1160),
(70164, 107908, 1015),
(144772, 106812, 105),
(asdff),
(asdff),
(asdff),
])
blacklist = set([
(137580, 101824, 369),
(127384, 115252, 746),
(155268, 99276, 918),
(182000, 91966, 716),
(119828, 107400, 312),
(171384, 94244, 573),
(asdfff),
(asdfff),
(asdfff),
(asdfff),
(asdfff),
(asdfff),
])
'''Cluster and extract locations of MF boutons'''
from sklearn.cluster import DBSCAN
mfs_bouton_locs = {}
'''if a bouton location has less than this many synapses then it won't be considered in order to reduce false positives'''
# bouton_synapse_threshold = 6 # safe for determining big bouton locations
bouton_synapse_threshold = 2
bouton_synapse_threshold = 3
bouton_synapse_threshold = 4 # 4 is a bit iffy, since it has some semi big boutons
bouton_synapse_threshold = 5
# bouton_synapse_threshold = 6 # this threshold has quite a bit of FPs
for mf in mfs_locs:
dbscan = DBSCAN(eps=8000, min_samples=2) # max dist set to 8um
# dbscan = DBSCAN(eps=10000, min_samples=2) # max dist set to 8um
dbscan.fit(mfs_locs[mf])
loc_by_label = defaultdict(list)
for loc, label in zip(mfs_locs[mf], dbscan.labels_):
loc_by_label[label].append(loc)
mf_bouton_locs = []
for label in loc_by_label:
if len(loc_by_label[label]) <= bouton_synapse_threshold:
whitelisted = False
for loc in loc_by_label[label]:
if to_ng(loc) in whitelist:
whitelisted = True
if not whitelisted:
if len(loc_by_label[label]) >= 2:
print(f'Ignoring {mf} due to insufficient synapses')
for loc in loc_by_label[label]:
print(to_ng(loc))
continue
sum = [0, 0, 0]
for loc in loc_by_label[label]:
sum = [sum[0]+loc[0], sum[1]+loc[1], sum[2]+loc[2]]
center = [
int(sum[0]/len(loc_by_label[label])),
int(sum[1]/len(loc_by_label[label])),
int(sum[2]/len(loc_by_label[label])),
]
mf_bouton_locs.append(center)
mfs_bouton_locs[mf] = mf_bouton_locs
# print(mf_bouton_locs)
# for loc in mf_bouton_locs:
# print([int(loc[0]/4), int(loc[1]/4), int(loc[2]/40)])
mfs_bouton_count = defaultdict(list)
for mf in mfs_bouton_locs:
mfs_bouton_count[len(mfs_bouton_locs[mf])].append(mf)
for count in sorted(mfs_bouton_count.keys()):
print(f'{count}: {mfs_bouton_count[count]}')
'''save mfs_bouton_locs'''
import compress_pickle
compress_pickle.dump((
mfs_bouton_locs
), f"{script_n}.gz")
asdf
for loc in mfs_bouton_locs['mf_431']:
print([int(loc[0]/4), int(loc[1]/4), int(loc[2]/40)])
for loc in mfs_locs['mf_41']:
print([int(loc[0]/4), int(loc[1]/4), int(loc[2]/40)])
| 28.427673 | 122 | 0.623009 | import collections
from collections import defaultdict
import sys
import json
import random
from jsmin import jsmin
from io import StringIO
import numpy as np
import copy
import os
script_n = os.path.basename(__file__).split('.')[0]
script_n = script_n.split('_', 1)[1]
def to_ng(loc):
return (int(loc[0]/4), int(loc[1]/4), int(loc[2]/40))
import compress_pickle
fname = 'gen_210518_setup01_v2_syndb_threshold_20_coalesced.gz'
grc_mfs_locs = compress_pickle.load(fname)
mfs_locs = defaultdict(list)
for grc in grc_mfs_locs:
for mf in grc_mfs_locs[grc]:
for syn in grc_mfs_locs[grc][mf]:
mfs_locs[mf].append(syn['syn_loc0'])
asdff = (172644, 113468, 89)
asdfff = (137580, 101824, 369)
whitelist = set([
(172644, 113468, 89),
(163520, 98364, 83),
(113008, 109372, 1154),
(70424, 116512, 71),
(186536, 100020, 130),
(86780, 110184, 81),
(177992, 108528, 1164),
(127368, 101716, 1143),
(155036, 103252, 71),
(97884, 104152, 1160),
(109476, 104808, 76),
(82936, 122484, 76),
(113532, 104660, 1150),
(78904, 115540, 1158),
(190684, 91276, 1015),
(160500, 99828, 1165),
(109020, 115476, 74),
(93516, 101476, 858),
(126728, 104988, 86),
(173456, 106376, 71),
(197436, 95688, 898),
(122752, 110608, 85),
(122192, 119344, 70),
(122396, 118840, 83),
(204868, 103452, 145),
(94212, 107860, 1137),
(92360, 105844, 1162),
(84704, 115452, 119),
(54036, 105484, 394),
(110624, 105800, 70),
(170512, 99132, 107),
(71200, 114308, 1123),
(106588, 98692, 1160),
(70164, 107908, 1015),
(144772, 106812, 105),
(asdff),
(asdff),
(asdff),
])
blacklist = set([
(137580, 101824, 369),
(127384, 115252, 746),
(155268, 99276, 918),
(182000, 91966, 716),
(119828, 107400, 312),
(171384, 94244, 573),
(asdfff),
(asdfff),
(asdfff),
(asdfff),
(asdfff),
(asdfff),
])
from sklearn.cluster import DBSCAN
mfs_bouton_locs = {}
e_threshold = 3
bouton_synapse_threshold = 4
bouton_synapse_threshold = 5
AN(eps=8000, min_samples=2)
ocs[mf])
loc_by_label = defaultdict(list)
for loc, label in zip(mfs_locs[mf], dbscan.labels_):
loc_by_label[label].append(loc)
mf_bouton_locs = []
for label in loc_by_label:
if len(loc_by_label[label]) <= bouton_synapse_threshold:
whitelisted = False
for loc in loc_by_label[label]:
if to_ng(loc) in whitelist:
whitelisted = True
if not whitelisted:
if len(loc_by_label[label]) >= 2:
print(f'Ignoring {mf} due to insufficient synapses')
for loc in loc_by_label[label]:
print(to_ng(loc))
continue
sum = [0, 0, 0]
for loc in loc_by_label[label]:
sum = [sum[0]+loc[0], sum[1]+loc[1], sum[2]+loc[2]]
center = [
int(sum[0]/len(loc_by_label[label])),
int(sum[1]/len(loc_by_label[label])),
int(sum[2]/len(loc_by_label[label])),
]
mf_bouton_locs.append(center)
mfs_bouton_locs[mf] = mf_bouton_locs
mfs_bouton_count = defaultdict(list)
for mf in mfs_bouton_locs:
mfs_bouton_count[len(mfs_bouton_locs[mf])].append(mf)
for count in sorted(mfs_bouton_count.keys()):
print(f'{count}: {mfs_bouton_count[count]}')
import compress_pickle
compress_pickle.dump((
mfs_bouton_locs
), f"{script_n}.gz")
asdf
for loc in mfs_bouton_locs['mf_431']:
print([int(loc[0]/4), int(loc[1]/4), int(loc[2]/40)])
for loc in mfs_locs['mf_41']:
print([int(loc[0]/4), int(loc[1]/4), int(loc[2]/40)])
| true | true |
7900079cdf32747ee591880bae0317ca36a8f03e | 4,367 | py | Python | common/security-features/subresource/font.py | meyerweb/wpt | f04261533819893c71289614c03434c06856c13e | [
"BSD-3-Clause"
] | 14,668 | 2015-01-01T01:57:10.000Z | 2022-03-31T23:33:32.000Z | common/security-features/subresource/font.py | meyerweb/wpt | f04261533819893c71289614c03434c06856c13e | [
"BSD-3-Clause"
] | 7,642 | 2018-05-28T09:38:03.000Z | 2022-03-31T20:55:48.000Z | common/security-features/subresource/font.py | meyerweb/wpt | f04261533819893c71289614c03434c06856c13e | [
"BSD-3-Clause"
] | 5,941 | 2015-01-02T11:32:21.000Z | 2022-03-31T16:35:46.000Z | import os, sys
from base64 import decodebytes
from wptserve.utils import isomorphic_decode
import importlib
subresource = importlib.import_module("common.security-features.subresource.subresource")
def generate_payload(request, server_data):
data = (u'{"headers": %(headers)s}') % server_data
if b"id" in request.GET:
request.server.stash.put(request.GET[b"id"], data)
# Simple base64 encoded .tff font
return decodebytes(b"AAEAAAANAIAAAwBQRkZUTU6u6MkAAAXcAAAAHE9TLzJWYW"
b"QKAAABWAAAAFZjbWFwAA8D7wAAAcAAAAFCY3Z0IAAhAnkA"
b"AAMEAAAABGdhc3D//wADAAAF1AAAAAhnbHlmCC6aTwAAAx"
b"QAAACMaGVhZO8ooBcAAADcAAAANmhoZWEIkAV9AAABFAAA"
b"ACRobXR4EZQAhQAAAbAAAAAQbG9jYQBwAFQAAAMIAAAACm"
b"1heHAASQA9AAABOAAAACBuYW1lehAVOgAAA6AAAAIHcG9z"
b"dP+uADUAAAWoAAAAKgABAAAAAQAAMhPyuV8PPPUACwPoAA"
b"AAAMU4Lm0AAAAAxTgubQAh/5wFeAK8AAAACAACAAAAAAAA"
b"AAEAAAK8/5wAWgXcAAAAAAV4AAEAAAAAAAAAAAAAAAAAAA"
b"AEAAEAAAAEAAwAAwAAAAAAAgAAAAEAAQAAAEAALgAAAAAA"
b"AQXcAfQABQAAAooCvAAAAIwCigK8AAAB4AAxAQIAAAIABg"
b"kAAAAAAAAAAAABAAAAAAAAAAAAAAAAUGZFZABAAEEAQQMg"
b"/zgAWgK8AGQAAAABAAAAAAAABdwAIQAAAAAF3AAABdwAZA"
b"AAAAMAAAADAAAAHAABAAAAAAA8AAMAAQAAABwABAAgAAAA"
b"BAAEAAEAAABB//8AAABB////wgABAAAAAAAAAQYAAAEAAA"
b"AAAAAAAQIAAAACAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAhAnkAAAAqACoAKgBGAAAAAgAhAA"
b"ABKgKaAAMABwAusQEALzyyBwQA7TKxBgXcPLIDAgDtMgCx"
b"AwAvPLIFBADtMrIHBgH8PLIBAgDtMjMRIREnMxEjIQEJ6M"
b"fHApr9ZiECWAAAAwBk/5wFeAK8AAMABwALAAABNSEVATUh"
b"FQE1IRUB9AH0/UQDhPu0BRQB9MjI/tTIyP7UyMgAAAAAAA"
b"4ArgABAAAAAAAAACYATgABAAAAAAABAAUAgQABAAAAAAAC"
b"AAYAlQABAAAAAAADACEA4AABAAAAAAAEAAUBDgABAAAAAA"
b"AFABABNgABAAAAAAAGAAUBUwADAAEECQAAAEwAAAADAAEE"
b"CQABAAoAdQADAAEECQACAAwAhwADAAEECQADAEIAnAADAA"
b"EECQAEAAoBAgADAAEECQAFACABFAADAAEECQAGAAoBRwBD"
b"AG8AcAB5AHIAaQBnAGgAdAAgACgAYwApACAAMgAwADAAOA"
b"AgAE0AbwB6AGkAbABsAGEAIABDAG8AcgBwAG8AcgBhAHQA"
b"aQBvAG4AAENvcHlyaWdodCAoYykgMjAwOCBNb3ppbGxhIE"
b"NvcnBvcmF0aW9uAABNAGEAcgBrAEEAAE1hcmtBAABNAGUA"
b"ZABpAHUAbQAATWVkaXVtAABGAG8AbgB0AEYAbwByAGcAZQ"
b"AgADIALgAwACAAOgAgAE0AYQByAGsAQQAgADoAIAA1AC0A"
b"MQAxAC0AMgAwADAAOAAARm9udEZvcmdlIDIuMCA6IE1hcm"
b"tBIDogNS0xMS0yMDA4AABNAGEAcgBrAEEAAE1hcmtBAABW"
b"AGUAcgBzAGkAbwBuACAAMAAwADEALgAwADAAMAAgAABWZX"
b"JzaW9uIDAwMS4wMDAgAABNAGEAcgBrAEEAAE1hcmtBAAAA"
b"AgAAAAAAAP+DADIAAAABAAAAAAAAAAAAAAAAAAAAAAAEAA"
b"AAAQACACQAAAAAAAH//wACAAAAAQAAAADEPovuAAAAAMU4"
b"Lm0AAAAAxTgubQ==")
def generate_report_headers_payload(request, server_data):
stashed_data = request.server.stash.take(request.GET[b"id"])
return stashed_data
def main(request, response):
handler = lambda data: generate_payload(request, data)
content_type = b'application/x-font-truetype'
if b"report-headers" in request.GET:
handler = lambda data: generate_report_headers_payload(request, data)
content_type = b'application/json'
subresource.respond(request,
response,
payload_generator = handler,
content_type = content_type,
access_control_allow_origin = b"*")
| 56.714286 | 89 | 0.651477 | import os, sys
from base64 import decodebytes
from wptserve.utils import isomorphic_decode
import importlib
subresource = importlib.import_module("common.security-features.subresource.subresource")
def generate_payload(request, server_data):
data = (u'{"headers": %(headers)s}') % server_data
if b"id" in request.GET:
request.server.stash.put(request.GET[b"id"], data)
return decodebytes(b"AAEAAAANAIAAAwBQRkZUTU6u6MkAAAXcAAAAHE9TLzJWYW"
b"QKAAABWAAAAFZjbWFwAA8D7wAAAcAAAAFCY3Z0IAAhAnkA"
b"AAMEAAAABGdhc3D//wADAAAF1AAAAAhnbHlmCC6aTwAAAx"
b"QAAACMaGVhZO8ooBcAAADcAAAANmhoZWEIkAV9AAABFAAA"
b"ACRobXR4EZQAhQAAAbAAAAAQbG9jYQBwAFQAAAMIAAAACm"
b"1heHAASQA9AAABOAAAACBuYW1lehAVOgAAA6AAAAIHcG9z"
b"dP+uADUAAAWoAAAAKgABAAAAAQAAMhPyuV8PPPUACwPoAA"
b"AAAMU4Lm0AAAAAxTgubQAh/5wFeAK8AAAACAACAAAAAAAA"
b"AAEAAAK8/5wAWgXcAAAAAAV4AAEAAAAAAAAAAAAAAAAAAA"
b"AEAAEAAAAEAAwAAwAAAAAAAgAAAAEAAQAAAEAALgAAAAAA"
b"AQXcAfQABQAAAooCvAAAAIwCigK8AAAB4AAxAQIAAAIABg"
b"kAAAAAAAAAAAABAAAAAAAAAAAAAAAAUGZFZABAAEEAQQMg"
b"/zgAWgK8AGQAAAABAAAAAAAABdwAIQAAAAAF3AAABdwAZA"
b"AAAAMAAAADAAAAHAABAAAAAAA8AAMAAQAAABwABAAgAAAA"
b"BAAEAAEAAABB//8AAABB////wgABAAAAAAAAAQYAAAEAAA"
b"AAAAAAAQIAAAACAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
b"AAAAAAAAAAAAAAAAAAAhAnkAAAAqACoAKgBGAAAAAgAhAA"
b"ABKgKaAAMABwAusQEALzyyBwQA7TKxBgXcPLIDAgDtMgCx"
b"AwAvPLIFBADtMrIHBgH8PLIBAgDtMjMRIREnMxEjIQEJ6M"
b"fHApr9ZiECWAAAAwBk/5wFeAK8AAMABwALAAABNSEVATUh"
b"FQE1IRUB9AH0/UQDhPu0BRQB9MjI/tTIyP7UyMgAAAAAAA"
b"4ArgABAAAAAAAAACYATgABAAAAAAABAAUAgQABAAAAAAAC"
b"AAYAlQABAAAAAAADACEA4AABAAAAAAAEAAUBDgABAAAAAA"
b"AFABABNgABAAAAAAAGAAUBUwADAAEECQAAAEwAAAADAAEE"
b"CQABAAoAdQADAAEECQACAAwAhwADAAEECQADAEIAnAADAA"
b"EECQAEAAoBAgADAAEECQAFACABFAADAAEECQAGAAoBRwBD"
b"AG8AcAB5AHIAaQBnAGgAdAAgACgAYwApACAAMgAwADAAOA"
b"AgAE0AbwB6AGkAbABsAGEAIABDAG8AcgBwAG8AcgBhAHQA"
b"aQBvAG4AAENvcHlyaWdodCAoYykgMjAwOCBNb3ppbGxhIE"
b"NvcnBvcmF0aW9uAABNAGEAcgBrAEEAAE1hcmtBAABNAGUA"
b"ZABpAHUAbQAATWVkaXVtAABGAG8AbgB0AEYAbwByAGcAZQ"
b"AgADIALgAwACAAOgAgAE0AYQByAGsAQQAgADoAIAA1AC0A"
b"MQAxAC0AMgAwADAAOAAARm9udEZvcmdlIDIuMCA6IE1hcm"
b"tBIDogNS0xMS0yMDA4AABNAGEAcgBrAEEAAE1hcmtBAABW"
b"AGUAcgBzAGkAbwBuACAAMAAwADEALgAwADAAMAAgAABWZX"
b"JzaW9uIDAwMS4wMDAgAABNAGEAcgBrAEEAAE1hcmtBAAAA"
b"AgAAAAAAAP+DADIAAAABAAAAAAAAAAAAAAAAAAAAAAAEAA"
b"AAAQACACQAAAAAAAH//wACAAAAAQAAAADEPovuAAAAAMU4"
b"Lm0AAAAAxTgubQ==")
def generate_report_headers_payload(request, server_data):
stashed_data = request.server.stash.take(request.GET[b"id"])
return stashed_data
def main(request, response):
handler = lambda data: generate_payload(request, data)
content_type = b'application/x-font-truetype'
if b"report-headers" in request.GET:
handler = lambda data: generate_report_headers_payload(request, data)
content_type = b'application/json'
subresource.respond(request,
response,
payload_generator = handler,
content_type = content_type,
access_control_allow_origin = b"*")
| true | true |
790007dc0e8352ed637dd24396f6a69566b63345 | 10,359 | py | Python | homeassistant/components/mqtt/discovery.py | tylerball/home-assistant | 5b35317e1e9f3fe28298cd862525c6a386f267df | [
"Apache-2.0"
] | null | null | null | homeassistant/components/mqtt/discovery.py | tylerball/home-assistant | 5b35317e1e9f3fe28298cd862525c6a386f267df | [
"Apache-2.0"
] | null | null | null | homeassistant/components/mqtt/discovery.py | tylerball/home-assistant | 5b35317e1e9f3fe28298cd862525c6a386f267df | [
"Apache-2.0"
] | 1 | 2019-09-06T01:00:46.000Z | 2019-09-06T01:00:46.000Z | """
Support for MQTT discovery.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/mqtt/#discovery
"""
import asyncio
import json
import logging
import re
from homeassistant.components import mqtt
from homeassistant.components.mqtt import CONF_STATE_TOPIC, ATTR_DISCOVERY_HASH
from homeassistant.const import CONF_PLATFORM
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import HomeAssistantType
_LOGGER = logging.getLogger(__name__)
TOPIC_MATCHER = re.compile(
r'(?P<prefix_topic>\w+)/(?P<component>\w+)/'
r'(?:(?P<node_id>[a-zA-Z0-9_-]+)/)?(?P<object_id>[a-zA-Z0-9_-]+)/config')
SUPPORTED_COMPONENTS = [
'binary_sensor', 'camera', 'cover', 'fan',
'light', 'sensor', 'switch', 'lock', 'climate',
'alarm_control_panel', 'vacuum']
CONFIG_ENTRY_COMPONENTS = [
'binary_sensor',
'camera',
'cover',
'light',
'lock',
'sensor',
'switch',
'climate',
'alarm_control_panel',
'fan',
'vacuum',
]
DEPRECATED_PLATFORM_TO_SCHEMA = {
'light': {
'mqtt_json': 'json',
'mqtt_template': 'template',
}
}
ALREADY_DISCOVERED = 'mqtt_discovered_components'
DATA_CONFIG_ENTRY_LOCK = 'mqtt_config_entry_lock'
CONFIG_ENTRY_IS_SETUP = 'mqtt_config_entry_is_setup'
MQTT_DISCOVERY_UPDATED = 'mqtt_discovery_updated_{}'
MQTT_DISCOVERY_NEW = 'mqtt_discovery_new_{}_{}'
TOPIC_BASE = '~'
ABBREVIATIONS = {
'aux_cmd_t': 'aux_command_topic',
'aux_stat_tpl': 'aux_state_template',
'aux_stat_t': 'aux_state_topic',
'avty_t': 'availability_topic',
'away_mode_cmd_t': 'away_mode_command_topic',
'away_mode_stat_tpl': 'away_mode_state_template',
'away_mode_stat_t': 'away_mode_state_topic',
'bri_cmd_t': 'brightness_command_topic',
'bri_scl': 'brightness_scale',
'bri_stat_t': 'brightness_state_topic',
'bri_val_tpl': 'brightness_value_template',
'clr_temp_cmd_tpl': 'color_temp_command_template',
'bat_lev_t': 'battery_level_topic',
'bat_lev_tpl': 'battery_level_template',
'chrg_t': 'charging_topic',
'chrg_tpl': 'charging_template',
'clr_temp_cmd_t': 'color_temp_command_topic',
'clr_temp_stat_t': 'color_temp_state_topic',
'clr_temp_val_tpl': 'color_temp_value_template',
'cln_t': 'cleaning_topic',
'cln_tpl': 'cleaning_template',
'cmd_t': 'command_topic',
'curr_temp_t': 'current_temperature_topic',
'dev_cla': 'device_class',
'dock_t': 'docked_topic',
'dock_tpl': 'docked_template',
'err_t': 'error_topic',
'err_tpl': 'error_template',
'fanspd_t': 'fan_speed_topic',
'fanspd_tpl': 'fan_speed_template',
'fanspd_lst': 'fan_speed_list',
'fx_cmd_t': 'effect_command_topic',
'fx_list': 'effect_list',
'fx_stat_t': 'effect_state_topic',
'fx_val_tpl': 'effect_value_template',
'exp_aft': 'expire_after',
'fan_mode_cmd_t': 'fan_mode_command_topic',
'fan_mode_stat_tpl': 'fan_mode_state_template',
'fan_mode_stat_t': 'fan_mode_state_topic',
'frc_upd': 'force_update',
'hold_cmd_t': 'hold_command_topic',
'hold_stat_tpl': 'hold_state_template',
'hold_stat_t': 'hold_state_topic',
'ic': 'icon',
'init': 'initial',
'json_attr': 'json_attributes',
'max_temp': 'max_temp',
'min_temp': 'min_temp',
'mode_cmd_t': 'mode_command_topic',
'mode_stat_tpl': 'mode_state_template',
'mode_stat_t': 'mode_state_topic',
'name': 'name',
'on_cmd_type': 'on_command_type',
'opt': 'optimistic',
'osc_cmd_t': 'oscillation_command_topic',
'osc_stat_t': 'oscillation_state_topic',
'osc_val_tpl': 'oscillation_value_template',
'pl_arm_away': 'payload_arm_away',
'pl_arm_home': 'payload_arm_home',
'pl_avail': 'payload_available',
'pl_cls': 'payload_close',
'pl_disarm': 'payload_disarm',
'pl_hi_spd': 'payload_high_speed',
'pl_lock': 'payload_lock',
'pl_lo_spd': 'payload_low_speed',
'pl_med_spd': 'payload_medium_speed',
'pl_not_avail': 'payload_not_available',
'pl_off': 'payload_off',
'pl_on': 'payload_on',
'pl_open': 'payload_open',
'pl_osc_off': 'payload_oscillation_off',
'pl_osc_on': 'payload_oscillation_on',
'pl_stop': 'payload_stop',
'pl_unlk': 'payload_unlock',
'pow_cmd_t': 'power_command_topic',
'ret': 'retain',
'rgb_cmd_tpl': 'rgb_command_template',
'rgb_cmd_t': 'rgb_command_topic',
'rgb_stat_t': 'rgb_state_topic',
'rgb_val_tpl': 'rgb_value_template',
'send_cmd_t': 'send_command_topic',
'send_if_off': 'send_if_off',
'set_pos_tpl': 'set_position_template',
'set_pos_t': 'set_position_topic',
'spd_cmd_t': 'speed_command_topic',
'spd_stat_t': 'speed_state_topic',
'spd_val_tpl': 'speed_value_template',
'spds': 'speeds',
'stat_clsd': 'state_closed',
'stat_off': 'state_off',
'stat_on': 'state_on',
'stat_open': 'state_open',
'stat_t': 'state_topic',
'stat_val_tpl': 'state_value_template',
'sup_feat': 'supported_features',
'swing_mode_cmd_t': 'swing_mode_command_topic',
'swing_mode_stat_tpl': 'swing_mode_state_template',
'swing_mode_stat_t': 'swing_mode_state_topic',
'temp_cmd_t': 'temperature_command_topic',
'temp_stat_tpl': 'temperature_state_template',
'temp_stat_t': 'temperature_state_topic',
'tilt_clsd_val': 'tilt_closed_value',
'tilt_cmd_t': 'tilt_command_topic',
'tilt_inv_stat': 'tilt_invert_state',
'tilt_max': 'tilt_max',
'tilt_min': 'tilt_min',
'tilt_opnd_val': 'tilt_opened_value',
'tilt_status_opt': 'tilt_status_optimistic',
'tilt_status_t': 'tilt_status_topic',
't': 'topic',
'uniq_id': 'unique_id',
'unit_of_meas': 'unit_of_measurement',
'val_tpl': 'value_template',
'whit_val_cmd_t': 'white_value_command_topic',
'whit_val_stat_t': 'white_value_state_topic',
'whit_val_tpl': 'white_value_template',
'xy_cmd_t': 'xy_command_topic',
'xy_stat_t': 'xy_state_topic',
'xy_val_tpl': 'xy_value_template',
}
async def async_start(hass: HomeAssistantType, discovery_topic, hass_config,
config_entry=None) -> bool:
"""Initialize of MQTT Discovery."""
async def async_device_message_received(topic, payload, qos):
"""Process the received message."""
match = TOPIC_MATCHER.match(topic)
if not match:
return
_prefix_topic, component, node_id, object_id = match.groups()
if component not in SUPPORTED_COMPONENTS:
_LOGGER.warning("Component %s is not supported", component)
return
if payload:
try:
payload = json.loads(payload)
except ValueError:
_LOGGER.warning("Unable to parse JSON %s: '%s'",
object_id, payload)
return
payload = dict(payload)
for key in list(payload.keys()):
abbreviated_key = key
key = ABBREVIATIONS.get(key, key)
payload[key] = payload.pop(abbreviated_key)
if TOPIC_BASE in payload:
base = payload[TOPIC_BASE]
for key, value in payload.items():
if isinstance(value, str) and value:
if value[0] == TOPIC_BASE and key.endswith('_topic'):
payload[key] = "{}{}".format(base, value[1:])
if value[-1] == TOPIC_BASE and key.endswith('_topic'):
payload[key] = "{}{}".format(value[:-1], base)
# If present, the node_id will be included in the discovered object id
discovery_id = ' '.join((node_id, object_id)) if node_id else object_id
discovery_hash = (component, discovery_id)
if payload:
if CONF_PLATFORM in payload and 'schema' not in payload:
platform = payload[CONF_PLATFORM]
if (component in DEPRECATED_PLATFORM_TO_SCHEMA and
platform in DEPRECATED_PLATFORM_TO_SCHEMA[component]):
schema = DEPRECATED_PLATFORM_TO_SCHEMA[component][platform]
payload['schema'] = schema
_LOGGER.warning('"platform": "%s" is deprecated, '
'replace with "schema":"%s"',
platform, schema)
payload[CONF_PLATFORM] = 'mqtt'
if CONF_STATE_TOPIC not in payload:
payload[CONF_STATE_TOPIC] = '{}/{}/{}{}/state'.format(
discovery_topic, component,
'%s/' % node_id if node_id else '', object_id)
payload[ATTR_DISCOVERY_HASH] = discovery_hash
if ALREADY_DISCOVERED not in hass.data:
hass.data[ALREADY_DISCOVERED] = {}
if discovery_hash in hass.data[ALREADY_DISCOVERED]:
# Dispatch update
_LOGGER.info(
"Component has already been discovered: %s %s, sending update",
component, discovery_id)
async_dispatcher_send(
hass, MQTT_DISCOVERY_UPDATED.format(discovery_hash), payload)
elif payload:
# Add component
_LOGGER.info("Found new component: %s %s", component, discovery_id)
hass.data[ALREADY_DISCOVERED][discovery_hash] = None
if component not in CONFIG_ENTRY_COMPONENTS:
await async_load_platform(
hass, component, 'mqtt', payload, hass_config)
return
config_entries_key = '{}.{}'.format(component, 'mqtt')
async with hass.data[DATA_CONFIG_ENTRY_LOCK]:
if config_entries_key not in hass.data[CONFIG_ENTRY_IS_SETUP]:
await hass.config_entries.async_forward_entry_setup(
config_entry, component)
hass.data[CONFIG_ENTRY_IS_SETUP].add(config_entries_key)
async_dispatcher_send(hass, MQTT_DISCOVERY_NEW.format(
component, 'mqtt'), payload)
hass.data[DATA_CONFIG_ENTRY_LOCK] = asyncio.Lock()
hass.data[CONFIG_ENTRY_IS_SETUP] = set()
await mqtt.async_subscribe(
hass, discovery_topic + '/#', async_device_message_received, 0)
return True
| 36.734043 | 79 | 0.646491 | import asyncio
import json
import logging
import re
from homeassistant.components import mqtt
from homeassistant.components.mqtt import CONF_STATE_TOPIC, ATTR_DISCOVERY_HASH
from homeassistant.const import CONF_PLATFORM
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import HomeAssistantType
_LOGGER = logging.getLogger(__name__)
TOPIC_MATCHER = re.compile(
r'(?P<prefix_topic>\w+)/(?P<component>\w+)/'
r'(?:(?P<node_id>[a-zA-Z0-9_-]+)/)?(?P<object_id>[a-zA-Z0-9_-]+)/config')
SUPPORTED_COMPONENTS = [
'binary_sensor', 'camera', 'cover', 'fan',
'light', 'sensor', 'switch', 'lock', 'climate',
'alarm_control_panel', 'vacuum']
CONFIG_ENTRY_COMPONENTS = [
'binary_sensor',
'camera',
'cover',
'light',
'lock',
'sensor',
'switch',
'climate',
'alarm_control_panel',
'fan',
'vacuum',
]
DEPRECATED_PLATFORM_TO_SCHEMA = {
'light': {
'mqtt_json': 'json',
'mqtt_template': 'template',
}
}
ALREADY_DISCOVERED = 'mqtt_discovered_components'
DATA_CONFIG_ENTRY_LOCK = 'mqtt_config_entry_lock'
CONFIG_ENTRY_IS_SETUP = 'mqtt_config_entry_is_setup'
MQTT_DISCOVERY_UPDATED = 'mqtt_discovery_updated_{}'
MQTT_DISCOVERY_NEW = 'mqtt_discovery_new_{}_{}'
TOPIC_BASE = '~'
ABBREVIATIONS = {
'aux_cmd_t': 'aux_command_topic',
'aux_stat_tpl': 'aux_state_template',
'aux_stat_t': 'aux_state_topic',
'avty_t': 'availability_topic',
'away_mode_cmd_t': 'away_mode_command_topic',
'away_mode_stat_tpl': 'away_mode_state_template',
'away_mode_stat_t': 'away_mode_state_topic',
'bri_cmd_t': 'brightness_command_topic',
'bri_scl': 'brightness_scale',
'bri_stat_t': 'brightness_state_topic',
'bri_val_tpl': 'brightness_value_template',
'clr_temp_cmd_tpl': 'color_temp_command_template',
'bat_lev_t': 'battery_level_topic',
'bat_lev_tpl': 'battery_level_template',
'chrg_t': 'charging_topic',
'chrg_tpl': 'charging_template',
'clr_temp_cmd_t': 'color_temp_command_topic',
'clr_temp_stat_t': 'color_temp_state_topic',
'clr_temp_val_tpl': 'color_temp_value_template',
'cln_t': 'cleaning_topic',
'cln_tpl': 'cleaning_template',
'cmd_t': 'command_topic',
'curr_temp_t': 'current_temperature_topic',
'dev_cla': 'device_class',
'dock_t': 'docked_topic',
'dock_tpl': 'docked_template',
'err_t': 'error_topic',
'err_tpl': 'error_template',
'fanspd_t': 'fan_speed_topic',
'fanspd_tpl': 'fan_speed_template',
'fanspd_lst': 'fan_speed_list',
'fx_cmd_t': 'effect_command_topic',
'fx_list': 'effect_list',
'fx_stat_t': 'effect_state_topic',
'fx_val_tpl': 'effect_value_template',
'exp_aft': 'expire_after',
'fan_mode_cmd_t': 'fan_mode_command_topic',
'fan_mode_stat_tpl': 'fan_mode_state_template',
'fan_mode_stat_t': 'fan_mode_state_topic',
'frc_upd': 'force_update',
'hold_cmd_t': 'hold_command_topic',
'hold_stat_tpl': 'hold_state_template',
'hold_stat_t': 'hold_state_topic',
'ic': 'icon',
'init': 'initial',
'json_attr': 'json_attributes',
'max_temp': 'max_temp',
'min_temp': 'min_temp',
'mode_cmd_t': 'mode_command_topic',
'mode_stat_tpl': 'mode_state_template',
'mode_stat_t': 'mode_state_topic',
'name': 'name',
'on_cmd_type': 'on_command_type',
'opt': 'optimistic',
'osc_cmd_t': 'oscillation_command_topic',
'osc_stat_t': 'oscillation_state_topic',
'osc_val_tpl': 'oscillation_value_template',
'pl_arm_away': 'payload_arm_away',
'pl_arm_home': 'payload_arm_home',
'pl_avail': 'payload_available',
'pl_cls': 'payload_close',
'pl_disarm': 'payload_disarm',
'pl_hi_spd': 'payload_high_speed',
'pl_lock': 'payload_lock',
'pl_lo_spd': 'payload_low_speed',
'pl_med_spd': 'payload_medium_speed',
'pl_not_avail': 'payload_not_available',
'pl_off': 'payload_off',
'pl_on': 'payload_on',
'pl_open': 'payload_open',
'pl_osc_off': 'payload_oscillation_off',
'pl_osc_on': 'payload_oscillation_on',
'pl_stop': 'payload_stop',
'pl_unlk': 'payload_unlock',
'pow_cmd_t': 'power_command_topic',
'ret': 'retain',
'rgb_cmd_tpl': 'rgb_command_template',
'rgb_cmd_t': 'rgb_command_topic',
'rgb_stat_t': 'rgb_state_topic',
'rgb_val_tpl': 'rgb_value_template',
'send_cmd_t': 'send_command_topic',
'send_if_off': 'send_if_off',
'set_pos_tpl': 'set_position_template',
'set_pos_t': 'set_position_topic',
'spd_cmd_t': 'speed_command_topic',
'spd_stat_t': 'speed_state_topic',
'spd_val_tpl': 'speed_value_template',
'spds': 'speeds',
'stat_clsd': 'state_closed',
'stat_off': 'state_off',
'stat_on': 'state_on',
'stat_open': 'state_open',
'stat_t': 'state_topic',
'stat_val_tpl': 'state_value_template',
'sup_feat': 'supported_features',
'swing_mode_cmd_t': 'swing_mode_command_topic',
'swing_mode_stat_tpl': 'swing_mode_state_template',
'swing_mode_stat_t': 'swing_mode_state_topic',
'temp_cmd_t': 'temperature_command_topic',
'temp_stat_tpl': 'temperature_state_template',
'temp_stat_t': 'temperature_state_topic',
'tilt_clsd_val': 'tilt_closed_value',
'tilt_cmd_t': 'tilt_command_topic',
'tilt_inv_stat': 'tilt_invert_state',
'tilt_max': 'tilt_max',
'tilt_min': 'tilt_min',
'tilt_opnd_val': 'tilt_opened_value',
'tilt_status_opt': 'tilt_status_optimistic',
'tilt_status_t': 'tilt_status_topic',
't': 'topic',
'uniq_id': 'unique_id',
'unit_of_meas': 'unit_of_measurement',
'val_tpl': 'value_template',
'whit_val_cmd_t': 'white_value_command_topic',
'whit_val_stat_t': 'white_value_state_topic',
'whit_val_tpl': 'white_value_template',
'xy_cmd_t': 'xy_command_topic',
'xy_stat_t': 'xy_state_topic',
'xy_val_tpl': 'xy_value_template',
}
async def async_start(hass: HomeAssistantType, discovery_topic, hass_config,
config_entry=None) -> bool:
async def async_device_message_received(topic, payload, qos):
match = TOPIC_MATCHER.match(topic)
if not match:
return
_prefix_topic, component, node_id, object_id = match.groups()
if component not in SUPPORTED_COMPONENTS:
_LOGGER.warning("Component %s is not supported", component)
return
if payload:
try:
payload = json.loads(payload)
except ValueError:
_LOGGER.warning("Unable to parse JSON %s: '%s'",
object_id, payload)
return
payload = dict(payload)
for key in list(payload.keys()):
abbreviated_key = key
key = ABBREVIATIONS.get(key, key)
payload[key] = payload.pop(abbreviated_key)
if TOPIC_BASE in payload:
base = payload[TOPIC_BASE]
for key, value in payload.items():
if isinstance(value, str) and value:
if value[0] == TOPIC_BASE and key.endswith('_topic'):
payload[key] = "{}{}".format(base, value[1:])
if value[-1] == TOPIC_BASE and key.endswith('_topic'):
payload[key] = "{}{}".format(value[:-1], base)
discovery_id = ' '.join((node_id, object_id)) if node_id else object_id
discovery_hash = (component, discovery_id)
if payload:
if CONF_PLATFORM in payload and 'schema' not in payload:
platform = payload[CONF_PLATFORM]
if (component in DEPRECATED_PLATFORM_TO_SCHEMA and
platform in DEPRECATED_PLATFORM_TO_SCHEMA[component]):
schema = DEPRECATED_PLATFORM_TO_SCHEMA[component][platform]
payload['schema'] = schema
_LOGGER.warning('"platform": "%s" is deprecated, '
'replace with "schema":"%s"',
platform, schema)
payload[CONF_PLATFORM] = 'mqtt'
if CONF_STATE_TOPIC not in payload:
payload[CONF_STATE_TOPIC] = '{}/{}/{}{}/state'.format(
discovery_topic, component,
'%s/' % node_id if node_id else '', object_id)
payload[ATTR_DISCOVERY_HASH] = discovery_hash
if ALREADY_DISCOVERED not in hass.data:
hass.data[ALREADY_DISCOVERED] = {}
if discovery_hash in hass.data[ALREADY_DISCOVERED]:
_LOGGER.info(
"Component has already been discovered: %s %s, sending update",
component, discovery_id)
async_dispatcher_send(
hass, MQTT_DISCOVERY_UPDATED.format(discovery_hash), payload)
elif payload:
_LOGGER.info("Found new component: %s %s", component, discovery_id)
hass.data[ALREADY_DISCOVERED][discovery_hash] = None
if component not in CONFIG_ENTRY_COMPONENTS:
await async_load_platform(
hass, component, 'mqtt', payload, hass_config)
return
config_entries_key = '{}.{}'.format(component, 'mqtt')
async with hass.data[DATA_CONFIG_ENTRY_LOCK]:
if config_entries_key not in hass.data[CONFIG_ENTRY_IS_SETUP]:
await hass.config_entries.async_forward_entry_setup(
config_entry, component)
hass.data[CONFIG_ENTRY_IS_SETUP].add(config_entries_key)
async_dispatcher_send(hass, MQTT_DISCOVERY_NEW.format(
component, 'mqtt'), payload)
hass.data[DATA_CONFIG_ENTRY_LOCK] = asyncio.Lock()
hass.data[CONFIG_ENTRY_IS_SETUP] = set()
await mqtt.async_subscribe(
hass, discovery_topic + '/#', async_device_message_received, 0)
return True
| true | true |
790008bef397388425c61a748250859aaf874286 | 2,175 | py | Python | venv/lib/python3.7/site-packages/pyecharts/charts/kline.py | Amoswish/graduaction_design_pubgprediction | 3752943bc6bab813ec3a0ad60a5441df6f1e7baf | [
"Apache-2.0"
] | null | null | null | venv/lib/python3.7/site-packages/pyecharts/charts/kline.py | Amoswish/graduaction_design_pubgprediction | 3752943bc6bab813ec3a0ad60a5441df6f1e7baf | [
"Apache-2.0"
] | null | null | null | venv/lib/python3.7/site-packages/pyecharts/charts/kline.py | Amoswish/graduaction_design_pubgprediction | 3752943bc6bab813ec3a0ad60a5441df6f1e7baf | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
from pyecharts.chart import Chart
def kline_tooltip_formatter(params):
text = (
params[0].seriesName
+ "<br/>"
+ "- open:"
+ params[0].data[1]
+ "<br/>"
+ "- close:"
+ params[0].data[2]
+ "<br/>"
+ "- lowest:"
+ params[0].data[3]
+ "<br/>"
+ "- highest:"
+ params[0].data[4]
)
return text
class Kline(Chart):
"""
<<< K 线图 >>>
红涨蓝跌
"""
def __init__(self, title="", subtitle="", **kwargs):
super(Kline, self).__init__(title, subtitle, **kwargs)
def add(self, *args, **kwargs):
self.__add(*args, **kwargs)
return self
def __add(self, name, x_axis, y_axis, **kwargs):
"""
:param name:
系列名称,用于 tooltip 的显示,legend 的图例筛选。
:param x_axis:
x 坐标轴数据。
:param y_axis:
y 坐标轴数据。数据中,每一行是一个『数据项』,每一列属于一个『维度』。
数据项具体为 [open, close, lowest, highest] (即:[开盘值, 收盘值,
最低值, 最高值])。
:param kwargs:
"""
kwargs.update(type="candlestick", x_axis=x_axis)
if "tooltip_formatter" not in kwargs:
kwargs["tooltip_formatter"] = kline_tooltip_formatter
if "tooltip_trigger" not in kwargs:
kwargs["tooltip_trigger"] = "axis"
chart = self._get_all_options(**kwargs)
xaxis, yaxis = chart["xy_axis"]
self._option.update(xAxis=xaxis, yAxis=yaxis)
self._option.get("xAxis")[0]["scale"] = True
self._option.get("yAxis")[0]["scale"] = True
self._option.get("yAxis")[0]["splitArea"] = {"show": True}
self._option.get("legend")[0].get("data").append(name)
self._option.get("series").append(
{
"type": "candlestick",
"name": name,
"data": y_axis,
"markPoint": chart["mark_point"],
"markLine": chart["mark_line"],
"seriesId": self._option.get("series_id"),
}
)
self._config_components(**kwargs)
| 27.884615 | 67 | 0.486437 |
from pyecharts.chart import Chart
def kline_tooltip_formatter(params):
text = (
params[0].seriesName
+ "<br/>"
+ "- open:"
+ params[0].data[1]
+ "<br/>"
+ "- close:"
+ params[0].data[2]
+ "<br/>"
+ "- lowest:"
+ params[0].data[3]
+ "<br/>"
+ "- highest:"
+ params[0].data[4]
)
return text
class Kline(Chart):
def __init__(self, title="", subtitle="", **kwargs):
super(Kline, self).__init__(title, subtitle, **kwargs)
def add(self, *args, **kwargs):
self.__add(*args, **kwargs)
return self
def __add(self, name, x_axis, y_axis, **kwargs):
kwargs.update(type="candlestick", x_axis=x_axis)
if "tooltip_formatter" not in kwargs:
kwargs["tooltip_formatter"] = kline_tooltip_formatter
if "tooltip_trigger" not in kwargs:
kwargs["tooltip_trigger"] = "axis"
chart = self._get_all_options(**kwargs)
xaxis, yaxis = chart["xy_axis"]
self._option.update(xAxis=xaxis, yAxis=yaxis)
self._option.get("xAxis")[0]["scale"] = True
self._option.get("yAxis")[0]["scale"] = True
self._option.get("yAxis")[0]["splitArea"] = {"show": True}
self._option.get("legend")[0].get("data").append(name)
self._option.get("series").append(
{
"type": "candlestick",
"name": name,
"data": y_axis,
"markPoint": chart["mark_point"],
"markLine": chart["mark_line"],
"seriesId": self._option.get("series_id"),
}
)
self._config_components(**kwargs)
| true | true |
790008f7d70f039760e35974e70eeda32a2c44ed | 8,724 | py | Python | validator/models/validation_run.py | wpreimes/qa4sm | 1e9c1c18d07d713a048ad13bac617b147240991c | [
"MIT"
] | 10 | 2019-02-27T15:05:15.000Z | 2022-03-10T21:13:40.000Z | validator/models/validation_run.py | awst-austria/qa4sm | e67c9c0ec75fedabbfc389e4400e1203aa2d8a57 | [
"MIT"
] | 69 | 2019-07-04T23:20:17.000Z | 2022-03-29T06:34:06.000Z | validator/models/validation_run.py | wpreimes/qa4sm | 1e9c1c18d07d713a048ad13bac617b147240991c | [
"MIT"
] | 10 | 2019-03-14T11:46:58.000Z | 2022-03-25T13:06:16.000Z | from datetime import timedelta
from os import path
from re import sub as regex_sub
from shutil import rmtree
import uuid
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch.dispatcher import receiver
from django.utils import timezone
from validator.models import DatasetConfiguration, User, CopiedValidations
from django.db.models import Q, ExpressionWrapper, F, BooleanField
class ValidationRun(models.Model):
# scaling methods
MIN_MAX = 'min_max'
LINREG = 'linreg'
MEAN_STD = 'mean_std'
NO_SCALING = 'none'
BETA_SCALING = 'cdf_beta_match'
SCALING_METHODS = (
(NO_SCALING, 'No scaling'),
(MIN_MAX, 'Min/Max'),
(LINREG, 'Linear regression'),
(MEAN_STD, 'Mean/standard deviation'),
(BETA_SCALING, 'CDF matching with beta distribution fitting'),
)
# scale to
SCALE_TO_REF = 'ref'
SCALE_TO_DATA = 'data'
SCALE_TO_OPTIONS = (
(SCALE_TO_REF, 'Scale to reference'),
(SCALE_TO_DATA, 'Scale to data')
)
# anomalies
MOVING_AVG_35_D = "moving_avg_35_d"
CLIMATOLOGY = "climatology"
NO_ANOM = "none"
ANOMALIES_METHODS = (
(NO_ANOM, 'Do not calculate'),
(MOVING_AVG_35_D, '35 day moving average'),
(CLIMATOLOGY, 'Climatology'),
)
# upscaling options
NO_UPSCALE = "none"
AVERAGE = "average"
UPSCALING_METHODS = (
(NO_UPSCALE, 'Do not upscale point measurements'),
(AVERAGE, 'Average point measurements'),
)
# fields
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name_tag = models.CharField(max_length=80, blank=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True)
start_time = models.DateTimeField('started')
end_time = models.DateTimeField('finished', null=True)
total_points = models.IntegerField(default=0)
error_points = models.IntegerField(default=0)
ok_points = models.IntegerField(default=0)
progress = models.IntegerField(default=0)
reference_configuration = models.ForeignKey(to=DatasetConfiguration, on_delete=models.SET_NULL,
related_name='ref_validation_run', null=True)
scaling_ref = models.ForeignKey(to=DatasetConfiguration, on_delete=models.SET_NULL,
related_name='scaling_ref_validation_run', null=True)
scaling_method = models.CharField(max_length=20, choices=SCALING_METHODS, default=MEAN_STD)
interval_from = models.DateTimeField(null=True)
interval_to = models.DateTimeField(null=True)
anomalies = models.CharField(max_length=20, choices=ANOMALIES_METHODS, default=NO_ANOM)
min_lat = models.FloatField(null=True, blank=True, validators=[MinValueValidator(-90.0), MaxValueValidator(90.0)])
min_lon = models.FloatField(null=True, blank=True)
max_lat = models.FloatField(null=True, blank=True, validators=[MinValueValidator(-90.0), MaxValueValidator(90.0)])
max_lon = models.FloatField(null=True, blank=True)
# only applicable if anomalies with climatology is selected
anomalies_from = models.DateTimeField(null=True, blank=True)
anomalies_to = models.DateTimeField(null=True, blank=True)
# upscaling of ISMN point measurements
upscaling_method = models.CharField(max_length=50, choices=UPSCALING_METHODS, default=NO_UPSCALE, blank=True)
temporal_stability = models.BooleanField(default=False)
output_file = models.FileField(null=True, max_length=250, blank=True)
is_archived = models.BooleanField(default=False)
last_extended = models.DateTimeField(null=True, blank=True)
expiry_notified = models.BooleanField(default=False)
doi = models.CharField(max_length=255, blank=True)
publishing_in_progress = models.BooleanField(default=False)
tcol = models.BooleanField(default=False)
bootstrap_tcol_cis = models.BooleanField(default=False)
used_by = models.ManyToManyField(User, through=CopiedValidations, through_fields=('original_run', 'used_by_user'),
related_name='copied_runs')
# many-to-one relationships coming from other models:
# dataset_configurations from DatasetConfiguration
# celery_tasks from CeleryTask
@property
def expiry_date(self):
if (self.is_archived or (self.end_time is None)) and (self.progress != -1):
return None
if self.progress == -1:
initial_date = self.start_time
else:
initial_date = self.last_extended if self.last_extended else self.end_time
return initial_date + timedelta(days=settings.VALIDATION_EXPIRY_DAYS)
@property
def is_expired(self):
e = self.expiry_date
return (e is not None) and (timezone.now() > e)
@property
def is_near_expiry(self):
e = self.expiry_date
return (e is not None) and (timezone.now() > e - timedelta(days=settings.VALIDATION_EXPIRY_WARNING_DAYS))
@property
def is_unpublished(self):
return not self.doi
def archive(self, unarchive=False, commit=True):
if unarchive:
self.extend_lifespan(commit=False)
self.is_archived = False
else:
self.is_archived = True
if commit:
self.save()
def extend_lifespan(self, commit=True):
self.last_extended = timezone.now()
self.expiry_notified = False
if commit:
self.save()
def clean(self):
super(ValidationRun, self).clean()
if self.interval_from is None and self.interval_to is not None:
raise ValidationError({'interval_from': 'What has an end must have a beginning.', })
if self.interval_from is not None and self.interval_to is None:
raise ValidationError({'interval_to': 'What has a beginning must have an end.', })
if self.interval_from is not None and self.interval_to is not None and self.interval_from > self.interval_to:
raise ValidationError({'interval_from': 'From must be before To',
'interval_to': 'From must be before To', })
if self.anomalies == self.CLIMATOLOGY:
if self.anomalies_from is None or self.anomalies_to is None:
raise ValidationError({'anomalies': 'Need valid time period to calculate climatology from.', })
if self.anomalies_from > self.anomalies_to:
raise ValidationError({'anomalies_from': 'Start of climatology period must be before end.',
'anomalies_to': 'Start of climatology period must be before end.', })
else:
if self.anomalies_from is not None or self.anomalies_to is not None:
raise ValidationError(
{'anomalies': 'Time period makes no sense for anomalies calculation without climatology.', })
box = {'min_lat': self.min_lat, 'min_lon': self.min_lon, 'max_lat': self.max_lat, 'max_lon': self.max_lon}
if any(x is None for x in box.values()) and any(x is not None for x in box.values()):
affected_fields = {}
for key, value in box.items():
if value is None:
affected_fields[key] = 'For spatial subsetting, please set all bounding box coordinates.'
raise ValidationError(affected_fields)
def __str__(self):
return "id: {}, user: {}, start: {} )".format(self.id, self.user, self.start_time)
@property
def output_dir_url(self):
if bool(self.output_file) is False:
return None
url = regex_sub('[^/]+$', '', self.output_file.url)
return url
@property
def output_file_name(self):
if bool(self.output_file) is False:
return None
name = self.output_file.name.split('/')[1]
return name
@property
def is_a_copy(self):
copied_runs = CopiedValidations.objects.filter(copied_run_id=self.id)\
.annotate(is_copied=ExpressionWrapper(~Q(copied_run=F('original_run')), output_field=BooleanField())) \
.filter(is_copied=True)
return len(copied_runs) != 0
# delete model output directory on disk when model is deleted
@receiver(post_delete, sender=ValidationRun)
def auto_delete_file_on_delete(sender, instance, **kwargs):
if instance.output_file:
rundir = path.dirname(instance.output_file.path)
if path.isdir(rundir):
rmtree(rundir)
| 40.018349 | 118 | 0.673315 | from datetime import timedelta
from os import path
from re import sub as regex_sub
from shutil import rmtree
import uuid
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch.dispatcher import receiver
from django.utils import timezone
from validator.models import DatasetConfiguration, User, CopiedValidations
from django.db.models import Q, ExpressionWrapper, F, BooleanField
class ValidationRun(models.Model):
MIN_MAX = 'min_max'
LINREG = 'linreg'
MEAN_STD = 'mean_std'
NO_SCALING = 'none'
BETA_SCALING = 'cdf_beta_match'
SCALING_METHODS = (
(NO_SCALING, 'No scaling'),
(MIN_MAX, 'Min/Max'),
(LINREG, 'Linear regression'),
(MEAN_STD, 'Mean/standard deviation'),
(BETA_SCALING, 'CDF matching with beta distribution fitting'),
)
SCALE_TO_REF = 'ref'
SCALE_TO_DATA = 'data'
SCALE_TO_OPTIONS = (
(SCALE_TO_REF, 'Scale to reference'),
(SCALE_TO_DATA, 'Scale to data')
)
MOVING_AVG_35_D = "moving_avg_35_d"
CLIMATOLOGY = "climatology"
NO_ANOM = "none"
ANOMALIES_METHODS = (
(NO_ANOM, 'Do not calculate'),
(MOVING_AVG_35_D, '35 day moving average'),
(CLIMATOLOGY, 'Climatology'),
)
NO_UPSCALE = "none"
AVERAGE = "average"
UPSCALING_METHODS = (
(NO_UPSCALE, 'Do not upscale point measurements'),
(AVERAGE, 'Average point measurements'),
)
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name_tag = models.CharField(max_length=80, blank=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True)
start_time = models.DateTimeField('started')
end_time = models.DateTimeField('finished', null=True)
total_points = models.IntegerField(default=0)
error_points = models.IntegerField(default=0)
ok_points = models.IntegerField(default=0)
progress = models.IntegerField(default=0)
reference_configuration = models.ForeignKey(to=DatasetConfiguration, on_delete=models.SET_NULL,
related_name='ref_validation_run', null=True)
scaling_ref = models.ForeignKey(to=DatasetConfiguration, on_delete=models.SET_NULL,
related_name='scaling_ref_validation_run', null=True)
scaling_method = models.CharField(max_length=20, choices=SCALING_METHODS, default=MEAN_STD)
interval_from = models.DateTimeField(null=True)
interval_to = models.DateTimeField(null=True)
anomalies = models.CharField(max_length=20, choices=ANOMALIES_METHODS, default=NO_ANOM)
min_lat = models.FloatField(null=True, blank=True, validators=[MinValueValidator(-90.0), MaxValueValidator(90.0)])
min_lon = models.FloatField(null=True, blank=True)
max_lat = models.FloatField(null=True, blank=True, validators=[MinValueValidator(-90.0), MaxValueValidator(90.0)])
max_lon = models.FloatField(null=True, blank=True)
anomalies_from = models.DateTimeField(null=True, blank=True)
anomalies_to = models.DateTimeField(null=True, blank=True)
upscaling_method = models.CharField(max_length=50, choices=UPSCALING_METHODS, default=NO_UPSCALE, blank=True)
temporal_stability = models.BooleanField(default=False)
output_file = models.FileField(null=True, max_length=250, blank=True)
is_archived = models.BooleanField(default=False)
last_extended = models.DateTimeField(null=True, blank=True)
expiry_notified = models.BooleanField(default=False)
doi = models.CharField(max_length=255, blank=True)
publishing_in_progress = models.BooleanField(default=False)
tcol = models.BooleanField(default=False)
bootstrap_tcol_cis = models.BooleanField(default=False)
used_by = models.ManyToManyField(User, through=CopiedValidations, through_fields=('original_run', 'used_by_user'),
related_name='copied_runs')
@property
def expiry_date(self):
if (self.is_archived or (self.end_time is None)) and (self.progress != -1):
return None
if self.progress == -1:
initial_date = self.start_time
else:
initial_date = self.last_extended if self.last_extended else self.end_time
return initial_date + timedelta(days=settings.VALIDATION_EXPIRY_DAYS)
@property
def is_expired(self):
e = self.expiry_date
return (e is not None) and (timezone.now() > e)
@property
def is_near_expiry(self):
e = self.expiry_date
return (e is not None) and (timezone.now() > e - timedelta(days=settings.VALIDATION_EXPIRY_WARNING_DAYS))
@property
def is_unpublished(self):
return not self.doi
def archive(self, unarchive=False, commit=True):
if unarchive:
self.extend_lifespan(commit=False)
self.is_archived = False
else:
self.is_archived = True
if commit:
self.save()
def extend_lifespan(self, commit=True):
self.last_extended = timezone.now()
self.expiry_notified = False
if commit:
self.save()
def clean(self):
super(ValidationRun, self).clean()
if self.interval_from is None and self.interval_to is not None:
raise ValidationError({'interval_from': 'What has an end must have a beginning.', })
if self.interval_from is not None and self.interval_to is None:
raise ValidationError({'interval_to': 'What has a beginning must have an end.', })
if self.interval_from is not None and self.interval_to is not None and self.interval_from > self.interval_to:
raise ValidationError({'interval_from': 'From must be before To',
'interval_to': 'From must be before To', })
if self.anomalies == self.CLIMATOLOGY:
if self.anomalies_from is None or self.anomalies_to is None:
raise ValidationError({'anomalies': 'Need valid time period to calculate climatology from.', })
if self.anomalies_from > self.anomalies_to:
raise ValidationError({'anomalies_from': 'Start of climatology period must be before end.',
'anomalies_to': 'Start of climatology period must be before end.', })
else:
if self.anomalies_from is not None or self.anomalies_to is not None:
raise ValidationError(
{'anomalies': 'Time period makes no sense for anomalies calculation without climatology.', })
box = {'min_lat': self.min_lat, 'min_lon': self.min_lon, 'max_lat': self.max_lat, 'max_lon': self.max_lon}
if any(x is None for x in box.values()) and any(x is not None for x in box.values()):
affected_fields = {}
for key, value in box.items():
if value is None:
affected_fields[key] = 'For spatial subsetting, please set all bounding box coordinates.'
raise ValidationError(affected_fields)
def __str__(self):
return "id: {}, user: {}, start: {} )".format(self.id, self.user, self.start_time)
@property
def output_dir_url(self):
if bool(self.output_file) is False:
return None
url = regex_sub('[^/]+$', '', self.output_file.url)
return url
@property
def output_file_name(self):
if bool(self.output_file) is False:
return None
name = self.output_file.name.split('/')[1]
return name
@property
def is_a_copy(self):
copied_runs = CopiedValidations.objects.filter(copied_run_id=self.id)\
.annotate(is_copied=ExpressionWrapper(~Q(copied_run=F('original_run')), output_field=BooleanField())) \
.filter(is_copied=True)
return len(copied_runs) != 0
@receiver(post_delete, sender=ValidationRun)
def auto_delete_file_on_delete(sender, instance, **kwargs):
if instance.output_file:
rundir = path.dirname(instance.output_file.path)
if path.isdir(rundir):
rmtree(rundir)
| true | true |
79000906dcb627c39707b46c52140b350e7043eb | 1,573 | py | Python | backend/project404_t8/API/serializers.py | peterweckend/group-project-cmput404 | d59912dbe0252868452a2e142e4c20f953792740 | [
"MIT"
] | 5 | 2019-02-22T21:15:48.000Z | 2019-03-16T22:59:17.000Z | backend/project404_t8/API/serializers.py | cjlee1/group-project-cmput404 | 791ac00494b1005d5b3792492060806bcddc5cf6 | [
"MIT"
] | 66 | 2019-03-13T07:03:42.000Z | 2022-03-11T23:41:00.000Z | backend/project404_t8/API/serializers.py | cjlee1/group-project-cmput404 | 791ac00494b1005d5b3792492060806bcddc5cf6 | [
"MIT"
] | 7 | 2019-01-25T21:09:23.000Z | 2019-07-20T16:11:33.000Z | from rest_framework import serializers
from .models import Post, Comment, Friendship, Follow, Server, PostCategory, PostAuthorizedAuthor
# REST API Serializer JSON https://www.youtube.com/watch?v=V4NjlXiu5WI
from users.models import CustomUser
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = CustomUser
fields = '__all__' #['id', 'username', 'password', 'last_login']
# "id", "last_login", "is_superuser", "first_name", "last_name",
# "email", "is_staff", "is_active", "date_joined", "username",
# "password", "admin", "timestamp", "groups", "user_permissions"
class PostSerializer(serializers.ModelSerializer):
class Meta:
model = Post
fields = '__all__'
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = '__all__'
class PostCategorySerializer(serializers.ModelSerializer):
class Meta:
model = PostCategory
fields = '__all__'
class PostAuthorizedAuthorSerializer(serializers.ModelSerializer):
class Meta:
model = PostAuthorizedAuthor
fields = '__all__'
class FriendshipSerializer(serializers.ModelSerializer):
class Meta:
model = Friendship
fields = '__all__'
class FollowSerializer(serializers.ModelSerializer):
class Meta:
model = Follow
fields = '__all__'
class ServerSerializer(serializers.ModelSerializer):
# id = serializers.ReadOnlyField()
class Meta:
model = Server
fields = '__all__' | 30.843137 | 97 | 0.682136 | from rest_framework import serializers
from .models import Post, Comment, Friendship, Follow, Server, PostCategory, PostAuthorizedAuthor
from users.models import CustomUser
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = CustomUser
fields = '__all__'
class PostSerializer(serializers.ModelSerializer):
class Meta:
model = Post
fields = '__all__'
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = '__all__'
class PostCategorySerializer(serializers.ModelSerializer):
class Meta:
model = PostCategory
fields = '__all__'
class PostAuthorizedAuthorSerializer(serializers.ModelSerializer):
class Meta:
model = PostAuthorizedAuthor
fields = '__all__'
class FriendshipSerializer(serializers.ModelSerializer):
class Meta:
model = Friendship
fields = '__all__'
class FollowSerializer(serializers.ModelSerializer):
class Meta:
model = Follow
fields = '__all__'
class ServerSerializer(serializers.ModelSerializer):
class Meta:
model = Server
fields = '__all__' | true | true |
79000943f3bf01c0665850ee6e73413e916581d8 | 522 | py | Python | stubs/micropython-v1_16-esp32/uheapq.py | mattytrentini/micropython-stubs | 4d596273823b69e9e5bcf5fa67f249c374ee0bbc | [
"MIT"
] | null | null | null | stubs/micropython-v1_16-esp32/uheapq.py | mattytrentini/micropython-stubs | 4d596273823b69e9e5bcf5fa67f249c374ee0bbc | [
"MIT"
] | null | null | null | stubs/micropython-v1_16-esp32/uheapq.py | mattytrentini/micropython-stubs | 4d596273823b69e9e5bcf5fa67f249c374ee0bbc | [
"MIT"
] | null | null | null | """
Module: 'uheapq' on micropython-v1.16-esp32
"""
# MCU: {'ver': 'v1.16', 'port': 'esp32', 'arch': 'xtensawin', 'sysname': 'esp32', 'release': '1.16.0', 'name': 'micropython', 'mpy': 10757, 'version': '1.16.0', 'machine': 'ESP32 module (spiram) with ESP32', 'build': '', 'nodename': 'esp32', 'platform': 'esp32', 'family': 'micropython'}
# Stubber: 1.5.4
from typing import Any
def heapify(*args, **kwargs) -> Any:
...
def heappop(*args, **kwargs) -> Any:
...
def heappush(*args, **kwargs) -> Any:
...
| 27.473684 | 287 | 0.58046 |
from typing import Any
def heapify(*args, **kwargs) -> Any:
...
def heappop(*args, **kwargs) -> Any:
...
def heappush(*args, **kwargs) -> Any:
...
| true | true |
79000958ca956d1f4ddd649980e8d598a155c6b0 | 251 | py | Python | bdd_example/login/urls.py | omiguelperez/DjangoLoginBDD | 9714a1cf31d95a0df2c3a82ccde5b8c00c92268d | [
"MIT"
] | null | null | null | bdd_example/login/urls.py | omiguelperez/DjangoLoginBDD | 9714a1cf31d95a0df2c3a82ccde5b8c00c92268d | [
"MIT"
] | null | null | null | bdd_example/login/urls.py | omiguelperez/DjangoLoginBDD | 9714a1cf31d95a0df2c3a82ccde5b8c00c92268d | [
"MIT"
] | null | null | null | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.login_root, name='login_root'),
url(r'^success/$', views.login_success, name='login_success'),
url(r'^fail/$', views.login_fail, name='login_fail'),
]
| 25.1 | 66 | 0.673307 | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.login_root, name='login_root'),
url(r'^success/$', views.login_success, name='login_success'),
url(r'^fail/$', views.login_fail, name='login_fail'),
]
| true | true |
790009f836b12214d122c317d4c425d2cac87663 | 1,413 | py | Python | gold nanocluster synthesis/own_package/others.py | acceleratedmaterials/NUS_workshop | 8937111a4f4d252ed76e33897fd4be7d9582a491 | [
"MIT"
] | 3 | 2018-11-14T13:13:48.000Z | 2022-01-13T16:49:10.000Z | gold nanocluster synthesis/own_package/others.py | acceleratedmaterials/NUS_workshop | 8937111a4f4d252ed76e33897fd4be7d9582a491 | [
"MIT"
] | 17 | 2020-01-28T22:32:32.000Z | 2022-03-11T23:33:24.000Z | gold nanocluster synthesis/own_package/others.py | acceleratedmaterials/NUS_workshop | 8937111a4f4d252ed76e33897fd4be7d9582a491 | [
"MIT"
] | 10 | 2018-11-14T13:15:33.000Z | 2022-01-13T16:48:57.000Z | import numpy as np
import pandas as pd
from openpyxl import load_workbook
import sys
def print_array_to_excel(array, first_cell, ws, axis=2):
'''
Print an np array to excel using openpyxl
:param array: np array
:param first_cell: first cell to start dumping values in
:param ws: worksheet reference. From openpyxl, ws=wb[sheetname]
:param axis: to determine if the array is a col vector (0), row vector (1), or 2d matrix (2)
'''
if isinstance(array, (list,)):
array = np.array(array)
shape = array.shape
if axis == 0:
# Treat array as col vector and print along the rows
array.flatten() # Flatten in case the input array is a nx1 ndarry which acts weird
for i in range(shape[0]):
j = 0
ws.cell(i + first_cell[0], j + first_cell[1]).value = array[i]
elif axis == 1:
# Treat array as row vector and print along the columns
array.flatten() # Flatten in case the input array is a 1xn ndarry which acts weird
for j in range(shape[0]):
i = 0
ws.cell(i + first_cell[0], j + first_cell[1]).value = array[j]
elif axis == 2:
# If axis==2, means it is a 2d array
for i in range(shape[0]):
for j in range(shape[1]):
ws.cell(i + first_cell[0], j + first_cell[1]).value = array[i, j]
if __name__ == '__main__':
print('hi') | 38.189189 | 96 | 0.610757 | import numpy as np
import pandas as pd
from openpyxl import load_workbook
import sys
def print_array_to_excel(array, first_cell, ws, axis=2):
if isinstance(array, (list,)):
array = np.array(array)
shape = array.shape
if axis == 0:
array.flatten()
for i in range(shape[0]):
j = 0
ws.cell(i + first_cell[0], j + first_cell[1]).value = array[i]
elif axis == 1:
array.flatten()
for j in range(shape[0]):
i = 0
ws.cell(i + first_cell[0], j + first_cell[1]).value = array[j]
elif axis == 2:
for i in range(shape[0]):
for j in range(shape[1]):
ws.cell(i + first_cell[0], j + first_cell[1]).value = array[i, j]
if __name__ == '__main__':
print('hi') | true | true |
79000aa5b4520916e6ece843d77d27a1c53957d8 | 1,394 | py | Python | audio_lab_pynq/AxisSwitch.py | cramsay/Audio-Lab-PYNQ | 8f4d1f767b2e697253c7c6dea6445d880401869f | [
"WTFPL"
] | 3 | 2020-04-20T21:48:01.000Z | 2022-01-31T11:24:01.000Z | audio_lab_pynq/AxisSwitch.py | cramsay/Audio-Lab-PYNQ | 8f4d1f767b2e697253c7c6dea6445d880401869f | [
"WTFPL"
] | null | null | null | audio_lab_pynq/AxisSwitch.py | cramsay/Audio-Lab-PYNQ | 8f4d1f767b2e697253c7c6dea6445d880401869f | [
"WTFPL"
] | 1 | 2022-01-31T11:24:05.000Z | 2022-01-31T11:24:05.000Z | from pynq import DefaultIP
# Things to fix up about this driver:
# * Add safety checks [a la C driver](https://github.com/Xilinx/embeddedsw/blob/master/XilinxProcessorIPLib/drivers/axis_switch/src/xaxis_switch_hw.h)
# * Think about better interface / language to control the routing
class AxisSwitch(DefaultIP):
CTRL_OFFSET = 0x00
CFG_OFFSET = 0x40
CFG_GATE_MASK = 0x02
CFG_DISABLE_MASK = 0x80000000
def __init__(self, description):
super().__init__(description=description)
self.num_mi = int(description['parameters']['NUM_MI'])
self.num_si = int(description['parameters']['NUM_SI'])
self.disable_all()
bindto = ['xilinx.com:ip:axis_switch:1.1']
def start_cfg(self):
self.write(AxisSwitch.CTRL_OFFSET,
self.read(AxisSwitch.CTRL_OFFSET) &
(~AxisSwitch.CFG_GATE_MASK)
)
def stop_cfg(self):
self.write(AxisSwitch.CTRL_OFFSET,
self.read(AxisSwitch.CTRL_OFFSET) |
AxisSwitch.CFG_GATE_MASK
)
def route_pair(self, master, slave):
self.write(AxisSwitch.CFG_OFFSET+4*master, slave)
def disable_all(self):
for i in range(self.num_mi):
self.disable_master(i)
def disable_master(self, master):
self.write(AxisSwitch.CFG_OFFSET+4*master, AxisSwitch.CFG_DISABLE_MASK)
| 30.977778 | 151 | 0.658537 | from pynq import DefaultIP
class AxisSwitch(DefaultIP):
CTRL_OFFSET = 0x00
CFG_OFFSET = 0x40
CFG_GATE_MASK = 0x02
CFG_DISABLE_MASK = 0x80000000
def __init__(self, description):
super().__init__(description=description)
self.num_mi = int(description['parameters']['NUM_MI'])
self.num_si = int(description['parameters']['NUM_SI'])
self.disable_all()
bindto = ['xilinx.com:ip:axis_switch:1.1']
def start_cfg(self):
self.write(AxisSwitch.CTRL_OFFSET,
self.read(AxisSwitch.CTRL_OFFSET) &
(~AxisSwitch.CFG_GATE_MASK)
)
def stop_cfg(self):
self.write(AxisSwitch.CTRL_OFFSET,
self.read(AxisSwitch.CTRL_OFFSET) |
AxisSwitch.CFG_GATE_MASK
)
def route_pair(self, master, slave):
self.write(AxisSwitch.CFG_OFFSET+4*master, slave)
def disable_all(self):
for i in range(self.num_mi):
self.disable_master(i)
def disable_master(self, master):
self.write(AxisSwitch.CFG_OFFSET+4*master, AxisSwitch.CFG_DISABLE_MASK)
| true | true |
79000b75fa842f24fb3c23d1a26f99744a99976d | 16,241 | py | Python | test/functional/mempool_accept.py | Mantle-One/mantlecoin | 61342ad672819d412b17ec110aca89a9d0205dcc | [
"MIT"
] | null | null | null | test/functional/mempool_accept.py | Mantle-One/mantlecoin | 61342ad672819d412b17ec110aca89a9d0205dcc | [
"MIT"
] | null | null | null | test/functional/mempool_accept.py | Mantle-One/mantlecoin | 61342ad672819d412b17ec110aca89a9d0205dcc | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2017-2020 The Mantle Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool acceptance of raw transactions."""
from decimal import Decimal
from io import BytesIO
import math
from test_framework.test_framework import MantleTestFramework
from test_framework.key import ECKey
from test_framework.messages import (
BIP125_SEQUENCE_NUMBER,
COIN,
COutPoint,
CTransaction,
CTxOut,
MAX_BLOCK_BASE_SIZE,
MAX_MONEY,
)
from test_framework.script import (
hash160,
CScript,
OP_0,
OP_2,
OP_3,
OP_CHECKMULTISIG,
OP_EQUAL,
OP_HASH160,
OP_RETURN,
)
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
hex_str_to_bytes,
)
class MempoolAcceptanceTest(MantleTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[
'-txindex','-permitbaremultisig=0',
]] * self.num_nodes
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def check_mempool_result(self, result_expected, *args, **kwargs):
"""Wrapper to check result of testmempoolaccept on node_0's mempool"""
result_test = self.nodes[0].testmempoolaccept(*args, **kwargs)
assert_equal(result_expected, result_test)
assert_equal(self.nodes[0].getmempoolinfo()['size'], self.mempool_size) # Must not change mempool state
def run_test(self):
node = self.nodes[0]
self.log.info('Start with empty mempool, and 200 blocks')
self.mempool_size = 0
assert_equal(node.getblockcount(), 200)
assert_equal(node.getmempoolinfo()['size'], self.mempool_size)
coins = node.listunspent()
self.log.info('Should not accept garbage to testmempoolaccept')
assert_raises_rpc_error(-3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar'))
assert_raises_rpc_error(-8, 'Array must contain exactly one raw transaction for now', lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22']))
assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar']))
self.log.info('A transaction already in the blockchain')
coin = coins.pop() # Pick a random coin(base) to spend
raw_tx_in_block = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': coin['txid'], 'vout': coin['vout']}],
outputs=[{node.getnewaddress(): 0.3}, {node.getnewaddress(): 49}],
))['hex']
txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, maxfeerate=0)
node.generate(1)
self.mempool_size = 0
self.check_mempool_result(
result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': 'txn-already-known'}],
rawtxs=[raw_tx_in_block],
)
self.log.info('A transaction not in the mempool')
fee = Decimal('0.000007')
raw_tx_0 = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{"txid": txid_in_block, "vout": 0, "sequence": BIP125_SEQUENCE_NUMBER}], # RBF is used later
outputs=[{node.getnewaddress(): Decimal('0.3') - fee}],
))['hex']
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
txid_0 = tx.rehash()
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee}}],
rawtxs=[raw_tx_0],
)
self.log.info('A final transaction not in the mempool')
coin = coins.pop() # Pick a random coin(base) to spend
output_amount = Decimal('0.025')
raw_tx_final = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': coin['txid'], 'vout': coin['vout'], "sequence": 0xffffffff}], # SEQUENCE_FINAL
outputs=[{node.getnewaddress(): output_amount}],
locktime=node.getblockcount() + 2000, # Can be anything
))['hex']
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_final)))
fee_expected = coin['amount'] - output_amount
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee_expected}}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0)
self.mempool_size += 1
self.log.info('A transaction in the mempool')
node.sendrawtransaction(hexstring=raw_tx_0)
self.mempool_size += 1
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'txn-already-in-mempool'}],
rawtxs=[raw_tx_0],
)
self.log.info('A transaction that replaces a mempool transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vout[0].nValue -= int(fee * COIN) # Double the fee
tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1 # Now, opt out of RBF
raw_tx_0 = node.signrawtransactionwithwallet(tx.serialize().hex())['hex']
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
txid_0 = tx.rehash()
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': (2 * fee)}}],
rawtxs=[raw_tx_0],
)
self.log.info('A transaction that conflicts with an unconfirmed tx')
# Send the transaction that replaces the mempool transaction and opts out of replaceability
node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0)
# take original raw_tx_0
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vout[0].nValue -= int(4 * fee * COIN) # Set more fee
# skip re-signing the tx
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'txn-mempool-conflict'}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
self.log.info('A transaction with missing inputs, that never existed')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
# skip re-signing the tx
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with missing inputs, that existed once in the past')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vin[0].prevout.n = 1 # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend
raw_tx_1 = node.signrawtransactionwithwallet(tx.serialize().hex())['hex']
txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0)
# Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them
raw_tx_spend_both = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[
{'txid': txid_0, 'vout': 0},
{'txid': txid_1, 'vout': 0},
],
outputs=[{node.getnewaddress(): 0.1}]
))['hex']
txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, maxfeerate=0)
node.generate(1)
self.mempool_size = 0
# Now see if we can add the coins back to the utxo set by sending the exact txs again
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[raw_tx_0],
)
self.check_mempool_result(
result_expected=[{'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[raw_tx_1],
)
self.log.info('Create a signed "reference" tx for later use')
raw_tx_reference = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': txid_spend_both, 'vout': 0}],
outputs=[{node.getnewaddress(): 0.05}],
))['hex']
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
# Reference tx should be valid on itself
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': { 'base': Decimal('0.1') - Decimal('0.05')}}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
self.log.info('A transaction with no outputs')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout = []
# Skip re-signing the transaction for context independent checks from now on
# tx.deserialize(BytesIO(hex_str_to_bytes(node.signrawtransactionwithwallet(tx.serialize().hex())['hex'])))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-empty'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A really large transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin = [tx.vin[0]] * math.ceil(MAX_BLOCK_BASE_SIZE / len(tx.vin[0].serialize()))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-oversize'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with negative output value')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].nValue *= -1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-negative'}],
rawtxs=[tx.serialize().hex()],
)
# The following two validations prevent overflow of the output amounts (see CVE-2010-5139).
self.log.info('A transaction with too large output value')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].nValue = MAX_MONEY + 1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-toolarge'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with too large sum of output values')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout = [tx.vout[0]] * 2
tx.vout[0].nValue = MAX_MONEY
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-txouttotal-toolarge'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with duplicate inputs')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin = [tx.vin[0]] * 2
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-inputs-duplicate'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A coinbase transaction')
# Pick the input of the first tx we signed, so it has to be a coinbase tx
raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid'])
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_coinbase_spent)))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'coinbase'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('Some nonstandard transactions')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.nVersion = 3 # A version currently non-standard
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'version'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].scriptPubKey = CScript([OP_0]) # Some non-standard script
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptpubkey'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
key = ECKey()
key.generate()
pubkey = key.get_pubkey().get_bytes()
tx.vout[0].scriptPubKey = CScript([OP_2, pubkey, pubkey, pubkey, OP_3, OP_CHECKMULTISIG]) # Some bare multisig script (2-of-3)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bare-multisig'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].scriptSig = CScript([OP_HASH160]) # Some not-pushonly scriptSig
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-not-pushonly'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].scriptSig = CScript([b'a' * 1648]) # Some too large scriptSig (>1650 bytes)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-size'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript([OP_HASH160, hash160(b'burn'), OP_EQUAL]))
num_scripts = 100000 // len(output_p2sh_burn.serialize()) # Use enough outputs to make the tx too large for our policy
tx.vout = [output_p2sh_burn] * num_scripts
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'tx-size'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0] = output_p2sh_burn
tx.vout[0].nValue -= 1 # Make output smaller, such that it is dust for our policy
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
tx.vout = [tx.vout[0]] * 2
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'multi-op-return'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A timelocked transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].nSequence -= 1 # Should be non-max, so locktime is not ignored
tx.nLockTime = node.getblockcount() + 1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-final'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction that is locked by BIP68 sequence logic')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].nSequence = 2 # We could include it in the second block mined from now, but not the very next one
# Can skip re-signing the tx because of early rejection
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-BIP68-final'}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
if __name__ == '__main__':
MempoolAcceptanceTest().main()
| 48.050296 | 154 | 0.634382 |
from decimal import Decimal
from io import BytesIO
import math
from test_framework.test_framework import MantleTestFramework
from test_framework.key import ECKey
from test_framework.messages import (
BIP125_SEQUENCE_NUMBER,
COIN,
COutPoint,
CTransaction,
CTxOut,
MAX_BLOCK_BASE_SIZE,
MAX_MONEY,
)
from test_framework.script import (
hash160,
CScript,
OP_0,
OP_2,
OP_3,
OP_CHECKMULTISIG,
OP_EQUAL,
OP_HASH160,
OP_RETURN,
)
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
hex_str_to_bytes,
)
class MempoolAcceptanceTest(MantleTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[
'-txindex','-permitbaremultisig=0',
]] * self.num_nodes
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def check_mempool_result(self, result_expected, *args, **kwargs):
result_test = self.nodes[0].testmempoolaccept(*args, **kwargs)
assert_equal(result_expected, result_test)
assert_equal(self.nodes[0].getmempoolinfo()['size'], self.mempool_size)
def run_test(self):
node = self.nodes[0]
self.log.info('Start with empty mempool, and 200 blocks')
self.mempool_size = 0
assert_equal(node.getblockcount(), 200)
assert_equal(node.getmempoolinfo()['size'], self.mempool_size)
coins = node.listunspent()
self.log.info('Should not accept garbage to testmempoolaccept')
assert_raises_rpc_error(-3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar'))
assert_raises_rpc_error(-8, 'Array must contain exactly one raw transaction for now', lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22']))
assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar']))
self.log.info('A transaction already in the blockchain')
coin = coins.pop()
raw_tx_in_block = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': coin['txid'], 'vout': coin['vout']}],
outputs=[{node.getnewaddress(): 0.3}, {node.getnewaddress(): 49}],
))['hex']
txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, maxfeerate=0)
node.generate(1)
self.mempool_size = 0
self.check_mempool_result(
result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': 'txn-already-known'}],
rawtxs=[raw_tx_in_block],
)
self.log.info('A transaction not in the mempool')
fee = Decimal('0.000007')
raw_tx_0 = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{"txid": txid_in_block, "vout": 0, "sequence": BIP125_SEQUENCE_NUMBER}],
outputs=[{node.getnewaddress(): Decimal('0.3') - fee}],
))['hex']
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
txid_0 = tx.rehash()
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee}}],
rawtxs=[raw_tx_0],
)
self.log.info('A final transaction not in the mempool')
coin = coins.pop()
output_amount = Decimal('0.025')
raw_tx_final = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': coin['txid'], 'vout': coin['vout'], "sequence": 0xffffffff}],
outputs=[{node.getnewaddress(): output_amount}],
locktime=node.getblockcount() + 2000,
))['hex']
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_final)))
fee_expected = coin['amount'] - output_amount
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee_expected}}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0)
self.mempool_size += 1
self.log.info('A transaction in the mempool')
node.sendrawtransaction(hexstring=raw_tx_0)
self.mempool_size += 1
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'txn-already-in-mempool'}],
rawtxs=[raw_tx_0],
)
self.log.info('A transaction that replaces a mempool transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vout[0].nValue -= int(fee * COIN)
tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1
raw_tx_0 = node.signrawtransactionwithwallet(tx.serialize().hex())['hex']
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
txid_0 = tx.rehash()
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': (2 * fee)}}],
rawtxs=[raw_tx_0],
)
self.log.info('A transaction that conflicts with an unconfirmed tx')
node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vout[0].nValue -= int(4 * fee * COIN)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'txn-mempool-conflict'}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
self.log.info('A transaction with missing inputs, that never existed')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with missing inputs, that existed once in the past')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vin[0].prevout.n = 1
raw_tx_1 = node.signrawtransactionwithwallet(tx.serialize().hex())['hex']
txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0)
raw_tx_spend_both = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[
{'txid': txid_0, 'vout': 0},
{'txid': txid_1, 'vout': 0},
],
outputs=[{node.getnewaddress(): 0.1}]
))['hex']
txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, maxfeerate=0)
node.generate(1)
self.mempool_size = 0
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[raw_tx_0],
)
self.check_mempool_result(
result_expected=[{'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[raw_tx_1],
)
self.log.info('Create a signed "reference" tx for later use')
raw_tx_reference = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': txid_spend_both, 'vout': 0}],
outputs=[{node.getnewaddress(): 0.05}],
))['hex']
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': { 'base': Decimal('0.1') - Decimal('0.05')}}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
self.log.info('A transaction with no outputs')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout = []
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-empty'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A really large transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin = [tx.vin[0]] * math.ceil(MAX_BLOCK_BASE_SIZE / len(tx.vin[0].serialize()))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-oversize'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with negative output value')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].nValue *= -1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-negative'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with too large output value')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].nValue = MAX_MONEY + 1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-toolarge'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with too large sum of output values')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout = [tx.vout[0]] * 2
tx.vout[0].nValue = MAX_MONEY
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-txouttotal-toolarge'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with duplicate inputs')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin = [tx.vin[0]] * 2
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-inputs-duplicate'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A coinbase transaction')
raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid'])
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_coinbase_spent)))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'coinbase'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('Some nonstandard transactions')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.nVersion = 3
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'version'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].scriptPubKey = CScript([OP_0])
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptpubkey'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
key = ECKey()
key.generate()
pubkey = key.get_pubkey().get_bytes()
tx.vout[0].scriptPubKey = CScript([OP_2, pubkey, pubkey, pubkey, OP_3, OP_CHECKMULTISIG])
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bare-multisig'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].scriptSig = CScript([OP_HASH160])
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-not-pushonly'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].scriptSig = CScript([b'a' * 1648])
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-size'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript([OP_HASH160, hash160(b'burn'), OP_EQUAL]))
num_scripts = 100000 // len(output_p2sh_burn.serialize())
tx.vout = [output_p2sh_burn] * num_scripts
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'tx-size'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0] = output_p2sh_burn
tx.vout[0].nValue -= 1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust'}],
rawtxs=[tx.serialize().hex()],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
tx.vout = [tx.vout[0]] * 2
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'multi-op-return'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A timelocked transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].nSequence -= 1
tx.nLockTime = node.getblockcount() + 1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-final'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction that is locked by BIP68 sequence logic')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].nSequence = 2
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-BIP68-final'}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
if __name__ == '__main__':
MempoolAcceptanceTest().main()
| true | true |
79000c0436d1ec7a629af2fb10decb61e631f8e2 | 18,203 | py | Python | tools/web_dev_style/css_checker_test.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 14,668 | 2015-01-01T01:57:10.000Z | 2022-03-31T23:33:32.000Z | tools/web_dev_style/css_checker_test.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 86 | 2015-10-21T13:02:42.000Z | 2022-03-14T07:50:50.000Z | tools/web_dev_style/css_checker_test.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 5,941 | 2015-01-02T11:32:21.000Z | 2022-03-31T16:35:46.000Z | #!/usr/bin/env python3
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from . import css_checker
from os import path as os_path
import re
from sys import path as sys_path
import unittest
_HERE = os_path.dirname(os_path.abspath(__file__))
sys_path.append(os_path.join(_HERE, '..', '..'))
from PRESUBMIT_test_mocks import MockInputApi, MockOutputApi, MockFile
class CssCheckerTest(unittest.TestCase):
def setUp(self):
super(CssCheckerTest, self).setUp()
self.input_api = MockInputApi()
self.checker = css_checker.CSSChecker(self.input_api, MockOutputApi())
def _create_file(self, contents, filename):
self.input_api.files.append(MockFile(filename, contents.splitlines()))
def VerifyContentIsValid(self, contents, filename='fake.css'):
self._create_file(contents, filename)
results = self.checker.RunChecks()
self.assertEqual(len(results), 0)
def VerifyContentsProducesOutput(self, contents, output, filename='fake.css'):
self._create_file(contents, filename)
results = self.checker.RunChecks()
self.assertEqual(len(results), 1)
self.assertEqual(results[0].message, filename + ':\n' + output.strip())
def testCssAlphaWithAtBlock(self):
self.VerifyContentsProducesOutput("""
<include src="../shared/css/cr/ui/overlay.css">
<include src="chrome://resources/totally-cool.css" />
/* A hopefully safely ignored comment and @media statement. /**/
@media print {
div {
display: block;
color: red;
}
}
.rule {
z-index: 5;
<if expr="not is macosx">
background-image: url(chrome://resources/BLAH); /* TODO(dbeam): Fix this. */
background-color: rgb(235, 239, 249);
</if>
<if expr="is_macosx">
background-color: white;
background-image: url(chrome://resources/BLAH2);
</if>
color: black;
}
<if expr="is_macosx">
.language-options-right {
visibility: hidden;
opacity: 1; /* TODO(dbeam): Fix this. */
}
</if>
@media (prefers-color-scheme: dark) {
a[href] {
z-index: 3;
color: blue;
}
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
display: block;
color: red;
z-index: 5;
color: black;
z-index: 3;
color: blue;""")
def testCssStringWithAt(self):
self.VerifyContentIsValid("""
#logo {
background-image: url(images/google_logo.png@2x);
}
body.alternate-logo #logo {
-webkit-mask-image: url(images/google_logo.png@2x);
background: none;
}
div {
margin-inline-start: 5px;
}
.stuff1 {
}
.stuff2 {
}
""")
def testCssAlphaWithNonStandard(self):
self.VerifyContentsProducesOutput("""
div {
/* A hopefully safely ignored comment and @media statement. /**/
color: red;
-webkit-margin-before-collapse: discard;
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
color: red;
-webkit-margin-before-collapse: discard;""")
def testCssAlphaWithLongerDashedProps(self):
self.VerifyContentsProducesOutput("""
div {
border-inline-start: 5px; /* A hopefully removed comment. */
border: 5px solid red;
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
border-inline-start: 5px;
border: 5px solid red;""")
def testCssAlphaWithVariables(self):
self.VerifyContentIsValid("""
#id {
--zzyxx-xylophone: 3px;
--aardvark-animal: var(--zzyxz-xylophone);
}
""")
def testCssBracesHaveSpaceBeforeAndNothingAfter(self):
self.VerifyContentsProducesOutput("""
/* Hello! */div/* Comment here*/{
display: block;
}
blah /* hey! */
{
rule: value;
}
.mixed-in {
display: none;
}
.this.is { /* allowed */
rule: value;
}""", """
- Start braces ({) end a selector, have a space before them and no rules after.
div{
{""")
def testMixins(self):
self.VerifyContentsProducesOutput(
"""
.mixed-in {
--css-mixin: {
color: red;
}
}""", """
- Avoid using CSS mixins. Use CSS shadow parts, CSS variables, or common CSS \
classes instead.
--css-mixin: {""")
def testCssClassesUseDashes(self):
self.VerifyContentsProducesOutput("""
.className,
.ClassName,
.class-name /* We should not catch this. */,
.class_name,
[i18n-values*='.innerHTML:'] {
display: block;
}""", """
- Classes use .dash-form.
.className,
.ClassName,
.class_name,""")
def testCssCloseBraceOnNewLine(self):
self.VerifyContentsProducesOutput("""
@-webkit-keyframe blah {
from { height: rotate(-10turn); }
100% { height: 500px; }
}
#id { /* $i18n{*} and $i18nRaw{*} should be ignored. */
rule: $i18n{someValue};
rule2: $i18nRaw{someValue};
}
#rule {
rule: value; }""", """
- Always put a rule closing brace (}) on a new line.
rule: value; }""")
def testCssColonsHaveSpaceAfter(self):
self.VerifyContentsProducesOutput("""
div:not(.class):not([attr=5]), /* We should not catch this. */
div:not(.class):not([attr]) /* Nor this. */ {
background: url(data:image/jpeg,asdfasdfsadf); /* Ignore this. */
background: -webkit-linear-gradient(left, red,
80% blah blee blar);
color: red;
display:block;
}""", """
- Colons (:) should have a space after them.
display:block;
- Don't use data URIs in source files. Use grit instead.
background: url(data:image/jpeg,asdfasdfsadf);""")
def testCssFavorSingleQuotes(self):
self.VerifyContentsProducesOutput("""
html[dir="rtl"] body,
html[dir=ltr] body /* TODO(dbeam): Require '' around rtl in future? */ {
font-family: "Open Sans";
<if expr="is_macosx">
blah: blee;
</if>
}""", """
- Use single quotes (') instead of double quotes (") in strings.
html[dir="rtl"] body,
font-family: "Open Sans";""")
def testCssHexCouldBeShorter(self):
self.VerifyContentsProducesOutput("""
#abc,
#abc-,
#abc-ghij,
#abcdef-,
#abcdef-ghij,
#aaaaaa,
#bbaacc {
background-color: #336699; /* Ignore short hex rule if not gray. */
color: #999999;
color: #666;
}""", """
- Use abbreviated hex (#rgb) when in form #rrggbb.
color: #999999; (replace with #999)
- Use rgb() over #hex when not a shade of gray (like #333).
background-color: #336699; (replace with rgb(51, 102, 153))""")
def testCssUseMillisecondsForSmallTimes(self):
self.VerifyContentsProducesOutput("""
.transition-0s /* This is gross but may happen. */ {
transform: one 0.2s;
transform: two .1s;
transform: tree 1s;
transform: four 300ms;
}""", """
- Use milliseconds for time measurements under 1 second.
transform: one 0.2s; (replace with 200ms)
transform: two .1s; (replace with 100ms)""")
def testCssNoDataUrisInSourceFiles(self):
self.VerifyContentsProducesOutput("""
img {
background: url( data:image/jpeg,4\/\/350|\/|3|2 );
}""", """
- Don't use data URIs in source files. Use grit instead.
background: url( data:image/jpeg,4\/\/350|\/|3|2 );""")
def testCssNoMixinShims(self):
self.VerifyContentsProducesOutput("""
:host {
--good-property: red;
--not-okay-mixin_-_not-okay-property: green;
}""", """
- Don't override custom properties created by Polymer's mixin shim. Set \
mixins or documented custom properties directly.
--not-okay-mixin_-_not-okay-property: green;""")
def testCssNoQuotesInUrl(self):
self.VerifyContentsProducesOutput("""
img {
background: url('chrome://resources/images/blah.jpg');
background: url("../../folder/hello.png");
}""", """
- Use single quotes (') instead of double quotes (") in strings.
background: url("../../folder/hello.png");
- Don't use quotes in url().
background: url('chrome://resources/images/blah.jpg');
background: url("../../folder/hello.png");""")
def testCssOneRulePerLine(self):
self.VerifyContentsProducesOutput("""
a:not([hidden]):not(.custom-appearance):not([version=1]):first-of-type,
a:not([hidden]):not(.custom-appearance):not([version=1]):first-of-type ~
input[type='checkbox']:not([hidden]),
div {
background: url(chrome://resources/BLAH);
rule: value; /* rule: value; */
rule: value; rule: value;
}
""", """
- One rule per line (what not to do: color: red; margin: 0;).
rule: value; rule: value;""")
def testCssOneSelectorPerLine(self):
self.VerifyContentsProducesOutput(
"""
a,
div,a,
div,/* Hello! */ span,
#id.class([dir=rtl]):not(.class):any(a, b, d),
div :is(:not(a), #b, .c) {
rule: value;
}
a,
div,a {
some-other: rule here;
}""", """
- One selector per line (what not to do: a, b {}).
div,a,
div, span,
div,a {""")
def testCssPseudoElementDoubleColon(self):
self.VerifyContentsProducesOutput("""
a:href,
br::after,
::-webkit-scrollbar-thumb,
a:not([empty]):hover:focus:active, /* shouldn't catch here and above */
abbr:after,
.tree-label:empty:after,
b:before,
:-WebKit-ScrollBar {
rule: value;
}""", """
- Pseudo-elements should use double colon (i.e. ::after).
:after (should be ::after)
:after (should be ::after)
:before (should be ::before)
:-WebKit-ScrollBar (should be ::-WebKit-ScrollBar)
""")
def testCssRgbIfNotGray(self):
self.VerifyContentsProducesOutput(
"""
#abc,
#aaa,
#aabbcc {
background: -webkit-linear-gradient(left, from(#abc), to(#def));
color: #bad;
color: #bada55;
}""", """
- Use rgb() over #hex when not a shade of gray (like #333).
background: -webkit-linear-gradient(left, from(#abc), to(#def)); """
"""(replace with rgb(170, 187, 204), rgb(221, 238, 255))
color: #bad; (replace with rgb(187, 170, 221))
color: #bada55; (replace with rgb(186, 218, 85))""")
def testPrefixedLogicalAxis(self):
self.VerifyContentsProducesOutput("""
.test {
-webkit-logical-height: 50%;
-webkit-logical-width: 50%;
-webkit-max-logical-height: 200px;
-webkit-max-logical-width: 200px;
-webkit-min-logical-height: 100px;
-webkit-min-logical-width: 100px;
}
""", """
- Unprefix logical axis property.
-webkit-logical-height: 50%; (replace with block-size)
-webkit-logical-width: 50%; (replace with inline-size)
-webkit-max-logical-height: 200px; (replace with max-block-size)
-webkit-max-logical-width: 200px; (replace with max-inline-size)
-webkit-min-logical-height: 100px; (replace with min-block-size)
-webkit-min-logical-width: 100px; (replace with min-inline-size)""")
def testPrefixedLogicalSide(self):
self.VerifyContentsProducesOutput("""
.test {
-webkit-border-after: 1px solid blue;
-webkit-border-after-color: green;
-webkit-border-after-style: dotted;
-webkit-border-after-width: 10px;
-webkit-border-before: 2px solid blue;
-webkit-border-before-color: green;
-webkit-border-before-style: dotted;
-webkit-border-before-width: 20px;
-webkit-border-end: 3px solid blue;
-webkit-border-end-color: green;
-webkit-border-end-style: dotted;
-webkit-border-end-width: 30px;
-webkit-border-start: 4px solid blue;
-webkit-border-start-color: green;
-webkit-border-start-style: dotted;
-webkit-border-start-width: 40px;
-webkit-margin-after: 1px;
-webkit-margin-after-collapse: discard;
-webkit-margin-before: 2px;
-webkit-margin-before-collapse: discard;
-webkit-margin-end: 3px;
-webkit-margin-end-collapse: discard;
-webkit-margin-start: 4px;
-webkit-margin-start-collapse: discard;
-webkit-padding-after: 1px;
-webkit-padding-before: 2px;
-webkit-padding-end: 3px;
-webkit-padding-start: 4px;
}
""", """
- Unprefix logical side property.
-webkit-border-after: 1px solid blue; (replace with border-block-end)
-webkit-border-after-color: green; (replace with border-block-end-color)
-webkit-border-after-style: dotted; (replace with border-block-end-style)
-webkit-border-after-width: 10px; (replace with border-block-end-width)
-webkit-border-before: 2px solid blue; (replace with border-block-start)
-webkit-border-before-color: green; (replace with border-block-start-color)
-webkit-border-before-style: dotted; (replace with border-block-start-style)
-webkit-border-before-width: 20px; (replace with border-block-start-width)
-webkit-border-end: 3px solid blue; (replace with border-inline-end)
-webkit-border-end-color: green; (replace with border-inline-end-color)
-webkit-border-end-style: dotted; (replace with border-inline-end-style)
-webkit-border-end-width: 30px; (replace with border-inline-end-width)
-webkit-border-start: 4px solid blue; (replace with border-inline-start)
-webkit-border-start-color: green; (replace with border-inline-start-color)
-webkit-border-start-style: dotted; (replace with border-inline-start-style)
-webkit-border-start-width: 40px; (replace with border-inline-start-width)
-webkit-margin-after: 1px; (replace with margin-block-end)
-webkit-margin-before: 2px; (replace with margin-block-start)
-webkit-margin-end: 3px; (replace with margin-inline-end)
-webkit-margin-start: 4px; (replace with margin-inline-start)
-webkit-padding-after: 1px; (replace with padding-block-end)
-webkit-padding-before: 2px; (replace with padding-block-start)
-webkit-padding-end: 3px; (replace with padding-inline-end)
-webkit-padding-start: 4px; (replace with padding-inline-start)""")
def testStartEndInsteadOfLeftRight(self):
self.VerifyContentsProducesOutput("""
.inline-node {
--var-is-ignored-left: 10px;
--var-is-ignored-right: 10px;
border-left-color: black;
border-right: 1px solid blue; /* csschecker-disable-line left-right */
margin-right: 5px;
padding-left: 10px; /* csschecker-disable-line some-other-thing */
text-align: right;
}""", """
- Use -start/end instead of -left/right (https://goo.gl/gQYY7z, add /* csschecker-disable-line left-right */ to suppress)
border-left-color: black; (replace with border-inline-start-color)
margin-right: 5px; (replace with margin-inline-end)
padding-left: 10px; (replace with padding-inline-start)
text-align: right; (replace with text-align: end)
""")
def testCssZeroWidthLengths(self):
self.VerifyContentsProducesOutput("""
@-webkit-keyframe anim {
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
50% { background-image: url(blah.svg); }
100% {
width: 100px;
}
}
#logo {
background-image: url(images/google_logo.png@2x);
}
body.alternate-logo #logo {
-webkit-mask-image: url(images/google_logo.png@2x);
}
/* http://crbug.com/359682 */
#spinner-container #spinner {
-webkit-animation-duration: 1.0s;
background-image: url(images/google_logo0.svg);
}
.media-button.play > .state0.active,
.media-button[state='0'] > .state0.normal /* blah */, /* blee */
.media-button[state='0']:not(.disabled):hover > .state0.hover {
-webkit-animation: anim 0s;
-webkit-animation-duration: anim 0ms;
-webkit-transform: scale(0%);
background-position-x: 0em;
background-position-y: 0ex;
border-width: 0em;
color: hsl(0, 0%, 85%); /* Shouldn't trigger error. */
opacity: .0;
opacity: 0.0;
opacity: 0.;
}
@page {
border-width: 0mm;
height: 0cm;
width: 0in;
}""", """
- Use "0" for zero-width lengths (i.e. 0px -> 0)
width: 0px;
-webkit-transform: scale(0%);
background-position-x: 0em;
background-position-y: 0ex;
border-width: 0em;
opacity: .0;
opacity: 0.0;
opacity: 0.;
border-width: 0mm;
height: 0cm;
width: 0in;
""")
def testInlineStyleInHtml(self):
self.VerifyContentsProducesOutput("""<!doctype html>
<html>
<head>
<!-- Don't warn about problems outside of style tags
html,
body {
margin: 0;
height: 100%;
}
-->
<style>
body {
flex-direction:column;
}
</style>
</head>
</html>""", """
- Colons (:) should have a space after them.
flex-direction:column;
""", filename='test.html')
def testInlineStyleInHtmlWithIncludes(self):
self.VerifyContentsProducesOutput("""<!doctype html>
<html>
<style include="fake-shared-css other-shared-css">
body {
flex-direction:column;
}
</style>
</head>
</html>""", """
- Colons (:) should have a space after them.
flex-direction:column;
""", filename='test.html')
def testInlineStyleInHtmlWithTagsInComments(self):
self.VerifyContentsProducesOutput("""<!doctype html>
<html>
<style>
body {
/* You better ignore the <tag> in this comment! */
flex-direction:column;
}
</style>
</head>
</html>""", """
- Colons (:) should have a space after them.
flex-direction:column;
""", filename='test.html')
def testRemoveAtBlocks(self):
self.assertEqual(self.checker.RemoveAtBlocks("""
@media (prefers-color-scheme: dark) {
.magic {
color: #000;
}
}"""), """
.magic {
color: #000;
}""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@media (prefers-color-scheme: dark) {
.magic {
--mixin-definition: {
color: red;
};
}
}"""), """
.magic {
--mixin-definition: {
color: red;
};
}""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@keyframes jiggle {
from { left: 0; }
50% { left: 100%; }
to { left: 10%; }
}"""), """
from { left: 0; }
50% { left: 100%; }
to { left: 10%; }""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@media print {
.rule1 {
color: black;
}
.rule2 {
margin: 1in;
}
}"""), """
.rule1 {
color: black;
}
.rule2 {
margin: 1in;
}""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@media (prefers-color-scheme: dark) {
.rule1 {
color: gray;
}
.rule2 {
margin: .5in;
}
@keyframe dark-fade {
0% { background: black; }
100% { background: darkgray; }
}
}"""), """
.rule1 {
color: gray;
}
.rule2 {
margin: .5in;
}
0% { background: black; }
100% { background: darkgray; }""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@-webkit-keyframe anim {
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
50% { background-image: url(blah.svg); }
100% {
width: 100px;
}
}"""), """
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
50% { background-image: url(blah.svg); }
100% {
width: 100px;
}""")
if __name__ == '__main__':
unittest.main()
| 27.007418 | 121 | 0.658683 |
from . import css_checker
from os import path as os_path
import re
from sys import path as sys_path
import unittest
_HERE = os_path.dirname(os_path.abspath(__file__))
sys_path.append(os_path.join(_HERE, '..', '..'))
from PRESUBMIT_test_mocks import MockInputApi, MockOutputApi, MockFile
class CssCheckerTest(unittest.TestCase):
def setUp(self):
super(CssCheckerTest, self).setUp()
self.input_api = MockInputApi()
self.checker = css_checker.CSSChecker(self.input_api, MockOutputApi())
def _create_file(self, contents, filename):
self.input_api.files.append(MockFile(filename, contents.splitlines()))
def VerifyContentIsValid(self, contents, filename='fake.css'):
self._create_file(contents, filename)
results = self.checker.RunChecks()
self.assertEqual(len(results), 0)
def VerifyContentsProducesOutput(self, contents, output, filename='fake.css'):
self._create_file(contents, filename)
results = self.checker.RunChecks()
self.assertEqual(len(results), 1)
self.assertEqual(results[0].message, filename + ':\n' + output.strip())
def testCssAlphaWithAtBlock(self):
self.VerifyContentsProducesOutput("""
<include src="../shared/css/cr/ui/overlay.css">
<include src="chrome://resources/totally-cool.css" />
/* A hopefully safely ignored comment and @media statement. /**/
@media print {
div {
display: block;
color: red;
}
}
.rule {
z-index: 5;
<if expr="not is macosx">
background-image: url(chrome://resources/BLAH); /* TODO(dbeam): Fix this. */
background-color: rgb(235, 239, 249);
</if>
<if expr="is_macosx">
background-color: white;
background-image: url(chrome://resources/BLAH2);
</if>
color: black;
}
<if expr="is_macosx">
.language-options-right {
visibility: hidden;
opacity: 1; /* TODO(dbeam): Fix this. */
}
</if>
@media (prefers-color-scheme: dark) {
a[href] {
z-index: 3;
color: blue;
}
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
display: block;
color: red;
z-index: 5;
color: black;
z-index: 3;
color: blue;""")
def testCssStringWithAt(self):
self.VerifyContentIsValid("""
#logo {
background-image: url(images/google_logo.png@2x);
}
body.alternate-logo #logo {
-webkit-mask-image: url(images/google_logo.png@2x);
background: none;
}
div {
margin-inline-start: 5px;
}
.stuff1 {
}
.stuff2 {
}
""")
def testCssAlphaWithNonStandard(self):
self.VerifyContentsProducesOutput("""
div {
/* A hopefully safely ignored comment and @media statement. /**/
color: red;
-webkit-margin-before-collapse: discard;
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
color: red;
-webkit-margin-before-collapse: discard;""")
def testCssAlphaWithLongerDashedProps(self):
self.VerifyContentsProducesOutput("""
div {
border-inline-start: 5px; /* A hopefully removed comment. */
border: 5px solid red;
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
border-inline-start: 5px;
border: 5px solid red;""")
def testCssAlphaWithVariables(self):
self.VerifyContentIsValid("""
#id {
--zzyxx-xylophone: 3px;
--aardvark-animal: var(--zzyxz-xylophone);
}
""")
def testCssBracesHaveSpaceBeforeAndNothingAfter(self):
self.VerifyContentsProducesOutput("""
/* Hello! */div/* Comment here*/{
display: block;
}
blah /* hey! */
{
rule: value;
}
.mixed-in {
display: none;
}
.this.is { /* allowed */
rule: value;
}""", """
- Start braces ({) end a selector, have a space before them and no rules after.
div{
{""")
def testMixins(self):
self.VerifyContentsProducesOutput(
"""
.mixed-in {
--css-mixin: {
color: red;
}
}""", """
- Avoid using CSS mixins. Use CSS shadow parts, CSS variables, or common CSS \
classes instead.
--css-mixin: {""")
def testCssClassesUseDashes(self):
self.VerifyContentsProducesOutput("""
.className,
.ClassName,
.class-name /* We should not catch this. */,
.class_name,
[i18n-values*='.innerHTML:'] {
display: block;
}""", """
- Classes use .dash-form.
.className,
.ClassName,
.class_name,""")
def testCssCloseBraceOnNewLine(self):
self.VerifyContentsProducesOutput("""
@-webkit-keyframe blah {
from { height: rotate(-10turn); }
100% { height: 500px; }
}
#id { /* $i18n{*} and $i18nRaw{*} should be ignored. */
rule: $i18n{someValue};
rule2: $i18nRaw{someValue};
}
#rule {
rule: value; }""", """
- Always put a rule closing brace (}) on a new line.
rule: value; }""")
def testCssColonsHaveSpaceAfter(self):
self.VerifyContentsProducesOutput("""
div:not(.class):not([attr=5]), /* We should not catch this. */
div:not(.class):not([attr]) /* Nor this. */ {
background: url(data:image/jpeg,asdfasdfsadf); /* Ignore this. */
background: -webkit-linear-gradient(left, red,
80% blah blee blar);
color: red;
display:block;
}""", """
- Colons (:) should have a space after them.
display:block;
- Don't use data URIs in source files. Use grit instead.
background: url(data:image/jpeg,asdfasdfsadf);""")
def testCssFavorSingleQuotes(self):
self.VerifyContentsProducesOutput("""
html[dir="rtl"] body,
html[dir=ltr] body /* TODO(dbeam): Require '' around rtl in future? */ {
font-family: "Open Sans";
<if expr="is_macosx">
blah: blee;
</if>
}""", """
- Use single quotes (') instead of double quotes (") in strings.
html[dir="rtl"] body,
font-family: "Open Sans";""")
def testCssHexCouldBeShorter(self):
self.VerifyContentsProducesOutput("""
#abc,
#abc-,
#abc-ghij,
#abcdef-,
#abcdef-ghij,
#aaaaaa,
#bbaacc {
background-color: #336699; /* Ignore short hex rule if not gray. */
color: #999999;
color: #666;
}""", """
- Use abbreviated hex (#rgb) when in form #rrggbb.
color: #999999; (replace with #999)
- Use rgb() over #hex when not a shade of gray (like #333).
background-color: #336699; (replace with rgb(51, 102, 153))""")
def testCssUseMillisecondsForSmallTimes(self):
self.VerifyContentsProducesOutput("""
.transition-0s /* This is gross but may happen. */ {
transform: one 0.2s;
transform: two .1s;
transform: tree 1s;
transform: four 300ms;
}""", """
- Use milliseconds for time measurements under 1 second.
transform: one 0.2s; (replace with 200ms)
transform: two .1s; (replace with 100ms)""")
def testCssNoDataUrisInSourceFiles(self):
self.VerifyContentsProducesOutput("""
img {
background: url( data:image/jpeg,4\/\/350|\/|3|2 );
}""", """
- Don't use data URIs in source files. Use grit instead.
background: url( data:image/jpeg,4\/\/350|\/|3|2 );""")
def testCssNoMixinShims(self):
self.VerifyContentsProducesOutput("""
:host {
--good-property: red;
--not-okay-mixin_-_not-okay-property: green;
}""", """
- Don't override custom properties created by Polymer's mixin shim. Set \
mixins or documented custom properties directly.
--not-okay-mixin_-_not-okay-property: green;""")
def testCssNoQuotesInUrl(self):
self.VerifyContentsProducesOutput("""
img {
background: url('chrome://resources/images/blah.jpg');
background: url("../../folder/hello.png");
}""", """
- Use single quotes (') instead of double quotes (") in strings.
background: url("../../folder/hello.png");
- Don't use quotes in url().
background: url('chrome://resources/images/blah.jpg');
background: url("../../folder/hello.png");""")
def testCssOneRulePerLine(self):
self.VerifyContentsProducesOutput("""
a:not([hidden]):not(.custom-appearance):not([version=1]):first-of-type,
a:not([hidden]):not(.custom-appearance):not([version=1]):first-of-type ~
input[type='checkbox']:not([hidden]),
div {
background: url(chrome://resources/BLAH);
rule: value; /* rule: value; */
rule: value; rule: value;
}
""", """
- One rule per line (what not to do: color: red; margin: 0;).
rule: value; rule: value;""")
def testCssOneSelectorPerLine(self):
self.VerifyContentsProducesOutput(
"""
a,
div,a,
div,/* Hello! */ span,
#id.class([dir=rtl]):not(.class):any(a, b, d),
div :is(:not(a), #b, .c) {
rule: value;
}
a,
div,a {
some-other: rule here;
}""", """
- One selector per line (what not to do: a, b {}).
div,a,
div, span,
div,a {""")
def testCssPseudoElementDoubleColon(self):
self.VerifyContentsProducesOutput("""
a:href,
br::after,
::-webkit-scrollbar-thumb,
a:not([empty]):hover:focus:active, /* shouldn't catch here and above */
abbr:after,
.tree-label:empty:after,
b:before,
:-WebKit-ScrollBar {
rule: value;
}""", """
- Pseudo-elements should use double colon (i.e. ::after).
:after (should be ::after)
:after (should be ::after)
:before (should be ::before)
:-WebKit-ScrollBar (should be ::-WebKit-ScrollBar)
""")
def testCssRgbIfNotGray(self):
self.VerifyContentsProducesOutput(
"""
#abc,
#aaa,
#aabbcc {
background: -webkit-linear-gradient(left, from(#abc), to(#def));
color: #bad;
color: #bada55;
}""", """
- Use rgb() over #hex when not a shade of gray (like #333).
background: -webkit-linear-gradient(left, from(#abc), to(#def)); """
"""(replace with rgb(170, 187, 204), rgb(221, 238, 255))
color: #bad; (replace with rgb(187, 170, 221))
color: #bada55; (replace with rgb(186, 218, 85))""")
def testPrefixedLogicalAxis(self):
self.VerifyContentsProducesOutput("""
.test {
-webkit-logical-height: 50%;
-webkit-logical-width: 50%;
-webkit-max-logical-height: 200px;
-webkit-max-logical-width: 200px;
-webkit-min-logical-height: 100px;
-webkit-min-logical-width: 100px;
}
""", """
- Unprefix logical axis property.
-webkit-logical-height: 50%; (replace with block-size)
-webkit-logical-width: 50%; (replace with inline-size)
-webkit-max-logical-height: 200px; (replace with max-block-size)
-webkit-max-logical-width: 200px; (replace with max-inline-size)
-webkit-min-logical-height: 100px; (replace with min-block-size)
-webkit-min-logical-width: 100px; (replace with min-inline-size)""")
def testPrefixedLogicalSide(self):
self.VerifyContentsProducesOutput("""
.test {
-webkit-border-after: 1px solid blue;
-webkit-border-after-color: green;
-webkit-border-after-style: dotted;
-webkit-border-after-width: 10px;
-webkit-border-before: 2px solid blue;
-webkit-border-before-color: green;
-webkit-border-before-style: dotted;
-webkit-border-before-width: 20px;
-webkit-border-end: 3px solid blue;
-webkit-border-end-color: green;
-webkit-border-end-style: dotted;
-webkit-border-end-width: 30px;
-webkit-border-start: 4px solid blue;
-webkit-border-start-color: green;
-webkit-border-start-style: dotted;
-webkit-border-start-width: 40px;
-webkit-margin-after: 1px;
-webkit-margin-after-collapse: discard;
-webkit-margin-before: 2px;
-webkit-margin-before-collapse: discard;
-webkit-margin-end: 3px;
-webkit-margin-end-collapse: discard;
-webkit-margin-start: 4px;
-webkit-margin-start-collapse: discard;
-webkit-padding-after: 1px;
-webkit-padding-before: 2px;
-webkit-padding-end: 3px;
-webkit-padding-start: 4px;
}
""", """
- Unprefix logical side property.
-webkit-border-after: 1px solid blue; (replace with border-block-end)
-webkit-border-after-color: green; (replace with border-block-end-color)
-webkit-border-after-style: dotted; (replace with border-block-end-style)
-webkit-border-after-width: 10px; (replace with border-block-end-width)
-webkit-border-before: 2px solid blue; (replace with border-block-start)
-webkit-border-before-color: green; (replace with border-block-start-color)
-webkit-border-before-style: dotted; (replace with border-block-start-style)
-webkit-border-before-width: 20px; (replace with border-block-start-width)
-webkit-border-end: 3px solid blue; (replace with border-inline-end)
-webkit-border-end-color: green; (replace with border-inline-end-color)
-webkit-border-end-style: dotted; (replace with border-inline-end-style)
-webkit-border-end-width: 30px; (replace with border-inline-end-width)
-webkit-border-start: 4px solid blue; (replace with border-inline-start)
-webkit-border-start-color: green; (replace with border-inline-start-color)
-webkit-border-start-style: dotted; (replace with border-inline-start-style)
-webkit-border-start-width: 40px; (replace with border-inline-start-width)
-webkit-margin-after: 1px; (replace with margin-block-end)
-webkit-margin-before: 2px; (replace with margin-block-start)
-webkit-margin-end: 3px; (replace with margin-inline-end)
-webkit-margin-start: 4px; (replace with margin-inline-start)
-webkit-padding-after: 1px; (replace with padding-block-end)
-webkit-padding-before: 2px; (replace with padding-block-start)
-webkit-padding-end: 3px; (replace with padding-inline-end)
-webkit-padding-start: 4px; (replace with padding-inline-start)""")
def testStartEndInsteadOfLeftRight(self):
self.VerifyContentsProducesOutput("""
.inline-node {
--var-is-ignored-left: 10px;
--var-is-ignored-right: 10px;
border-left-color: black;
border-right: 1px solid blue; /* csschecker-disable-line left-right */
margin-right: 5px;
padding-left: 10px; /* csschecker-disable-line some-other-thing */
text-align: right;
}""", """
- Use -start/end instead of -left/right (https://goo.gl/gQYY7z, add /* csschecker-disable-line left-right */ to suppress)
border-left-color: black; (replace with border-inline-start-color)
margin-right: 5px; (replace with margin-inline-end)
padding-left: 10px; (replace with padding-inline-start)
text-align: right; (replace with text-align: end)
""")
def testCssZeroWidthLengths(self):
self.VerifyContentsProducesOutput("""
@-webkit-keyframe anim {
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
50% { background-image: url(blah.svg); }
100% {
width: 100px;
}
}
#logo {
background-image: url(images/google_logo.png@2x);
}
body.alternate-logo #logo {
-webkit-mask-image: url(images/google_logo.png@2x);
}
/* http://crbug.com/359682 */
#spinner-container #spinner {
-webkit-animation-duration: 1.0s;
background-image: url(images/google_logo0.svg);
}
.media-button.play > .state0.active,
.media-button[state='0'] > .state0.normal /* blah */, /* blee */
.media-button[state='0']:not(.disabled):hover > .state0.hover {
-webkit-animation: anim 0s;
-webkit-animation-duration: anim 0ms;
-webkit-transform: scale(0%);
background-position-x: 0em;
background-position-y: 0ex;
border-width: 0em;
color: hsl(0, 0%, 85%); /* Shouldn't trigger error. */
opacity: .0;
opacity: 0.0;
opacity: 0.;
}
@page {
border-width: 0mm;
height: 0cm;
width: 0in;
}""", """
- Use "0" for zero-width lengths (i.e. 0px -> 0)
width: 0px;
-webkit-transform: scale(0%);
background-position-x: 0em;
background-position-y: 0ex;
border-width: 0em;
opacity: .0;
opacity: 0.0;
opacity: 0.;
border-width: 0mm;
height: 0cm;
width: 0in;
""")
def testInlineStyleInHtml(self):
self.VerifyContentsProducesOutput("""<!doctype html>
<html>
<head>
<!-- Don't warn about problems outside of style tags
html,
body {
margin: 0;
height: 100%;
}
-->
<style>
body {
flex-direction:column;
}
</style>
</head>
</html>""", """
- Colons (:) should have a space after them.
flex-direction:column;
""", filename='test.html')
def testInlineStyleInHtmlWithIncludes(self):
self.VerifyContentsProducesOutput("""<!doctype html>
<html>
<style include="fake-shared-css other-shared-css">
body {
flex-direction:column;
}
</style>
</head>
</html>""", """
- Colons (:) should have a space after them.
flex-direction:column;
""", filename='test.html')
def testInlineStyleInHtmlWithTagsInComments(self):
self.VerifyContentsProducesOutput("""<!doctype html>
<html>
<style>
body {
/* You better ignore the <tag> in this comment! */
flex-direction:column;
}
</style>
</head>
</html>""", """
- Colons (:) should have a space after them.
flex-direction:column;
""", filename='test.html')
def testRemoveAtBlocks(self):
self.assertEqual(self.checker.RemoveAtBlocks("""
@media (prefers-color-scheme: dark) {
.magic {
color: #000;
}
}"""), """
.magic {
color: #000;
}""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@media (prefers-color-scheme: dark) {
.magic {
--mixin-definition: {
color: red;
};
}
}"""), """
.magic {
--mixin-definition: {
color: red;
};
}""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@keyframes jiggle {
from { left: 0; }
50% { left: 100%; }
to { left: 10%; }
}"""), """
from { left: 0; }
50% { left: 100%; }
to { left: 10%; }""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@media print {
.rule1 {
color: black;
}
.rule2 {
margin: 1in;
}
}"""), """
.rule1 {
color: black;
}
.rule2 {
margin: 1in;
}""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@media (prefers-color-scheme: dark) {
.rule1 {
color: gray;
}
.rule2 {
margin: .5in;
}
@keyframe dark-fade {
0% { background: black; }
100% { background: darkgray; }
}
}"""), """
.rule1 {
color: gray;
}
.rule2 {
margin: .5in;
}
0% { background: black; }
100% { background: darkgray; }""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@-webkit-keyframe anim {
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
50% { background-image: url(blah.svg); }
100% {
width: 100px;
}
}"""), """
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
50% { background-image: url(blah.svg); }
100% {
width: 100px;
}""")
if __name__ == '__main__':
unittest.main()
| true | true |
79000c7280a32674d40fe7e6ce459bdae278b0b6 | 172,325 | py | Python | ma.py | azmi155/mu | d6863d432e047b4e5c4a925172757b0febb81633 | [
"MIT"
] | null | null | null | ma.py | azmi155/mu | d6863d432e047b4e5c4a925172757b0febb81633 | [
"MIT"
] | null | null | null | ma.py | azmi155/mu | d6863d432e047b4e5c4a925172757b0febb81633 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#Chucky_Bot
import LINETCR
from LINETCR.lib.curve.ttypes import *
from datetime import datetime
from bs4 import BeautifulSoup
from threading import Thread
from googletrans import Translator
from gtts import gTTS
import time,random,sys,json,codecs,threading,glob,urllib,urllib2,urllib3,re,ast,os,subprocess,requests,tempfile
cl = LINETCR.LINE()
#cl.login(qr=True)
cl.login(token='EsOvPPzeFykCVG8OoGf0.hE4TS1Hheb46PcdMzZKaaa.rzBOrFqSAApZownyv2qBJWU3PWWbf9/oE6G+sSVzUTo=')
cl.loginResult()
print "Azmi 1-Login Success\n"
ki = LINETCR.LINE()
#ki.login(qr=True)
ki.login(token='EsTdk3fyUSbT7LJVwoEd.rLylacrPH39WJb0UIwB8Nq.GYYzsgzj7aHd7mzCSluc3162Uqrry6Jjwf/bFuq9Etw=')
ki.loginResult()
print "Ki-Login Success\n"
kk = LINETCR.LINE()
#kk.login(qr=True)
kk.login(token='EsNKJDaP0J7Pt7syTOW9.GgPTp3/FisKkVX1rJHeroq.hUG0VDbWHz8R7o80xI0Pvme8dBb3dSsmCnat0PRX+JM=')
kk.loginResult()
print "Kk-Login Success\n"
#kc = LINETCR.LINE()
#kc.login(qr=True)
#kc.login(token='TOKEN_KAMU_DISINI_BEIB')
#kc.loginResult()
#print "Kc-Login Success\n"
#kr = LINETCR.LINE()
#kr.login(qr=True)
#kr.login(token='TOKEN_KAMU_DISINI_BEIB')
#kr.loginResult()
#print "Kr-Login Success\n"
#km = LINETCR.LINE()
#km.login(qr=True)
#km.login(token='TOKEN_KAMU_DISINI_BEIB')
#km.loginResult()
print "Km-Login Success\n\n=====[Sukses All Login]====="
reload(sys)
sys.setdefaultencoding('utf-8')
selfMessage ="""
╔═════════════════════════
║ ☆☞ S E L F ☜☆
╠═════════════════════════
╠➩〘Hi〙
╠➩〘Me〙
╠➩〘Mymid〙
╠➩〘Mid @〙
╠➩〘SearchID: (ID LINE)〙
╠➩〘Checkdate (DD/MM/YY)〙
╠➩〘Kalender〙
╠➩〘Steal contact〙
╠➩〘Pp @〙
╠➩〘Cover @〙
╠➩〘Auto like〙
╠➩〘Scbc Text〙
╠➩〘Cbc Text〙
╠➩〘Gbc Text〙
╠➩〘Getbio @〙
╠➩〘Getinfo @〙
╠➩〘Getname @〙
╠➩〘Getprofile @〙
╠➩〘Getcontact @〙
╠➩〘Getvid @〙
╠➩〘Friendlist〙
╠═════════════════════════
║ ☆☞ S E L F ☜☆
╚═════════════════════════
"""
botMessage ="""
╔═════════════════════════
║ ☆☞ B O T ☜☆
╠═════════════════════════
╠➩〘Absen〙
╠➩〘Respon〙
╠➩〘Runtime〙
╠➩〘Kapten copy @〙
╠➩〘TC1 copy @〙
╠➩〘TC2 copy @〙
╠➩〘TC3 copy @〙
╠➩〘TC4 copy @〙
╠➩〘Backup all〙
╠➩〘/bio Text〙
╠➩〘@bye (Usir Kapten)〙
╠➩〘Bye all (Usir Semua)〙
╠═════════════════════════
║ ☆☞ B O T ☜☆
╚═════════════════════════
"""
mediaMessage ="""
╔═════════════════════════
║ ☆☞ M E D I A ☜☆
╠═════════════════════════
╠➩〘Gift〙
╠➩〘Gift1 @ s/d Gift10 @〙
╠➩〘Giftbycontact〙
╠➩〘All gift〙
╠➩〘Gif gore〙
╠➩〘Google: (Text)〙
╠➩〘Playstore NamaApp〙
╠➩〘Fancytext: Text〙
╠➩〘/musik Judul-Penyanyi〙
╠➩〘/lirik Judul-Penyanyi〙
╠➩〘/musrik Judul-Penyanyi〙
╠➩〘/ig UrsnameInstagram〙
╠➩〘Checkig UrsnameInstagram〙
╠➩〘/apakah Text (Kerang Ajaib)〙
╠➩〘/kapan Text (Kerang Ajaib)〙
╠➩〘/hari Text (Kerang Ajaib)〙
╠➩〘/berapa Text (Kerang Ajaib)〙
╠➩〘/berapakah Text〙
╠➩〘Youtubelink: Judul Video〙
╠➩〘Youtubevideo: Judul Video〙
╠➩〘Youtubesearch: Judul Video〙
╠➩〘Image NamaGambar〙
╠➩〘Say-id Text〙
╠➩〘Say-en Text〙
╠➩〘Say-jp Text〙
╠➩〘Image NamaGambar〙
╠➩〘Tr-id Text (Translate En Ke ID〙
╠➩〘Tr-en Text (Translate ID Ke En〙
╠➩〘Tr-th Text (Translate ID Ke Th〙
╠➩〘Id@en Text (Translate ID Ke En〙
╠➩〘Id@th Text (Translate ID Ke TH〙
╠➩〘En@id Text (Translate En Ke ID〙
╠═════════════════════════
║ ☆☞ M E D I A ☜☆
╚═════════════════════════
"""
groupMessage ="""
╔═════════════════════════
║ ☆☞ G R O U P ☜☆
╠═════════════════════════
╠➩〘Welcome〙
╠➩〘Say welcome〙
╠➩〘Invite creator〙
╠➩〘Setview〙
╠➩〘Viewseen〙
╠➩〘Gn: (NamaGroup)〙
╠➩〘Tag all〙
╠➩〘Recover〙
╠➩〘Cancel〙
╠➩〘Cancelall〙
╠➩〘Gcreator〙
╠➩〘Ginfo〙
╠➩〘Gurl〙
╠➩〘List group〙
╠➩〘Pict group: (NamaGroup)〙
╠➩〘Spam: (Text)〙
╠➩〘Spam〙
╠➩〘Add all〙
╠➩〘Kick: (Mid)〙
╠➩〘Invite: (Mid)〙
╠➩〘Invite〙
╠➩〘Memlist〙
╠➩〘Getgroup image〙
╠➩〘Urlgroup Image〙
╠═════════════════════════
║ ☆☞ G R O U P ☜☆
╚═════════════════════════
"""
tjia="u71b6799e1c37868a871d442e67633182"
setMessage ="""
╔═════════════════════════
║ ☆☞ S E T ☜☆
╠═════════════════════════
╠➩〘Sambutan on/off〙
╠➩〘Url on/off〙
╠➩〘Alwaysread on/off〙
╠➩〘Sider on/off〙
╠➩〘Contact on/off〙
╠➩〘Simisimi on/off〙
╠═════════════════════════
║ ☆☞ S E T ☜☆
╚═════════════════════════
"""
creatorMessage ="""
╔═════════════════════════
║ ☆☞ C R E A T O R ☜☆
╠═════════════════════════
╠➩〘Admin add @〙
╠➩〘Admin remove @〙
╠➩〘/cnkapten〙
╠➩〘/cntc1〙
╠➩〘/cntc2〙
╠➩〘/cntc3〙
╠➩〘/cntc4〙
╠➩〘Crash〙
╠➩〘Kickall〙
╠➩〘Bc: (Text)〙
╠➩〘Nk: @〙
╠➩〘Ulti @〙
╠➩〘Join group: (NamaGroup〙
╠➩〘Leave group: (NamaGroup〙
╠➩〘Leave all group〙
╠➩〘Bot restart〙
╠➩〘Turn off〙
╠═════════════════════════
║ ☆☞ C R E A T O R ☜☆
╚═════════════════════════
"""
adminMessage ="""
╔═════════════════════════
║ ☆☞ A D M I N ☜☆
╠═════════════════════════
╠➩〘Admin list〙
╠➩〘Ban〙
╠➩〘Unban〙
╠➩〘Ban @〙
╠➩〘Unban @〙
╠➩〘Ban list〙
╠➩〘Clear ban〙
╠➩〘Kill〙
╠➩〘Kick @〙
╠➩〘Set member: (Jumblah)〙
╠➩〘Ban group: (NamaGroup〙
╠➩〘Del ban: (NamaGroup〙
╠➩〘List ban〙
╠➩〘Kill ban〙
╠➩〘Glist〙
╠➩〘Glistmid〙
╠➩〘Details group: (Gid)〙
╠➩〘Cancel invite: (Gid)〙
╠➩〘Invitemeto: (Gid)〙
╠➩〘Kapten acc invite〙
╠➩〘TC1 acc invite〙
╠➩〘TC2 acc invite〙
╠➩〘TC3 acc invite〙
╠➩〘TC4 acc invite〙
╠➩〘Removechat〙
╠➩〘Join on/off〙
╠➩〘Joincancel on/off〙
╠➩〘Respon on/off〙
╠➩〘Responkick on/off〙
╠➩〘Leave on/off〙
╠➩〘All join / (TC1/2/3/4 Join)〙
╠═════════════════════════
║ ☆☞ A D M I N ☜☆
╚═════════════════════════
"""
helpMessage ="""
╔═════════════════════════
║ ☆☞ H E L P ☜☆
╠═════════════════════════
╠➩〘Help protect〙
╠➩〘Help self〙
╠➩〘Help bot〙
╠➩〘Help group〙
╠➩〘Help set〙
╠➩〘Help media〙
╠➩〘Help admin〙
╠➩〘Help creator〙
╠➩〘Owner〙
╠➩〘Pap owner〙
╠➩〘Admin〙
╠➩〘Speed〙
╠➩〘Speed test〙
╠➩〘Status〙
╠═════════════════════════
║ ☆☞ H E L P ☜☆
╚═════════════════════════
"""
protectMessage ="""
╔═════════════════════════
║ ☆☞ P R O T E C T ☜☆
╠═════════════════════════
╠➩〘Allprotect on/off〙
╠➩〘Autocancel on/off〙
╠➩〘Qr on/off〙
╠➩〘Autokick on/off〙
╠➩〘Ghost on/off〙
╠➩〘Invitepro on/off〙
╠═════════════════════════
║ ☆☞ P R O T E C T ☜☆
╚═════════════════════════
"""
KAC=[cl,ki,kk]
mid = cl.getProfile().mid
Amid = ki.getProfile().mid
Bmid = kk.getProfile().mid
Bots=[mid,Amid,Bmid]
Creator=["u71b6799e1c37868a871d442e67633182"]
admin=["u71b6799e1c37868a871d442e67633182"]
contact = cl.getProfile()
backup1 = cl.getProfile()
backup1.displayName = contact.displayName
backup1.statusMessage = contact.statusMessage
backup1.pictureStatus = contact.pictureStatus
contact = ki.getProfile()
backup2 = ki.getProfile()
backup2.displayName = contact.displayName
backup2.statusMessage = contact.statusMessage
backup2.pictureStatus = contact.pictureStatus
contact = kk.getProfile()
backup3 = kk.getProfile()
backup3.displayName = contact.displayName
backup3.statusMessage = contact.statusMessage
backup3.pictureStatus = contact.pictureStatus
responsename = cl.getProfile().displayName
responsename2 = ki.getProfile().displayName
responsename3 = kk.getProfile().displayName
wait = {
"LeaveRoom":True,
"AutoJoin":False,
"AutoJoinCancel":True,
"memberscancel":0,
"Members":1,
"AutoCancel":{},
"AutoCancelon":False,
"joinkick":False,
"AutoKick":{},
"AutoKickon":False,
'pap':{},
'invite':{},
'steal':{},
'gift':{},
'likeOn':{},
'Leave':{},
'detectMention':True,
'kickMention':False,
'timeline':True,
"Timeline":True,
"comment1":"Kenapa Kak?",
"comment2":"Wkwkwk \(○^ω^○)/",
"comment3":"Lucu Banget!!! ヘ(^_^)ヘ",
"comment4":"Nice Kak (^_^)",
"comment5":"Bot Auto Like ©By : Azmi\nContact Me : 👉 line.me/ti/p/~a_ulul15",
"commentOn":True,
"commentBlack":{},
"message":"Thx For Add Me (^_^)\nInvite Me To Your Group ヘ(^_^)ヘ",
"blacklist":{},
"wblacklist":False,
"dblacklist":False,
"Qr":{},
"Qron":False,
"Contact":False,
"Sambutan":True,
"Ghost":False,
"inviteprotect":False,
"alwaysRead":False,
"Sider":{},
"Simi":{},
"lang":"JP",
"BlGroup":{}
}
settings = {
"simiSimi":{}
}
cctv = {
"cyduk":{},
"point":{},
"sidermem":{}
}
wait2 = {
"readPoint":{},
"readMember":{},
"setTime":{},
"ROM":{}
}
setTime = {}
setTime = wait2['setTime']
mulai = time.time()
def download_page(url):
version = (3,0)
cur_version = sys.version_info
if cur_version >= version:
import urllib,request
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
req = urllib,request.Request(url, headers = headers)
resp = urllib,request.urlopen(req)
respData = str(resp.read())
return respData
except Exception as e:
print(str(e))
else:
import urllib2
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
req = urllib2.Request(url, headers = headers)
response = urllib2.urlopen(req)
page = response.read()
return page
except:
return"Page Not found"
def _images_get_next_item(s):
start_line = s.find('rg_di')
if start_line == -1:
end_quote = 0
link = "no_links"
return link, end_quote
else:
start_line = s.find('"class="rg_meta"')
start_content = s.find('"ou"',start_line+90)
end_content = s.find(',"ow"',start_content-90)
content_raw = str(s[start_content+6:end_content-1])
return content_raw, end_content
def _images_get_all_items(page):
items = []
while True:
item, end_content = _images_get_next_item(page)
if item == "no_links":
break
else:
items.append(item)
time.sleep(0.1)
page = page[end_content:]
return items
def waktu(secs):
mins, secs = divmod(secs,60)
hours, mins = divmod(mins,60)
return '%02d Jam %02d Menit %02d Detik' % (hours, mins, secs)
def cms(string, commands):# /XXX, >XXX, ;XXX, ^XXX, %XXX, $XXX...
tex = ["+","@","/",">",";","^","%","$","^","サテラ:","サテラ:","サテラ:","サテラ:"]
for texX in tex:
for command in commands:
if string ==command:
return True
return False
def upload_tempimage(client):
'''
Upload a picture of a kitten. We don't ship one, so get creative!
'''
config = {
'album': album,
'name': 'bot auto upload',
'title': 'bot auto upload',
'description': 'bot auto upload'
}
print("Uploading image... ")
image = client.upload_from_path(image_path, config=config, anon=False)
print("Done")
print()
return image
def sendAudio(self, to_, path):
M = Message()
M.text = None
M.to = to_
M.contentMetadata = None
M.contentPreview = None
M.contentType = 3
M_id = self._client.sendMessage(0,M).id
files = {
'file': open(path, 'rb'),
}
def sendMessage(to, text, contentMetadata={}, contentType=0):
mes = Message()
mes.to, mes.from_ = to, profile.mid
mes.text = text
mes.contentType, mes.contentMetadata = contentType, contentMetadata
if to not in messageReq:
messageReq[to] = -1
messageReq[to] += 1
def sendImage(self, to_, path):
M = Message(to=to_, text=None, contentType = 1)
M.contentMetadata = None
M.contentPreview = None
M2 = self._client.sendMessage(0,M)
M_id = M2.id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'image',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.post_content('https://obs-sg.line-apps.com/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload image failure.')
return True
def sendImageWithURL(self, to_, url):
path = '%s/pythonLine-%i.data' % (tempfile.gettempdir(), randint(0, 9))
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download image failure.')
try:
self.sendImage(to_, path)
except:
try:
self.sendImage(to_, path)
except Exception as e:
raise e
def sendAudio(self, to_, path):
M = Message()
M.text = None
M.to = to_
M.contentMetadata = None
M.contentPreview = None
M.contentType = 3
M_id = self._client.sendMessage(0,M).id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'audio',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.post_content('https://os.line.naver.jp/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload audio failure.')
return True
def sendAudioWithURL(self, to_, url):
path = self.downloadFileWithURL(url)
try:
self.sendAudio(to_, path)
except Exception as e:
raise Exception(e)
def sendAudioWithUrl(self, to_, url):
path = '%s/pythonLine-%1.data' % (tempfile.gettempdir(), randint(0, 9))
r = requests.get(url, stream=True, verify=False)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download audio failure.')
try:
self.sendAudio(to_, path)
except Exception as e:
raise e
def downloadFileWithURL(self, fileUrl):
saveAs = '%s/pythonLine-%i.data' % (tempfile.gettempdir(), randint(0, 9))
r = self.get_content(fileUrl)
if r.status_code == 200:
with open(saveAs, 'wb') as f:
shutil.copyfileobj(r.raw, f)
return saveAs
else:
raise Exception('Download file failure.')
def restart_program():
python = sys.executable
os.execl(python, python, * sys.argv)
def bot(op):
try:
if op.type == 0:
return
if op.type == 5:
if wait["autoAdd"] == True:
cl.findAndAddContactsByMid(op.param1)
if(wait["message"]in[""," ","\n",None]):
pass
else:
cl.sendText(op.param1,str(wait["message"]))
if op.type == 55:
try:
group_id = op.param1
user_id=op.param2
subprocess.Popen('echo "'+ user_id+'|'+str(op.createdTime)+'" >> dataSeen/%s.txt' % group_id, shell=True, stdout=subprocess.PIPE, )
except Exception as e:
print e
if op.type == 55:
try:
if cctv['cyduk'][op.param1]==True:
if op.param1 in cctv['point']:
Name = cl.getContact(op.param2).displayName
Name = ki.getContact(op.param2).displayName
Name = kk.getContact(op.param2).displayName
Name = kc.getContact(op.param2).displayName
Name = kr.getContact(op.param2).displayName
if Name in cctv['sidermem'][op.param1]:
pass
else:
cctv['sidermem'][op.param1] += "\n• " + Name
if " " in Name:
nick = Name.split(' ')
if len(nick) == 2:
random.choice(KAC).sendText(op.param1, "Haii " + "☞ " + nick[0] + " ☜" + "\nNgintip Aja Niih. . .\nChat Kek Idiih (-__-) ")
else:
random.choice(KAC).sendText(op.param1, "Haii " + "☞ " + nick[1] + " ☜" + "\nBetah Banget Jadi Penonton. . .\nChat Napa (-__-) ")
else:
random.choice(KAC).sendText(op.param1, "Haii " + "☞ " + Name + " ☜" + "\nNgapain Kak Ngintip Aja???\nSini Gabung Chat... ")
else:
pass
else:
pass
except:
pass
else:
pass
if op.type == 22:
cl.leaveRoom(op.param1)
if op.type == 21:
cl.leaveRoom(op.param1)
if op.type == 13:
print op.param3
if op.param3 in mid:
if op.param2 in Creator:
cl.acceptGroupInvitation(op.param1)
if op.param3 in Amid:
if op.param2 in Creator:
ki.acceptGroupInvitation(op.param1)
if op.param3 in Bmid:
if op.param2 in Creator:
kk.acceptGroupInvitation(op.param1)
if op.param3 in Cmid:
if op.param2 in Creator:
kc.acceptGroupInvitation(op.param1)
if op.param3 in Dmid:
if op.param2 in Creator:
kr.acceptGroupInvitation(op.param1)
if op.param3 in mid:
if op.param2 in Amid:
cl.acceptGroupInvitation(op.param1)
if op.param3 in mid:
if op.param2 in Bmid:
cl.acceptGroupInvitation(op.param1)
if op.param3 in mid:
if op.param2 in Cmid:
cl.acceptGroupInvitation(op.param1)
if op.param3 in Amid:
if op.param2 in mid:
ki.acceptGroupInvitation(op.param1)
if op.param3 in Amid:
if op.param2 in Bmid:
ki.acceptGroupInvitation(op.param1)
if op.param3 in Amid:
if op.param2 in Cmid:
ki.acceptGroupInvitation(op.param1)
if op.param3 in Bmid:
if op.param2 in mid:
kk.acceptGroupInvitation(op.param1)
if op.param3 in Bmid:
if op.param2 in Amid:
kk.acceptGroupInvitation(op.param1)
if op.param3 in Bmid:
if op.param2 in Cmid:
kk.acceptGroupInvitation(op.param1)
if op.param3 in Cmid:
if op.param2 in mid:
kc.acceptGroupInvitation(op.param1)
if op.param3 in Cmid:
if op.param2 in Amid:
kc.acceptGroupInvitation(op.param1)
if op.param3 in Cmid:
if op.param2 in Cmid:
kc.acceptGroupInvitation(op.param1)
if op.param3 in Dmid:
if op.param2 in mid:
kr.acceptGroupInvitation(op.param1)
if op.param3 in Dmid:
if op.param2 in Amid:
kr.acceptGroupInvitation(op.param1)
if op.param3 in Dmid:
if op.param2 in Bmid:
kr.acceptGroupInvitation(op.param1)
if mid in op.param3:
if wait["AutoJoinCancel"] == True:
G = cl.getGroup(op.param1)
if len(G.members) <= wait["memberscancel"]:
cl.acceptGroupInvitation(op.param1)
cl.sendText(op.param1,"Maaf " + cl.getContact(op.param2).displayName + "\nMember Kurang Dari 30 Orang\nUntuk Info, Silahkan Chat Owner Kami!")
c = Message(to=op.param1, from_=None, text=None, contentType=13)
c.contentMetadata={'mid':tjia}
cl.sendMessage(c)
cl.leaveGroup(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ti = cl.reissueGroupTicket(op.param1)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G.preventJoinByTicket = True
cl.updateGroup(G)
cl.sendText(op.param1,"☆Ketik ☞Help☜ Untuk Bantuan☆\n☆Harap Gunakan Dengan Bijak ^_^ ☆")
if mid in op.param3:
if wait["AutoJoin"] == True:
G = cl.getGroup(op.param1)
if len(G.members) <= wait["Members"]:
cl.rejectGroupInvitation(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ti = cl.reissueGroupTicket(op.param1)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G.preventJoinByTicket = True
cl.updateGroup(G)
cl.sendText(op.param1,"☆Ketik ☞Help☜ Untuk Bantuan☆\n☆Harap Gunakan Dengan Bijak ^_^ ☆")
else:
if wait["AutoCancel"][op.param1] == True:
if op.param3 in admin:
pass
else:
cl.cancelGroupInvitation(op.param1, [op.param3])
else:
if op.param3 in wait["blacklist"]:
cl.cancelGroupInvitation(op.param1, [op.param3])
cl.sendText(op.param1, "Blacklist Detected")
else:
pass
if op.type == 19:
if wait["AutoKick"][op.param1] == True:
try:
if op.param3 in Creator:
if op.param3 in admin:
if op.param3 in Bots:
pass
if op.param2 in Creator:
if op.param2 in admin:
if op.param2 in Bots:
pass
else:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
if op.param2 in wait["blacklist"]:
pass
else:
random.choice(KAC).inviteIntoGroup(op.param1,[op.param3])
except:
try:
if op.param2 not in Creator:
if op.param2 not in admin:
if op.param2 not in Bots:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
if op.param2 in wait["blacklist"]:
pass
else:
random.choice(KAC).inviteIntoGroup(op.param1,[op.param3])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Creator:
if op.param2 in admin:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Creator:
if op.param2 in admin:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
else:
pass
if mid in op.param3:
if op.param2 in Creator:
if op.param2 in Bots:
pass
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
G = ki.getGroup(op.param1)
G.preventJoinByTicket = False
ki.updateGroup(G)
Ti = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
X = cl.getGroup(op.param1)
X.preventJoinByTicket = True
cl.updateGroup(X)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Amid in op.param3:
if op.param2 in Bots:
pass
try:
kk.kickoutFromGroup(op.param1,[op.param2])
kc.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
X = kk.getGroup(op.param1)
X.preventJoinByTicket = False
cl.updateGroup(X)
Ti = kk.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G = ki.getGroup(op.param1)
G.preventJoinByTicket = True
ki.updateGroup(G)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Bmid in op.param3:
if op.param2 in Bots:
pass
try:
kc.kickoutFromGroup(op.param1,[op.param2])
kk.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
X = kc.getGroup(op.param1)
X.preventJoinByTicket = False
kc.updateGroup(X)
Ti = kc.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G = kk.getGroup(op.param1)
G.preventJoinByTicket = True
kk.updateGroup(G)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Cmid in op.param3:
if op.param2 in Bots:
pass
try:
cl.kickoutFromGroup(op.param1,[op.param2])
kk.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
X = cl.getGroup(op.param1)
X.preventJoinByTicket = False
cl.updateGroup(X)
Ti = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G = kc.getGroup(op.param1)
G.preventJoinByTicket = True
kc.updateGroup(G)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Dmid in op.param3:
if op.param2 in Bots:
pass
try:
cl.kickoutFromGroup(op.param1,[op.param2])
kk.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
X = cl.getGroup(op.param1)
X.preventJoinByTicket = False
cl.updateGroup(X)
Ti = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G = kc.getGroup(op.param1)
G.preventJoinByTicket = True
kc.updateGroup(G)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Creator in op.param3:
if admin in op.param3:
if op.param2 in Bots:
pass
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param2 not in Bots:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
if op.param2 in wait["blacklist"]:
pass
else:
random.choice(KAC).inviteIntoGroup(op.param1,[op.param3])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
random.choice(KAC).inviteIntoGroup(op.param1,[op.param3])
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
if op.type == 11:
if wait["Qr"][op.param1] == True:
if op.param2 not in Bots:
if op.param2 not in admin:
G = random.choice(KAC).getGroup(op.param1)
G.preventJoinByTicket = True
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
random.choice(KAC).updateGroup(G)
if op.type == 17:
if wait["Sambutan"] == True:
if op.param2 in admin:
return
ginfo = cl.getGroup(op.param1)
contact = cl.getContact(op.param2)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
c = Message(to=op.param1, from_=None, text=None, contentType=13)
c.contentMetadata={'mid':op.param2}
cl.sendMessage(c)
cl.sendText(op.param1,"Hallo " + cl.getContact(op.param2).displayName + "\nWelcome To ☞ " + str(ginfo.name) + " ☜" + "\nBudayakan Cek Note\nDan Semoga Betah Disini ^_^")
cl.sendImageWithURL(op.param1,image)
print "MEMBER JOIN TO GROUP"
if op.type == 17:
if wait["joinkick"] == True:
if op.param2 in admin:
if op.param2 in Bots:
return
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
print "MEMBER JOIN KICK TO GROUP"
if op.type == 15:
if wait["Sambutan"] == True:
if op.param2 in admin:
return
cl.sendText(op.param1,"Good Bye " + cl.getContact(op.param2).displayName + "\nSee You Next Time . . . (p′︵‵。) 🤗")
random.choice(KAC).inviteIntoGroup(op.param1,[op.param2])
print "MEMBER HAS LEFT THE GROUP"
if op.type == 13:
if op.param2 not in Creator:
if op.param2 not in admin:
if op.param2 not in Bots:
if op.param2 in Creator:
if op.param2 in admin:
if op.param2 in Bots:
pass
elif wait["inviteprotect"] == True:
wait ["blacklist"][op.param2] = True
cl.cancelGroupInvitation(op.param1,[op.param3])
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
if op.type == 19:
if wait["Ghost"] == True:
if op.param2 in admin:
if op.param2 in Bots:
pass
else:
try:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
km.acceptGroupInvitationByTicket(op.param1,Ticket)
time.sleep(0.01)
km.kickoutFromGroup(op.param1,[op.param2])
c = Message(to=op.param1, from_=None, text=None, contentType=13)
c.contentMetadata={'mid':op.param2}
km.sendMessage(c)
km.leaveGroup(op.param1)
G.preventJoinByTicket = True
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
except:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
km.acceptGroupInvitationByTicket(op.param1,Ticket)
time.sleep(0.01)
km.kickoutFromGroup(op.param1,[op.param2])
c = Message(to=op.param1, from_=None, text=None, contentType=13)
c.contentMetadata={'mid':op.param2}
km.sendMessage(c)
km.leaveGroup(op.param1)
G.preventJoinByTicket = True
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
if op.type == 26:
msg = op.message
if wait["alwaysRead"] == True:
if msg.toType == 0:
cl.sendChatChecked(msg.from_,msg.id)
else:
cl.sendChatChecked(msg.to,msg.id)
if msg.contentType == 16:
if wait['likeOn'] == True:
url = msg.contentMetadata["postEndUrl"]
cl.like(url[25:58], url[66:], likeType=1005)
ki.like(url[25:58], url[66:], likeType=1002)
kk.like(url[25:58], url[66:], likeType=1004)
kc.like(url[25:58], url[66:], likeType=1003)
kr.like(url[25:58], url[66:], likeType=1001)
cl.comment(url[25:58], url[66:], wait["comment1"])
ki.comment(url[25:58], url[66:], wait["comment2"])
kk.comment(url[25:58], url[66:], wait["comment3"])
kc.comment(url[25:58], url[66:], wait["comment4"])
kr.comment(url[25:58], url[66:], wait["comment5"])
cl.sendText(msg.to,"Like Success")
wait['likeOn'] = False
if op.type == 26:
msg = op.message
if msg.to in settings["simiSimi"]:
if settings["simiSimi"][msg.to] == True:
if msg.text is not None:
text = msg.text
r = requests.get("http://api.ntcorp.us/chatbot/v1/?text=" + text.replace(" ","+") + "&key=beta1.nt")
data = r.text
data = json.loads(data)
if data['status'] == 200:
if data['result']['result'] == 100:
cl.sendText(msg.to,data['result']['response'].encode('utf-8'))
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["kickMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["Aku Bilang Jangan Ngetag Lagi " + cName + "\nAku Kick Kamu! Sorry, Byee!!!"]
ret_ = random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in admin:
cl.sendText(msg.to,ret_)
random.choice(KAC).kickoutFromGroup(msg.to,[msg.from_])
break
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
random.choice(KAC).kickoutFromGroup(msg.to,[msg.from_])
break
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["detectMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["Sekali lagi nge tag gw sumpahin jomblo seumur hidup!","Dont Tag!! Lagi Sibuk",cName + " Ngapain Ngetag?",cName + " Nggak Usah Tag-Tag! Kalo Penting Langsung Pc Aja","Tag Mulu Lo Anjirr!","Dia Lagi Off", cName + " Kenapa Tag? Kangen?","Dia Lagi Tidur\nJangan Di Tag " + cName, "Jangan Suka Tag Gua " + cName, "Kamu Siapa " + cName + "?", "Ada Perlu Apa " + cName + "?","Woii " + cName + " Jangan Ngetag, Riibut!"]
ret_ = random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in admin:
cl.sendText(msg.to,ret_)
break
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
break
if msg.contentType == 13:
if wait["wblacklist"] == True:
if msg.contentMetadata["mid"] not in admin:
if msg.contentMetadata["mid"] in wait["blacklist"]:
random.choice(KAC).sendText(msg.to,"Sudah")
wait["wblacklist"] = False
else:
wait["blacklist"][msg.contentMetadata["mid"]] = True
wait["wblacklist"] = False
random.choice(KAC).sendText(msg.to,"Ditambahkan")
else:
cl.sendText(msg.to,"Admin Detected~")
elif wait["dblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
del wait["blacklist"][msg.contentMetadata["mid"]]
random.choice(KAC).sendText(msg.to,"Terhapus")
wait["dblacklist"] = False
else:
wait["dblacklist"] = False
random.choice(KAC).sendText(msg.to,"Tidak Ada Black List")
elif wait["Contact"] == True:
msg.contentType = 0
cl.sendText(msg.to,msg.contentMetadata["mid"])
if 'displayName' in msg.contentMetadata:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"Nama:\n" + msg.contentMetadata["displayName"] + "\n\nMid:\n" + msg.contentMetadata["mid"] + "\n\nStatus:\n" + contact.statusMessage + "\n\nPhoto Profile:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n\nPhoto Cover:\n" + str(cu))
else:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"Nama:\n" + msg.contentMetadata["displayName"] + "\n\nMid:\n" + msg.contentMetadata["mid"] + "\n\nStatus:\n" + contact.statusMessage + "\n\nPhoto Profile:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n\nPhoto Cover:\n" + str(cu))
elif msg.text == "Ginfo":
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
gCreator = ginfo.creator.displayName
except:
gCreator = "Error"
if wait["lang"] == "JP":
if ginfo.invitee is None:
sinvitee = "0"
else:
sinvitee = str(len(ginfo.invitee))
if ginfo.preventJoinByTicket == True:
u = "close"
else:
u = "open"
cl.sendText(msg.to,"[Group name]\n" + str(ginfo.name) + "\n\n[Gid]\n" + msg.to + "\n\n[Group creator]\n" + gCreator + "\n\n[Profile status]\nhttp://dl.profile.line.naver.jp/" + ginfo.pictureStatus + "\n\nMembers:" + str(len(ginfo.members)) + "members\nPending:" + sinvitee + "people\nURL:" + u + "it is inside")
else:
cl.sendText(msg.to,"[group name]\n" + str(ginfo.name) + "\n[gid]\n" + msg.to + "\n[group creator]\n" + gCreator + "\n[profile status]\nhttp://dl.profile.line.naver.jp/" + ginfo.pictureStatus)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can not be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif msg.text is None:
return
elif msg.text in ["Creator","Owner"]:
msg.contentType = 13
msg.contentMetadata = {'mid': tjia}
cl.sendMessage(msg)
cl.sendText(msg.to,"Itu tukang tikungnya(^_^)")
elif msg.text in ["Admin","admin"]:
msg.contentType = 13
admin1 = "u71b6799e1c37868a871d442e67633182"
admin2 = "u46560b002469877f708c1d2e8966fc9d"
admin3 = "u1dee2db35847101e3aa420e667390000"
msg.contentMetadata = {'mid': tjia}
random.choice(KAC).sendMessage(msg)
msg.contentMetadata = {'mid': admin1}
random.choice(KAC).sendMessage(msg)
msg.contentMetadata = {'mid': admin2}
random.choice(KAC).sendMessage(msg)
msg.contentMetadata = {'mid': admin3}
random.choice(KAC).sendMessage(msg)
random.choice(KAC).sendText(msg.to,"Itu Admin Kami (^_^)")
elif "Admin add @" in msg.text:
if msg.from_ in Creator:
print "[Command]Admin add executing"
_name = msg.text.replace("Admin add @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
gs = kr.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
random.choice(KAC).sendText(msg.to,"Contact Tidak Di Temukan")
else:
for target in targets:
try:
admin.append(target)
cl.sendText(msg.to,"Admin Chucky Ditambahkan")
except:
pass
print "[Command]Admin add executed"
else:
cl.sendText(msg.to,"Command Denied.")
cl.sendText(msg.to,"Creator Permission Required.")
elif "Admin remove @" in msg.text:
if msg.from_ in Creator:
print "[Command]Admin Remove Executing"
_name = msg.text.replace("Admin remove @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
gs = kr.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
random.choice(KAC).sendText(msg.to,"Contact Tidak Di Temukan")
else:
for target in targets:
try:
admin.remove(target)
cl.sendText(msg.to,"Admin Chucky Dihapus")
except:
pass
print "[Command]Admin remove executed"
else:
cl.sendText(msg.to,"Command Denied.")
cl.sendText(msg.to,"Creator Permission Required.")
elif msg.text in ["Admin list","admin list","List admin"]:
if admin == []:
cl.sendText(msg.to,"The Admin List Is Empty")
else:
cl.sendText(msg.to,"Tunggu...")
mc = "╔═════════════════════════\n║ ☆☞ ADMIN CHUCKY ☜☆\n╠═════════════════════════\n"
for mi_d in admin:
mc += "╠••> " +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc + "╚═════════════════════════")
print "[Command]Admin List executed"
elif msg.text in ["Group creator","Gcreator","gcreator"]:
ginfo = cl.getGroup(msg.to)
gCreator = ginfo.creator.mid
msg.contentType = 13
msg.contentMetadata = {'mid': gCreator}
cl.sendMessage(msg)
cl.sendText(msg.to,"Itu Yang Buat Grup Ini")
elif msg.contentType == 16:
if wait["Timeline"] == True:
msg.contentType = 0
msg.text = "post URL\n" + msg.contentMetadata["postEndUrl"]
random.choice(KAC).sendText(msg.to,msg.text)
if msg.contentType == 13:
if wait["steal"] == True:
_name = msg.contentMetadata["displayName"]
copy = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
print "[Target] Stealed"
break
else:
targets.append(copy)
if targets == []:
pass
else:
for target in targets:
try:
cl.findAndAddContactsByMid(target)
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + msg.contentMetadata["mid"] + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
wait["steal"] = False
break
except:
pass
if msg.contentType == 13:
if wait["gift"] == True:
_name = msg.contentMetadata["displayName"]
copy = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
print "[Target] Gift"
break
else:
targets.append(copy)
if targets == []:
pass
else:
for target in targets:
try:
cl.sendText(msg.to,"Gift Sudah Terkirim!")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '1296261'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
wait['gift'] = False
break
except:
msg.contentMetadata = {'mid': target}
wait["gift"] = False
break
if msg.contentType == 13:
if wait['invite'] == True:
_name = msg.contentMetadata["displayName"]
invite = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
groups = ki.getGroup(msg.to)
groups = kk.getGroup(msg.to)
groups = kc.getGroup(msg.to)
groups = kr.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
random.choice(KAC).sendText(msg.to, _name + " Berada DiGrup Ini")
else:
targets.append(invite)
if targets == []:
pass
else:
for target in targets:
try:
cl.findAndAddContactsByMid(target)
ki.findAndAddContactsByMid(target)
kk.findAndAddContactsByMid(target)
kc.findAndAddContactsByMid(target)
kr.findAndAddContactsByMid(target)
random.choice(KAC).inviteIntoGroup(msg.to,[target])
random.choice(KAC).sendText(msg.to,"Invite " + _name)
wait['invite'] = False
break
except:
random.choice(KAC).sendText(msg.to,"Limit Invite")
wait['invite'] = False
break
elif msg.text in ["Key creator","help creator","Help creator"]:
cl.sendText(msg.to,creatorMessage)
elif msg.text in ["Key group","help group","Help group"]:
cl.sendText(msg.to,groupMessage)
elif msg.text in ["Key","help","Help"]:
cl.sendText(msg.to,helpMessage)
elif msg.text in ["Key self","help self","Help self"]:
cl.sendText(msg.to,selfMessage)
elif msg.text in ["Key bot","help bot","Help bot"]:
cl.sendText(msg.to,botMessage)
elif msg.text in ["Key set","help set","Help set"]:
cl.sendText(msg.to,setMessage)
elif msg.text in ["Key media","help media","Help media"]:
cl.sendText(msg.to,mediaMessage)
elif msg.text in ["Key admin","help admin","Help admin"]:
cl.sendText(msg.to,adminMessage)
elif msg.text in ["Key protect","help protect","Help protect"]:
cl.sendText(msg.to,protectMessage)
elif msg.text in ["List group"]:
gid = cl.getGroupIdsJoined()
h = ""
jml = 0
for i in gid:
gn = cl.getGroup(i).name
h += "♦【%s】\n" % (gn)
jml += 1
cl.sendText(msg.to,"=======[List Group]=======\n"+ h +"\nTotal Group: "+str(jml))
elif "Ban group: " in msg.text:
grp = msg.text.replace("Ban group: ","")
gid = cl.getGroupIdsJoined()
if msg.from_ in admin:
for i in gid:
h = cl.getGroup(i).name
if h == grp:
wait["BlGroup"][i]=True
cl.sendText(msg.to, "Success Ban Group : "+grp)
else:
pass
else:
cl.sendText(msg.to, "Only Admin")
elif msg.text in ["List ban","List ban group"]:
if msg.from_ in admin:
if wait["BlGroup"] == {}:
random.choice(KAC).sendText(msg.to,"Tidak Ada")
else:
mc = ""
for gid in wait["BlGroup"]:
mc += "-> " +cl.getGroup(gid).name + "\n"
random.choice(KAC).sendText(msg.to,"===[Ban Group]===\n"+mc)
else:
cl.sendText(msg.to, "Khusus Admin")
elif msg.text in ["Del ban: "]:
if msg.from_ in admin:
ng = msg.text.replace("Del ban: ","")
for gid in wait["BlGroup"]:
if cl.getGroup(gid).name == ng:
del wait["BlGroup"][gid]
cl.sendText(msg.to, "Success del ban "+ng)
else:
pass
else:
cl.sendText(msg.to, "Only Admin")
elif "Join group: " in msg.text:
ng = msg.text.replace("Join group: ","")
gid = cl.getGroupIdsJoined()
gid = ki.getGroupIdsJoined()
gid = kk.getGroupIdsJoined()
gid = kc.getGroupIdsJoined()
gid = kr.getGroupIdsJoined()
try:
if msg.from_ in Creator:
for i in gid:
h = cl.getGroup(i).name
h = ki.getGroup(i).name
h = kk.getGroup(i).name
h = kc.getGroup(i).name
h = kr.getGroup(i).name
if h == ng:
random.choice(KAC).inviteIntoGroup(i,[Creator])
cl.sendText(msg.to,"Success Join To ["+ h +"] Group")
else:
pass
else:
cl.sendText(msg.to,"Only Admin")
except Exception as e:
cl.sendText(msg.to, str(e))
elif "Leave group: " in msg.text:
ng = msg.text.replace("Leave group: ","")
gid = cl.getGroupIdsJoined()
if msg.from_ in Creator:
for i in gid:
h = cl.getGroup(i).name
if h == ng:
cl.sendText(i,"Bot Di Paksa Keluar Oleh Owner!")
cl.leaveGroup(i)
ki.leaveGroup(i)
kk.leaveGroup(i)
kc.leaveGroup(i)
kr.leaveGroup(i)
cl.sendText(msg.to,"Success Left ["+ h +"] group")
else:
pass
else:
cl.sendText(msg.to,"Only Admin")
elif "Leave all group" == msg.text:
gid = cl.getGroupIdsJoined()
if msg.from_ in Creator:
for i in gid:
cl.sendText(i,"Bot Di Paksa Keluar Oleh Owner!")
cl.leaveGroup(i)
ki.leaveGroup(i)
kk.leaveGroup(i)
kc.leaveGroup(i)
kr.leaveGroup(i)
cl.sendText(msg.to,"Success Leave All Group")
else:
cl.sendText(msg.to,"Only Admin")
elif "Pict group: " in msg.text:
saya = msg.text.replace('Pict group: ','')
gid = cl.getGroupIdsJoined()
for i in gid:
h = cl.getGroup(i).name
gna = cl.getGroup(i)
if h == saya:
cl.sendImageWithURL(msg.to,"http://dl.profile.line.naver.jp/"+ gna.pictureStatus)
elif msg.text in ["cancelall","Cancelall"]:
if msg.toType == 2:
X = cl.getGroup(msg.to)
if X.invitee is not None:
gInviMids = [contact.mid for contact in X.invitee]
cl.cancelGroupInvitation(msg.to, gInviMids)
else:
cl.sendText(msg.to,"Tidak Ada Yang Pending")
else:
cl.sendText(msg.to,"Tidak Bisa Digunakan Diluar Group")
elif msg.text in ["Ourl","Url on"]:
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
cl.updateGroup(X)
cl.sendText(msg.to,"Url Sudah Aktif")
else:
cl.sendText(msg.to,"Can not be used outside the group")
elif msg.text in ["Curl","Url off"]:
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = True
cl.updateGroup(X)
cl.sendText(msg.to,"Url Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Can not be used outside the group")
elif msg.text in ["Join on","Autojoin on"]:
if msg.from_ in admin:
wait["AutoJoin"] = True
wait["AutoJoinCancel"] = False
cl.sendText(msg.to,"Auto Join Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Join off","Autojoin off"]:
if msg.from_ in admin:
wait["AutoJoin"] = False
cl.sendText(msg.to,"Auto Join Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Joincancel on","Autojoincancel on"]:
if msg.from_ in admin:
wait["AutoJoinCancel"] = True
wait["AutoJoin"] = False
cl.sendText(msg.to,"Auto Join Cancel Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Joincancel off","Autojoincancel off"]:
if msg.from_ in admin:
wait["AutoJoinCancel"] = False
cl.sendText(msg.to,"Auto Join Cancel Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Respon on"]:
if msg.from_ in admin:
wait["detectMention"] = True
wait["kickMention"] = False
cl.sendText(msg.to,"Auto Respon Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Respon off"]:
if msg.from_ in admin:
wait["detectMention"] = False
cl.sendText(msg.to,"Auto Respon Sudah Off")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Responkick on"]:
if msg.from_ in admin:
wait["kickMention"] = True
wait["detectMention"] = False
cl.sendText(msg.to,"Auto Respon Kick Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Responkick off"]:
if msg.from_ in admin:
wait["kickMention"] = False
cl.sendText(msg.to,"Auto Respon Kick Sudah Off")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Leave on"]:
if msg.from_ in admin:
wait["Leave"] = True
cl.sendText(msg.to,"Leave Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Autocancel on"]:
if msg.from_ in admin:
wait["AutoCancel"][msg.to] = True
wait["AutoCancelon"] = True
cl.sendText(msg.to,"Auto Cancel Sudah Aktif")
print wait["AutoCancel"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Autocancel off"]:
if msg.from_ in admin:
wait["AutoCancel"][msg.to] = False
wait["AutoCancelon"] = False
cl.sendText(msg.to,"Auto Cancel Sudah Di Nonaktifkan")
print wait["AutoCancel"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Joinkick on"]:
if msg.from_ in admin:
wait["joinkick"] = True
wait["Sambutan"] = False
cl.sendText(msg.to,"Join Kick Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Joinkick off"]:
if msg.from_ in admin:
wait["joinkick"] = False
cl.sendText(msg.to,"Join Kick Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Invitepro on","Inviteprotect on"]:
if msg.from_ in admin:
wait["inviteprotect"] = True
cl.sendText(msg.to,"Invite Protect Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Invitepro off","Inviteprotect off"]:
if msg.from_ in admin:
wait["inviteprotect"] = False
cl.sendText(msg.to,"Invite Protect Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif "Qr on" in msg.text:
if msg.from_ in admin:
wait["Qr"][msg.to] = True
wait["Qron"] = True
cl.sendText(msg.to,"QR Protect Sudah Aktif")
print wait["Qr"]
else:
cl.sendText(msg.to,"Only Admin")
elif "Qr off" in msg.text:
if msg.from_ in admin:
wait["Qr"][msg.to] = False
wait["Qron"] = False
cl.sendText(msg.to,"Qr Protect Sudah Di Nonaktifkan")
print wait["Qr"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Autokick on"]:
if msg.from_ in admin:
wait["AutoKick"][msg.to] = True
wait["AutoKickon"] = True
cl.sendText(msg.to,"Auto Kick Sudah Aktif")
print wait["AutoKick"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Autokick off"]:
if msg.from_ in admin:
wait["AutoKick"][msg.to] = False
wait["AutoKickon"] = False
cl.sendText(msg.to,"Auto Kick Sudah Di Nonaktifkan")
print wait["AutoKick"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Ghost on"]:
if msg.from_ in admin:
wait["Ghost"] = True
cl.sendText(msg.to,"Ghost Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Ghost off"]:
if msg.from_ in admin:
wait["Ghost"] = False
cl.sendText(msg.to,"Ghost Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Allprotect on"]:
if msg.from_ in admin:
wait["AutoCancel"][msg.to] = True
wait["AutoCancelon"] = True
wait["inviteprotect"] = True
wait["joinkick"] = True
wait["AutoKick"][msg.to] = True
wait["AutoKickon"] = True
wait["Qr"][msg.to] = True
wait["Qron"] = True
wait["Ghost"] = True
cl.sendText(msg.to,"All Protect Sudah Aktif Semua")
print wait["AutoCancel"]
print wait["AutoKick"]
print wait["Qr"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Allprotect off"]:
if msg.from_ in admin:
wait["AutoCancel"][msg.to] = False
wait["AutoCancelon"] = False
wait["inviteprotect"] = False
wait["joinkick"] = False
wait["AutoKick"][msg.to] = False
wait["AutoKickon"] = False
wait["Qr"][msg.to] = False
wait["Qron"] = False
wait["Ghost"] = False
cl.sendText(msg.to,"All Protect Sudah Di Nonaktifkan Semua")
print wait["AutoCancel"]
print wait["AutoKick"]
print wait["Qr"]
else:
#else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["K on","Contact on"]:
wait["Contact"] = True
cl.sendText(msg.to,"Contact Sudah Aktif")
elif msg.text in ["K off","Contact off"]:
wait["Contact"] = False
cl.sendText(msg.to,"Contact Sudah Di Nonaktifkan")
elif msg.text in ["Alwaysread on"]:
wait["alwaysRead"] = True
cl.sendText(msg.to,"Always Read Sudah Aktif")
elif msg.text in ["Alwaysread off"]:
wait["alwaysRead"] = False
cl.sendText(msg.to,"Always Read Sudah Di Nonaktifkan")
elif msg.text in ["Sambutan on"]:
if wait["Sambutan"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sambutan Di Aktifkanヾ(*´∀`*)ノ")
else:
wait["Sambutan"] = True
wait["joinkick"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah Onヽ(´▽`)/")
elif msg.text in ["Sambutan off"]:
if wait["Sambutan"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sambutan Di Nonaktifkan( ^∇^)")
else:
wait["Sambutan"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah Off(p′︵‵。)")
elif "Sider on" in msg.text:
try:
del cctv['point'][msg.to]
del cctv['sidermem'][msg.to]
del cctv['cyduk'][msg.to]
except:
pass
cctv['point'][msg.to] = msg.id
cctv['sidermem'][msg.to] = ""
cctv['cyduk'][msg.to]=True
wait["Sider"] = True
cl.sendText(msg.to,"Siap On Cek Sider")
elif "Sider off" in msg.text:
if msg.to in cctv['point']:
cctv['cyduk'][msg.to]=False
wait["Sider"] = False
cl.sendText(msg.to, "Cek Sider Off")
else:
cl.sendText(msg.to, "Heh Belom Di Set")
elif msg.text in ["Status"]:
md = ""
if wait["Sambutan"] == True: md+="╠➩✔️ Sambutan : On\n"
else:md+="╠➩❌ Sambutan : Off\n"
if wait["joinkick"] == True: md+="╠➩✔️ Join Kick : On\n"
else:md+="╠➩❌ Join Kick : Off\n"
if wait["AutoJoin"] == True: md+="╠➩✔️ Auto Join : On\n"
else: md +="╠➩❌ Auto Join : Off\n"
if wait["AutoJoinCancel"] == True: md+="╠➩✔️ Auto Join Cancel : On\n"
else: md +="╠➩❌ Auto Join Cancel : Off\n"
if wait["Leave"] == True: md+="╠➩✔️ Leave : On\n"
else: md +="╠➩❌ Leave : Off\n"
if wait["Contact"] == True: md+="╠➩✔️ Info Contact : On\n"
else: md+="╠➩❌ Info Contact : Off\n"
if wait["AutoCancelon"] == True:md+="╠➩✔️ Auto Cancel : On\n"
else: md+= "╠➩❌ Auto Cancel : Off\n"
if wait["inviteprotect"] == True:md+="╠➩✔️ Invite Protect : On\n"
else: md+= "╠➩❌ Invite Protect : Off\n"
if wait["Qron"] == True: md+="╠➩✔️ Qr Protect : On\n"
else:md+="╠➩❌ Qr Protect : Off\n"
if wait["AutoKickon"] == True: md+="╠➩✔️ Auto Kick : On\n"
else:md+="╠➩❌ Auto Kick : Off\n"
if wait["Ghost"] == True: md+="╠➩✔️ Ghost : On\n"
else:md+="╠➩❌ Ghost : Off\n"
if wait["alwaysRead"] == True: md+="╠➩✔️ Always Read : On\n"
else:md+="╠➩❌ Always Read: Off\n"
if wait["detectMention"] == True: md+="╠➩✔️ Auto Respon : On\n"
else:md+="╠➩❌ Auto Respon : Off\n"
if wait["kickMention"] == True: md+="╠➩✔️ Auto Respon Kick : On\n"
else:md+="╠➩❌ Auto Respon Kick : Off\n"
if wait["Sider"] == True: md+="╠➩✔️ Auto Sider : On\n"
else:md+="╠➩❌ Auto Sider: Off\n"
if wait["Simi"] == True: md+="╠➩✔️ Simisimi : On\n"
else:md+="╠➩❌ Simisimi: Off\n"
cl.sendText(msg.to,"╔═════════════════════════\n""║ ☆☞ S T A T U S ☜☆\n""╠═════════════════════════\n"+md+"╚═════════════════════════")
elif msg.text in ["Gift","gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58',
'PRDTYPE': 'THEME',
'MSGTPL': '5'}
msg.text = None
cl.sendMessage(msg)
elif msg.text in ["All gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58',
'PRDTYPE': 'THEME',
'MSGTPL': '5'}
msg.text = None
ki.sendMessage(msg)
kk.sendMessage(msg)
kc.sendMessage(msg)
elif msg.text in ["TC1 Gift","TC1 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '696d7046-843b-4ed0-8aac-3113ed6c0733',
'PRDTYPE': 'THEME',
'MSGTPL': '6'}
msg.text = None
ki.sendMessage(msg)
elif msg.text in ["TC2 Gift","TC2 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '8fe8cdab-96f3-4f84-95f1-6d731f0e273e',
'PRDTYPE': 'THEME',
'MSGTPL': '7'}
msg.text = None
kk.sendMessage(msg)
elif msg.text in ["TC3 Gift","TC3 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': 'ae3d9165-fab2-4e70-859b-c14a9d4137c4',
'PRDTYPE': 'THEME',
'MSGTPL': '8'}
msg.text = None
kc.sendMessage(msg)
elif "Gift1 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift1 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '1380280'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift2 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift2 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '2',
'STKPKGID': '1360738'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift3 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift3 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '3',
'STKPKGID': '1395389'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift4 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift4 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '4',
'STKPKGID': '1329191'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift5 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift5 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '9057'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift6 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift6 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '2',
'STKPKGID': '9167'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift7 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift7 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '3',
'STKPKGID': '7334'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift8 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift8 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '1380280'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift9 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift9 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '4',
'STKPKGID': '1405277'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift10 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift10 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '1296261'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif msg.text.lower() in ["wkwkwk","wkwk","hahaha","haha"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '100',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["hehehe","hehe"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '10',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["galau"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '9',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["you","kau","kamu"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '7',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["marah","hadeuh","hadeh"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '6',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["please","pliss","mohon","tolong"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '4',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["haa","haaa","kaget"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '3',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["lucu","ngakak","lol"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '110',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["hmm","hmmm"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '101',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["tidur"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '1',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["gemes"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '2',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["cantik","imut"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '5',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["nyanyi","lalala"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '11',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["gugup"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '8',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["ok","oke","okay","oce","okee","sip","siph"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '13',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["mantab","mantap","nice","keren"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '14',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["ngejek"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '15',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["nangis","sedih"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '16',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["woi","kampret"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '102',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["huft"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '104',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text in ["Tagall","Tag all"]:
group = cl.getGroup(msg.to)
nama = [contact.mid for contact in group.members]
cb = ""
cb2 = ""
strt = int(0)
akh = int(0)
for md in nama:
akh = akh + int(6)
cb += """{"S":"""+json.dumps(str(strt))+""","E":"""+json.dumps(str(akh))+""","M":"""+json.dumps(md)+"},"""
strt = strt + int(7)
akh = akh + 1
cb2 += "@nrik \n"
cb = (cb[:int(len(cb)-1)])
msg.contentType = 0
msg.text = cb2
msg.contentMetadata ={'MENTION':'{"MENTIONEES":['+cb+']}','EMTVER':'4'}
try:
cl.sendMessage(msg)
except Exception as error:
print error
elif msg.text in ["Setview","Setpoint","Cctv"]:
subprocess.Popen("echo '' > dataSeen/"+msg.to+".txt", shell=True, stdout=subprocess.PIPE)
cl.sendText(msg.to, "☆Checkpoint Checked☆")
print "Setview"
elif msg.text in ["Viewseen","Check","Ciduk","Cyduk"]:
lurkGroup = ""
dataResult, timeSeen, contacts, userList, timelist, recheckData = [], [], [], [], [], []
with open('dataSeen/'+msg.to+'.txt','r') as rr:
contactArr = rr.readlines()
for v in xrange(len(contactArr) -1,0,-1):
num = re.sub(r'\n', "", contactArr[v])
contacts.append(num)
pass
contacts = list(set(contacts))
for z in range(len(contacts)):
arg = contacts[z].split('|')
userList.append(arg[0])
timelist.append(arg[1])
uL = list(set(userList))
for ll in range(len(uL)):
try:
getIndexUser = userList.index(uL[ll])
timeSeen.append(time.strftime("%H:%M:%S", time.localtime(int(timelist[getIndexUser]) / 1000)))
recheckData.append(userList[getIndexUser])
except IndexError:
conName.append('nones')
pass
contactId = cl.getContacts(recheckData)
for v in range(len(recheckData)):
dataResult.append(contactId[v].displayName + ' ('+timeSeen[v]+')')
pass
if len(dataResult) > 0:
tukang = "╔═════════════════════════\n║ ☆☞ LIST VIEWERS ☜☆\n╠═════════════════════════\n╠➩"
grp = '\n╠➩ '.join(str(f) for f in dataResult)
total = '\n╠═════════════════════════\n╠➩ Total %i Viewers (%s)' % (len(dataResult), datetime.now().strftime('%H:%M:%S')) + "\n╚═════════════════════════"
cl.sendText(msg.to, "%s %s %s" % (tukang, grp, total))
subprocess.Popen("echo '' > dataSeen/"+msg.to+".txt", shell=True, stdout=subprocess.PIPE)
cl.sendText(msg.to, "☆Auto Checkpoint☆")
else:
cl.sendText(msg.to, "☆Belum Ada Viewers☆")
print "Viewseen"
elif "Kick " in msg.text:
if msg.from_ in admin:
if 'MENTION' in msg.contentMetadata.keys()!= None:
names = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
print mentionees
for mention in mentionees:
ki.kickoutFromGroup(msg.to,[mention['M']])
elif "Set member: " in msg.text:
if msg.from_ in admin:
jml = msg.text.replace("Set member: ","")
wait["memberscancel"] = int(jml)
cl.sendText(msg.to, "Jumlah minimal member telah di set : "+jml)
elif "Add all" in msg.text:
thisgroup = cl.getGroups([msg.to])
Mids = [contact.mid for contact in thisgroup[0].members]
mi_d = Mids[:33]
cl.findAndAddContactsByMids(mi_d)
cl.sendText(msg.to,"Success Add all")
elif msg.text in ["Invite"]:
wait["invite"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text in ["Auto like"]:
wait["likeOn"] = True
cl.sendText(msg.to,"Shere Post Kamu Yang Mau Di Like!")
elif msg.text in ["Steal contact"]:
wait["steal"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text in ["Giftbycontact"]:
wait["gift"] = True
cl.sendText(msg.to,"Send Contact")
elif "Recover" in msg.text:
thisgroup = cl.getGroups([msg.to])
Mids = [contact.mid for contact in thisgroup[0].members]
mi_d = Mids[:33]
cl.createGroup("Recover", mi_d)
cl.sendText(msg.to,"Success recover")
elif ("Gn: " in msg.text):
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.name = msg.text.replace("Gn: ","")
cl.updateGroup(X)
else:
cl.sendText(msg.to,"It can't be used besides the group.")
elif "Kick: " in msg.text:
midd = msg.text.replace("Kick: ","")
kicker = [ki,kk,kc]
if midd not in admin:
random.choice(kicker).kickoutFromGroup(msg.to,[midd])
else:
cl.sendText(msg.to,"Admin Detected")
elif "Invite: " in msg.text:
midd = msg.text.replace("Invite: ","")
cl.findAndAddContactsByMid(midd)
ki.findAndAddContactsByMid(midd)
kk.findAndAddContactsByMid(midd)
kc.findAndAddContactsByMid(midd)
kr.findAndAddContactsByMid(midd)
random.choice(KAC).inviteIntoGroup(msg.to,[midd])
elif "Invite creator" in msg.text:
midd = "u71b6799e1c37868a871d442e67633182"
random.choice(KAC).inviteIntoGroup(msg.to,[midd])
elif msg.text in ["Welcome","welcome","Welkam","welkam","Wc","wc"]:
gs = cl.getGroup(msg.to)
cl.sendText(msg.to,"Selamat Datang Di "+ gs.name)
msg.contentType = 7
msg.contentMetadata={'STKID': '247',
'STKPKGID': '3',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif "Bc: " in msg.text:
bc = msg.text.replace("Bc: ","")
gid = cl.getGroupIdsJoined()
if msg.from_ in Creator:
for i in gid:
cl.sendText(i,"=======[BROADCAST]=======\n\n"+bc+"\n\nContact Me : line.me/ti/p/~a_ulul15")
cl.sendText(msg.to,"Success BC BosQ")
else:
cl.sendText(msg.to,"Khusus Admin")
elif msg.text in ["Cancel"]:
gid = cl.getGroupIdsInvited()
for i in gid:
cl.rejectGroupInvitation(i)
cl.sendText(msg.to,"All invitations have been refused")
elif msg.text in ["TC1 Cancel"]:
gid = ki.getGroupIdsInvited()
for i in gid:
ki.rejectGroupInvitation(i)
ki.sendText(msg.to,"All invitations have been refused")
elif msg.text in ["TC2 Cancel"]:
gid = kk.getGroupIdsInvited()
for i in gid:
kk.rejectGroupInvitation(i)
kk.sendText(msg.to,"All invitations have been refused")
elif msg.text in ["TC3 Cancel"]:
gid = kc.getGroupIdsInvited()
for i in gid:
kc.rejectGroupInvitation(i)
kc.sendText(msg.to,"All invitations have been refused")
elif msg.text in ["Gurl"]:
if msg.toType == 2:
x = cl.getGroup(msg.to)
if x.preventJoinByTicket == True:
x.preventJoinByTicket = False
cl.updateGroup(x)
gurl = cl.reissueGroupTicket(msg.to)
cl.sendText(msg.to,"line://ti/g/" + gurl)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can't be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif msg.text in ["All join","Join all"]:
if msg.from_ in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
kk.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
kc.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
kr.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
G = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki.updateGroup(G)
G.preventJoinByTicket(G)
ki.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["TC1 join"]:
if msg.from_ in admin:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
cl.updateGroup(X)
invsend = 0
Ti = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ti)
G = kk.getGroup(msg.to)
G.preventJoinByTicket = True
ki.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["TC2 join"]:
if msg.from_ in admin:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
cl.updateGroup(X)
invsend = 0
Ti = cl.reissueGroupTicket(msg.to)
kk.acceptGroupInvitationByTicket(msg.to,Ti)
G = ki.getGroup(msg.to)
G.preventJoinByTicket = True
kk.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["TC3 join"]:
if msg.from_ in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
kc.acceptGroupInvitationByTicket(msg.to,Ticket)
G.preventJoinByTicket = True
kc.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["TC4 join"]:
if msg.from_ in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
kr.acceptGroupInvitationByTicket(msg.to,Ticket)
G.preventJoinByTicket = True
kr.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["Ghost join"]:
if msg.from_ in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
km.acceptGroupInvitationByTicket(msg.to,Ticket)
G.preventJoinByTicket = True
km.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["timeline"]:
try:
url = cl.activity(limit=5)
cl.sendText(msg.to,url['result']['posts'][0]['postInfo']['postId'])
except Exception as E:
print E
elif msg.text in ["Bye all"]:
if wait["Leave"] == True:
ki.leaveGroup(msg.to)
kk.leaveGroup(msg.to)
kc.leaveGroup(msg.to)
kr.leaveGroup(msg.to)
else:
cl.sendText(msg.to,"Leavenya Belum On")
elif msg.text in ["@bye","@Bye"]:
if wait["Leave"] == True:
cl.leaveGroup(msg.to)
wait["Leave"] = False
else:
cl.sendText(msg.to,"Bilang Dulu Sama Admin Ku")
elif msg.text in ["Absen"]:
cl.sendText(msg.to,"Pasukan Absen!!")
ki.sendText(msg.to,"TC1 Hadiir \(ˆ▿ˆ)/")
kk.sendText(msg.to,"TC2 Hadiir \(ˆ▿ˆ)/")
kc.sendText(msg.to,"TC3 Hadiir \(ˆ▿ˆ)/")
kr.sendText(msg.to,"Hadiir Semua Kapten \(ˆ▿ˆ)/")
elif msg.text.lower() in ["respon"]:
cl.sendText(msg.to,responsename)
ki.sendText(msg.to,responsename2)
kk.sendText(msg.to,responsename3)
kc.sendText(msg.to,responsename4)
kr.sendText(msg.to,responsename5)
elif msg.text in ["Sp","Speed","speed"]:
start = time.time()
print("Speed")
elapsed_time = time.time() - start
cl.sendText(msg.to, "Tunggu Bentaar BOS....")
cl.sendText(msg.to, "%sseconds" % (elapsed_time))
elif msg.text in ["Speed test"]:
start = time.time()
cl.sendText(msg.to, "Tunggu Bentaar BOS......")
elapsed_time = time.time() - start
cl.sendText(msg.to, "%sseconds" % (elapsed_time))
elif "Nk: " in msg.text:
if msg.from_ in Creator:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
cl.updateGroup(X)
invsend = 0
Ti = cl.reissueGroupTicket(msg.to)
kr.acceptGroupInvitationByTicket(msg.to,Ti)
G = kk.getGroup(msg.to)
G.preventJoinByTicket = True
kk.updateGroup(G)
nk0 = msg.text.replace("Nk: ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
targets = []
for s in X.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
if target not in admin:
kr.kickoutFromGroup(msg.to,[target])
kr.leaveGroup(msg.to)
ki.sendText(msg.to,"Succes BosQ")
kk.sendText(msg.to,"Pakyu~")
else:
cl.sendText(msg.to,"Admin Detected")
else:
cl.sendText(msg.to,"Lu sape!")
elif msg.text in ["Ban"]:
if msg.from_ in admin:
wait["wblacklist"] = True
ki.sendText(msg.to,"send contact")
elif msg.text in ["Unban"]:
if msg.from_ in admin:
wait["dblacklist"] = True
ki.sendText(msg.to,"send contact")
elif "Ban @" in msg.text:
if msg.from_ in admin:
if msg.toType == 2:
print "@Ban by mention"
_name = msg.text.replace("Ban @","")
_nametarget = _name.rstrip(' ')
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kc.sendText(msg.to,"Not found")
else:
for target in targets:
if target not in admin:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
ki.sendText(msg.to,"Succes BosQ")
except:
ki.sendText(msg.to,"Error")
else:
cl.sendText(msg.to,"Admin Detected~")
elif msg.text in ["Banlist","Ban list"]:
if msg.from_ in admin:
if wait["blacklist"] == {}:
random.choice(KAC).sendText(msg.to,"Tidak Ada")
else:
mc = ""
for mi_d in wait["blacklist"]:
mc += "->" +cl.getContact(mi_d).displayName + "\n"
random.choice(KAC).sendText(msg.to,"===[Blacklist User]===\n"+mc)
elif "Unban @" in msg.text:
if msg.toType == 2:
print "@Unban by mention"
if msg.from_ in admin:
_name = msg.text.replace("Unban @","")
_nametarget = _name.rstrip(' ')
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kk.sendText(msg.to,"Not found")
else:
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
ki.sendText(msg.to,"Succes BosQ")
except:
ki.sendText(msg.to,"Succes BosQ")
elif msg.text.lower() == 'clear ban':
if msg.from_ in admin:
wait["blacklist"] = {}
cl.sendText(msg.to,"ヽ( ^ω^)ノ└ ❉Unbanned All Success❉ ┐")
elif msg.text.lower() in ["sayang","chucky"]:
ki.sendText(msg.to,"Apa Sayang :*")
elif msg.text in ["Kill ban"]:
if msg.from_ in admin:
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
ki.sendText(msg.to,"There was no blacklist user")
return
for jj in matched_list:
random.choice(KAC).kickoutFromGroup(msg.to,[jj])
ki.sendText(msg.to,"Blacklist emang pantas tuk di usir")
else:
cl.sendText(msg.to, "Khusus creator")
elif msg.text in ["Kill"]:
if msg.toType == 2:
if msg.from_ in admin:
group = ki.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
kk.sendText(msg.to,"Fuck You")
kc.sendText(msg.to,"Fuck You")
return
for jj in matched_list:
try:
klist=[ki,kk,kc]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[jj])
print (msg.to,[jj])
except:
pass
elif "Kickall" == msg.text:
if msg.from_ in Creator:
if msg.toType == 2:
print "Kick all member"
_name = msg.text.replace("Kickall","")
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
ki.sendText(msg.to,"Sampai jumpaa~")
kc.sendText(msg.to,"Dadaaah~")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
ki.sendText(msg.to,"Not found.")
else:
for target in targets:
if target not in admin:
try:
klist=[ki,kk,kc]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except Exception as e:
cl.sendText(msg.to,str(e))
cl.inviteIntoGroup(msg.to, targets)
elif msg.text in ["Bot restart","Reboot"]:
if msg.from_ in Creator:
cl.sendText(msg.to, "Bot Has Been Restarted...")
restart_program()
print "@Restart"
else:
cl.sendText(msg.to, "No Access")
elif msg.text in ["Turn off"]:
if msg.from_ in Creator:
try:
import sys
sys.exit()
except:
pass
elif 'Crash' in msg.text:
if msg.from_ in Creator:
msg.contentType = 13
msg.contentMetadata = {'mid': "NADYA,'"}
cl.sendMessage(msg)
elif "Kapten copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("Kapten copy @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
cl.CloneContactProfile(target)
cl.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif "TC1 copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("TC1 copy @","")
_nametarget = _name.rstrip(' ')
gs = ki.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
ki.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
ki.CloneContactProfile(target)
ki.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif "TC2 copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("TC2 copy @","")
_nametarget = _name.rstrip(' ')
gs = kk.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kk.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
kk.CloneContactProfile(target)
kk.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif "TC3 copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("TC3 copy @","")
_nametarget = _name.rstrip(' ')
gs = kc.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kc.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
kc.CloneContactProfile(target)
kc.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif "TC4 copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("TC4 copy @","")
_nametarget = _name.rstrip(' ')
gs = kr.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kr.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
kr.CloneContactProfile(target)
kr.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif msg.text in ["Backup all"]:
try:
ki.updateDisplayPicture(backup2.pictureStatus)
ki.updateProfile(backup2)
kk.updateDisplayPicture(backup3.pictureStatus)
kk.updateProfile(backup3)
kc.updateDisplayPicture(backup4.pictureStatus)
kc.updateProfile(backup4)
kr.updateDisplayPicture(backup5.pictureStatus)
kr.updateProfile(backup5)
cl.updateDisplayPicture(backup1.pictureStatus)
cl.updateProfile(backup1)
cl.sendText(msg.to, "All Done (^_^)")
except Exception as e:
cl.sendText(msg.to, str(e))
elif "/musik " in msg.text:
songname = msg.text.replace("/musik ","")
params = {"songname": songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
abc = song[3].replace('https://','http://')
cl.sendText(msg.to, "Title : " + song[0] + "\nLength : " + song[1] + "\nLink download : " + song[4])
cl.sendText(msg.to, "Lagu " + song[0] + "\nSedang Di Prosses... Tunggu Sebentar ^_^ ")
cl.sendAudioWithURL(msg.to,abc)
cl.sendText(msg.to, "Selamat Mendengarkan Lagu " + song[0])
elif '/lirik ' in msg.text.lower():
try:
songname = msg.text.lower().replace('/lirik ','')
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'Lyric Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, hasil)
except Exception as wak:
cl.sendText(msg.to, str(wak))
elif "/musrik " in msg.text:
songname = msg.text.replace("/musrik ","")
params = {"songname": songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
abc = song[3].replace('https://','http://')
hasil = 'Lyric Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, "Lagu " + song[0] + "\nSedang Di Prosses... Tunggu Sebentar ^_^ ")
cl.sendAudioWithURL(msg.to,abc)
cl.sendText(msg.to, "Title : " + song[0] + "\nLength : " + song[1] + "\nLink download : " + song[4] +"\n\n" + hasil)
cl.sendText(msg.to, "Selamat Mendengarkan Lagu " + song[0])
elif "Fancytext: " in msg.text:
txt = msg.text.replace("Fancytext: ", "")
cl.kedapkedip(msg.to,txt)
print "[Command] Kedapkedip"
elif "cover @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("cover @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.channel.getHome(target)
objId = h["result"]["homeInfo"]["objectId"]
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/myhome/c/download.nhn?userid=" + target + "&oid=" + objId)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif "Cover @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("Cover @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.channel.getHome(target)
objId = h["result"]["homeInfo"]["objectId"]
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/myhome/c/download.nhn?userid=" + target + "&oid=" + objId)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif "pp @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("pp @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.getContact(target)
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif "Pp @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("Pp @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.getContact(target)
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif msg.text.lower() in ["van","yog","wan","gong","tep","pap creator"]:
link = ["http://dl.profile.line-cdn.net/0hbPvoismJPX9LFhHY8ztCKHdTMxI8OCw3JXclGzwRYBpgci99fyV2GzwUY01icXl5J3EnHjxBakxj"]
pilih = random.choice(link)
ki.sendImageWithURL(msg.to,pilih)
elif msg.text.lower() in ["van","yog","wan","gong","tep","pap owner","pap creator"]:
link = ["http://dl.profile.line-cdn.net/0hbPvoismJPX9LFhHY8ztCKHdTMxI8OCw3JXclGzwRYBpgci99fyV2GzwUY01icXl5J3EnHjxBakxj"]
pilih = random.choice(link)
ki.sendImageWithURL(msg.to,pilih)
elif "Spam: " in msg.text:
bctxt = msg.text.replace("Spam: ", "")
t = 10
while(t):
random.choice(KAC).sendText(msg.to, (bctxt))
t-=1
elif "Scbc " in msg.text:
bctxt = msg.text.replace("Scbc ", "")
orang = cl.getAllContactIds()
t = 20
for manusia in orang:
while(t):
cl.sendText(manusia, (bctxt))
t-=1
elif "Cbc " in msg.text:
broadcasttxt = msg.text.replace("Cbc ", "")
orang = cl.getAllContactIds()
for manusia in orang:
cl.sendText(manusia, (broadcasttxt))
elif '/ig ' in msg.text.lower():
try:
instagram = msg.text.lower().replace("/ig ","")
html = requests.get('https://www.instagram.com/' + instagram + '/?')
soup = BeautifulSoup(html.text, 'html.parser')
data = soup.find_all('meta', attrs={'property':'og:description'})
text = data[0].get('content').split()
data1 = soup.find_all('meta', attrs={'property':'og:image'})
text1 = data1[0].get('content').split()
nadya = text1[0].replace("s150x150/","")
user = "Name: " + text[-2] + "\n"
user1 = "Username: " + text[-1] + "\n"
followers = "Followers: " + text[0] + "\n"
following = "Following: " + text[2] + "\n"
post = "Post: " + text[4] + "\n"
link = "Link: " + "https://www.instagram.com/" + instagram
detail = "========INSTAGRAM INFO ========\n"
details = "\n========INSTAGRAM INFO ========"
cl.sendText(msg.to, detail + user + user1 + followers + following + post + link + details)
cl.sendImageWithURL(msg.to, nadya)
except Exception as njer:
cl.sendText(msg.to, str(njer))
elif "Checkig " in msg.text:
separate = msg.text.split(" ")
user = msg.text.replace(separate[0] + " ","")
if user.startswith("@"):
user = user.replace("@","")
profile = "https://www.instagram.com/" + user
with requests.session() as x:
x.headers['user-agent'] = 'Mozilla/5.0'
end_cursor = ''
for count in range(1, 999):
print('PAGE: ', count)
r = x.get(profile, params={'max_id': end_cursor})
data = re.search(r'window._sharedData = (\{.+?});</script>', r.text).group(1)
j = json.loads(data)
for node in j['entry_data']['ProfilePage'][0]['user']['media']['nodes']:
if node['is_video']:
page = 'https://www.instagram.com/p/' + node['code']
r = x.get(page)
url = re.search(r'"video_url": "([^"]+)"', r.text).group(1)
print(url)
cl.sendVideoWithURL(msg.to,url)
else:
print (node['display_src'])
cl.sendImageWithURL(msg.to,node['display_src'])
end_cursor = re.search(r'"end_cursor": "([^"]+)"', r.text).group(1)
elif 'Youtubelink: ' in msg.text:
try:
textToSearch = (msg.text).replace('Youtube ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class':'yt-uix-tile-link'})
cl.sendText(msg.to,'https://www.youtube.com' + results['href'])
except:
cl.sendText(msg.to,"Could not find it")
elif 'Youtubevideo: ' in msg.text:
try:
textToSearch = (msg.text).replace('Youtubevideo: ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class': 'yt-uix-tile-link'})
cl.sendVideoWithURL(msg.to,'https://www.youtube.com' + results['href'])
except:
cl.sendText(msg.to, "Could not find it")
elif "Say-id " in msg.text:
say = msg.text.replace("Say-id ","")
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-en " in msg.text:
say = msg.text.replace("Say-en ","")
lang = 'en'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-jp " in msg.text:
say = msg.text.replace("Say-jp ","")
lang = 'ja'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say welcome" in msg.text:
gs = cl.getGroup(msg.to)
say = msg.text.replace("Say welcome","Selamat Datang Di "+ gs.name)
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif msg.text.lower() in ["hi","hai","halo","hallo"]:
beb = "Hi Sayang 😘 " +cl.getContact(msg.from_).displayName + " starry heart"
kr.sendText(msg.to,beb)
elif "playstore " in msg.text.lower():
tob = msg.text.lower().replace("playstore ","")
cl.sendText(msg.to,"Sedang Mencari...")
cl.sendText(msg.to,"Title : "+tob+"\nSource : Google Play\nLink : https://play.google.com/store/search?q=" + tob)
cl.sendText(msg.to,"Tuh Linknya Kak (^_^)")
elif "Mid @" in msg.text:
_name = msg.text.replace("Mid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
random.choice(KAC).sendText(msg.to, g.mid)
else:
pass
elif "/bio " in msg.text:
string = msg.text.replace("/bio ","")
if len(string.decode('utf-8')) <= 500:
profile = cl.getProfile()
profile.statusMessage = string
cl.updateProfile(profile)
ki.updateProfile(profile)
kk.updateProfile(profile)
kc.updateProfile(profile)
kr.updateProfile(profile)
cl.sendText(msg.to,"All Done")
elif "/cnkapten" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cnkapten","Mi Kapten")
if len(string.decode('utf-8')) <= 5000:
profile = cl.getProfile()
profile.displayName = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Done")
elif "/cntc1" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cntc1","Mi TC1")
if len(string.decode('utf-8')) <= 5000:
profile = ki.getProfile()
profile.displayName = string
ki.updateProfile(profile)
ki.sendText(msg.to,"Done")
elif "/cntc2" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cntc2","Mi TC2")
if len(string.decode('utf-8')) <= 5000:
profile = kk.getProfile()
profile.displayName = string
kk.updateProfile(profile)
kk.sendText(msg.to,"Done")
elif "/cntc3" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cntc3","Mi TC3")
if len(string.decode('utf-8')) <= 5000:
profile = kc.getProfile()
profile.displayName = string
kc.updateProfile(profile)
kc.sendText(msg.to,"Done")
elif "/cntc4" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cntc4","Mi TC4")
if len(string.decode('utf-8')) <= 5000:
profile = cl.getProfile()
profile.displayName = string
kr.updateProfile(profile)
kr.sendText(msg.to,"Done")
elif "Ulti " in msg.text:
if msg.from_ in Creator:
ulti0 = msg.text.replace("Ulti ","")
ulti1 = ulti0.rstrip()
ulti2 = ulti1.replace("@","")
ulti3 = ulti2.rstrip()
_name = ulti3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
km.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets ==[]:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
km.kickoutFromGroup(msg.to,[target])
km.leaveGroup(msg.to)
print (msg.to,[g.mid])
except:
km.sendText(msg.t,"Ter ELIMINASI....")
km.sendText(msg.to,"WOLES brooo....!!!")
km.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
elif msg.text.lower() in ["mymid","myid"]:
middd = "Name : " +cl.getContact(msg.from_).displayName + "\nMid : " +msg.from_
kr.sendText(msg.to,middd)
elif msg.text.lower() in ["me"]:
msg.contentType = 13
msg.contentMetadata = {'mid': msg.from_}
cl.sendMessage(msg)
elif "/apakah " in msg.text:
apk = msg.text.replace("/apakah ","")
rnd = ["Ya","Tidak","Bisa Jadi","Mungkin"]
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "/hari " in msg.text:
apk = msg.text.replace("/hari ","")
rnd = ["Senin","Selasa","Rabu","Kamis","Jumat","Sabtu","Minggu"]
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "/berapa " in msg.text:
apk = msg.text.replace("/berapa ","")
rnd = ['10%','20%','30%','40%','50%','60%','70%','80%','90%','100%','0%']
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "/berapakah " in msg.text:
apk = msg.text.replace("/berapakah ","")
rnd = ['1','2','3','4','5','6','7','8','9','10','Tidak Ada']
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "/kapan " in msg.text:
apk = msg.text.replace("/kapan ","")
rnd = ["kapan kapan","besok","satu abad lagi","Hari ini","Tahun depan","Minggu depan","Bulan depan","Sebentar lagi","Tidak Akan Pernah"]
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif msg.text in ["Simisimi on","Simisimi:on"]:
settings["simiSimi"][msg.to] = True
wait["Simi"] = True
cl.sendText(msg.to," Simisimi Di Aktifkan")
elif msg.text in ["Simisimi off","Simisimi:off"]:
settings["simiSimi"][msg.to] = False
wait["Simi"] = False
cl.sendText(msg.to,"Simisimi Di Nonaktifkan")
elif "Image " in msg.text:
search = msg.text.replace("Image ","")
url = 'https://www.google.com/search?espv=2&biw=1366&bih=667&tbm=isch&oq=kuc&aqs=mobile-gws-lite.0.0l5&q=' + search
raw_html = (download_page(url))
items = []
items = items + (_images_get_all_items(raw_html))
path = random.choice(items)
print path
try:
cl.sendImageWithURL(msg.to,path)
except:
pass
elif "Youtubesearch: " in msg.text:
query = msg.text.replace("Youtube ","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'http://www.youtube.com/results'
params = {'search_query': query}
r = s.get(url, params=params)
soup = BeautifulSoup(r.content, 'html.parser')
hasil = ""
for a in soup.select('.yt-lockup-title > a[title]'):
if '&list=' not in a['href']:
hasil += ''.join((a['title'],'\nUrl : http://www.youtube.com' + a['href'],'\n\n'))
cl.sendText(msg.to,hasil)
print '[Command] Youtube Search'
elif "Tr-id " in msg.text:
isi = msg.text.replace("Tr-id ","")
translator = Translator()
hasil = translator.translate(isi, dest='id')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Tr-en " in msg.text:
isi = msg.text.replace("Tr-en ","")
translator = Translator()
hasil = translator.translate(isi, dest='en')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Tr-th " in msg.text:
isi = msg.text.replace("Tr-th ","")
translator = Translator()
hasil = translator.translate(isi, dest='th')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Id@en" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'en'
kata = msg.text.replace("Id@en ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----Dari Indonesia----\n" + "" + kata + "\n\n----Ke Inggris----\n" + "" + result)
elif "En@id" in msg.text:
bahasa_awal = 'en'
bahasa_tujuan = 'id'
kata = msg.text.replace("En@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----Dari Inggris----\n" + "" + kata + "\n\n----Ke Indonesia----\n" + "" + result)
elif "Id@th" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'th'
kata = msg.text.replace("Id@en ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----Dari Indonesia----\n" + "" + kata + "\n\n----Ke Thailand----\n" + "" + result)
elif "Th@id" in msg.text:
bahasa_awal = 'th'
bahasa_tujuan = 'id'
kata = msg.text.replace("Id@en ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----Dari Thailand----\n" + "" + kata + "\n\n----Ke Indonesia----\n" + "" + result)
elif msg.text in ["Friendlist"]:
contactlist = cl.getAllContactIds()
kontak = cl.getContacts(contactlist)
num=1
msgs="═════════List Friend═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Friend═════════\n\nTotal Friend : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["Memlist"]:
kontak = cl.getGroup(msg.to)
group = kontak.members
num=1
msgs="═════════List Member═�����═══════-"
for ids in group:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Member═════════\n\nTotal Members : %i" % len(group)
cl.sendText(msg.to, msgs)
elif msg.text in ["Spam"]:
if msg.from_ in admin:
cl.sendText(msg.to,"Aku belum mandi")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Tak tun tuang")
cl.sendText(msg.to,"Tapi masih cantik juga")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Tak tun tuang")
cl.sendText(msg.to,"apalagi kalau sudah mandi")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Pasti cantik sekali")
cl.sendText(msg.to,"yiha")
ki.sendText(msg.to,"Kalau orang lain melihatku")
kk.sendText(msg.to,"Tak tun tuang")
cl.sendText(msg.to,"Badak aku taba bana")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Tak tuntuang")
cl.sendText(msg.to,"Tapi kalau langsuang diidu")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Atagfirullah baunya")
cl.sendText(msg.to,"Males lanjutin ah")
ki.sendText(msg.to,"Sepi bat")
kk.sendText(msg.to,"Iya sepi udah udah")
cl.sendText(msg.to,"Gaada yang denger juga kita nyanyi")
ki.sendText(msg.to,"Nah")
kk.sendText(msg.to,"Mending gua makan dulu")
cl.sendText(msg.to,"Siyap")
ki.sendText(msg.to,"Okeh")
kk.sendText(msg.to,"Katanya owner kita Jomblo ya")
cl.sendText(msg.to,"Iya emang")
ki.sendText(msg.to,"Denger denger si lagi nyari pacar doi")
kk.sendText(msg.to,"Udah ah gosip mulu doain aja biar dapet")
elif "Getvid @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Getvid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendVideoWithURL(msg.to, path)
except Exception as e:
raise e
print "[Command]dp executed"
elif "Getgroup image" in msg.text:
group = cl.getGroup(msg.to)
path = "http://dl.profile.line-cdn.net/" + group.pictureStatus
cl.sendImageWithURL(msg.to,path)
elif "Urlgroup image" in msg.text:
group = cl.getGroup(msg.to)
path = "http://dl.profile.line-cdn.net/" + group.pictureStatus
cl.sendText(msg.to,path)
elif "Getname" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to, "===[DisplayName]===\n" + contact.displayName)
except:
cl.sendText(msg.to, "===[DisplayName]===\n" + contact.displayName)
elif "Getprofile" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
except:
pass
elif "Getcontact" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
mmid = cl.getContact(key1)
msg.contentType = 13
msg.contentMetadata = {"mid": key1}
cl.sendMessage(msg)
elif "Getinfo" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + contact.mid + "\n\nBio :\n" + contact.statusMessage + "\n\nProfile Picture :\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n\nHeader :\n" + str(cu))
except:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + contact.mid + "\n\nBio :\n" + contact.statusMessage + "\n\nProfile Picture :\n" + str(cu))
elif "Getbio" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to, "===[StatusMessage]===\n" + contact.statusMessage)
except:
cl.sendText(msg.to, "===[StatusMessage]===\n" + contact.statusMessage)
elif msg.text.lower() == 'runtime':
eltime = time.time() - mulai
van = "Bot Sudah Berjalan Selama :\n"+waktu(eltime)
cl.sendText(msg.to,van)
elif "Checkdate " in msg.text:
tanggal = msg.text.replace("Checkdate ","")
r=requests.get('https://script.google.com/macros/exec?service=AKfycbw7gKzP-WYV2F5mc9RaR7yE3Ve1yN91Tjs91hp_jHSE02dSv9w&nama=ervan&tanggal='+tanggal)
data=r.text
data=json.loads(data)
lahir = data["data"]["lahir"]
usia = data["data"]["usia"]
ultah = data["data"]["ultah"]
zodiak = data["data"]["zodiak"]
cl.sendText(msg.to,"========== I N F O R M A S I ==========\n"+"Date Of Birth : "+lahir+"\nAge : "+usia+"\nUltah : "+ultah+"\nZodiak : "+zodiak+"\n========== I N F O R M A S I ==========")
elif msg.text in ["Kalender","Time","Waktu"]:
timeNow = datetime.now()
timeHours = datetime.strftime(timeNow,"(%H:%M)")
day = ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday","Friday", "Saturday"]
hari = ["Minggu", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu"]
bulan = ["Januari", "Februari", "Maret", "April", "Mei", "Juni", "Juli", "Agustus", "September", "Oktober", "November", "Desember"]
inihari = datetime.today()
hr = inihari.strftime('%A')
bln = inihari.strftime('%m')
for i in range(len(day)):
if hr == day[i]: hasil = hari[i]
for k in range(0, len(bulan)):
if bln == str(k): bln = bulan[k-1]
rst = hasil + ", " + inihari.strftime('%d') + " - " + bln + " - " + inihari.strftime('%Y') + "\nJam : [ " + inihari.strftime('%H:%M:%S') + " ]"
cl.sendText(msg.to, rst)
elif "SearchID: " in msg.text:
userid = msg.text.replace("SearchID: ","")
contact = cl.findContactsByUserid(userid)
msg.contentType = 13
msg.contentMetadata = {'mid': contact.mid}
cl.sendMessage(msg)
elif "Searchid: " in msg.text:
userid = msg.text.replace("Searchid: ","")
contact = cl.findContactsByUserid(userid)
msg.contentType = 13
msg.contentMetadata = {'mid': contact.mid}
cl.sendMessage(msg)
elif "removechat" in msg.text.lower():
if msg.from_ in admin:
try:
cl.removeAllMessages(op.param2)
ki.removeAllMessages(op.param2)
kk.removeAllMessages(op.param2)
kc.removeAllMessages(op.param2)
kr.removeAllMessages(op.param2)
print "[Command] Remove Chat"
cl.sendText(msg.to,"Done")
except Exception as error:
print error
cl.sendText(msg.to,"Error")
elif "Invitemeto: " in msg.text:
if msg.from_ in admin:
gid = msg.text.replace("Invitemeto: ","")
if gid == "":
cl.sendText(msg.to,"Invalid group id")
else:
try:
cl.findAndAddContactsByMid(msg.from_)
ki.findAndAddContactsByMid(msg.from_)
kk.findAndAddContactsByMid(msg.from_)
kc.findAndAddContactsByMid(msg.from_)
kr.findAndAddContactsByMid(msg.from_)
random.choice(KAC).inviteIntoGroup(gid,[msg.from_])
except:
cl.sendText(msg.to,"Mungkin Saya Tidak Di Dalaam Grup Itu")
elif msg.text in ["Glist"]:
cl.sendText(msg.to, "Tunggu Sebentar. . .")
gid = cl.getGroupIdsJoined()
h = ""
for i in gid:
h += "╠➩" + "%s\n" % (cl.getGroup(i).name +" ~> ["+str(len(cl.getGroup(i).members))+"]")
cl.sendText(msg.to,"╔═════════════════════════\n║ ☆☞ LIST GROUPS☜☆\n╠═════════════════════════\n" + h + "╠═════════════════════════" + "\n║ Total Groups =" +" ["+str(len(gid))+"]\n╚═════════════════════════")
elif msg.text in ["Glistmid"]:
gruplist = kr.getGroupIdsJoined()
kontak = kr.getGroups(gruplist)
num=1
msgs="═════════List GrupMid═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.id)
num=(num+1)
msgs+="\n═════════List GrupMid═════════\n\nTotal Grup : %i" % len(kontak)
kr.sendText(msg.to, msgs)
elif "Google: " in msg.text:
a = msg.text.replace("Google: ","")
b = urllib.quote(a)
cl.sendText(msg.to,"Sedang Mencari...")
cl.sendText(msg.to, "https://www.google.com/" + b)
cl.sendText(msg.to,"Itu Dia Linknya. . .")
elif "Details group: " in msg.text:
if msg.from_ in admin:
gid = msg.text.replace("Details group: ","")
if gid in [""," "]:
cl.sendText(msg.to,"Grup id tidak valid")
else:
try:
groups = cl.getGroup(gid)
if groups.members is not None:
members = str(len(groups.members))
else:
members = "0"
if groups.invitee is not None:
pendings = str(len(groups.invitee))
else:
pendings = "0"
h = "[" + groups.name + "]\n -+GroupID : " + gid + "\n -+Members : " + members + "\n -+MembersPending : " + pendings + "\n -+Creator : " + groups.creator.displayName + "\n -+GroupPicture : http://dl.profile.line.naver.jp/" + groups.pictureStatus
cl.sendText(msg.to,h)
except Exception as error:
cl.sendText(msg.to,(error))
elif "Cancel invite: " in msg.text:
if msg.from_ in admin:
gids = msg.text.replace("Cancel invite: ","")
gid = cl.getGroup(gids)
for i in gid:
if i is not None:
try:
cl.rejectGroupInvitation(i)
except:
cl.sendText(msg.to,"Error!")
break
else:
break
if gid is not None:
cl.sendText(msg.to,"Berhasil tolak undangan dari grup " + gid.name)
else:
cl.sendText(msg.to,"Grup tidak ditemukan")
elif msg.text in ["Kapten acc invite"]:
if msg.from_ in admin:
gid = cl.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = cl.getGroup(i)
_list += gids.name
cl.acceptGroupInvitation(i)
else:
break
if gid is not None:
cl.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
cl.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif msg.text in ["TC1 acc invite"]:
if msg.from_ in admin:
gid = ki.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = ki.getGroup(i)
_list += gids.name
ki.acceptGroupInvitation(i)
else:
break
if gid is not None:
ki.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
ki.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif msg.text in ["TC2 acc invite"]:
if msg.from_ in admin:
gid = kk.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = kk.getGroup(i)
_list += gids.name
kk.acceptGroupInvitation(i)
else:
break
if gid is not None:
kk.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
kk.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif msg.text in ["TC3 acc invite"]:
if msg.from_ in admin:
gid = kc.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = kc.getGroup(i)
_list += gids.name
kc.acceptGroupInvitation(i)
else:
break
if gid is not None:
kc.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
kc.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif msg.text in ["TC4 acc invite"]:
if msg.from_ in admin:
gid = kr.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = kr.getGroup(i)
_list += gids.name
kr.acceptGroupInvitation(i)
else:
break
if gid is not None:
kr.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
kr.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif "Gif gore" in msg.text:
gif = ("https://media.giphy.com/media/l2JHVsQiOZrNMGzYs/giphy.gif","https://media.giphy.com/media/OgltQ2hbilzJS/200w.gif")
gore = random.choice(gif)
cl.sendGifWithURL(msg.to,gore)
if op.type == 59:
print op
except Exception as error:
print error
while True:
try:
Ops = cl.fetchOps(cl.Poll.rev, 5)
except EOFError:
raise Exception("It might be wrong revision\n" + str(cl.Poll.rev))
for Op in Ops:
if (Op.type != OpType.END_OF_OPERATION):
cl.Poll.rev = max(cl.Poll.rev, Op.revision)
bot(Op)
| 40.951759 | 443 | 0.429363 |
import LINETCR
from LINETCR.lib.curve.ttypes import *
from datetime import datetime
from bs4 import BeautifulSoup
from threading import Thread
from googletrans import Translator
from gtts import gTTS
import time,random,sys,json,codecs,threading,glob,urllib,urllib2,urllib3,re,ast,os,subprocess,requests,tempfile
cl = LINETCR.LINE()
cl.login(token='EsOvPPzeFykCVG8OoGf0.hE4TS1Hheb46PcdMzZKaaa.rzBOrFqSAApZownyv2qBJWU3PWWbf9/oE6G+sSVzUTo=')
cl.loginResult()
print "Azmi 1-Login Success\n"
ki = LINETCR.LINE()
ki.login(token='EsTdk3fyUSbT7LJVwoEd.rLylacrPH39WJb0UIwB8Nq.GYYzsgzj7aHd7mzCSluc3162Uqrry6Jjwf/bFuq9Etw=')
ki.loginResult()
print "Ki-Login Success\n"
kk = LINETCR.LINE()
kk.login(token='EsNKJDaP0J7Pt7syTOW9.GgPTp3/FisKkVX1rJHeroq.hUG0VDbWHz8R7o80xI0Pvme8dBb3dSsmCnat0PRX+JM=')
kk.loginResult()
print "Kk-Login Success\n"
print "Km-Login Success\n\n=====[Sukses All Login]====="
reload(sys)
sys.setdefaultencoding('utf-8')
selfMessage ="""
╔═════════════════════════
║ ☆☞ S E L F ☜☆
╠═════════════════════════
╠➩〘Hi〙
╠➩〘Me〙
╠➩〘Mymid〙
╠➩〘Mid @〙
╠➩〘SearchID: (ID LINE)〙
╠➩〘Checkdate (DD/MM/YY)〙
╠➩〘Kalender〙
╠➩〘Steal contact〙
╠➩〘Pp @〙
╠➩〘Cover @〙
╠➩〘Auto like〙
╠➩〘Scbc Text〙
╠➩〘Cbc Text〙
╠➩〘Gbc Text〙
╠➩〘Getbio @〙
╠➩〘Getinfo @〙
╠➩〘Getname @〙
╠➩〘Getprofile @〙
╠➩〘Getcontact @〙
╠➩〘Getvid @〙
╠➩〘Friendlist〙
╠═════════════════════════
║ ☆☞ S E L F ☜☆
╚═════════════════════════
"""
botMessage ="""
╔═════════════════════════
║ ☆☞ B O T ☜☆
╠═════════════════════════
╠➩〘Absen〙
╠➩〘Respon〙
╠➩〘Runtime〙
╠➩〘Kapten copy @〙
╠➩〘TC1 copy @〙
╠➩〘TC2 copy @〙
╠➩〘TC3 copy @〙
╠➩〘TC4 copy @〙
╠➩〘Backup all〙
╠➩〘/bio Text〙
╠➩〘@bye (Usir Kapten)〙
╠➩〘Bye all (Usir Semua)〙
╠═════════════════════════
║ ☆☞ B O T ☜☆
╚═════════════════════════
"""
mediaMessage ="""
╔═════════════════════════
║ ☆☞ M E D I A ☜☆
╠═════════════════════════
╠➩〘Gift〙
╠➩〘Gift1 @ s/d Gift10 @〙
╠➩〘Giftbycontact〙
╠➩〘All gift〙
╠➩〘Gif gore〙
╠➩〘Google: (Text)〙
╠➩〘Playstore NamaApp〙
╠➩〘Fancytext: Text〙
╠➩〘/musik Judul-Penyanyi〙
╠➩〘/lirik Judul-Penyanyi〙
╠➩〘/musrik Judul-Penyanyi〙
╠➩〘/ig UrsnameInstagram〙
╠➩〘Checkig UrsnameInstagram〙
╠➩〘/apakah Text (Kerang Ajaib)〙
╠➩〘/kapan Text (Kerang Ajaib)〙
╠➩〘/hari Text (Kerang Ajaib)〙
╠➩〘/berapa Text (Kerang Ajaib)〙
╠➩〘/berapakah Text〙
╠➩〘Youtubelink: Judul Video〙
╠➩〘Youtubevideo: Judul Video〙
╠➩〘Youtubesearch: Judul Video〙
╠➩〘Image NamaGambar〙
╠➩〘Say-id Text〙
╠➩〘Say-en Text〙
╠➩〘Say-jp Text〙
╠➩〘Image NamaGambar〙
╠➩〘Tr-id Text (Translate En Ke ID〙
╠➩〘Tr-en Text (Translate ID Ke En〙
╠➩〘Tr-th Text (Translate ID Ke Th〙
╠➩〘Id@en Text (Translate ID Ke En〙
╠➩〘Id@th Text (Translate ID Ke TH〙
╠➩〘En@id Text (Translate En Ke ID〙
╠═════════════════════════
║ ☆☞ M E D I A ☜☆
╚═════════════════════════
"""
groupMessage ="""
╔═════════════════════════
║ ☆☞ G R O U P ☜☆
╠═════════════════════════
╠➩〘Welcome〙
╠➩〘Say welcome〙
╠➩〘Invite creator〙
╠➩〘Setview〙
╠➩〘Viewseen〙
╠➩〘Gn: (NamaGroup)〙
╠➩〘Tag all〙
╠➩〘Recover〙
╠➩〘Cancel〙
╠➩〘Cancelall〙
╠➩〘Gcreator〙
╠➩〘Ginfo〙
╠➩〘Gurl〙
╠➩〘List group〙
╠➩〘Pict group: (NamaGroup)〙
╠➩〘Spam: (Text)〙
╠➩〘Spam〙
╠➩〘Add all〙
╠➩〘Kick: (Mid)〙
╠➩〘Invite: (Mid)〙
╠➩〘Invite〙
╠➩〘Memlist〙
╠➩〘Getgroup image〙
╠➩〘Urlgroup Image〙
╠═════════════════════════
║ ☆☞ G R O U P ☜☆
╚═════════════════════════
"""
tjia="u71b6799e1c37868a871d442e67633182"
setMessage ="""
╔═════════════════════════
║ ☆☞ S E T ☜☆
╠═════════════════════════
╠➩〘Sambutan on/off〙
╠➩〘Url on/off〙
╠➩〘Alwaysread on/off〙
╠➩〘Sider on/off〙
╠➩〘Contact on/off〙
╠➩〘Simisimi on/off〙
╠═════════════════════════
║ ☆☞ S E T ☜☆
╚═════════════════════════
"""
creatorMessage ="""
╔═════════════════════════
║ ☆☞ C R E A T O R ☜☆
╠═════════════════════════
╠➩〘Admin add @〙
╠➩〘Admin remove @〙
╠➩〘/cnkapten〙
╠➩〘/cntc1〙
╠➩〘/cntc2〙
╠➩〘/cntc3〙
╠➩〘/cntc4〙
╠➩〘Crash〙
╠➩〘Kickall〙
╠➩〘Bc: (Text)〙
╠➩〘Nk: @〙
╠➩〘Ulti @〙
╠➩〘Join group: (NamaGroup〙
╠➩〘Leave group: (NamaGroup〙
╠➩〘Leave all group〙
╠➩〘Bot restart〙
╠➩〘Turn off〙
╠═════════════════════════
║ ☆☞ C R E A T O R ☜☆
╚═════════════════════════
"""
adminMessage ="""
╔═════════════════════════
║ ☆☞ A D M I N ☜☆
╠═════════════════════════
╠➩〘Admin list〙
╠➩〘Ban〙
╠➩〘Unban〙
╠➩〘Ban @〙
╠➩〘Unban @〙
╠➩〘Ban list〙
╠➩〘Clear ban〙
╠➩〘Kill〙
╠➩〘Kick @〙
╠➩〘Set member: (Jumblah)〙
╠➩〘Ban group: (NamaGroup〙
╠➩〘Del ban: (NamaGroup〙
╠➩〘List ban〙
╠➩〘Kill ban〙
╠➩〘Glist〙
╠➩〘Glistmid〙
╠➩〘Details group: (Gid)〙
╠➩〘Cancel invite: (Gid)〙
╠➩〘Invitemeto: (Gid)〙
╠➩〘Kapten acc invite〙
╠➩〘TC1 acc invite〙
╠➩〘TC2 acc invite〙
╠➩〘TC3 acc invite〙
╠➩〘TC4 acc invite〙
╠➩〘Removechat〙
╠➩〘Join on/off〙
╠➩〘Joincancel on/off〙
╠➩〘Respon on/off〙
╠➩〘Responkick on/off〙
╠➩〘Leave on/off〙
╠➩〘All join / (TC1/2/3/4 Join)〙
╠═════════════════════════
║ ☆☞ A D M I N ☜☆
╚═════════════════════════
"""
helpMessage ="""
╔═════════════════════════
║ ☆☞ H E L P ☜☆
╠═════════════════════════
╠➩〘Help protect〙
╠➩〘Help self〙
╠➩〘Help bot〙
╠➩〘Help group〙
╠➩〘Help set〙
╠➩〘Help media〙
╠➩〘Help admin〙
╠➩〘Help creator〙
╠➩〘Owner〙
╠➩〘Pap owner〙
╠➩〘Admin〙
╠➩〘Speed〙
╠➩〘Speed test〙
╠➩〘Status〙
╠═════════════════════════
║ ☆☞ H E L P ☜☆
╚═════════════════════════
"""
protectMessage ="""
╔═════════════════════════
║ ☆☞ P R O T E C T ☜☆
╠═════════════════════════
╠➩〘Allprotect on/off〙
╠➩〘Autocancel on/off〙
╠➩〘Qr on/off〙
╠➩〘Autokick on/off〙
╠➩〘Ghost on/off〙
╠➩〘Invitepro on/off〙
╠═════════════════════════
║ ☆☞ P R O T E C T ☜☆
╚═════════════════════════
"""
KAC=[cl,ki,kk]
mid = cl.getProfile().mid
Amid = ki.getProfile().mid
Bmid = kk.getProfile().mid
Bots=[mid,Amid,Bmid]
Creator=["u71b6799e1c37868a871d442e67633182"]
admin=["u71b6799e1c37868a871d442e67633182"]
contact = cl.getProfile()
backup1 = cl.getProfile()
backup1.displayName = contact.displayName
backup1.statusMessage = contact.statusMessage
backup1.pictureStatus = contact.pictureStatus
contact = ki.getProfile()
backup2 = ki.getProfile()
backup2.displayName = contact.displayName
backup2.statusMessage = contact.statusMessage
backup2.pictureStatus = contact.pictureStatus
contact = kk.getProfile()
backup3 = kk.getProfile()
backup3.displayName = contact.displayName
backup3.statusMessage = contact.statusMessage
backup3.pictureStatus = contact.pictureStatus
responsename = cl.getProfile().displayName
responsename2 = ki.getProfile().displayName
responsename3 = kk.getProfile().displayName
wait = {
"LeaveRoom":True,
"AutoJoin":False,
"AutoJoinCancel":True,
"memberscancel":0,
"Members":1,
"AutoCancel":{},
"AutoCancelon":False,
"joinkick":False,
"AutoKick":{},
"AutoKickon":False,
'pap':{},
'invite':{},
'steal':{},
'gift':{},
'likeOn':{},
'Leave':{},
'detectMention':True,
'kickMention':False,
'timeline':True,
"Timeline":True,
"comment1":"Kenapa Kak?",
"comment2":"Wkwkwk \(○^ω^○)/",
"comment3":"Lucu Banget!!! ヘ(^_^)ヘ",
"comment4":"Nice Kak (^_^)",
"comment5":"Bot Auto Like ©By : Azmi\nContact Me : 👉 line.me/ti/p/~a_ulul15",
"commentOn":True,
"commentBlack":{},
"message":"Thx For Add Me (^_^)\nInvite Me To Your Group ヘ(^_^)ヘ",
"blacklist":{},
"wblacklist":False,
"dblacklist":False,
"Qr":{},
"Qron":False,
"Contact":False,
"Sambutan":True,
"Ghost":False,
"inviteprotect":False,
"alwaysRead":False,
"Sider":{},
"Simi":{},
"lang":"JP",
"BlGroup":{}
}
settings = {
"simiSimi":{}
}
cctv = {
"cyduk":{},
"point":{},
"sidermem":{}
}
wait2 = {
"readPoint":{},
"readMember":{},
"setTime":{},
"ROM":{}
}
setTime = {}
setTime = wait2['setTime']
mulai = time.time()
def download_page(url):
version = (3,0)
cur_version = sys.version_info
if cur_version >= version:
import urllib,request
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
req = urllib,request.Request(url, headers = headers)
resp = urllib,request.urlopen(req)
respData = str(resp.read())
return respData
except Exception as e:
print(str(e))
else:
import urllib2
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
req = urllib2.Request(url, headers = headers)
response = urllib2.urlopen(req)
page = response.read()
return page
except:
return"Page Not found"
def _images_get_next_item(s):
start_line = s.find('rg_di')
if start_line == -1:
end_quote = 0
link = "no_links"
return link, end_quote
else:
start_line = s.find('"class="rg_meta"')
start_content = s.find('"ou"',start_line+90)
end_content = s.find(',"ow"',start_content-90)
content_raw = str(s[start_content+6:end_content-1])
return content_raw, end_content
def _images_get_all_items(page):
items = []
while True:
item, end_content = _images_get_next_item(page)
if item == "no_links":
break
else:
items.append(item)
time.sleep(0.1)
page = page[end_content:]
return items
def waktu(secs):
mins, secs = divmod(secs,60)
hours, mins = divmod(mins,60)
return '%02d Jam %02d Menit %02d Detik' % (hours, mins, secs)
def cms(string, commands):# /XXX, >XXX, ;XXX, ^XXX, %XXX, $XXX...
tex = ["+","@","/",">",";","^","%","$","^","サテラ:","サテラ:","サテラ:","サテラ:"]
for texX in tex:
for command in commands:
if string ==command:
return True
return False
def upload_tempimage(client):
'''
Upload a picture of a kitten. We don't ship one, so get creative!
'''
config = {
'album': album,
'name': 'bot auto upload',
'title': 'bot auto upload',
'description': 'bot auto upload'
}
print("Uploading image... ")
image = client.upload_from_path(image_path, config=config, anon=False)
print("Done")
print()
return image
def sendAudio(self, to_, path):
M = Message()
M.text = None
M.to = to_
M.contentMetadata = None
M.contentPreview = None
M.contentType = 3
M_id = self._client.sendMessage(0,M).id
files = {
'file': open(path, 'rb'),
}
def sendMessage(to, text, contentMetadata={}, contentType=0):
mes = Message()
mes.to, mes.from_ = to, profile.mid
mes.text = text
mes.contentType, mes.contentMetadata = contentType, contentMetadata
if to not in messageReq:
messageReq[to] = -1
messageReq[to] += 1
def sendImage(self, to_, path):
M = Message(to=to_, text=None, contentType = 1)
M.contentMetadata = None
M.contentPreview = None
M2 = self._client.sendMessage(0,M)
M_id = M2.id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'image',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.post_content('https://obs-sg.line-apps.com/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload image failure.')
return True
def sendImageWithURL(self, to_, url):
path = '%s/pythonLine-%i.data' % (tempfile.gettempdir(), randint(0, 9))
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download image failure.')
try:
self.sendImage(to_, path)
except:
try:
self.sendImage(to_, path)
except Exception as e:
raise e
def sendAudio(self, to_, path):
M = Message()
M.text = None
M.to = to_
M.contentMetadata = None
M.contentPreview = None
M.contentType = 3
M_id = self._client.sendMessage(0,M).id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'audio',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.post_content('https://os.line.naver.jp/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload audio failure.')
return True
def sendAudioWithURL(self, to_, url):
path = self.downloadFileWithURL(url)
try:
self.sendAudio(to_, path)
except Exception as e:
raise Exception(e)
def sendAudioWithUrl(self, to_, url):
path = '%s/pythonLine-%1.data' % (tempfile.gettempdir(), randint(0, 9))
r = requests.get(url, stream=True, verify=False)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download audio failure.')
try:
self.sendAudio(to_, path)
except Exception as e:
raise e
def downloadFileWithURL(self, fileUrl):
saveAs = '%s/pythonLine-%i.data' % (tempfile.gettempdir(), randint(0, 9))
r = self.get_content(fileUrl)
if r.status_code == 200:
with open(saveAs, 'wb') as f:
shutil.copyfileobj(r.raw, f)
return saveAs
else:
raise Exception('Download file failure.')
def restart_program():
python = sys.executable
os.execl(python, python, * sys.argv)
def bot(op):
try:
if op.type == 0:
return
if op.type == 5:
if wait["autoAdd"] == True:
cl.findAndAddContactsByMid(op.param1)
if(wait["message"]in[""," ","\n",None]):
pass
else:
cl.sendText(op.param1,str(wait["message"]))
if op.type == 55:
try:
group_id = op.param1
user_id=op.param2
subprocess.Popen('echo "'+ user_id+'|'+str(op.createdTime)+'" >> dataSeen/%s.txt' % group_id, shell=True, stdout=subprocess.PIPE, )
except Exception as e:
print e
if op.type == 55:
try:
if cctv['cyduk'][op.param1]==True:
if op.param1 in cctv['point']:
Name = cl.getContact(op.param2).displayName
Name = ki.getContact(op.param2).displayName
Name = kk.getContact(op.param2).displayName
Name = kc.getContact(op.param2).displayName
Name = kr.getContact(op.param2).displayName
if Name in cctv['sidermem'][op.param1]:
pass
else:
cctv['sidermem'][op.param1] += "\n• " + Name
if " " in Name:
nick = Name.split(' ')
if len(nick) == 2:
random.choice(KAC).sendText(op.param1, "Haii " + "☞ " + nick[0] + " ☜" + "\nNgintip Aja Niih. . .\nChat Kek Idiih (-__-) ")
else:
random.choice(KAC).sendText(op.param1, "Haii " + "☞ " + nick[1] + " ☜" + "\nBetah Banget Jadi Penonton. . .\nChat Napa (-__-) ")
else:
random.choice(KAC).sendText(op.param1, "Haii " + "☞ " + Name + " ☜" + "\nNgapain Kak Ngintip Aja???\nSini Gabung Chat... ")
else:
pass
else:
pass
except:
pass
else:
pass
if op.type == 22:
cl.leaveRoom(op.param1)
if op.type == 21:
cl.leaveRoom(op.param1)
if op.type == 13:
print op.param3
if op.param3 in mid:
if op.param2 in Creator:
cl.acceptGroupInvitation(op.param1)
if op.param3 in Amid:
if op.param2 in Creator:
ki.acceptGroupInvitation(op.param1)
if op.param3 in Bmid:
if op.param2 in Creator:
kk.acceptGroupInvitation(op.param1)
if op.param3 in Cmid:
if op.param2 in Creator:
kc.acceptGroupInvitation(op.param1)
if op.param3 in Dmid:
if op.param2 in Creator:
kr.acceptGroupInvitation(op.param1)
if op.param3 in mid:
if op.param2 in Amid:
cl.acceptGroupInvitation(op.param1)
if op.param3 in mid:
if op.param2 in Bmid:
cl.acceptGroupInvitation(op.param1)
if op.param3 in mid:
if op.param2 in Cmid:
cl.acceptGroupInvitation(op.param1)
if op.param3 in Amid:
if op.param2 in mid:
ki.acceptGroupInvitation(op.param1)
if op.param3 in Amid:
if op.param2 in Bmid:
ki.acceptGroupInvitation(op.param1)
if op.param3 in Amid:
if op.param2 in Cmid:
ki.acceptGroupInvitation(op.param1)
if op.param3 in Bmid:
if op.param2 in mid:
kk.acceptGroupInvitation(op.param1)
if op.param3 in Bmid:
if op.param2 in Amid:
kk.acceptGroupInvitation(op.param1)
if op.param3 in Bmid:
if op.param2 in Cmid:
kk.acceptGroupInvitation(op.param1)
if op.param3 in Cmid:
if op.param2 in mid:
kc.acceptGroupInvitation(op.param1)
if op.param3 in Cmid:
if op.param2 in Amid:
kc.acceptGroupInvitation(op.param1)
if op.param3 in Cmid:
if op.param2 in Cmid:
kc.acceptGroupInvitation(op.param1)
if op.param3 in Dmid:
if op.param2 in mid:
kr.acceptGroupInvitation(op.param1)
if op.param3 in Dmid:
if op.param2 in Amid:
kr.acceptGroupInvitation(op.param1)
if op.param3 in Dmid:
if op.param2 in Bmid:
kr.acceptGroupInvitation(op.param1)
if mid in op.param3:
if wait["AutoJoinCancel"] == True:
G = cl.getGroup(op.param1)
if len(G.members) <= wait["memberscancel"]:
cl.acceptGroupInvitation(op.param1)
cl.sendText(op.param1,"Maaf " + cl.getContact(op.param2).displayName + "\nMember Kurang Dari 30 Orang\nUntuk Info, Silahkan Chat Owner Kami!")
c = Message(to=op.param1, from_=None, text=None, contentType=13)
c.contentMetadata={'mid':tjia}
cl.sendMessage(c)
cl.leaveGroup(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ti = cl.reissueGroupTicket(op.param1)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G.preventJoinByTicket = True
cl.updateGroup(G)
cl.sendText(op.param1,"☆Ketik ☞Help☜ Untuk Bantuan☆\n☆Harap Gunakan Dengan Bijak ^_^ ☆")
if mid in op.param3:
if wait["AutoJoin"] == True:
G = cl.getGroup(op.param1)
if len(G.members) <= wait["Members"]:
cl.rejectGroupInvitation(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ti = cl.reissueGroupTicket(op.param1)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G.preventJoinByTicket = True
cl.updateGroup(G)
cl.sendText(op.param1,"☆Ketik ☞Help☜ Untuk Bantuan☆\n☆Harap Gunakan Dengan Bijak ^_^ ☆")
else:
if wait["AutoCancel"][op.param1] == True:
if op.param3 in admin:
pass
else:
cl.cancelGroupInvitation(op.param1, [op.param3])
else:
if op.param3 in wait["blacklist"]:
cl.cancelGroupInvitation(op.param1, [op.param3])
cl.sendText(op.param1, "Blacklist Detected")
else:
pass
if op.type == 19:
if wait["AutoKick"][op.param1] == True:
try:
if op.param3 in Creator:
if op.param3 in admin:
if op.param3 in Bots:
pass
if op.param2 in Creator:
if op.param2 in admin:
if op.param2 in Bots:
pass
else:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
if op.param2 in wait["blacklist"]:
pass
else:
random.choice(KAC).inviteIntoGroup(op.param1,[op.param3])
except:
try:
if op.param2 not in Creator:
if op.param2 not in admin:
if op.param2 not in Bots:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
if op.param2 in wait["blacklist"]:
pass
else:
random.choice(KAC).inviteIntoGroup(op.param1,[op.param3])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Creator:
if op.param2 in admin:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Creator:
if op.param2 in admin:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
else:
pass
if mid in op.param3:
if op.param2 in Creator:
if op.param2 in Bots:
pass
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
G = ki.getGroup(op.param1)
G.preventJoinByTicket = False
ki.updateGroup(G)
Ti = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
X = cl.getGroup(op.param1)
X.preventJoinByTicket = True
cl.updateGroup(X)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Amid in op.param3:
if op.param2 in Bots:
pass
try:
kk.kickoutFromGroup(op.param1,[op.param2])
kc.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
X = kk.getGroup(op.param1)
X.preventJoinByTicket = False
cl.updateGroup(X)
Ti = kk.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G = ki.getGroup(op.param1)
G.preventJoinByTicket = True
ki.updateGroup(G)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Bmid in op.param3:
if op.param2 in Bots:
pass
try:
kc.kickoutFromGroup(op.param1,[op.param2])
kk.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
X = kc.getGroup(op.param1)
X.preventJoinByTicket = False
kc.updateGroup(X)
Ti = kc.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G = kk.getGroup(op.param1)
G.preventJoinByTicket = True
kk.updateGroup(G)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Cmid in op.param3:
if op.param2 in Bots:
pass
try:
cl.kickoutFromGroup(op.param1,[op.param2])
kk.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
X = cl.getGroup(op.param1)
X.preventJoinByTicket = False
cl.updateGroup(X)
Ti = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G = kc.getGroup(op.param1)
G.preventJoinByTicket = True
kc.updateGroup(G)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Dmid in op.param3:
if op.param2 in Bots:
pass
try:
cl.kickoutFromGroup(op.param1,[op.param2])
kk.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
X = cl.getGroup(op.param1)
X.preventJoinByTicket = False
cl.updateGroup(X)
Ti = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
kk.acceptGroupInvitationByTicket(op.param1,Ti)
kc.acceptGroupInvitationByTicket(op.param1,Ti)
kr.acceptGroupInvitationByTicket(op.param1,Ti)
G = kc.getGroup(op.param1)
G.preventJoinByTicket = True
kc.updateGroup(G)
if op.param2 in wait["blacklist"]:
pass
else:
if op.param2 in Bots:
pass
else:
wait["blacklist"][op.param2] = True
if Creator in op.param3:
if admin in op.param3:
if op.param2 in Bots:
pass
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param2 not in Bots:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
if op.param2 in wait["blacklist"]:
pass
else:
random.choice(KAC).inviteIntoGroup(op.param1,[op.param3])
except:
print ("client Kick regulation or Because it does not exist in the group\ngid=["+op.param1+"]\nmid=["+op.param2+"]")
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
random.choice(KAC).inviteIntoGroup(op.param1,[op.param3])
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
if op.type == 11:
if wait["Qr"][op.param1] == True:
if op.param2 not in Bots:
if op.param2 not in admin:
G = random.choice(KAC).getGroup(op.param1)
G.preventJoinByTicket = True
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
random.choice(KAC).updateGroup(G)
if op.type == 17:
if wait["Sambutan"] == True:
if op.param2 in admin:
return
ginfo = cl.getGroup(op.param1)
contact = cl.getContact(op.param2)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
c = Message(to=op.param1, from_=None, text=None, contentType=13)
c.contentMetadata={'mid':op.param2}
cl.sendMessage(c)
cl.sendText(op.param1,"Hallo " + cl.getContact(op.param2).displayName + "\nWelcome To ☞ " + str(ginfo.name) + " ☜" + "\nBudayakan Cek Note\nDan Semoga Betah Disini ^_^")
cl.sendImageWithURL(op.param1,image)
print "MEMBER JOIN TO GROUP"
if op.type == 17:
if wait["joinkick"] == True:
if op.param2 in admin:
if op.param2 in Bots:
return
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
print "MEMBER JOIN KICK TO GROUP"
if op.type == 15:
if wait["Sambutan"] == True:
if op.param2 in admin:
return
cl.sendText(op.param1,"Good Bye " + cl.getContact(op.param2).displayName + "\nSee You Next Time . . . (p′︵‵。) 🤗")
random.choice(KAC).inviteIntoGroup(op.param1,[op.param2])
print "MEMBER HAS LEFT THE GROUP"
if op.type == 13:
if op.param2 not in Creator:
if op.param2 not in admin:
if op.param2 not in Bots:
if op.param2 in Creator:
if op.param2 in admin:
if op.param2 in Bots:
pass
elif wait["inviteprotect"] == True:
wait ["blacklist"][op.param2] = True
cl.cancelGroupInvitation(op.param1,[op.param3])
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
if op.type == 19:
if wait["Ghost"] == True:
if op.param2 in admin:
if op.param2 in Bots:
pass
else:
try:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
km.acceptGroupInvitationByTicket(op.param1,Ticket)
time.sleep(0.01)
km.kickoutFromGroup(op.param1,[op.param2])
c = Message(to=op.param1, from_=None, text=None, contentType=13)
c.contentMetadata={'mid':op.param2}
km.sendMessage(c)
km.leaveGroup(op.param1)
G.preventJoinByTicket = True
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
except:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
km.acceptGroupInvitationByTicket(op.param1,Ticket)
time.sleep(0.01)
km.kickoutFromGroup(op.param1,[op.param2])
c = Message(to=op.param1, from_=None, text=None, contentType=13)
c.contentMetadata={'mid':op.param2}
km.sendMessage(c)
km.leaveGroup(op.param1)
G.preventJoinByTicket = True
cl.updateGroup(G)
wait["blacklist"][op.param2] = True
if op.type == 26:
msg = op.message
if wait["alwaysRead"] == True:
if msg.toType == 0:
cl.sendChatChecked(msg.from_,msg.id)
else:
cl.sendChatChecked(msg.to,msg.id)
if msg.contentType == 16:
if wait['likeOn'] == True:
url = msg.contentMetadata["postEndUrl"]
cl.like(url[25:58], url[66:], likeType=1005)
ki.like(url[25:58], url[66:], likeType=1002)
kk.like(url[25:58], url[66:], likeType=1004)
kc.like(url[25:58], url[66:], likeType=1003)
kr.like(url[25:58], url[66:], likeType=1001)
cl.comment(url[25:58], url[66:], wait["comment1"])
ki.comment(url[25:58], url[66:], wait["comment2"])
kk.comment(url[25:58], url[66:], wait["comment3"])
kc.comment(url[25:58], url[66:], wait["comment4"])
kr.comment(url[25:58], url[66:], wait["comment5"])
cl.sendText(msg.to,"Like Success")
wait['likeOn'] = False
if op.type == 26:
msg = op.message
if msg.to in settings["simiSimi"]:
if settings["simiSimi"][msg.to] == True:
if msg.text is not None:
text = msg.text
r = requests.get("http://api.ntcorp.us/chatbot/v1/?text=" + text.replace(" ","+") + "&key=beta1.nt")
data = r.text
data = json.loads(data)
if data['status'] == 200:
if data['result']['result'] == 100:
cl.sendText(msg.to,data['result']['response'].encode('utf-8'))
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["kickMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["Aku Bilang Jangan Ngetag Lagi " + cName + "\nAku Kick Kamu! Sorry, Byee!!!"]
ret_ = random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in admin:
cl.sendText(msg.to,ret_)
random.choice(KAC).kickoutFromGroup(msg.to,[msg.from_])
break
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
random.choice(KAC).kickoutFromGroup(msg.to,[msg.from_])
break
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["detectMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["Sekali lagi nge tag gw sumpahin jomblo seumur hidup!","Dont Tag!! Lagi Sibuk",cName + " Ngapain Ngetag?",cName + " Nggak Usah Tag-Tag! Kalo Penting Langsung Pc Aja","Tag Mulu Lo Anjirr!","Dia Lagi Off", cName + " Kenapa Tag? Kangen?","Dia Lagi Tidur\nJangan Di Tag " + cName, "Jangan Suka Tag Gua " + cName, "Kamu Siapa " + cName + "?", "Ada Perlu Apa " + cName + "?","Woii " + cName + " Jangan Ngetag, Riibut!"]
ret_ = random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in admin:
cl.sendText(msg.to,ret_)
break
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
break
if msg.contentType == 13:
if wait["wblacklist"] == True:
if msg.contentMetadata["mid"] not in admin:
if msg.contentMetadata["mid"] in wait["blacklist"]:
random.choice(KAC).sendText(msg.to,"Sudah")
wait["wblacklist"] = False
else:
wait["blacklist"][msg.contentMetadata["mid"]] = True
wait["wblacklist"] = False
random.choice(KAC).sendText(msg.to,"Ditambahkan")
else:
cl.sendText(msg.to,"Admin Detected~")
elif wait["dblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
del wait["blacklist"][msg.contentMetadata["mid"]]
random.choice(KAC).sendText(msg.to,"Terhapus")
wait["dblacklist"] = False
else:
wait["dblacklist"] = False
random.choice(KAC).sendText(msg.to,"Tidak Ada Black List")
elif wait["Contact"] == True:
msg.contentType = 0
cl.sendText(msg.to,msg.contentMetadata["mid"])
if 'displayName' in msg.contentMetadata:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"Nama:\n" + msg.contentMetadata["displayName"] + "\n\nMid:\n" + msg.contentMetadata["mid"] + "\n\nStatus:\n" + contact.statusMessage + "\n\nPhoto Profile:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n\nPhoto Cover:\n" + str(cu))
else:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"Nama:\n" + msg.contentMetadata["displayName"] + "\n\nMid:\n" + msg.contentMetadata["mid"] + "\n\nStatus:\n" + contact.statusMessage + "\n\nPhoto Profile:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n\nPhoto Cover:\n" + str(cu))
elif msg.text == "Ginfo":
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
gCreator = ginfo.creator.displayName
except:
gCreator = "Error"
if wait["lang"] == "JP":
if ginfo.invitee is None:
sinvitee = "0"
else:
sinvitee = str(len(ginfo.invitee))
if ginfo.preventJoinByTicket == True:
u = "close"
else:
u = "open"
cl.sendText(msg.to,"[Group name]\n" + str(ginfo.name) + "\n\n[Gid]\n" + msg.to + "\n\n[Group creator]\n" + gCreator + "\n\n[Profile status]\nhttp://dl.profile.line.naver.jp/" + ginfo.pictureStatus + "\n\nMembers:" + str(len(ginfo.members)) + "members\nPending:" + sinvitee + "people\nURL:" + u + "it is inside")
else:
cl.sendText(msg.to,"[group name]\n" + str(ginfo.name) + "\n[gid]\n" + msg.to + "\n[group creator]\n" + gCreator + "\n[profile status]\nhttp://dl.profile.line.naver.jp/" + ginfo.pictureStatus)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can not be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif msg.text is None:
return
elif msg.text in ["Creator","Owner"]:
msg.contentType = 13
msg.contentMetadata = {'mid': tjia}
cl.sendMessage(msg)
cl.sendText(msg.to,"Itu tukang tikungnya(^_^)")
elif msg.text in ["Admin","admin"]:
msg.contentType = 13
admin1 = "u71b6799e1c37868a871d442e67633182"
admin2 = "u46560b002469877f708c1d2e8966fc9d"
admin3 = "u1dee2db35847101e3aa420e667390000"
msg.contentMetadata = {'mid': tjia}
random.choice(KAC).sendMessage(msg)
msg.contentMetadata = {'mid': admin1}
random.choice(KAC).sendMessage(msg)
msg.contentMetadata = {'mid': admin2}
random.choice(KAC).sendMessage(msg)
msg.contentMetadata = {'mid': admin3}
random.choice(KAC).sendMessage(msg)
random.choice(KAC).sendText(msg.to,"Itu Admin Kami (^_^)")
elif "Admin add @" in msg.text:
if msg.from_ in Creator:
print "[Command]Admin add executing"
_name = msg.text.replace("Admin add @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
gs = kr.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
random.choice(KAC).sendText(msg.to,"Contact Tidak Di Temukan")
else:
for target in targets:
try:
admin.append(target)
cl.sendText(msg.to,"Admin Chucky Ditambahkan")
except:
pass
print "[Command]Admin add executed"
else:
cl.sendText(msg.to,"Command Denied.")
cl.sendText(msg.to,"Creator Permission Required.")
elif "Admin remove @" in msg.text:
if msg.from_ in Creator:
print "[Command]Admin Remove Executing"
_name = msg.text.replace("Admin remove @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
gs = kr.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
random.choice(KAC).sendText(msg.to,"Contact Tidak Di Temukan")
else:
for target in targets:
try:
admin.remove(target)
cl.sendText(msg.to,"Admin Chucky Dihapus")
except:
pass
print "[Command]Admin remove executed"
else:
cl.sendText(msg.to,"Command Denied.")
cl.sendText(msg.to,"Creator Permission Required.")
elif msg.text in ["Admin list","admin list","List admin"]:
if admin == []:
cl.sendText(msg.to,"The Admin List Is Empty")
else:
cl.sendText(msg.to,"Tunggu...")
mc = "╔═════════════════════════\n║ ☆☞ ADMIN CHUCKY ☜☆\n╠═════════════════════════\n"
for mi_d in admin:
mc += "╠••> " +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc + "╚═════════════════════════")
print "[Command]Admin List executed"
elif msg.text in ["Group creator","Gcreator","gcreator"]:
ginfo = cl.getGroup(msg.to)
gCreator = ginfo.creator.mid
msg.contentType = 13
msg.contentMetadata = {'mid': gCreator}
cl.sendMessage(msg)
cl.sendText(msg.to,"Itu Yang Buat Grup Ini")
elif msg.contentType == 16:
if wait["Timeline"] == True:
msg.contentType = 0
msg.text = "post URL\n" + msg.contentMetadata["postEndUrl"]
random.choice(KAC).sendText(msg.to,msg.text)
if msg.contentType == 13:
if wait["steal"] == True:
_name = msg.contentMetadata["displayName"]
copy = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
print "[Target] Stealed"
break
else:
targets.append(copy)
if targets == []:
pass
else:
for target in targets:
try:
cl.findAndAddContactsByMid(target)
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + msg.contentMetadata["mid"] + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
wait["steal"] = False
break
except:
pass
if msg.contentType == 13:
if wait["gift"] == True:
_name = msg.contentMetadata["displayName"]
copy = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
print "[Target] Gift"
break
else:
targets.append(copy)
if targets == []:
pass
else:
for target in targets:
try:
cl.sendText(msg.to,"Gift Sudah Terkirim!")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '1296261'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
wait['gift'] = False
break
except:
msg.contentMetadata = {'mid': target}
wait["gift"] = False
break
if msg.contentType == 13:
if wait['invite'] == True:
_name = msg.contentMetadata["displayName"]
invite = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
groups = ki.getGroup(msg.to)
groups = kk.getGroup(msg.to)
groups = kc.getGroup(msg.to)
groups = kr.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
random.choice(KAC).sendText(msg.to, _name + " Berada DiGrup Ini")
else:
targets.append(invite)
if targets == []:
pass
else:
for target in targets:
try:
cl.findAndAddContactsByMid(target)
ki.findAndAddContactsByMid(target)
kk.findAndAddContactsByMid(target)
kc.findAndAddContactsByMid(target)
kr.findAndAddContactsByMid(target)
random.choice(KAC).inviteIntoGroup(msg.to,[target])
random.choice(KAC).sendText(msg.to,"Invite " + _name)
wait['invite'] = False
break
except:
random.choice(KAC).sendText(msg.to,"Limit Invite")
wait['invite'] = False
break
elif msg.text in ["Key creator","help creator","Help creator"]:
cl.sendText(msg.to,creatorMessage)
elif msg.text in ["Key group","help group","Help group"]:
cl.sendText(msg.to,groupMessage)
elif msg.text in ["Key","help","Help"]:
cl.sendText(msg.to,helpMessage)
elif msg.text in ["Key self","help self","Help self"]:
cl.sendText(msg.to,selfMessage)
elif msg.text in ["Key bot","help bot","Help bot"]:
cl.sendText(msg.to,botMessage)
elif msg.text in ["Key set","help set","Help set"]:
cl.sendText(msg.to,setMessage)
elif msg.text in ["Key media","help media","Help media"]:
cl.sendText(msg.to,mediaMessage)
elif msg.text in ["Key admin","help admin","Help admin"]:
cl.sendText(msg.to,adminMessage)
elif msg.text in ["Key protect","help protect","Help protect"]:
cl.sendText(msg.to,protectMessage)
elif msg.text in ["List group"]:
gid = cl.getGroupIdsJoined()
h = ""
jml = 0
for i in gid:
gn = cl.getGroup(i).name
h += "♦【%s】\n" % (gn)
jml += 1
cl.sendText(msg.to,"=======[List Group]=======\n"+ h +"\nTotal Group: "+str(jml))
elif "Ban group: " in msg.text:
grp = msg.text.replace("Ban group: ","")
gid = cl.getGroupIdsJoined()
if msg.from_ in admin:
for i in gid:
h = cl.getGroup(i).name
if h == grp:
wait["BlGroup"][i]=True
cl.sendText(msg.to, "Success Ban Group : "+grp)
else:
pass
else:
cl.sendText(msg.to, "Only Admin")
elif msg.text in ["List ban","List ban group"]:
if msg.from_ in admin:
if wait["BlGroup"] == {}:
random.choice(KAC).sendText(msg.to,"Tidak Ada")
else:
mc = ""
for gid in wait["BlGroup"]:
mc += "-> " +cl.getGroup(gid).name + "\n"
random.choice(KAC).sendText(msg.to,"===[Ban Group]===\n"+mc)
else:
cl.sendText(msg.to, "Khusus Admin")
elif msg.text in ["Del ban: "]:
if msg.from_ in admin:
ng = msg.text.replace("Del ban: ","")
for gid in wait["BlGroup"]:
if cl.getGroup(gid).name == ng:
del wait["BlGroup"][gid]
cl.sendText(msg.to, "Success del ban "+ng)
else:
pass
else:
cl.sendText(msg.to, "Only Admin")
elif "Join group: " in msg.text:
ng = msg.text.replace("Join group: ","")
gid = cl.getGroupIdsJoined()
gid = ki.getGroupIdsJoined()
gid = kk.getGroupIdsJoined()
gid = kc.getGroupIdsJoined()
gid = kr.getGroupIdsJoined()
try:
if msg.from_ in Creator:
for i in gid:
h = cl.getGroup(i).name
h = ki.getGroup(i).name
h = kk.getGroup(i).name
h = kc.getGroup(i).name
h = kr.getGroup(i).name
if h == ng:
random.choice(KAC).inviteIntoGroup(i,[Creator])
cl.sendText(msg.to,"Success Join To ["+ h +"] Group")
else:
pass
else:
cl.sendText(msg.to,"Only Admin")
except Exception as e:
cl.sendText(msg.to, str(e))
elif "Leave group: " in msg.text:
ng = msg.text.replace("Leave group: ","")
gid = cl.getGroupIdsJoined()
if msg.from_ in Creator:
for i in gid:
h = cl.getGroup(i).name
if h == ng:
cl.sendText(i,"Bot Di Paksa Keluar Oleh Owner!")
cl.leaveGroup(i)
ki.leaveGroup(i)
kk.leaveGroup(i)
kc.leaveGroup(i)
kr.leaveGroup(i)
cl.sendText(msg.to,"Success Left ["+ h +"] group")
else:
pass
else:
cl.sendText(msg.to,"Only Admin")
elif "Leave all group" == msg.text:
gid = cl.getGroupIdsJoined()
if msg.from_ in Creator:
for i in gid:
cl.sendText(i,"Bot Di Paksa Keluar Oleh Owner!")
cl.leaveGroup(i)
ki.leaveGroup(i)
kk.leaveGroup(i)
kc.leaveGroup(i)
kr.leaveGroup(i)
cl.sendText(msg.to,"Success Leave All Group")
else:
cl.sendText(msg.to,"Only Admin")
elif "Pict group: " in msg.text:
saya = msg.text.replace('Pict group: ','')
gid = cl.getGroupIdsJoined()
for i in gid:
h = cl.getGroup(i).name
gna = cl.getGroup(i)
if h == saya:
cl.sendImageWithURL(msg.to,"http://dl.profile.line.naver.jp/"+ gna.pictureStatus)
elif msg.text in ["cancelall","Cancelall"]:
if msg.toType == 2:
X = cl.getGroup(msg.to)
if X.invitee is not None:
gInviMids = [contact.mid for contact in X.invitee]
cl.cancelGroupInvitation(msg.to, gInviMids)
else:
cl.sendText(msg.to,"Tidak Ada Yang Pending")
else:
cl.sendText(msg.to,"Tidak Bisa Digunakan Diluar Group")
elif msg.text in ["Ourl","Url on"]:
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
cl.updateGroup(X)
cl.sendText(msg.to,"Url Sudah Aktif")
else:
cl.sendText(msg.to,"Can not be used outside the group")
elif msg.text in ["Curl","Url off"]:
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = True
cl.updateGroup(X)
cl.sendText(msg.to,"Url Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Can not be used outside the group")
elif msg.text in ["Join on","Autojoin on"]:
if msg.from_ in admin:
wait["AutoJoin"] = True
wait["AutoJoinCancel"] = False
cl.sendText(msg.to,"Auto Join Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Join off","Autojoin off"]:
if msg.from_ in admin:
wait["AutoJoin"] = False
cl.sendText(msg.to,"Auto Join Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Joincancel on","Autojoincancel on"]:
if msg.from_ in admin:
wait["AutoJoinCancel"] = True
wait["AutoJoin"] = False
cl.sendText(msg.to,"Auto Join Cancel Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Joincancel off","Autojoincancel off"]:
if msg.from_ in admin:
wait["AutoJoinCancel"] = False
cl.sendText(msg.to,"Auto Join Cancel Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Respon on"]:
if msg.from_ in admin:
wait["detectMention"] = True
wait["kickMention"] = False
cl.sendText(msg.to,"Auto Respon Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Respon off"]:
if msg.from_ in admin:
wait["detectMention"] = False
cl.sendText(msg.to,"Auto Respon Sudah Off")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Responkick on"]:
if msg.from_ in admin:
wait["kickMention"] = True
wait["detectMention"] = False
cl.sendText(msg.to,"Auto Respon Kick Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Responkick off"]:
if msg.from_ in admin:
wait["kickMention"] = False
cl.sendText(msg.to,"Auto Respon Kick Sudah Off")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Leave on"]:
if msg.from_ in admin:
wait["Leave"] = True
cl.sendText(msg.to,"Leave Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Autocancel on"]:
if msg.from_ in admin:
wait["AutoCancel"][msg.to] = True
wait["AutoCancelon"] = True
cl.sendText(msg.to,"Auto Cancel Sudah Aktif")
print wait["AutoCancel"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Autocancel off"]:
if msg.from_ in admin:
wait["AutoCancel"][msg.to] = False
wait["AutoCancelon"] = False
cl.sendText(msg.to,"Auto Cancel Sudah Di Nonaktifkan")
print wait["AutoCancel"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Joinkick on"]:
if msg.from_ in admin:
wait["joinkick"] = True
wait["Sambutan"] = False
cl.sendText(msg.to,"Join Kick Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Joinkick off"]:
if msg.from_ in admin:
wait["joinkick"] = False
cl.sendText(msg.to,"Join Kick Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Invitepro on","Inviteprotect on"]:
if msg.from_ in admin:
wait["inviteprotect"] = True
cl.sendText(msg.to,"Invite Protect Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Invitepro off","Inviteprotect off"]:
if msg.from_ in admin:
wait["inviteprotect"] = False
cl.sendText(msg.to,"Invite Protect Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif "Qr on" in msg.text:
if msg.from_ in admin:
wait["Qr"][msg.to] = True
wait["Qron"] = True
cl.sendText(msg.to,"QR Protect Sudah Aktif")
print wait["Qr"]
else:
cl.sendText(msg.to,"Only Admin")
elif "Qr off" in msg.text:
if msg.from_ in admin:
wait["Qr"][msg.to] = False
wait["Qron"] = False
cl.sendText(msg.to,"Qr Protect Sudah Di Nonaktifkan")
print wait["Qr"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Autokick on"]:
if msg.from_ in admin:
wait["AutoKick"][msg.to] = True
wait["AutoKickon"] = True
cl.sendText(msg.to,"Auto Kick Sudah Aktif")
print wait["AutoKick"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Autokick off"]:
if msg.from_ in admin:
wait["AutoKick"][msg.to] = False
wait["AutoKickon"] = False
cl.sendText(msg.to,"Auto Kick Sudah Di Nonaktifkan")
print wait["AutoKick"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Ghost on"]:
if msg.from_ in admin:
wait["Ghost"] = True
cl.sendText(msg.to,"Ghost Sudah Aktif")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Ghost off"]:
if msg.from_ in admin:
wait["Ghost"] = False
cl.sendText(msg.to,"Ghost Sudah Di Nonaktifkan")
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Allprotect on"]:
if msg.from_ in admin:
wait["AutoCancel"][msg.to] = True
wait["AutoCancelon"] = True
wait["inviteprotect"] = True
wait["joinkick"] = True
wait["AutoKick"][msg.to] = True
wait["AutoKickon"] = True
wait["Qr"][msg.to] = True
wait["Qron"] = True
wait["Ghost"] = True
cl.sendText(msg.to,"All Protect Sudah Aktif Semua")
print wait["AutoCancel"]
print wait["AutoKick"]
print wait["Qr"]
else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["Allprotect off"]:
if msg.from_ in admin:
wait["AutoCancel"][msg.to] = False
wait["AutoCancelon"] = False
wait["inviteprotect"] = False
wait["joinkick"] = False
wait["AutoKick"][msg.to] = False
wait["AutoKickon"] = False
wait["Qr"][msg.to] = False
wait["Qron"] = False
wait["Ghost"] = False
cl.sendText(msg.to,"All Protect Sudah Di Nonaktifkan Semua")
print wait["AutoCancel"]
print wait["AutoKick"]
print wait["Qr"]
else:
#else:
cl.sendText(msg.to,"Only Admin")
elif msg.text in ["K on","Contact on"]:
wait["Contact"] = True
cl.sendText(msg.to,"Contact Sudah Aktif")
elif msg.text in ["K off","Contact off"]:
wait["Contact"] = False
cl.sendText(msg.to,"Contact Sudah Di Nonaktifkan")
elif msg.text in ["Alwaysread on"]:
wait["alwaysRead"] = True
cl.sendText(msg.to,"Always Read Sudah Aktif")
elif msg.text in ["Alwaysread off"]:
wait["alwaysRead"] = False
cl.sendText(msg.to,"Always Read Sudah Di Nonaktifkan")
elif msg.text in ["Sambutan on"]:
if wait["Sambutan"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sambutan Di Aktifkanヾ(*´∀`*)ノ")
else:
wait["Sambutan"] = True
wait["joinkick"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah Onヽ(´▽`)/")
elif msg.text in ["Sambutan off"]:
if wait["Sambutan"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sambutan Di Nonaktifkan( ^∇^)")
else:
wait["Sambutan"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah Off(p′︵‵。)")
elif "Sider on" in msg.text:
try:
del cctv['point'][msg.to]
del cctv['sidermem'][msg.to]
del cctv['cyduk'][msg.to]
except:
pass
cctv['point'][msg.to] = msg.id
cctv['sidermem'][msg.to] = ""
cctv['cyduk'][msg.to]=True
wait["Sider"] = True
cl.sendText(msg.to,"Siap On Cek Sider")
elif "Sider off" in msg.text:
if msg.to in cctv['point']:
cctv['cyduk'][msg.to]=False
wait["Sider"] = False
cl.sendText(msg.to, "Cek Sider Off")
else:
cl.sendText(msg.to, "Heh Belom Di Set")
elif msg.text in ["Status"]:
md = ""
if wait["Sambutan"] == True: md+="╠➩✔️ Sambutan : On\n"
else:md+="╠➩❌ Sambutan : Off\n"
if wait["joinkick"] == True: md+="╠➩✔️ Join Kick : On\n"
else:md+="╠➩❌ Join Kick : Off\n"
if wait["AutoJoin"] == True: md+="╠➩✔️ Auto Join : On\n"
else: md +="╠➩❌ Auto Join : Off\n"
if wait["AutoJoinCancel"] == True: md+="╠➩✔️ Auto Join Cancel : On\n"
else: md +="╠➩❌ Auto Join Cancel : Off\n"
if wait["Leave"] == True: md+="╠➩✔️ Leave : On\n"
else: md +="╠➩❌ Leave : Off\n"
if wait["Contact"] == True: md+="╠➩✔️ Info Contact : On\n"
else: md+="╠➩❌ Info Contact : Off\n"
if wait["AutoCancelon"] == True:md+="╠➩✔️ Auto Cancel : On\n"
else: md+= "╠➩❌ Auto Cancel : Off\n"
if wait["inviteprotect"] == True:md+="╠➩✔️ Invite Protect : On\n"
else: md+= "╠➩❌ Invite Protect : Off\n"
if wait["Qron"] == True: md+="╠➩✔️ Qr Protect : On\n"
else:md+="╠➩❌ Qr Protect : Off\n"
if wait["AutoKickon"] == True: md+="╠➩✔️ Auto Kick : On\n"
else:md+="╠➩❌ Auto Kick : Off\n"
if wait["Ghost"] == True: md+="╠➩✔️ Ghost : On\n"
else:md+="╠➩❌ Ghost : Off\n"
if wait["alwaysRead"] == True: md+="╠➩✔️ Always Read : On\n"
else:md+="╠➩❌ Always Read: Off\n"
if wait["detectMention"] == True: md+="╠➩✔️ Auto Respon : On\n"
else:md+="╠➩❌ Auto Respon : Off\n"
if wait["kickMention"] == True: md+="╠➩✔️ Auto Respon Kick : On\n"
else:md+="╠➩❌ Auto Respon Kick : Off\n"
if wait["Sider"] == True: md+="╠➩✔️ Auto Sider : On\n"
else:md+="╠➩❌ Auto Sider: Off\n"
if wait["Simi"] == True: md+="╠➩✔️ Simisimi : On\n"
else:md+="╠➩❌ Simisimi: Off\n"
cl.sendText(msg.to,"╔═════════════════════════\n""║ ☆☞ S T A T U S ☜☆\n""╠═════════════════════════\n"+md+"╚═════════════════════════")
elif msg.text in ["Gift","gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58',
'PRDTYPE': 'THEME',
'MSGTPL': '5'}
msg.text = None
cl.sendMessage(msg)
elif msg.text in ["All gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58',
'PRDTYPE': 'THEME',
'MSGTPL': '5'}
msg.text = None
ki.sendMessage(msg)
kk.sendMessage(msg)
kc.sendMessage(msg)
elif msg.text in ["TC1 Gift","TC1 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '696d7046-843b-4ed0-8aac-3113ed6c0733',
'PRDTYPE': 'THEME',
'MSGTPL': '6'}
msg.text = None
ki.sendMessage(msg)
elif msg.text in ["TC2 Gift","TC2 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '8fe8cdab-96f3-4f84-95f1-6d731f0e273e',
'PRDTYPE': 'THEME',
'MSGTPL': '7'}
msg.text = None
kk.sendMessage(msg)
elif msg.text in ["TC3 Gift","TC3 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': 'ae3d9165-fab2-4e70-859b-c14a9d4137c4',
'PRDTYPE': 'THEME',
'MSGTPL': '8'}
msg.text = None
kc.sendMessage(msg)
elif "Gift1 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift1 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '1380280'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift2 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift2 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '2',
'STKPKGID': '1360738'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift3 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift3 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '3',
'STKPKGID': '1395389'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift4 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift4 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '4',
'STKPKGID': '1329191'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift5 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift5 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '9057'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift6 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift6 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '2',
'STKPKGID': '9167'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift7 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift7 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '3',
'STKPKGID': '7334'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift8 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift8 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '1380280'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift9 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift9 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '4',
'STKPKGID': '1405277'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif "Gift10 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift10 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '1296261'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
elif msg.text.lower() in ["wkwkwk","wkwk","hahaha","haha"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '100',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["hehehe","hehe"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '10',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["galau"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '9',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["you","kau","kamu"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '7',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["marah","hadeuh","hadeh"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '6',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["please","pliss","mohon","tolong"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '4',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["haa","haaa","kaget"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '3',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["lucu","ngakak","lol"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '110',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["hmm","hmmm"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '101',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["tidur"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '1',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["gemes"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '2',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["cantik","imut"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '5',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["nyanyi","lalala"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '11',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["gugup"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '8',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["ok","oke","okay","oce","okee","sip","siph"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '13',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["mantab","mantap","nice","keren"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '14',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["ngejek"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '15',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["nangis","sedih"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '16',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["woi","kampret"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '102',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text.lower() in ["huft"]:
msg.contentType = 7
msg.contentMetadata={'STKID': '104',
'STKPKGID': '1',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif msg.text in ["Tagall","Tag all"]:
group = cl.getGroup(msg.to)
nama = [contact.mid for contact in group.members]
cb = ""
cb2 = ""
strt = int(0)
akh = int(0)
for md in nama:
akh = akh + int(6)
cb += """{"S":"""+json.dumps(str(strt))+""","E":"""+json.dumps(str(akh))+""","M":"""+json.dumps(md)+"},"""
strt = strt + int(7)
akh = akh + 1
cb2 += "@nrik \n"
cb = (cb[:int(len(cb)-1)])
msg.contentType = 0
msg.text = cb2
msg.contentMetadata ={'MENTION':'{"MENTIONEES":['+cb+']}','EMTVER':'4'}
try:
cl.sendMessage(msg)
except Exception as error:
print error
elif msg.text in ["Setview","Setpoint","Cctv"]:
subprocess.Popen("echo '' > dataSeen/"+msg.to+".txt", shell=True, stdout=subprocess.PIPE)
cl.sendText(msg.to, "☆Checkpoint Checked☆")
print "Setview"
elif msg.text in ["Viewseen","Check","Ciduk","Cyduk"]:
lurkGroup = ""
dataResult, timeSeen, contacts, userList, timelist, recheckData = [], [], [], [], [], []
with open('dataSeen/'+msg.to+'.txt','r') as rr:
contactArr = rr.readlines()
for v in xrange(len(contactArr) -1,0,-1):
num = re.sub(r'\n', "", contactArr[v])
contacts.append(num)
pass
contacts = list(set(contacts))
for z in range(len(contacts)):
arg = contacts[z].split('|')
userList.append(arg[0])
timelist.append(arg[1])
uL = list(set(userList))
for ll in range(len(uL)):
try:
getIndexUser = userList.index(uL[ll])
timeSeen.append(time.strftime("%H:%M:%S", time.localtime(int(timelist[getIndexUser]) / 1000)))
recheckData.append(userList[getIndexUser])
except IndexError:
conName.append('nones')
pass
contactId = cl.getContacts(recheckData)
for v in range(len(recheckData)):
dataResult.append(contactId[v].displayName + ' ('+timeSeen[v]+')')
pass
if len(dataResult) > 0:
tukang = "╔═════════════════════════\n║ ☆☞ LIST VIEWERS ☜☆\n╠═════════════════════════\n╠➩"
grp = '\n╠➩ '.join(str(f) for f in dataResult)
total = '\n╠═════════════════════════\n╠➩ Total %i Viewers (%s)' % (len(dataResult), datetime.now().strftime('%H:%M:%S')) + "\n╚═════════════════════════"
cl.sendText(msg.to, "%s %s %s" % (tukang, grp, total))
subprocess.Popen("echo '' > dataSeen/"+msg.to+".txt", shell=True, stdout=subprocess.PIPE)
cl.sendText(msg.to, "☆Auto Checkpoint☆")
else:
cl.sendText(msg.to, "☆Belum Ada Viewers☆")
print "Viewseen"
elif "Kick " in msg.text:
if msg.from_ in admin:
if 'MENTION' in msg.contentMetadata.keys()!= None:
names = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
print mentionees
for mention in mentionees:
ki.kickoutFromGroup(msg.to,[mention['M']])
elif "Set member: " in msg.text:
if msg.from_ in admin:
jml = msg.text.replace("Set member: ","")
wait["memberscancel"] = int(jml)
cl.sendText(msg.to, "Jumlah minimal member telah di set : "+jml)
elif "Add all" in msg.text:
thisgroup = cl.getGroups([msg.to])
Mids = [contact.mid for contact in thisgroup[0].members]
mi_d = Mids[:33]
cl.findAndAddContactsByMids(mi_d)
cl.sendText(msg.to,"Success Add all")
elif msg.text in ["Invite"]:
wait["invite"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text in ["Auto like"]:
wait["likeOn"] = True
cl.sendText(msg.to,"Shere Post Kamu Yang Mau Di Like!")
elif msg.text in ["Steal contact"]:
wait["steal"] = True
cl.sendText(msg.to,"Send Contact")
elif msg.text in ["Giftbycontact"]:
wait["gift"] = True
cl.sendText(msg.to,"Send Contact")
elif "Recover" in msg.text:
thisgroup = cl.getGroups([msg.to])
Mids = [contact.mid for contact in thisgroup[0].members]
mi_d = Mids[:33]
cl.createGroup("Recover", mi_d)
cl.sendText(msg.to,"Success recover")
elif ("Gn: " in msg.text):
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.name = msg.text.replace("Gn: ","")
cl.updateGroup(X)
else:
cl.sendText(msg.to,"It can't be used besides the group.")
elif "Kick: " in msg.text:
midd = msg.text.replace("Kick: ","")
kicker = [ki,kk,kc]
if midd not in admin:
random.choice(kicker).kickoutFromGroup(msg.to,[midd])
else:
cl.sendText(msg.to,"Admin Detected")
elif "Invite: " in msg.text:
midd = msg.text.replace("Invite: ","")
cl.findAndAddContactsByMid(midd)
ki.findAndAddContactsByMid(midd)
kk.findAndAddContactsByMid(midd)
kc.findAndAddContactsByMid(midd)
kr.findAndAddContactsByMid(midd)
random.choice(KAC).inviteIntoGroup(msg.to,[midd])
elif "Invite creator" in msg.text:
midd = "u71b6799e1c37868a871d442e67633182"
random.choice(KAC).inviteIntoGroup(msg.to,[midd])
elif msg.text in ["Welcome","welcome","Welkam","welkam","Wc","wc"]:
gs = cl.getGroup(msg.to)
cl.sendText(msg.to,"Selamat Datang Di "+ gs.name)
msg.contentType = 7
msg.contentMetadata={'STKID': '247',
'STKPKGID': '3',
'STKVER': '100'}
msg.text = None
cl.sendMessage(msg)
elif "Bc: " in msg.text:
bc = msg.text.replace("Bc: ","")
gid = cl.getGroupIdsJoined()
if msg.from_ in Creator:
for i in gid:
cl.sendText(i,"=======[BROADCAST]=======\n\n"+bc+"\n\nContact Me : line.me/ti/p/~a_ulul15")
cl.sendText(msg.to,"Success BC BosQ")
else:
cl.sendText(msg.to,"Khusus Admin")
elif msg.text in ["Cancel"]:
gid = cl.getGroupIdsInvited()
for i in gid:
cl.rejectGroupInvitation(i)
cl.sendText(msg.to,"All invitations have been refused")
elif msg.text in ["TC1 Cancel"]:
gid = ki.getGroupIdsInvited()
for i in gid:
ki.rejectGroupInvitation(i)
ki.sendText(msg.to,"All invitations have been refused")
elif msg.text in ["TC2 Cancel"]:
gid = kk.getGroupIdsInvited()
for i in gid:
kk.rejectGroupInvitation(i)
kk.sendText(msg.to,"All invitations have been refused")
elif msg.text in ["TC3 Cancel"]:
gid = kc.getGroupIdsInvited()
for i in gid:
kc.rejectGroupInvitation(i)
kc.sendText(msg.to,"All invitations have been refused")
elif msg.text in ["Gurl"]:
if msg.toType == 2:
x = cl.getGroup(msg.to)
if x.preventJoinByTicket == True:
x.preventJoinByTicket = False
cl.updateGroup(x)
gurl = cl.reissueGroupTicket(msg.to)
cl.sendText(msg.to,"line://ti/g/" + gurl)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can't be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif msg.text in ["All join","Join all"]:
if msg.from_ in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
kk.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
kc.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
kr.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
G = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki.updateGroup(G)
G.preventJoinByTicket(G)
ki.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["TC1 join"]:
if msg.from_ in admin:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
cl.updateGroup(X)
invsend = 0
Ti = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ti)
G = kk.getGroup(msg.to)
G.preventJoinByTicket = True
ki.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["TC2 join"]:
if msg.from_ in admin:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
cl.updateGroup(X)
invsend = 0
Ti = cl.reissueGroupTicket(msg.to)
kk.acceptGroupInvitationByTicket(msg.to,Ti)
G = ki.getGroup(msg.to)
G.preventJoinByTicket = True
kk.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["TC3 join"]:
if msg.from_ in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
kc.acceptGroupInvitationByTicket(msg.to,Ticket)
G.preventJoinByTicket = True
kc.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["TC4 join"]:
if msg.from_ in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
kr.acceptGroupInvitationByTicket(msg.to,Ticket)
G.preventJoinByTicket = True
kr.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["Ghost join"]:
if msg.from_ in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
km.acceptGroupInvitationByTicket(msg.to,Ticket)
G.preventJoinByTicket = True
km.updateGroup(G)
else:
cl.sendText(msg.to,"Sape lu!")
elif msg.text in ["timeline"]:
try:
url = cl.activity(limit=5)
cl.sendText(msg.to,url['result']['posts'][0]['postInfo']['postId'])
except Exception as E:
print E
elif msg.text in ["Bye all"]:
if wait["Leave"] == True:
ki.leaveGroup(msg.to)
kk.leaveGroup(msg.to)
kc.leaveGroup(msg.to)
kr.leaveGroup(msg.to)
else:
cl.sendText(msg.to,"Leavenya Belum On")
elif msg.text in ["@bye","@Bye"]:
if wait["Leave"] == True:
cl.leaveGroup(msg.to)
wait["Leave"] = False
else:
cl.sendText(msg.to,"Bilang Dulu Sama Admin Ku")
elif msg.text in ["Absen"]:
cl.sendText(msg.to,"Pasukan Absen!!")
ki.sendText(msg.to,"TC1 Hadiir \(ˆ▿ˆ)/")
kk.sendText(msg.to,"TC2 Hadiir \(ˆ▿ˆ)/")
kc.sendText(msg.to,"TC3 Hadiir \(ˆ▿ˆ)/")
kr.sendText(msg.to,"Hadiir Semua Kapten \(ˆ▿ˆ)/")
elif msg.text.lower() in ["respon"]:
cl.sendText(msg.to,responsename)
ki.sendText(msg.to,responsename2)
kk.sendText(msg.to,responsename3)
kc.sendText(msg.to,responsename4)
kr.sendText(msg.to,responsename5)
elif msg.text in ["Sp","Speed","speed"]:
start = time.time()
print("Speed")
elapsed_time = time.time() - start
cl.sendText(msg.to, "Tunggu Bentaar BOS....")
cl.sendText(msg.to, "%sseconds" % (elapsed_time))
elif msg.text in ["Speed test"]:
start = time.time()
cl.sendText(msg.to, "Tunggu Bentaar BOS......")
elapsed_time = time.time() - start
cl.sendText(msg.to, "%sseconds" % (elapsed_time))
elif "Nk: " in msg.text:
if msg.from_ in Creator:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
cl.updateGroup(X)
invsend = 0
Ti = cl.reissueGroupTicket(msg.to)
kr.acceptGroupInvitationByTicket(msg.to,Ti)
G = kk.getGroup(msg.to)
G.preventJoinByTicket = True
kk.updateGroup(G)
nk0 = msg.text.replace("Nk: ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
targets = []
for s in X.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
if target not in admin:
kr.kickoutFromGroup(msg.to,[target])
kr.leaveGroup(msg.to)
ki.sendText(msg.to,"Succes BosQ")
kk.sendText(msg.to,"Pakyu~")
else:
cl.sendText(msg.to,"Admin Detected")
else:
cl.sendText(msg.to,"Lu sape!")
elif msg.text in ["Ban"]:
if msg.from_ in admin:
wait["wblacklist"] = True
ki.sendText(msg.to,"send contact")
elif msg.text in ["Unban"]:
if msg.from_ in admin:
wait["dblacklist"] = True
ki.sendText(msg.to,"send contact")
elif "Ban @" in msg.text:
if msg.from_ in admin:
if msg.toType == 2:
print "@Ban by mention"
_name = msg.text.replace("Ban @","")
_nametarget = _name.rstrip(' ')
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kc.sendText(msg.to,"Not found")
else:
for target in targets:
if target not in admin:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
ki.sendText(msg.to,"Succes BosQ")
except:
ki.sendText(msg.to,"Error")
else:
cl.sendText(msg.to,"Admin Detected~")
elif msg.text in ["Banlist","Ban list"]:
if msg.from_ in admin:
if wait["blacklist"] == {}:
random.choice(KAC).sendText(msg.to,"Tidak Ada")
else:
mc = ""
for mi_d in wait["blacklist"]:
mc += "->" +cl.getContact(mi_d).displayName + "\n"
random.choice(KAC).sendText(msg.to,"===[Blacklist User]===\n"+mc)
elif "Unban @" in msg.text:
if msg.toType == 2:
print "@Unban by mention"
if msg.from_ in admin:
_name = msg.text.replace("Unban @","")
_nametarget = _name.rstrip(' ')
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kk.sendText(msg.to,"Not found")
else:
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
ki.sendText(msg.to,"Succes BosQ")
except:
ki.sendText(msg.to,"Succes BosQ")
elif msg.text.lower() == 'clear ban':
if msg.from_ in admin:
wait["blacklist"] = {}
cl.sendText(msg.to,"ヽ( ^ω^)ノ└ ❉Unbanned All Success❉ ┐")
elif msg.text.lower() in ["sayang","chucky"]:
ki.sendText(msg.to,"Apa Sayang :*")
elif msg.text in ["Kill ban"]:
if msg.from_ in admin:
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
ki.sendText(msg.to,"There was no blacklist user")
return
for jj in matched_list:
random.choice(KAC).kickoutFromGroup(msg.to,[jj])
ki.sendText(msg.to,"Blacklist emang pantas tuk di usir")
else:
cl.sendText(msg.to, "Khusus creator")
elif msg.text in ["Kill"]:
if msg.toType == 2:
if msg.from_ in admin:
group = ki.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
kk.sendText(msg.to,"Fuck You")
kc.sendText(msg.to,"Fuck You")
return
for jj in matched_list:
try:
klist=[ki,kk,kc]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[jj])
print (msg.to,[jj])
except:
pass
elif "Kickall" == msg.text:
if msg.from_ in Creator:
if msg.toType == 2:
print "Kick all member"
_name = msg.text.replace("Kickall","")
gs = ki.getGroup(msg.to)
gs = kk.getGroup(msg.to)
gs = kc.getGroup(msg.to)
ki.sendText(msg.to,"Sampai jumpaa~")
kc.sendText(msg.to,"Dadaaah~")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
ki.sendText(msg.to,"Not found.")
else:
for target in targets:
if target not in admin:
try:
klist=[ki,kk,kc]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except Exception as e:
cl.sendText(msg.to,str(e))
cl.inviteIntoGroup(msg.to, targets)
elif msg.text in ["Bot restart","Reboot"]:
if msg.from_ in Creator:
cl.sendText(msg.to, "Bot Has Been Restarted...")
restart_program()
print "@Restart"
else:
cl.sendText(msg.to, "No Access")
elif msg.text in ["Turn off"]:
if msg.from_ in Creator:
try:
import sys
sys.exit()
except:
pass
elif 'Crash' in msg.text:
if msg.from_ in Creator:
msg.contentType = 13
msg.contentMetadata = {'mid': "NADYA,'"}
cl.sendMessage(msg)
elif "Kapten copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("Kapten copy @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
cl.CloneContactProfile(target)
cl.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif "TC1 copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("TC1 copy @","")
_nametarget = _name.rstrip(' ')
gs = ki.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
ki.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
ki.CloneContactProfile(target)
ki.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif "TC2 copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("TC2 copy @","")
_nametarget = _name.rstrip(' ')
gs = kk.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kk.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
kk.CloneContactProfile(target)
kk.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif "TC3 copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("TC3 copy @","")
_nametarget = _name.rstrip(' ')
gs = kc.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kc.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
kc.CloneContactProfile(target)
kc.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif "TC4 copy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("TC4 copy @","")
_nametarget = _name.rstrip(' ')
gs = kr.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
kr.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
kr.CloneContactProfile(target)
kr.sendText(msg.to, "Copied (^_^)")
except Exception as e:
print e
elif msg.text in ["Backup all"]:
try:
ki.updateDisplayPicture(backup2.pictureStatus)
ki.updateProfile(backup2)
kk.updateDisplayPicture(backup3.pictureStatus)
kk.updateProfile(backup3)
kc.updateDisplayPicture(backup4.pictureStatus)
kc.updateProfile(backup4)
kr.updateDisplayPicture(backup5.pictureStatus)
kr.updateProfile(backup5)
cl.updateDisplayPicture(backup1.pictureStatus)
cl.updateProfile(backup1)
cl.sendText(msg.to, "All Done (^_^)")
except Exception as e:
cl.sendText(msg.to, str(e))
elif "/musik " in msg.text:
songname = msg.text.replace("/musik ","")
params = {"songname": songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
abc = song[3].replace('https://','http://')
cl.sendText(msg.to, "Title : " + song[0] + "\nLength : " + song[1] + "\nLink download : " + song[4])
cl.sendText(msg.to, "Lagu " + song[0] + "\nSedang Di Prosses... Tunggu Sebentar ^_^ ")
cl.sendAudioWithURL(msg.to,abc)
cl.sendText(msg.to, "Selamat Mendengarkan Lagu " + song[0])
elif '/lirik ' in msg.text.lower():
try:
songname = msg.text.lower().replace('/lirik ','')
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'Lyric Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, hasil)
except Exception as wak:
cl.sendText(msg.to, str(wak))
elif "/musrik " in msg.text:
songname = msg.text.replace("/musrik ","")
params = {"songname": songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
abc = song[3].replace('https://','http://')
hasil = 'Lyric Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, "Lagu " + song[0] + "\nSedang Di Prosses... Tunggu Sebentar ^_^ ")
cl.sendAudioWithURL(msg.to,abc)
cl.sendText(msg.to, "Title : " + song[0] + "\nLength : " + song[1] + "\nLink download : " + song[4] +"\n\n" + hasil)
cl.sendText(msg.to, "Selamat Mendengarkan Lagu " + song[0])
elif "Fancytext: " in msg.text:
txt = msg.text.replace("Fancytext: ", "")
cl.kedapkedip(msg.to,txt)
print "[Command] Kedapkedip"
elif "cover @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("cover @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.channel.getHome(target)
objId = h["result"]["homeInfo"]["objectId"]
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/myhome/c/download.nhn?userid=" + target + "&oid=" + objId)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif "Cover @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("Cover @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.channel.getHome(target)
objId = h["result"]["homeInfo"]["objectId"]
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/myhome/c/download.nhn?userid=" + target + "&oid=" + objId)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif "pp @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("pp @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.getContact(target)
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif "Pp @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("Pp @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.getContact(target)
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif msg.text.lower() in ["van","yog","wan","gong","tep","pap creator"]:
link = ["http://dl.profile.line-cdn.net/0hbPvoismJPX9LFhHY8ztCKHdTMxI8OCw3JXclGzwRYBpgci99fyV2GzwUY01icXl5J3EnHjxBakxj"]
pilih = random.choice(link)
ki.sendImageWithURL(msg.to,pilih)
elif msg.text.lower() in ["van","yog","wan","gong","tep","pap owner","pap creator"]:
link = ["http://dl.profile.line-cdn.net/0hbPvoismJPX9LFhHY8ztCKHdTMxI8OCw3JXclGzwRYBpgci99fyV2GzwUY01icXl5J3EnHjxBakxj"]
pilih = random.choice(link)
ki.sendImageWithURL(msg.to,pilih)
elif "Spam: " in msg.text:
bctxt = msg.text.replace("Spam: ", "")
t = 10
while(t):
random.choice(KAC).sendText(msg.to, (bctxt))
t-=1
elif "Scbc " in msg.text:
bctxt = msg.text.replace("Scbc ", "")
orang = cl.getAllContactIds()
t = 20
for manusia in orang:
while(t):
cl.sendText(manusia, (bctxt))
t-=1
elif "Cbc " in msg.text:
broadcasttxt = msg.text.replace("Cbc ", "")
orang = cl.getAllContactIds()
for manusia in orang:
cl.sendText(manusia, (broadcasttxt))
elif '/ig ' in msg.text.lower():
try:
instagram = msg.text.lower().replace("/ig ","")
html = requests.get('https://www.instagram.com/' + instagram + '/?')
soup = BeautifulSoup(html.text, 'html.parser')
data = soup.find_all('meta', attrs={'property':'og:description'})
text = data[0].get('content').split()
data1 = soup.find_all('meta', attrs={'property':'og:image'})
text1 = data1[0].get('content').split()
nadya = text1[0].replace("s150x150/","")
user = "Name: " + text[-2] + "\n"
user1 = "Username: " + text[-1] + "\n"
followers = "Followers: " + text[0] + "\n"
following = "Following: " + text[2] + "\n"
post = "Post: " + text[4] + "\n"
link = "Link: " + "https://www.instagram.com/" + instagram
detail = "========INSTAGRAM INFO ========\n"
details = "\n========INSTAGRAM INFO ========"
cl.sendText(msg.to, detail + user + user1 + followers + following + post + link + details)
cl.sendImageWithURL(msg.to, nadya)
except Exception as njer:
cl.sendText(msg.to, str(njer))
elif "Checkig " in msg.text:
separate = msg.text.split(" ")
user = msg.text.replace(separate[0] + " ","")
if user.startswith("@"):
user = user.replace("@","")
profile = "https://www.instagram.com/" + user
with requests.session() as x:
x.headers['user-agent'] = 'Mozilla/5.0'
end_cursor = ''
for count in range(1, 999):
print('PAGE: ', count)
r = x.get(profile, params={'max_id': end_cursor})
data = re.search(r'window._sharedData = (\{.+?});</script>', r.text).group(1)
j = json.loads(data)
for node in j['entry_data']['ProfilePage'][0]['user']['media']['nodes']:
if node['is_video']:
page = 'https://www.instagram.com/p/' + node['code']
r = x.get(page)
url = re.search(r'"video_url": "([^"]+)"', r.text).group(1)
print(url)
cl.sendVideoWithURL(msg.to,url)
else:
print (node['display_src'])
cl.sendImageWithURL(msg.to,node['display_src'])
end_cursor = re.search(r'"end_cursor": "([^"]+)"', r.text).group(1)
elif 'Youtubelink: ' in msg.text:
try:
textToSearch = (msg.text).replace('Youtube ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class':'yt-uix-tile-link'})
cl.sendText(msg.to,'https://www.youtube.com' + results['href'])
except:
cl.sendText(msg.to,"Could not find it")
elif 'Youtubevideo: ' in msg.text:
try:
textToSearch = (msg.text).replace('Youtubevideo: ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class': 'yt-uix-tile-link'})
cl.sendVideoWithURL(msg.to,'https://www.youtube.com' + results['href'])
except:
cl.sendText(msg.to, "Could not find it")
elif "Say-id " in msg.text:
say = msg.text.replace("Say-id ","")
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-en " in msg.text:
say = msg.text.replace("Say-en ","")
lang = 'en'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-jp " in msg.text:
say = msg.text.replace("Say-jp ","")
lang = 'ja'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say welcome" in msg.text:
gs = cl.getGroup(msg.to)
say = msg.text.replace("Say welcome","Selamat Datang Di "+ gs.name)
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif msg.text.lower() in ["hi","hai","halo","hallo"]:
beb = "Hi Sayang 😘 " +cl.getContact(msg.from_).displayName + " starry heart"
kr.sendText(msg.to,beb)
elif "playstore " in msg.text.lower():
tob = msg.text.lower().replace("playstore ","")
cl.sendText(msg.to,"Sedang Mencari...")
cl.sendText(msg.to,"Title : "+tob+"\nSource : Google Play\nLink : https://play.google.com/store/search?q=" + tob)
cl.sendText(msg.to,"Tuh Linknya Kak (^_^)")
elif "Mid @" in msg.text:
_name = msg.text.replace("Mid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
random.choice(KAC).sendText(msg.to, g.mid)
else:
pass
elif "/bio " in msg.text:
string = msg.text.replace("/bio ","")
if len(string.decode('utf-8')) <= 500:
profile = cl.getProfile()
profile.statusMessage = string
cl.updateProfile(profile)
ki.updateProfile(profile)
kk.updateProfile(profile)
kc.updateProfile(profile)
kr.updateProfile(profile)
cl.sendText(msg.to,"All Done")
elif "/cnkapten" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cnkapten","Mi Kapten")
if len(string.decode('utf-8')) <= 5000:
profile = cl.getProfile()
profile.displayName = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Done")
elif "/cntc1" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cntc1","Mi TC1")
if len(string.decode('utf-8')) <= 5000:
profile = ki.getProfile()
profile.displayName = string
ki.updateProfile(profile)
ki.sendText(msg.to,"Done")
elif "/cntc2" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cntc2","Mi TC2")
if len(string.decode('utf-8')) <= 5000:
profile = kk.getProfile()
profile.displayName = string
kk.updateProfile(profile)
kk.sendText(msg.to,"Done")
elif "/cntc3" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cntc3","Mi TC3")
if len(string.decode('utf-8')) <= 5000:
profile = kc.getProfile()
profile.displayName = string
kc.updateProfile(profile)
kc.sendText(msg.to,"Done")
elif "/cntc4" in msg.text:
if msg.from_ in Creator:
string = msg.text.replace("/cntc4","Mi TC4")
if len(string.decode('utf-8')) <= 5000:
profile = cl.getProfile()
profile.displayName = string
kr.updateProfile(profile)
kr.sendText(msg.to,"Done")
elif "Ulti " in msg.text:
if msg.from_ in Creator:
ulti0 = msg.text.replace("Ulti ","")
ulti1 = ulti0.rstrip()
ulti2 = ulti1.replace("@","")
ulti3 = ulti2.rstrip()
_name = ulti3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
km.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets ==[]:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
km.kickoutFromGroup(msg.to,[target])
km.leaveGroup(msg.to)
print (msg.to,[g.mid])
except:
km.sendText(msg.t,"Ter ELIMINASI....")
km.sendText(msg.to,"WOLES brooo....!!!")
km.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
elif msg.text.lower() in ["mymid","myid"]:
middd = "Name : " +cl.getContact(msg.from_).displayName + "\nMid : " +msg.from_
kr.sendText(msg.to,middd)
elif msg.text.lower() in ["me"]:
msg.contentType = 13
msg.contentMetadata = {'mid': msg.from_}
cl.sendMessage(msg)
elif "/apakah " in msg.text:
apk = msg.text.replace("/apakah ","")
rnd = ["Ya","Tidak","Bisa Jadi","Mungkin"]
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "/hari " in msg.text:
apk = msg.text.replace("/hari ","")
rnd = ["Senin","Selasa","Rabu","Kamis","Jumat","Sabtu","Minggu"]
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "/berapa " in msg.text:
apk = msg.text.replace("/berapa ","")
rnd = ['10%','20%','30%','40%','50%','60%','70%','80%','90%','100%','0%']
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "/berapakah " in msg.text:
apk = msg.text.replace("/berapakah ","")
rnd = ['1','2','3','4','5','6','7','8','9','10','Tidak Ada']
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "/kapan " in msg.text:
apk = msg.text.replace("/kapan ","")
rnd = ["kapan kapan","besok","satu abad lagi","Hari ini","Tahun depan","Minggu depan","Bulan depan","Sebentar lagi","Tidak Akan Pernah"]
p = random.choice(rnd)
lang = 'id'
tts = gTTS(text=p, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif msg.text in ["Simisimi on","Simisimi:on"]:
settings["simiSimi"][msg.to] = True
wait["Simi"] = True
cl.sendText(msg.to," Simisimi Di Aktifkan")
elif msg.text in ["Simisimi off","Simisimi:off"]:
settings["simiSimi"][msg.to] = False
wait["Simi"] = False
cl.sendText(msg.to,"Simisimi Di Nonaktifkan")
elif "Image " in msg.text:
search = msg.text.replace("Image ","")
url = 'https://www.google.com/search?espv=2&biw=1366&bih=667&tbm=isch&oq=kuc&aqs=mobile-gws-lite.0.0l5&q=' + search
raw_html = (download_page(url))
items = []
items = items + (_images_get_all_items(raw_html))
path = random.choice(items)
print path
try:
cl.sendImageWithURL(msg.to,path)
except:
pass
elif "Youtubesearch: " in msg.text:
query = msg.text.replace("Youtube ","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'http://www.youtube.com/results'
params = {'search_query': query}
r = s.get(url, params=params)
soup = BeautifulSoup(r.content, 'html.parser')
hasil = ""
for a in soup.select('.yt-lockup-title > a[title]'):
if '&list=' not in a['href']:
hasil += ''.join((a['title'],'\nUrl : http://www.youtube.com' + a['href'],'\n\n'))
cl.sendText(msg.to,hasil)
print '[Command] Youtube Search'
elif "Tr-id " in msg.text:
isi = msg.text.replace("Tr-id ","")
translator = Translator()
hasil = translator.translate(isi, dest='id')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Tr-en " in msg.text:
isi = msg.text.replace("Tr-en ","")
translator = Translator()
hasil = translator.translate(isi, dest='en')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Tr-th " in msg.text:
isi = msg.text.replace("Tr-th ","")
translator = Translator()
hasil = translator.translate(isi, dest='th')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Id@en" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'en'
kata = msg.text.replace("Id@en ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----Dari Indonesia----\n" + "" + kata + "\n\n----Ke Inggris----\n" + "" + result)
elif "En@id" in msg.text:
bahasa_awal = 'en'
bahasa_tujuan = 'id'
kata = msg.text.replace("En@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----Dari Inggris----\n" + "" + kata + "\n\n----Ke Indonesia----\n" + "" + result)
elif "Id@th" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'th'
kata = msg.text.replace("Id@en ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----Dari Indonesia----\n" + "" + kata + "\n\n----Ke Thailand----\n" + "" + result)
elif "Th@id" in msg.text:
bahasa_awal = 'th'
bahasa_tujuan = 'id'
kata = msg.text.replace("Id@en ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----Dari Thailand----\n" + "" + kata + "\n\n----Ke Indonesia----\n" + "" + result)
elif msg.text in ["Friendlist"]:
contactlist = cl.getAllContactIds()
kontak = cl.getContacts(contactlist)
num=1
msgs="═════════List Friend═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Friend═════════\n\nTotal Friend : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["Memlist"]:
kontak = cl.getGroup(msg.to)
group = kontak.members
num=1
msgs="═════════List Member═�����═══════-"
for ids in group:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Member═════════\n\nTotal Members : %i" % len(group)
cl.sendText(msg.to, msgs)
elif msg.text in ["Spam"]:
if msg.from_ in admin:
cl.sendText(msg.to,"Aku belum mandi")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Tak tun tuang")
cl.sendText(msg.to,"Tapi masih cantik juga")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Tak tun tuang")
cl.sendText(msg.to,"apalagi kalau sudah mandi")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Pasti cantik sekali")
cl.sendText(msg.to,"yiha")
ki.sendText(msg.to,"Kalau orang lain melihatku")
kk.sendText(msg.to,"Tak tun tuang")
cl.sendText(msg.to,"Badak aku taba bana")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Tak tuntuang")
cl.sendText(msg.to,"Tapi kalau langsuang diidu")
ki.sendText(msg.to,"Tak tun tuang")
kk.sendText(msg.to,"Atagfirullah baunya")
cl.sendText(msg.to,"Males lanjutin ah")
ki.sendText(msg.to,"Sepi bat")
kk.sendText(msg.to,"Iya sepi udah udah")
cl.sendText(msg.to,"Gaada yang denger juga kita nyanyi")
ki.sendText(msg.to,"Nah")
kk.sendText(msg.to,"Mending gua makan dulu")
cl.sendText(msg.to,"Siyap")
ki.sendText(msg.to,"Okeh")
kk.sendText(msg.to,"Katanya owner kita Jomblo ya")
cl.sendText(msg.to,"Iya emang")
ki.sendText(msg.to,"Denger denger si lagi nyari pacar doi")
kk.sendText(msg.to,"Udah ah gosip mulu doain aja biar dapet")
elif "Getvid @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Getvid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendVideoWithURL(msg.to, path)
except Exception as e:
raise e
print "[Command]dp executed"
elif "Getgroup image" in msg.text:
group = cl.getGroup(msg.to)
path = "http://dl.profile.line-cdn.net/" + group.pictureStatus
cl.sendImageWithURL(msg.to,path)
elif "Urlgroup image" in msg.text:
group = cl.getGroup(msg.to)
path = "http://dl.profile.line-cdn.net/" + group.pictureStatus
cl.sendText(msg.to,path)
elif "Getname" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to, "===[DisplayName]===\n" + contact.displayName)
except:
cl.sendText(msg.to, "===[DisplayName]===\n" + contact.displayName)
elif "Getprofile" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
except:
pass
elif "Getcontact" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
mmid = cl.getContact(key1)
msg.contentType = 13
msg.contentMetadata = {"mid": key1}
cl.sendMessage(msg)
elif "Getinfo" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + contact.mid + "\n\nBio :\n" + contact.statusMessage + "\n\nProfile Picture :\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n\nHeader :\n" + str(cu))
except:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + contact.mid + "\n\nBio :\n" + contact.statusMessage + "\n\nProfile Picture :\n" + str(cu))
elif "Getbio" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to, "===[StatusMessage]===\n" + contact.statusMessage)
except:
cl.sendText(msg.to, "===[StatusMessage]===\n" + contact.statusMessage)
elif msg.text.lower() == 'runtime':
eltime = time.time() - mulai
van = "Bot Sudah Berjalan Selama :\n"+waktu(eltime)
cl.sendText(msg.to,van)
elif "Checkdate " in msg.text:
tanggal = msg.text.replace("Checkdate ","")
r=requests.get('https://script.google.com/macros/exec?service=AKfycbw7gKzP-WYV2F5mc9RaR7yE3Ve1yN91Tjs91hp_jHSE02dSv9w&nama=ervan&tanggal='+tanggal)
data=r.text
data=json.loads(data)
lahir = data["data"]["lahir"]
usia = data["data"]["usia"]
ultah = data["data"]["ultah"]
zodiak = data["data"]["zodiak"]
cl.sendText(msg.to,"========== I N F O R M A S I ==========\n"+"Date Of Birth : "+lahir+"\nAge : "+usia+"\nUltah : "+ultah+"\nZodiak : "+zodiak+"\n========== I N F O R M A S I ==========")
elif msg.text in ["Kalender","Time","Waktu"]:
timeNow = datetime.now()
timeHours = datetime.strftime(timeNow,"(%H:%M)")
day = ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday","Friday", "Saturday"]
hari = ["Minggu", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu"]
bulan = ["Januari", "Februari", "Maret", "April", "Mei", "Juni", "Juli", "Agustus", "September", "Oktober", "November", "Desember"]
inihari = datetime.today()
hr = inihari.strftime('%A')
bln = inihari.strftime('%m')
for i in range(len(day)):
if hr == day[i]: hasil = hari[i]
for k in range(0, len(bulan)):
if bln == str(k): bln = bulan[k-1]
rst = hasil + ", " + inihari.strftime('%d') + " - " + bln + " - " + inihari.strftime('%Y') + "\nJam : [ " + inihari.strftime('%H:%M:%S') + " ]"
cl.sendText(msg.to, rst)
elif "SearchID: " in msg.text:
userid = msg.text.replace("SearchID: ","")
contact = cl.findContactsByUserid(userid)
msg.contentType = 13
msg.contentMetadata = {'mid': contact.mid}
cl.sendMessage(msg)
elif "Searchid: " in msg.text:
userid = msg.text.replace("Searchid: ","")
contact = cl.findContactsByUserid(userid)
msg.contentType = 13
msg.contentMetadata = {'mid': contact.mid}
cl.sendMessage(msg)
elif "removechat" in msg.text.lower():
if msg.from_ in admin:
try:
cl.removeAllMessages(op.param2)
ki.removeAllMessages(op.param2)
kk.removeAllMessages(op.param2)
kc.removeAllMessages(op.param2)
kr.removeAllMessages(op.param2)
print "[Command] Remove Chat"
cl.sendText(msg.to,"Done")
except Exception as error:
print error
cl.sendText(msg.to,"Error")
elif "Invitemeto: " in msg.text:
if msg.from_ in admin:
gid = msg.text.replace("Invitemeto: ","")
if gid == "":
cl.sendText(msg.to,"Invalid group id")
else:
try:
cl.findAndAddContactsByMid(msg.from_)
ki.findAndAddContactsByMid(msg.from_)
kk.findAndAddContactsByMid(msg.from_)
kc.findAndAddContactsByMid(msg.from_)
kr.findAndAddContactsByMid(msg.from_)
random.choice(KAC).inviteIntoGroup(gid,[msg.from_])
except:
cl.sendText(msg.to,"Mungkin Saya Tidak Di Dalaam Grup Itu")
elif msg.text in ["Glist"]:
cl.sendText(msg.to, "Tunggu Sebentar. . .")
gid = cl.getGroupIdsJoined()
h = ""
for i in gid:
h += "╠➩" + "%s\n" % (cl.getGroup(i).name +" ~> ["+str(len(cl.getGroup(i).members))+"]")
cl.sendText(msg.to,"╔═════════════════════════\n║ ☆☞ LIST GROUPS☜☆\n╠═════════════════════════\n" + h + "╠═════════════════════════" + "\n║ Total Groups =" +" ["+str(len(gid))+"]\n╚═════════════════════════")
elif msg.text in ["Glistmid"]:
gruplist = kr.getGroupIdsJoined()
kontak = kr.getGroups(gruplist)
num=1
msgs="═════════List GrupMid═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.id)
num=(num+1)
msgs+="\n═════════List GrupMid═════════\n\nTotal Grup : %i" % len(kontak)
kr.sendText(msg.to, msgs)
elif "Google: " in msg.text:
a = msg.text.replace("Google: ","")
b = urllib.quote(a)
cl.sendText(msg.to,"Sedang Mencari...")
cl.sendText(msg.to, "https://www.google.com/" + b)
cl.sendText(msg.to,"Itu Dia Linknya. . .")
elif "Details group: " in msg.text:
if msg.from_ in admin:
gid = msg.text.replace("Details group: ","")
if gid in [""," "]:
cl.sendText(msg.to,"Grup id tidak valid")
else:
try:
groups = cl.getGroup(gid)
if groups.members is not None:
members = str(len(groups.members))
else:
members = "0"
if groups.invitee is not None:
pendings = str(len(groups.invitee))
else:
pendings = "0"
h = "[" + groups.name + "]\n -+GroupID : " + gid + "\n -+Members : " + members + "\n -+MembersPending : " + pendings + "\n -+Creator : " + groups.creator.displayName + "\n -+GroupPicture : http://dl.profile.line.naver.jp/" + groups.pictureStatus
cl.sendText(msg.to,h)
except Exception as error:
cl.sendText(msg.to,(error))
elif "Cancel invite: " in msg.text:
if msg.from_ in admin:
gids = msg.text.replace("Cancel invite: ","")
gid = cl.getGroup(gids)
for i in gid:
if i is not None:
try:
cl.rejectGroupInvitation(i)
except:
cl.sendText(msg.to,"Error!")
break
else:
break
if gid is not None:
cl.sendText(msg.to,"Berhasil tolak undangan dari grup " + gid.name)
else:
cl.sendText(msg.to,"Grup tidak ditemukan")
elif msg.text in ["Kapten acc invite"]:
if msg.from_ in admin:
gid = cl.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = cl.getGroup(i)
_list += gids.name
cl.acceptGroupInvitation(i)
else:
break
if gid is not None:
cl.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
cl.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif msg.text in ["TC1 acc invite"]:
if msg.from_ in admin:
gid = ki.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = ki.getGroup(i)
_list += gids.name
ki.acceptGroupInvitation(i)
else:
break
if gid is not None:
ki.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
ki.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif msg.text in ["TC2 acc invite"]:
if msg.from_ in admin:
gid = kk.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = kk.getGroup(i)
_list += gids.name
kk.acceptGroupInvitation(i)
else:
break
if gid is not None:
kk.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
kk.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif msg.text in ["TC3 acc invite"]:
if msg.from_ in admin:
gid = kc.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = kc.getGroup(i)
_list += gids.name
kc.acceptGroupInvitation(i)
else:
break
if gid is not None:
kc.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
kc.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif msg.text in ["TC4 acc invite"]:
if msg.from_ in admin:
gid = kr.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = kr.getGroup(i)
_list += gids.name
kr.acceptGroupInvitation(i)
else:
break
if gid is not None:
kr.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
kr.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif "Gif gore" in msg.text:
gif = ("https://media.giphy.com/media/l2JHVsQiOZrNMGzYs/giphy.gif","https://media.giphy.com/media/OgltQ2hbilzJS/200w.gif")
gore = random.choice(gif)
cl.sendGifWithURL(msg.to,gore)
if op.type == 59:
print op
except Exception as error:
print error
while True:
try:
Ops = cl.fetchOps(cl.Poll.rev, 5)
except EOFError:
raise Exception("It might be wrong revision\n" + str(cl.Poll.rev))
for Op in Ops:
if (Op.type != OpType.END_OF_OPERATION):
cl.Poll.rev = max(cl.Poll.rev, Op.revision)
bot(Op)
| false | true |
79000d6311ac146b82125d572258b1a5fc5a4487 | 4,283 | py | Python | selenium_test.py | sr-comp/Divar | dbbb6aa6b5cbbca25e17b3b16c2b3d57d2e455f0 | [
"Apache-2.0"
] | null | null | null | selenium_test.py | sr-comp/Divar | dbbb6aa6b5cbbca25e17b3b16c2b3d57d2e455f0 | [
"Apache-2.0"
] | null | null | null | selenium_test.py | sr-comp/Divar | dbbb6aa6b5cbbca25e17b3b16c2b3d57d2e455f0 | [
"Apache-2.0"
] | null | null | null | from openpyxl.workbook import Workbook
from scrapy import Selector
from selenium import webdriver
from selenium.webdriver.common.by import By
from webdriver_manager.chrome import ChromeDriverManager
import time
from csv import writer
driver_path = 'D:\\Application\\installers\\ChromeDriver\\chromedriver.exe'
def append_list_as_row(file_name, list_of_elem):
# Open file in append mode
with open(file_name, 'a+', newline='', encoding="utf-8") as write_obj:
# Create a writer object from csv module
csv_writer = writer(write_obj)
# Add contents of list as last row in the csv file
csv_writer.writerow(list_of_elem)
def parse_house(link):
driver2 = webdriver.Chrome(executable_path=driver_path)
driver2.maximize_window()
driver2.get(link)
house_info = []
page_source = driver2.page_source
response2 = Selector(text=page_source)
title = response2.css('.kt-page-title h1::text').get()
address = response2.css('.kt-page-title__subtitle.kt-page-title__subtitle--responsive-sized::text').get()
area = response2.css('.kt-group-row-item__value::text').get()
year = response2.css('.kt-group-row-item__value::text')[1].get()
rooms = response2.css('.kt-group-row-item__value::text')[2].get()
price = response2.css('.kt-unexpandable-row__value::text').get()
price_per_meter = response2.css('.kt-unexpandable-row__value::text')[1].get()
elevator = response2.css('span.kt-group-row-item__value.kt-body.kt-body--stable::text')[0].get()
parking = response2.css('span.kt-group-row-item__value.kt-body.kt-body--stable::text')[1].get()
warehouse = response2.css('span.kt-group-row-item__value.kt-body.kt-body--stable::text')[2].get()
date = response2.css('.time::text').get()
house_info.append(title)
house_info.append(address)
house_info.append(area)
house_info.append(year)
house_info.append(rooms)
house_info.append(price)
house_info.append(price_per_meter)
house_info.append(elevator)
house_info.append(parking)
house_info.append(warehouse)
house_info.append(date)
append_list_as_row('Tehran House Data.csv', house_info)
driver2.quit()
def parse_neighborhood(link):
driver1 = webdriver.Chrome(executable_path=driver_path)
driver1.maximize_window()
driver1.get(link)
for i in range(8):
driver1.execute_script("window.scrollTo(0, document.body.scrollHeight)")
time.sleep(1)
sel = driver1.page_source
response1 = Selector(text=sel)
for cards in response1.css('div.post-card-item.kt-col-6.kt-col-xxl-4'):
link = cards.css('a').attrib['href']
house_link = "https://divar.ir" + link
parse_house(house_link)
time.sleep(1)
driver1.quit()
def parse():
driver = webdriver.Chrome(executable_path=driver_path)
driver.maximize_window()
driver.get("https://divar.ir/s/tehran/buy-apartment")
driver.implicitly_wait(5)
driver.find_element(By.XPATH, "/html/body/div[1]/div[2]/aside/div/div[1]/div[2]/div[1]").click()
driver.find_element(By.XPATH, "/html/body/div[1]/div[2]/aside/div/div[1]/div[2]/div[2]/div/button").click()
component = driver.find_element(By.XPATH, "/html/body/div[2]/div/article/div/div/div/div")
neighborhoods = []
subtitles = []
links = []
for number in range(0, 29280, 650):
driver.execute_script(f"arguments[0].scrollTop = {number}", component)
sel = driver.page_source
response = Selector(text=sel)
for part in response.css('div.kt-control-row.kt-control-row--large.kt-control-row--clickable'):
neighborhood = part.css('.kt-control-row__title::text').get()
neighborhoods.append(neighborhood)
subtitle = part.css('.kt-base-row__description.kt-body--sm::text').get()
subtitles.append(subtitle)
link = part.css('.kt-control-row__title').attrib['href']
links.append(link)
print(type(links))
counter = 1
set_links = set(links)
for element in set_links:
counter += 1
if counter <= 5:
continue
neighborhood_link = "https://divar.ir" + element
parse_neighborhood(neighborhood_link)
parse()
| 35.691667 | 111 | 0.676162 | from openpyxl.workbook import Workbook
from scrapy import Selector
from selenium import webdriver
from selenium.webdriver.common.by import By
from webdriver_manager.chrome import ChromeDriverManager
import time
from csv import writer
driver_path = 'D:\\Application\\installers\\ChromeDriver\\chromedriver.exe'
def append_list_as_row(file_name, list_of_elem):
with open(file_name, 'a+', newline='', encoding="utf-8") as write_obj:
csv_writer = writer(write_obj)
csv_writer.writerow(list_of_elem)
def parse_house(link):
driver2 = webdriver.Chrome(executable_path=driver_path)
driver2.maximize_window()
driver2.get(link)
house_info = []
page_source = driver2.page_source
response2 = Selector(text=page_source)
title = response2.css('.kt-page-title h1::text').get()
address = response2.css('.kt-page-title__subtitle.kt-page-title__subtitle--responsive-sized::text').get()
area = response2.css('.kt-group-row-item__value::text').get()
year = response2.css('.kt-group-row-item__value::text')[1].get()
rooms = response2.css('.kt-group-row-item__value::text')[2].get()
price = response2.css('.kt-unexpandable-row__value::text').get()
price_per_meter = response2.css('.kt-unexpandable-row__value::text')[1].get()
elevator = response2.css('span.kt-group-row-item__value.kt-body.kt-body--stable::text')[0].get()
parking = response2.css('span.kt-group-row-item__value.kt-body.kt-body--stable::text')[1].get()
warehouse = response2.css('span.kt-group-row-item__value.kt-body.kt-body--stable::text')[2].get()
date = response2.css('.time::text').get()
house_info.append(title)
house_info.append(address)
house_info.append(area)
house_info.append(year)
house_info.append(rooms)
house_info.append(price)
house_info.append(price_per_meter)
house_info.append(elevator)
house_info.append(parking)
house_info.append(warehouse)
house_info.append(date)
append_list_as_row('Tehran House Data.csv', house_info)
driver2.quit()
def parse_neighborhood(link):
driver1 = webdriver.Chrome(executable_path=driver_path)
driver1.maximize_window()
driver1.get(link)
for i in range(8):
driver1.execute_script("window.scrollTo(0, document.body.scrollHeight)")
time.sleep(1)
sel = driver1.page_source
response1 = Selector(text=sel)
for cards in response1.css('div.post-card-item.kt-col-6.kt-col-xxl-4'):
link = cards.css('a').attrib['href']
house_link = "https://divar.ir" + link
parse_house(house_link)
time.sleep(1)
driver1.quit()
def parse():
driver = webdriver.Chrome(executable_path=driver_path)
driver.maximize_window()
driver.get("https://divar.ir/s/tehran/buy-apartment")
driver.implicitly_wait(5)
driver.find_element(By.XPATH, "/html/body/div[1]/div[2]/aside/div/div[1]/div[2]/div[1]").click()
driver.find_element(By.XPATH, "/html/body/div[1]/div[2]/aside/div/div[1]/div[2]/div[2]/div/button").click()
component = driver.find_element(By.XPATH, "/html/body/div[2]/div/article/div/div/div/div")
neighborhoods = []
subtitles = []
links = []
for number in range(0, 29280, 650):
driver.execute_script(f"arguments[0].scrollTop = {number}", component)
sel = driver.page_source
response = Selector(text=sel)
for part in response.css('div.kt-control-row.kt-control-row--large.kt-control-row--clickable'):
neighborhood = part.css('.kt-control-row__title::text').get()
neighborhoods.append(neighborhood)
subtitle = part.css('.kt-base-row__description.kt-body--sm::text').get()
subtitles.append(subtitle)
link = part.css('.kt-control-row__title').attrib['href']
links.append(link)
print(type(links))
counter = 1
set_links = set(links)
for element in set_links:
counter += 1
if counter <= 5:
continue
neighborhood_link = "https://divar.ir" + element
parse_neighborhood(neighborhood_link)
parse()
| true | true |
79000d9eca5ada02eb7a74c14851876d049c94a2 | 26,196 | py | Python | debian/python-nova/usr/lib/python2.7/dist-packages/nova/tests/api/openstack/compute/test_extensions.py | bopopescu/stacklab-nova | 4ab1698659b663ef222255610d1a5c042706dd65 | [
"Apache-2.0"
] | null | null | null | debian/python-nova/usr/lib/python2.7/dist-packages/nova/tests/api/openstack/compute/test_extensions.py | bopopescu/stacklab-nova | 4ab1698659b663ef222255610d1a5c042706dd65 | [
"Apache-2.0"
] | null | null | null | debian/python-nova/usr/lib/python2.7/dist-packages/nova/tests/api/openstack/compute/test_extensions.py | bopopescu/stacklab-nova | 4ab1698659b663ef222255610d1a5c042706dd65 | [
"Apache-2.0"
] | 1 | 2020-07-24T08:31:57.000Z | 2020-07-24T08:31:57.000Z | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import iso8601
from lxml import etree
import webob
from nova.api.openstack import compute
from nova.api.openstack.compute import extensions as compute_extensions
from nova.api.openstack import extensions as base_extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import flags
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
FLAGS = flags.FLAGS
NS = "{http://docs.openstack.org/common/api/v1.0}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
response_body = "Try to say this Mr. Knox, sir..."
extension_body = "I am not a fox!"
class StubController(object):
def __init__(self, body):
self.body = body
def index(self, req):
return self.body
def create(self, req):
msg = 'All aboard the fail train!'
raise webob.exc.HTTPBadRequest(explanation=msg)
def show(self, req, id):
raise webob.exc.HTTPNotFound()
class StubActionController(wsgi.Controller):
def __init__(self, body):
self.body = body
@wsgi.action('fooAction')
def _action_foo(self, req, id, body):
return self.body
class StubControllerExtension(base_extensions.ExtensionDescriptor):
name = 'twaadle'
def __init__(self):
pass
class StubEarlyExtensionController(wsgi.Controller):
def __init__(self, body):
self.body = body
@wsgi.extends
def index(self, req):
yield self.body
@wsgi.extends(action='fooAction')
def _action_foo(self, req, id, body):
yield self.body
class StubLateExtensionController(wsgi.Controller):
def __init__(self, body):
self.body = body
@wsgi.extends
def index(self, req, resp_obj):
return self.body
@wsgi.extends(action='fooAction')
def _action_foo(self, req, resp_obj, id, body):
return self.body
class StubExtensionManager(object):
"""Provides access to Tweedle Beetles"""
name = "Tweedle Beetle Extension"
alias = "TWDLBETL"
def __init__(self, resource_ext=None, action_ext=None, request_ext=None,
controller_ext=None):
self.resource_ext = resource_ext
self.action_ext = action_ext
self.request_ext = request_ext
self.controller_ext = controller_ext
self.extra_resource_ext = None
def get_resources(self):
resource_exts = []
if self.resource_ext:
resource_exts.append(self.resource_ext)
if self.extra_resource_ext:
resource_exts.append(self.extra_resource_ext)
return resource_exts
def get_actions(self):
action_exts = []
if self.action_ext:
action_exts.append(self.action_ext)
return action_exts
def get_request_extensions(self):
request_extensions = []
if self.request_ext:
request_extensions.append(self.request_ext)
return request_extensions
def get_controller_extensions(self):
controller_extensions = []
if self.controller_ext:
controller_extensions.append(self.controller_ext)
return controller_extensions
class ExtensionTestCase(test.TestCase):
def setUp(self):
super(ExtensionTestCase, self).setUp()
ext_list = FLAGS.osapi_compute_extension[:]
fox = ('nova.tests.api.openstack.compute.extensions.'
'foxinsocks.Foxinsocks')
if fox not in ext_list:
ext_list.append(fox)
self.flags(osapi_compute_extension=ext_list)
class ExtensionControllerTest(ExtensionTestCase):
def setUp(self):
super(ExtensionControllerTest, self).setUp()
self.ext_list = [
"AdminActions",
"Aggregates",
"AvailabilityZone",
"Certificates",
"Cloudpipe",
"ConsoleOutput",
"Consoles",
"Createserverext",
"DeferredDelete",
"DiskConfig",
"ExtendedStatus",
"ExtendedServerAttributes",
"FlavorAccess",
"FlavorDisabled",
"FlavorExtraSpecs",
"FlavorExtraData",
"FlavorManage",
"FlavorRxtx",
"FlavorSwap",
"FloatingIps",
"FloatingIpDns",
"FloatingIpPools",
"Fox In Socks",
"Hosts",
"Keypairs",
"Multinic",
"MultipleCreate",
"Networks",
"QuotaClasses",
"Quotas",
"Rescue",
"SchedulerHints",
"SecurityGroups",
"ServerDiagnostics",
"ServerStartStop",
"SimpleTenantUsage",
"UsedLimits",
"UserData",
"VirtualInterfaces",
"Volumes",
"VolumeTypes",
]
self.ext_list.sort()
def test_list_extensions_json(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
# Make sure we have all the extensions, extra extensions being OK.
data = jsonutils.loads(response.body)
names = [str(x['name']) for x in data['extensions']
if str(x['name']) in self.ext_list]
names.sort()
self.assertEqual(names, self.ext_list)
# Ensure all the timestamps are valid according to iso8601
for ext in data['extensions']:
iso8601.parse_date(ext['updated'])
# Make sure that at least Fox in Sox is correct.
(fox_ext, ) = [
x for x in data['extensions'] if x['alias'] == 'FOXNSOX']
self.assertEqual(fox_ext, {
'namespace': 'http://www.fox.in.socks/api/ext/pie/v1.0',
'name': 'Fox In Socks',
'updated': '2011-01-22T13:25:27-06:00',
'description': 'The Fox In Socks Extension',
'alias': 'FOXNSOX',
'links': []
},
)
for ext in data['extensions']:
url = '/fake/extensions/%s' % ext['alias']
request = webob.Request.blank(url)
response = request.get_response(app)
output = jsonutils.loads(response.body)
self.assertEqual(output['extension']['alias'], ext['alias'])
def test_get_extension_json(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions/FOXNSOX")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
data = jsonutils.loads(response.body)
self.assertEqual(data['extension'], {
"namespace": "http://www.fox.in.socks/api/ext/pie/v1.0",
"name": "Fox In Socks",
"updated": "2011-01-22T13:25:27-06:00",
"description": "The Fox In Socks Extension",
"alias": "FOXNSOX",
"links": []})
def test_get_non_existing_extension_json(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions/4")
response = request.get_response(app)
self.assertEqual(404, response.status_int)
def test_list_extensions_xml(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions")
request.accept = "application/xml"
response = request.get_response(app)
self.assertEqual(200, response.status_int)
root = etree.XML(response.body)
self.assertEqual(root.tag.split('extensions')[0], NS)
# Make sure we have all the extensions, extras extensions being OK.
exts = root.findall('{0}extension'.format(NS))
self.assert_(len(exts) >= len(self.ext_list))
# Make sure that at least Fox in Sox is correct.
(fox_ext, ) = [x for x in exts if x.get('alias') == 'FOXNSOX']
self.assertEqual(fox_ext.get('name'), 'Fox In Socks')
self.assertEqual(fox_ext.get('namespace'),
'http://www.fox.in.socks/api/ext/pie/v1.0')
self.assertEqual(fox_ext.get('updated'), '2011-01-22T13:25:27-06:00')
self.assertEqual(fox_ext.findtext('{0}description'.format(NS)),
'The Fox In Socks Extension')
xmlutil.validate_schema(root, 'extensions')
def test_get_extension_xml(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions/FOXNSOX")
request.accept = "application/xml"
response = request.get_response(app)
self.assertEqual(200, response.status_int)
xml = response.body
root = etree.XML(xml)
self.assertEqual(root.tag.split('extension')[0], NS)
self.assertEqual(root.get('alias'), 'FOXNSOX')
self.assertEqual(root.get('name'), 'Fox In Socks')
self.assertEqual(root.get('namespace'),
'http://www.fox.in.socks/api/ext/pie/v1.0')
self.assertEqual(root.get('updated'), '2011-01-22T13:25:27-06:00')
self.assertEqual(root.findtext('{0}description'.format(NS)),
'The Fox In Socks Extension')
xmlutil.validate_schema(root, 'extension')
class ResourceExtensionTest(ExtensionTestCase):
def test_no_extension_present(self):
manager = StubExtensionManager(None)
app = compute.APIRouter(manager)
request = webob.Request.blank("/blah")
response = request.get_response(app)
self.assertEqual(404, response.status_int)
def test_get_resources(self):
res_ext = base_extensions.ResourceExtension('tweedles',
StubController(response_body))
manager = StubExtensionManager(res_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(response_body, response.body)
def test_get_resources_with_controller(self):
res_ext = base_extensions.ResourceExtension('tweedles',
StubController(response_body))
manager = StubExtensionManager(res_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(response_body, response.body)
def test_bad_request(self):
res_ext = base_extensions.ResourceExtension('tweedles',
StubController(response_body))
manager = StubExtensionManager(res_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
request.method = "POST"
response = request.get_response(app)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
body = jsonutils.loads(response.body)
expected = {
"badRequest": {
"message": "All aboard the fail train!",
"code": 400
}
}
self.assertDictMatch(expected, body)
def test_non_exist_resource(self):
res_ext = base_extensions.ResourceExtension('tweedles',
StubController(response_body))
manager = StubExtensionManager(res_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles/1")
response = request.get_response(app)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
body = jsonutils.loads(response.body)
expected = {
"itemNotFound": {
"message": "The resource could not be found.",
"code": 404
}
}
self.assertDictMatch(expected, body)
class InvalidExtension(object):
alias = "THIRD"
class ExtensionManagerTest(ExtensionTestCase):
response_body = "Try to say this Mr. Knox, sir..."
def test_get_resources(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/foxnsocks")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(response_body, response.body)
def test_invalid_extensions(self):
# Don't need the serialization middleware here because we're
# not testing any serialization
app = compute.APIRouter()
ext_mgr = compute_extensions.ExtensionManager()
ext_mgr.register(InvalidExtension())
self.assertTrue(ext_mgr.is_loaded('FOXNSOX'))
self.assertFalse(ext_mgr.is_loaded('THIRD'))
class ActionExtensionTest(ExtensionTestCase):
def _send_server_action_request(self, url, body):
app = compute.APIRouter()
request = webob.Request.blank(url)
request.method = 'POST'
request.content_type = 'application/json'
request.body = jsonutils.dumps(body)
response = request.get_response(app)
return response
def test_extended_action(self):
body = dict(add_tweedle=dict(name="test"))
url = "/fake/servers/abcd/action"
response = self._send_server_action_request(url, body)
self.assertEqual(200, response.status_int)
self.assertEqual("Tweedle Beetle Added.", response.body)
body = dict(delete_tweedle=dict(name="test"))
response = self._send_server_action_request(url, body)
self.assertEqual(200, response.status_int)
self.assertEqual("Tweedle Beetle Deleted.", response.body)
def test_invalid_action(self):
body = dict(blah=dict(name="test")) # Doesn't exist
url = "/fake/servers/abcd/action"
response = self._send_server_action_request(url, body)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
body = jsonutils.loads(response.body)
expected = {
"badRequest": {
"message": "There is no such action: blah",
"code": 400
}
}
self.assertDictMatch(expected, body)
def test_non_exist_action(self):
body = dict(blah=dict(name="test"))
url = "/fake/fdsa/1/action"
response = self._send_server_action_request(url, body)
self.assertEqual(404, response.status_int)
def test_failed_action(self):
body = dict(fail=dict(name="test"))
url = "/fake/servers/abcd/action"
response = self._send_server_action_request(url, body)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
body = jsonutils.loads(response.body)
expected = {
"badRequest": {
"message": "Tweedle fail",
"code": 400
}
}
self.assertDictMatch(expected, body)
class RequestExtensionTest(ExtensionTestCase):
def test_get_resources_with_stub_mgr(self):
class GooGoose(wsgi.Controller):
@wsgi.extends
def show(self, req, resp_obj, id):
# only handle JSON responses
resp_obj.obj['flavor']['googoose'] = req.GET.get('chewing')
req_ext = base_extensions.ControllerExtension(
StubControllerExtension(), 'flavors', GooGoose())
manager = StubExtensionManager(None, None, None, req_ext)
app = fakes.wsgi_app(ext_mgr=manager)
request = webob.Request.blank("/v2/fake/flavors/1?chewing=bluegoo")
request.environ['api.version'] = '2'
response = request.get_response(app)
self.assertEqual(200, response.status_int)
response_data = jsonutils.loads(response.body)
self.assertEqual('bluegoo', response_data['flavor']['googoose'])
def test_get_resources_with_mgr(self):
app = fakes.wsgi_app()
request = webob.Request.blank("/v2/fake/flavors/1?chewing=newblue")
request.environ['api.version'] = '2'
response = request.get_response(app)
self.assertEqual(200, response.status_int)
response_data = jsonutils.loads(response.body)
self.assertEqual('newblue', response_data['flavor']['googoose'])
self.assertEqual("Pig Bands!", response_data['big_bands'])
class ControllerExtensionTest(ExtensionTestCase):
def test_controller_extension_early(self):
controller = StubController(response_body)
res_ext = base_extensions.ResourceExtension('tweedles', controller)
ext_controller = StubEarlyExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=res_ext,
controller_ext=cont_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
def test_controller_extension_late(self):
# Need a dict for the body to convert to a ResponseObject
controller = StubController(dict(foo=response_body))
res_ext = base_extensions.ResourceExtension('tweedles', controller)
ext_controller = StubLateExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=res_ext,
controller_ext=cont_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
def test_controller_extension_late_inherited_resource(self):
# Need a dict for the body to convert to a ResponseObject
controller = StubController(dict(foo=response_body))
parent_ext = base_extensions.ResourceExtension('tweedles', controller)
ext_controller = StubLateExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=parent_ext,
controller_ext=cont_ext)
child_ext = base_extensions.ResourceExtension('beetles', controller,
inherits='tweedles')
manager.extra_resource_ext = child_ext
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/beetles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
def test_controller_action_extension_early(self):
controller = StubActionController(response_body)
actions = dict(action='POST')
res_ext = base_extensions.ResourceExtension('tweedles', controller,
member_actions=actions)
ext_controller = StubEarlyExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=res_ext,
controller_ext=cont_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles/foo/action")
request.method = 'POST'
request.headers['Content-Type'] = 'application/json'
request.body = jsonutils.dumps(dict(fooAction=True))
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
def test_controller_action_extension_late(self):
# Need a dict for the body to convert to a ResponseObject
controller = StubActionController(dict(foo=response_body))
actions = dict(action='POST')
res_ext = base_extensions.ResourceExtension('tweedles', controller,
member_actions=actions)
ext_controller = StubLateExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=res_ext,
controller_ext=cont_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles/foo/action")
request.method = 'POST'
request.headers['Content-Type'] = 'application/json'
request.body = jsonutils.dumps(dict(fooAction=True))
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
class ExtensionsXMLSerializerTest(test.TestCase):
def test_serialize_extension(self):
serializer = base_extensions.ExtensionTemplate()
data = {'extension': {
'name': 'ext1',
'namespace': 'http://docs.rack.com/servers/api/ext/pie/v1.0',
'alias': 'RS-PIE',
'updated': '2011-01-22T13:25:27-06:00',
'description': 'Adds the capability to share an image.',
'links': [{'rel': 'describedby',
'type': 'application/pdf',
'href': 'http://docs.rack.com/servers/api/ext/cs.pdf'},
{'rel': 'describedby',
'type': 'application/vnd.sun.wadl+xml',
'href': 'http://docs.rack.com/servers/api/ext/cs.wadl'}]}}
xml = serializer.serialize(data)
root = etree.XML(xml)
ext_dict = data['extension']
self.assertEqual(root.findtext('{0}description'.format(NS)),
ext_dict['description'])
for key in ['name', 'namespace', 'alias', 'updated']:
self.assertEqual(root.get(key), ext_dict[key])
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(ext_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
xmlutil.validate_schema(root, 'extension')
def test_serialize_extensions(self):
serializer = base_extensions.ExtensionsTemplate()
data = {"extensions": [{
"name": "Public Image Extension",
"namespace": "http://foo.com/api/ext/pie/v1.0",
"alias": "RS-PIE",
"updated": "2011-01-22T13:25:27-06:00",
"description": "Adds the capability to share an image.",
"links": [{"rel": "describedby",
"type": "application/pdf",
"type": "application/vnd.sun.wadl+xml",
"href": "http://foo.com/api/ext/cs-pie.pdf"},
{"rel": "describedby",
"type": "application/vnd.sun.wadl+xml",
"href": "http://foo.com/api/ext/cs-pie.wadl"}]},
{"name": "Cloud Block Storage",
"namespace": "http://foo.com/api/ext/cbs/v1.0",
"alias": "RS-CBS",
"updated": "2011-01-12T11:22:33-06:00",
"description": "Allows mounting cloud block storage.",
"links": [{"rel": "describedby",
"type": "application/pdf",
"href": "http://foo.com/api/ext/cs-cbs.pdf"},
{"rel": "describedby",
"type": "application/vnd.sun.wadl+xml",
"href": "http://foo.com/api/ext/cs-cbs.wadl"}]}]}
xml = serializer.serialize(data)
root = etree.XML(xml)
ext_elems = root.findall('{0}extension'.format(NS))
self.assertEqual(len(ext_elems), 2)
for i, ext_elem in enumerate(ext_elems):
ext_dict = data['extensions'][i]
self.assertEqual(ext_elem.findtext('{0}description'.format(NS)),
ext_dict['description'])
for key in ['name', 'namespace', 'alias', 'updated']:
self.assertEqual(ext_elem.get(key), ext_dict[key])
link_nodes = ext_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(ext_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
xmlutil.validate_schema(root, 'extensions')
| 39.098507 | 79 | 0.612193 |
import iso8601
from lxml import etree
import webob
from nova.api.openstack import compute
from nova.api.openstack.compute import extensions as compute_extensions
from nova.api.openstack import extensions as base_extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import flags
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
FLAGS = flags.FLAGS
NS = "{http://docs.openstack.org/common/api/v1.0}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
response_body = "Try to say this Mr. Knox, sir..."
extension_body = "I am not a fox!"
class StubController(object):
def __init__(self, body):
self.body = body
def index(self, req):
return self.body
def create(self, req):
msg = 'All aboard the fail train!'
raise webob.exc.HTTPBadRequest(explanation=msg)
def show(self, req, id):
raise webob.exc.HTTPNotFound()
class StubActionController(wsgi.Controller):
def __init__(self, body):
self.body = body
@wsgi.action('fooAction')
def _action_foo(self, req, id, body):
return self.body
class StubControllerExtension(base_extensions.ExtensionDescriptor):
name = 'twaadle'
def __init__(self):
pass
class StubEarlyExtensionController(wsgi.Controller):
def __init__(self, body):
self.body = body
@wsgi.extends
def index(self, req):
yield self.body
@wsgi.extends(action='fooAction')
def _action_foo(self, req, id, body):
yield self.body
class StubLateExtensionController(wsgi.Controller):
def __init__(self, body):
self.body = body
@wsgi.extends
def index(self, req, resp_obj):
return self.body
@wsgi.extends(action='fooAction')
def _action_foo(self, req, resp_obj, id, body):
return self.body
class StubExtensionManager(object):
name = "Tweedle Beetle Extension"
alias = "TWDLBETL"
def __init__(self, resource_ext=None, action_ext=None, request_ext=None,
controller_ext=None):
self.resource_ext = resource_ext
self.action_ext = action_ext
self.request_ext = request_ext
self.controller_ext = controller_ext
self.extra_resource_ext = None
def get_resources(self):
resource_exts = []
if self.resource_ext:
resource_exts.append(self.resource_ext)
if self.extra_resource_ext:
resource_exts.append(self.extra_resource_ext)
return resource_exts
def get_actions(self):
action_exts = []
if self.action_ext:
action_exts.append(self.action_ext)
return action_exts
def get_request_extensions(self):
request_extensions = []
if self.request_ext:
request_extensions.append(self.request_ext)
return request_extensions
def get_controller_extensions(self):
controller_extensions = []
if self.controller_ext:
controller_extensions.append(self.controller_ext)
return controller_extensions
class ExtensionTestCase(test.TestCase):
def setUp(self):
super(ExtensionTestCase, self).setUp()
ext_list = FLAGS.osapi_compute_extension[:]
fox = ('nova.tests.api.openstack.compute.extensions.'
'foxinsocks.Foxinsocks')
if fox not in ext_list:
ext_list.append(fox)
self.flags(osapi_compute_extension=ext_list)
class ExtensionControllerTest(ExtensionTestCase):
def setUp(self):
super(ExtensionControllerTest, self).setUp()
self.ext_list = [
"AdminActions",
"Aggregates",
"AvailabilityZone",
"Certificates",
"Cloudpipe",
"ConsoleOutput",
"Consoles",
"Createserverext",
"DeferredDelete",
"DiskConfig",
"ExtendedStatus",
"ExtendedServerAttributes",
"FlavorAccess",
"FlavorDisabled",
"FlavorExtraSpecs",
"FlavorExtraData",
"FlavorManage",
"FlavorRxtx",
"FlavorSwap",
"FloatingIps",
"FloatingIpDns",
"FloatingIpPools",
"Fox In Socks",
"Hosts",
"Keypairs",
"Multinic",
"MultipleCreate",
"Networks",
"QuotaClasses",
"Quotas",
"Rescue",
"SchedulerHints",
"SecurityGroups",
"ServerDiagnostics",
"ServerStartStop",
"SimpleTenantUsage",
"UsedLimits",
"UserData",
"VirtualInterfaces",
"Volumes",
"VolumeTypes",
]
self.ext_list.sort()
def test_list_extensions_json(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
data = jsonutils.loads(response.body)
names = [str(x['name']) for x in data['extensions']
if str(x['name']) in self.ext_list]
names.sort()
self.assertEqual(names, self.ext_list)
for ext in data['extensions']:
iso8601.parse_date(ext['updated'])
(fox_ext, ) = [
x for x in data['extensions'] if x['alias'] == 'FOXNSOX']
self.assertEqual(fox_ext, {
'namespace': 'http://www.fox.in.socks/api/ext/pie/v1.0',
'name': 'Fox In Socks',
'updated': '2011-01-22T13:25:27-06:00',
'description': 'The Fox In Socks Extension',
'alias': 'FOXNSOX',
'links': []
},
)
for ext in data['extensions']:
url = '/fake/extensions/%s' % ext['alias']
request = webob.Request.blank(url)
response = request.get_response(app)
output = jsonutils.loads(response.body)
self.assertEqual(output['extension']['alias'], ext['alias'])
def test_get_extension_json(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions/FOXNSOX")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
data = jsonutils.loads(response.body)
self.assertEqual(data['extension'], {
"namespace": "http://www.fox.in.socks/api/ext/pie/v1.0",
"name": "Fox In Socks",
"updated": "2011-01-22T13:25:27-06:00",
"description": "The Fox In Socks Extension",
"alias": "FOXNSOX",
"links": []})
def test_get_non_existing_extension_json(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions/4")
response = request.get_response(app)
self.assertEqual(404, response.status_int)
def test_list_extensions_xml(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions")
request.accept = "application/xml"
response = request.get_response(app)
self.assertEqual(200, response.status_int)
root = etree.XML(response.body)
self.assertEqual(root.tag.split('extensions')[0], NS)
exts = root.findall('{0}extension'.format(NS))
self.assert_(len(exts) >= len(self.ext_list))
(fox_ext, ) = [x for x in exts if x.get('alias') == 'FOXNSOX']
self.assertEqual(fox_ext.get('name'), 'Fox In Socks')
self.assertEqual(fox_ext.get('namespace'),
'http://www.fox.in.socks/api/ext/pie/v1.0')
self.assertEqual(fox_ext.get('updated'), '2011-01-22T13:25:27-06:00')
self.assertEqual(fox_ext.findtext('{0}description'.format(NS)),
'The Fox In Socks Extension')
xmlutil.validate_schema(root, 'extensions')
def test_get_extension_xml(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/extensions/FOXNSOX")
request.accept = "application/xml"
response = request.get_response(app)
self.assertEqual(200, response.status_int)
xml = response.body
root = etree.XML(xml)
self.assertEqual(root.tag.split('extension')[0], NS)
self.assertEqual(root.get('alias'), 'FOXNSOX')
self.assertEqual(root.get('name'), 'Fox In Socks')
self.assertEqual(root.get('namespace'),
'http://www.fox.in.socks/api/ext/pie/v1.0')
self.assertEqual(root.get('updated'), '2011-01-22T13:25:27-06:00')
self.assertEqual(root.findtext('{0}description'.format(NS)),
'The Fox In Socks Extension')
xmlutil.validate_schema(root, 'extension')
class ResourceExtensionTest(ExtensionTestCase):
def test_no_extension_present(self):
manager = StubExtensionManager(None)
app = compute.APIRouter(manager)
request = webob.Request.blank("/blah")
response = request.get_response(app)
self.assertEqual(404, response.status_int)
def test_get_resources(self):
res_ext = base_extensions.ResourceExtension('tweedles',
StubController(response_body))
manager = StubExtensionManager(res_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(response_body, response.body)
def test_get_resources_with_controller(self):
res_ext = base_extensions.ResourceExtension('tweedles',
StubController(response_body))
manager = StubExtensionManager(res_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(response_body, response.body)
def test_bad_request(self):
res_ext = base_extensions.ResourceExtension('tweedles',
StubController(response_body))
manager = StubExtensionManager(res_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
request.method = "POST"
response = request.get_response(app)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
body = jsonutils.loads(response.body)
expected = {
"badRequest": {
"message": "All aboard the fail train!",
"code": 400
}
}
self.assertDictMatch(expected, body)
def test_non_exist_resource(self):
res_ext = base_extensions.ResourceExtension('tweedles',
StubController(response_body))
manager = StubExtensionManager(res_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles/1")
response = request.get_response(app)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
body = jsonutils.loads(response.body)
expected = {
"itemNotFound": {
"message": "The resource could not be found.",
"code": 404
}
}
self.assertDictMatch(expected, body)
class InvalidExtension(object):
alias = "THIRD"
class ExtensionManagerTest(ExtensionTestCase):
response_body = "Try to say this Mr. Knox, sir..."
def test_get_resources(self):
app = compute.APIRouter()
request = webob.Request.blank("/fake/foxnsocks")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(response_body, response.body)
def test_invalid_extensions(self):
app = compute.APIRouter()
ext_mgr = compute_extensions.ExtensionManager()
ext_mgr.register(InvalidExtension())
self.assertTrue(ext_mgr.is_loaded('FOXNSOX'))
self.assertFalse(ext_mgr.is_loaded('THIRD'))
class ActionExtensionTest(ExtensionTestCase):
def _send_server_action_request(self, url, body):
app = compute.APIRouter()
request = webob.Request.blank(url)
request.method = 'POST'
request.content_type = 'application/json'
request.body = jsonutils.dumps(body)
response = request.get_response(app)
return response
def test_extended_action(self):
body = dict(add_tweedle=dict(name="test"))
url = "/fake/servers/abcd/action"
response = self._send_server_action_request(url, body)
self.assertEqual(200, response.status_int)
self.assertEqual("Tweedle Beetle Added.", response.body)
body = dict(delete_tweedle=dict(name="test"))
response = self._send_server_action_request(url, body)
self.assertEqual(200, response.status_int)
self.assertEqual("Tweedle Beetle Deleted.", response.body)
def test_invalid_action(self):
body = dict(blah=dict(name="test"))
url = "/fake/servers/abcd/action"
response = self._send_server_action_request(url, body)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
body = jsonutils.loads(response.body)
expected = {
"badRequest": {
"message": "There is no such action: blah",
"code": 400
}
}
self.assertDictMatch(expected, body)
def test_non_exist_action(self):
body = dict(blah=dict(name="test"))
url = "/fake/fdsa/1/action"
response = self._send_server_action_request(url, body)
self.assertEqual(404, response.status_int)
def test_failed_action(self):
body = dict(fail=dict(name="test"))
url = "/fake/servers/abcd/action"
response = self._send_server_action_request(url, body)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
body = jsonutils.loads(response.body)
expected = {
"badRequest": {
"message": "Tweedle fail",
"code": 400
}
}
self.assertDictMatch(expected, body)
class RequestExtensionTest(ExtensionTestCase):
def test_get_resources_with_stub_mgr(self):
class GooGoose(wsgi.Controller):
@wsgi.extends
def show(self, req, resp_obj, id):
# only handle JSON responses
resp_obj.obj['flavor']['googoose'] = req.GET.get('chewing')
req_ext = base_extensions.ControllerExtension(
StubControllerExtension(), 'flavors', GooGoose())
manager = StubExtensionManager(None, None, None, req_ext)
app = fakes.wsgi_app(ext_mgr=manager)
request = webob.Request.blank("/v2/fake/flavors/1?chewing=bluegoo")
request.environ['api.version'] = '2'
response = request.get_response(app)
self.assertEqual(200, response.status_int)
response_data = jsonutils.loads(response.body)
self.assertEqual('bluegoo', response_data['flavor']['googoose'])
def test_get_resources_with_mgr(self):
app = fakes.wsgi_app()
request = webob.Request.blank("/v2/fake/flavors/1?chewing=newblue")
request.environ['api.version'] = '2'
response = request.get_response(app)
self.assertEqual(200, response.status_int)
response_data = jsonutils.loads(response.body)
self.assertEqual('newblue', response_data['flavor']['googoose'])
self.assertEqual("Pig Bands!", response_data['big_bands'])
class ControllerExtensionTest(ExtensionTestCase):
def test_controller_extension_early(self):
controller = StubController(response_body)
res_ext = base_extensions.ResourceExtension('tweedles', controller)
ext_controller = StubEarlyExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=res_ext,
controller_ext=cont_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
def test_controller_extension_late(self):
# Need a dict for the body to convert to a ResponseObject
controller = StubController(dict(foo=response_body))
res_ext = base_extensions.ResourceExtension('tweedles', controller)
ext_controller = StubLateExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=res_ext,
controller_ext=cont_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
def test_controller_extension_late_inherited_resource(self):
# Need a dict for the body to convert to a ResponseObject
controller = StubController(dict(foo=response_body))
parent_ext = base_extensions.ResourceExtension('tweedles', controller)
ext_controller = StubLateExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=parent_ext,
controller_ext=cont_ext)
child_ext = base_extensions.ResourceExtension('beetles', controller,
inherits='tweedles')
manager.extra_resource_ext = child_ext
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/beetles")
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
def test_controller_action_extension_early(self):
controller = StubActionController(response_body)
actions = dict(action='POST')
res_ext = base_extensions.ResourceExtension('tweedles', controller,
member_actions=actions)
ext_controller = StubEarlyExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=res_ext,
controller_ext=cont_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles/foo/action")
request.method = 'POST'
request.headers['Content-Type'] = 'application/json'
request.body = jsonutils.dumps(dict(fooAction=True))
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
def test_controller_action_extension_late(self):
# Need a dict for the body to convert to a ResponseObject
controller = StubActionController(dict(foo=response_body))
actions = dict(action='POST')
res_ext = base_extensions.ResourceExtension('tweedles', controller,
member_actions=actions)
ext_controller = StubLateExtensionController(extension_body)
extension = StubControllerExtension()
cont_ext = base_extensions.ControllerExtension(extension, 'tweedles',
ext_controller)
manager = StubExtensionManager(resource_ext=res_ext,
controller_ext=cont_ext)
app = compute.APIRouter(manager)
request = webob.Request.blank("/fake/tweedles/foo/action")
request.method = 'POST'
request.headers['Content-Type'] = 'application/json'
request.body = jsonutils.dumps(dict(fooAction=True))
response = request.get_response(app)
self.assertEqual(200, response.status_int)
self.assertEqual(extension_body, response.body)
class ExtensionsXMLSerializerTest(test.TestCase):
def test_serialize_extension(self):
serializer = base_extensions.ExtensionTemplate()
data = {'extension': {
'name': 'ext1',
'namespace': 'http://docs.rack.com/servers/api/ext/pie/v1.0',
'alias': 'RS-PIE',
'updated': '2011-01-22T13:25:27-06:00',
'description': 'Adds the capability to share an image.',
'links': [{'rel': 'describedby',
'type': 'application/pdf',
'href': 'http://docs.rack.com/servers/api/ext/cs.pdf'},
{'rel': 'describedby',
'type': 'application/vnd.sun.wadl+xml',
'href': 'http://docs.rack.com/servers/api/ext/cs.wadl'}]}}
xml = serializer.serialize(data)
root = etree.XML(xml)
ext_dict = data['extension']
self.assertEqual(root.findtext('{0}description'.format(NS)),
ext_dict['description'])
for key in ['name', 'namespace', 'alias', 'updated']:
self.assertEqual(root.get(key), ext_dict[key])
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(ext_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
xmlutil.validate_schema(root, 'extension')
def test_serialize_extensions(self):
serializer = base_extensions.ExtensionsTemplate()
data = {"extensions": [{
"name": "Public Image Extension",
"namespace": "http://foo.com/api/ext/pie/v1.0",
"alias": "RS-PIE",
"updated": "2011-01-22T13:25:27-06:00",
"description": "Adds the capability to share an image.",
"links": [{"rel": "describedby",
"type": "application/pdf",
"type": "application/vnd.sun.wadl+xml",
"href": "http://foo.com/api/ext/cs-pie.pdf"},
{"rel": "describedby",
"type": "application/vnd.sun.wadl+xml",
"href": "http://foo.com/api/ext/cs-pie.wadl"}]},
{"name": "Cloud Block Storage",
"namespace": "http://foo.com/api/ext/cbs/v1.0",
"alias": "RS-CBS",
"updated": "2011-01-12T11:22:33-06:00",
"description": "Allows mounting cloud block storage.",
"links": [{"rel": "describedby",
"type": "application/pdf",
"href": "http://foo.com/api/ext/cs-cbs.pdf"},
{"rel": "describedby",
"type": "application/vnd.sun.wadl+xml",
"href": "http://foo.com/api/ext/cs-cbs.wadl"}]}]}
xml = serializer.serialize(data)
root = etree.XML(xml)
ext_elems = root.findall('{0}extension'.format(NS))
self.assertEqual(len(ext_elems), 2)
for i, ext_elem in enumerate(ext_elems):
ext_dict = data['extensions'][i]
self.assertEqual(ext_elem.findtext('{0}description'.format(NS)),
ext_dict['description'])
for key in ['name', 'namespace', 'alias', 'updated']:
self.assertEqual(ext_elem.get(key), ext_dict[key])
link_nodes = ext_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(ext_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
xmlutil.validate_schema(root, 'extensions')
| true | true |
79000e7cbee758209869966d2a67b7b04c04c1a5 | 12,428 | py | Python | analyze_dataset.py | PDillis/coiltraine | a682aa62af5f6ecb95a837d33b70d893d3d261f6 | [
"MIT"
] | 1 | 2021-03-01T19:43:12.000Z | 2021-03-01T19:43:12.000Z | analyze_dataset.py | PDillis/coiltraine | a682aa62af5f6ecb95a837d33b70d893d3d261f6 | [
"MIT"
] | null | null | null | analyze_dataset.py | PDillis/coiltraine | a682aa62af5f6ecb95a837d33b70d893d3d261f6 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import pandas as pd
import json
import glob
import os
import argparse
from typing import Tuple, Union, List
from collections import Counter
from tqdm import tqdm
from multiprocessing import Pool
pd.options.mode.chained_assignment = None # default='warn'
# ====================================================================
def get_data(img_pth: Union[str, os.PathLike]) -> dict:
"""Get a single data from the given file.json path"""
with open(img_pth, 'r') as f:
data = json.load(f)
return data
def get_original_df(
path: Union[str, os.PathLike],
filename: str,
processes_per_cpu: int = 2) -> Tuple[pd.DataFrame, bool]:
"""Get a DataFrame from all the can_bus*.json files in the dataset"""
save_path = os.path.join(os.getcwd(), 'data_analysis', filename)
if os.path.isfile(save_path):
print('.npy file exists, loading it...')
data = list(np.load(save_path, allow_pickle=True))
else:
# Construct the dataset
print('.npy file not found, constructing it...')
all_data_paths = sorted(glob.glob(os.path.join(path, '**/can_bus*.json'), recursive=True))
with Pool(os.cpu_count() * processes_per_cpu) as p:
data = list(tqdm(p.imap(get_data, all_data_paths), total=len(all_data_paths)))
np.save(save_path, data)
# Create dataframe with the data
df = pd.DataFrame(data)
print(df.describe())
return df, False
# ====================================================================
def get_augmented_df(preloads_name: str) -> Tuple[pd.DataFrame, bool]:
"""Use the preloads file to load the data; will be augmented, as that's what we did"""
assert preloads_name.endswith('.npy')
data = np.load(os.path.join(os.getcwd(), '_preloads', preloads_name), allow_pickle=True)[1]
df = pd.DataFrame(data)
print(df.describe())
return df, True
# ====================================================================
def violin_plot(df: pd.DataFrame, save_name: str, augmented: bool) -> None:
"""Save violin plot for the interesting parameters using df"""
directions_dict = {'No Action': 2.0, 'Turn Left': 3.0, 'Turn Right': 4.0, 'Continue Straight': 5.0}
# Auxiliary function for setting the quartile lines
def set_lines(ax):
for l in ax.lines:
l.set_linestyle('--')
l.set_linewidth(0.6)
l.set_color('white')
l.set_alpha(0.7)
for l in ax.lines[1::3]:
l.set_linestyle('-')
l.set_linewidth(1.3)
l.set_color('black')
l.set_alpha(0.8)
for key in directions_dict:
# Get respective subset of the dataframe
data = df[df['directions'] == directions_dict[key]]
fig = plt.figure(figsize=(8, 6))
gs = fig.add_gridspec(1, 4)
fig.add_subplot(gs[0, 0])
ax = sns.violinplot(y='steer', data=data, color='r', inner='quartile')
set_lines(ax)
fig.add_subplot(gs[0, 1])
ax = sns.violinplot(y='throttle', data=data, color='g', inner='quartile')
set_lines(ax)
fig.add_subplot(gs[0, 2])
ax = sns.violinplot(y='brake', data=data, color='b', inner='quartile')
set_lines(ax)
fig.add_subplot(gs[0, 3])
ax = sns.violinplot(y='speed', data=data, color='m', inner='quartile')
set_lines(ax)
# When using tight layout, we need the title to be spaced accordingly
fig.tight_layout()
fig.subplots_adjust(top=0.88)
stitle = f'Direction: {key} - $N={len(data)}$ - ${100 * len(data)/len(df):6.3f}$% of total'
stitle = f'{stitle} - Augmented' if augmented else stitle
fig.suptitle(stitle, fontsize=16)
fname = f'{save_name}-{key.replace(" ", "")}'
fname = f'{fname}-aug' if augmented else fname
fig_name = os.path.join(os.getcwd(), 'data_analysis', save_name, 'violin_plots', f'{fname}.png')
os.makedirs(os.path.join(os.getcwd(), 'data_analysis', save_name, 'violin_plots'), exist_ok=True)
plt.savefig(fig_name)
plt.close()
# ====================================================================
def plot_clients(path: Union[str, os.PathLike], df: pd.DataFrame, augmented: bool, speed_factor: float) -> None:
"""Plot the steer, throttle, brake, and speed of a client during its data collection"""
# Some sanity check
if path.endswith(os.sep):
path = path[:-1]
# Get dataset name and make the necessary directories
dataset_name = os.path.basename(path)
s_path = os.path.join(os.getcwd(), 'data_analysis', dataset_name, 'clients')
os.makedirs(s_path, exist_ok=True)
# Get the number of clients/cars that collected the data
clients = glob.glob(os.path.join(path, '**/*'))
clients = [cl for cl in clients if os.path.isdir(cl)] # Remove path of metadata.json
num_clients = len(clients)
# Total number of frames and for a single client
num_frames = len(df)
num_frames_per_client = num_frames // num_clients
# Aux function
def get_change_locs(df: pd.DataFrame, cli: int) -> Tuple[List[int], List[float]]:
"""Get the index and directions from the df of the actions taken by the client"""
df['directions_str'] = df['directions'].astype(str) # In order to compare, turn directions into a string
# Shift directions column by 1 (filling the top with the head), and compare to the original
df['change'] = df['directions_str'].shift(1, fill_value=df['directions_str'].head(1)) != df['directions_str']
# Get the rows where there's a change
index_change = list(df.loc[df['change'] == True].index.values)
# Add the first frame
index_change = [(cli - 1) * len(df)] + index_change
# For these indexes, get the value of the direction
dirs = list(df['directions'][index_change].values)
# Add the last frame
index_change = index_change + [cli * len(df) - 1]
return index_change, dirs
# Dictionaries containing the name and color for plotting the direction given to the car
my_labels = {2.0: 'No Action', 3.0: 'Turn Left', 4.0: 'Turn Right', 5.0: 'Continue Straight'}
colors = {2.0: 'gold', 3.0: 'gray', 4.0: 'cyan', 5.0: 'magenta'}
# Initialize the total counts per action
total_action_counts = Counter({2.0: 0, 3.0: 0, 4.0: 0, 5.0: 0})
max_speed_clients = {}
idx_change_clients = {}
dirs_clients = {}
# Make a plot for each client
for client in tqdm(range(1, num_clients + 1), total=num_clients, unit='clients'):
if augmented:
# Dataframe will have augmented data, which uses center, left, right, center, ... data
df_client = df[(client - 1) * num_frames_per_client: client * num_frames_per_client: 3]
else:
df_client = df[(client - 1) * num_frames_per_client: client * num_frames_per_client]
# Augmented data will have been normalized already
df_client['speed'] = df_client['speed'].div(speed_factor) # normalize to range [0, 1]
# The actual max speed (see if it differs from collected data)
actual_max_speed = df_client['speed'].max()
max_speed_clients[client] = actual_max_speed
# Build the plot
fig, ax = plt.subplots(figsize=(48, 16))
fig.tight_layout(rect=[0, 0.03, 1, 0.95])
df_client.plot(y=['steer', 'throttle', 'brake', 'speed'], ax=ax)
# Set the area colors for when an direction is taken
idx_change, dirs = get_change_locs(df_client, client)
for idx, dir in enumerate(dirs):
ax.axvspan(idx_change[idx], idx_change[idx + 1], facecolor=colors[dir], alpha=0.5, label=my_labels[dir])
# Save these index and directions for each client
idx_change_clients[f'client_{client:02d}'] = [int(idx) for idx in idx_change]
dirs_clients[f'client_{client:02d}'] = [float(d) for d in dirs]
# Count the directions taken by the client
dirs_count = Counter(dirs)
# Add this to the total for the whole dataset
total_action_counts += dirs_count
# Add the counts to the title
total_actions = ''
for key in my_labels:
total_actions += f' - {my_labels[key]}: {dirs_count[key]}'
# Set title and x and y axes labels
suptitle = f'Client {client} - Actual max speed: {actual_max_speed:.4f}'
suptitle = f'{suptitle} - Augmented' if augmented else suptitle
suptitle = f'{suptitle}{total_actions}'
plt.suptitle(suptitle, fontsize=30)
plt.xlabel('Frame idx', fontsize=22)
plt.ylabel('Normed value', fontsize=22)
plt.xticks(list(range((client - 1) * num_frames_per_client,
client * num_frames_per_client + 1, len(df_client) // 20))) # ticks in 5% increments
# Fix the legend / remove duplicated areas and labels
hand, labl = ax.get_legend_handles_labels()
handout = []
lablout = []
for h, l in zip(hand, labl):
if l not in lablout:
lablout.append(l)
handout.append(h)
ax.legend(handout, lablout, fontsize='x-large')
sname = os.path.join(s_path, f'{dataset_name}_Client{client:02d}')
sname = f'{sname}-aug' if augmented else sname
plt.savefig(f'{sname}.png', dpi=300)
plt.close()
# Add summary and save it as a JSON file
actions_summary = {
'avg_no_action': total_action_counts[2.0] / num_clients,
'avg_turn_left': total_action_counts[3.0] / num_clients,
'avg_turn_right': total_action_counts[4.0] / num_clients,
'avg_continue_straight': total_action_counts[5.0] / num_clients
}
summary = {
'num_clients': num_clients,
'num_frames_per_client': num_frames_per_client,
'hours_per_client': num_frames_per_client / (20 * 60 * 60),
'total_action_counts': total_action_counts,
'actions_summary': actions_summary,
'max_speed_clients': max_speed_clients,
'idx_change_clients': idx_change_clients,
'dirs_clients': dirs_clients
}
with open(os.path.join(s_path, f'{dataset_name}-summary.json'), 'w') as f:
json.dump(summary, f, indent=4)
# ====================================================================
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--path', type=str, help='Path to the head of the dataset', required=True)
parser.add_argument('--filename', type=str, help='Name of file to save', default=None)
parser.add_argument('--preloads-name', type=str, help='Name of preload file', default=None)
parser.add_argument('--processes-per-cpu', '-proc', type=int, help='Processes per cpu (default: %(default)s)', default=2)
parser.add_argument('--speed-factor', '-sf', type=float, help='Speed factor to normalize data (default: %(default)s)', default=14.0)
parser.add_argument('--plot-clients', action='store_true', help='Add flag to plot the actions and speed of a client')
args = parser.parse_args()
# Create dir if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'data_analysis')):
os.mkdir(os.path.join(os.getcwd(), 'data_analysis'))
print('Getting the dataframe...')
if args.preloads_name is not None:
# Preloaded data is augmented
df, augmented = get_augmented_df(preloads_name=args.preloads_name)
save_name = os.path.basename(args.preloads_name).split('.')[0]
else:
assert args.filename is not None
assert args.filename.endswith('.npy')
df, augmented = get_original_df(args.path, args.filename, args.processes_per_cpu)
save_name = os.path.basename(args.filename).split('.')[0]
# Create and save the violin plots
print('Plotting data...')
violin_plot(df, save_name, augmented)
if args.plot_clients:
print(f'Plotting actions taken by all clients in {args.path}...')
plot_clients(path=args.path, df=df, augmented=augmented, speed_factor=args.speed_factor)
print('Done!')
# ====================================================================
if __name__ == '__main__':
main()
# ====================================================================
| 39.833333 | 136 | 0.61466 | import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import pandas as pd
import json
import glob
import os
import argparse
from typing import Tuple, Union, List
from collections import Counter
from tqdm import tqdm
from multiprocessing import Pool
pd.options.mode.chained_assignment = None
def get_data(img_pth: Union[str, os.PathLike]) -> dict:
with open(img_pth, 'r') as f:
data = json.load(f)
return data
def get_original_df(
path: Union[str, os.PathLike],
filename: str,
processes_per_cpu: int = 2) -> Tuple[pd.DataFrame, bool]:
save_path = os.path.join(os.getcwd(), 'data_analysis', filename)
if os.path.isfile(save_path):
print('.npy file exists, loading it...')
data = list(np.load(save_path, allow_pickle=True))
else:
print('.npy file not found, constructing it...')
all_data_paths = sorted(glob.glob(os.path.join(path, '**/can_bus*.json'), recursive=True))
with Pool(os.cpu_count() * processes_per_cpu) as p:
data = list(tqdm(p.imap(get_data, all_data_paths), total=len(all_data_paths)))
np.save(save_path, data)
df = pd.DataFrame(data)
print(df.describe())
return df, False
def get_augmented_df(preloads_name: str) -> Tuple[pd.DataFrame, bool]:
assert preloads_name.endswith('.npy')
data = np.load(os.path.join(os.getcwd(), '_preloads', preloads_name), allow_pickle=True)[1]
df = pd.DataFrame(data)
print(df.describe())
return df, True
def violin_plot(df: pd.DataFrame, save_name: str, augmented: bool) -> None:
directions_dict = {'No Action': 2.0, 'Turn Left': 3.0, 'Turn Right': 4.0, 'Continue Straight': 5.0}
def set_lines(ax):
for l in ax.lines:
l.set_linestyle('--')
l.set_linewidth(0.6)
l.set_color('white')
l.set_alpha(0.7)
for l in ax.lines[1::3]:
l.set_linestyle('-')
l.set_linewidth(1.3)
l.set_color('black')
l.set_alpha(0.8)
for key in directions_dict:
data = df[df['directions'] == directions_dict[key]]
fig = plt.figure(figsize=(8, 6))
gs = fig.add_gridspec(1, 4)
fig.add_subplot(gs[0, 0])
ax = sns.violinplot(y='steer', data=data, color='r', inner='quartile')
set_lines(ax)
fig.add_subplot(gs[0, 1])
ax = sns.violinplot(y='throttle', data=data, color='g', inner='quartile')
set_lines(ax)
fig.add_subplot(gs[0, 2])
ax = sns.violinplot(y='brake', data=data, color='b', inner='quartile')
set_lines(ax)
fig.add_subplot(gs[0, 3])
ax = sns.violinplot(y='speed', data=data, color='m', inner='quartile')
set_lines(ax)
fig.tight_layout()
fig.subplots_adjust(top=0.88)
stitle = f'Direction: {key} - $N={len(data)}$ - ${100 * len(data)/len(df):6.3f}$% of total'
stitle = f'{stitle} - Augmented' if augmented else stitle
fig.suptitle(stitle, fontsize=16)
fname = f'{save_name}-{key.replace(" ", "")}'
fname = f'{fname}-aug' if augmented else fname
fig_name = os.path.join(os.getcwd(), 'data_analysis', save_name, 'violin_plots', f'{fname}.png')
os.makedirs(os.path.join(os.getcwd(), 'data_analysis', save_name, 'violin_plots'), exist_ok=True)
plt.savefig(fig_name)
plt.close()
def plot_clients(path: Union[str, os.PathLike], df: pd.DataFrame, augmented: bool, speed_factor: float) -> None:
if path.endswith(os.sep):
path = path[:-1]
dataset_name = os.path.basename(path)
s_path = os.path.join(os.getcwd(), 'data_analysis', dataset_name, 'clients')
os.makedirs(s_path, exist_ok=True)
clients = glob.glob(os.path.join(path, '**/*'))
clients = [cl for cl in clients if os.path.isdir(cl)]
num_clients = len(clients)
num_frames = len(df)
num_frames_per_client = num_frames // num_clients
def get_change_locs(df: pd.DataFrame, cli: int) -> Tuple[List[int], List[float]]:
df['directions_str'] = df['directions'].astype(str)
df['change'] = df['directions_str'].shift(1, fill_value=df['directions_str'].head(1)) != df['directions_str']
index_change = list(df.loc[df['change'] == True].index.values)
# Add the first frame
index_change = [(cli - 1) * len(df)] + index_change
# For these indexes, get the value of the direction
dirs = list(df['directions'][index_change].values)
# Add the last frame
index_change = index_change + [cli * len(df) - 1]
return index_change, dirs
# Dictionaries containing the name and color for plotting the direction given to the car
my_labels = {2.0: 'No Action', 3.0: 'Turn Left', 4.0: 'Turn Right', 5.0: 'Continue Straight'}
colors = {2.0: 'gold', 3.0: 'gray', 4.0: 'cyan', 5.0: 'magenta'}
# Initialize the total counts per action
total_action_counts = Counter({2.0: 0, 3.0: 0, 4.0: 0, 5.0: 0})
max_speed_clients = {}
idx_change_clients = {}
dirs_clients = {}
# Make a plot for each client
for client in tqdm(range(1, num_clients + 1), total=num_clients, unit='clients'):
if augmented:
# Dataframe will have augmented data, which uses center, left, right, center, ... data
df_client = df[(client - 1) * num_frames_per_client: client * num_frames_per_client: 3]
else:
df_client = df[(client - 1) * num_frames_per_client: client * num_frames_per_client]
# Augmented data will have been normalized already
df_client['speed'] = df_client['speed'].div(speed_factor) # normalize to range [0, 1]
# The actual max speed (see if it differs from collected data)
actual_max_speed = df_client['speed'].max()
max_speed_clients[client] = actual_max_speed
# Build the plot
fig, ax = plt.subplots(figsize=(48, 16))
fig.tight_layout(rect=[0, 0.03, 1, 0.95])
df_client.plot(y=['steer', 'throttle', 'brake', 'speed'], ax=ax)
# Set the area colors for when an direction is taken
idx_change, dirs = get_change_locs(df_client, client)
for idx, dir in enumerate(dirs):
ax.axvspan(idx_change[idx], idx_change[idx + 1], facecolor=colors[dir], alpha=0.5, label=my_labels[dir])
# Save these index and directions for each client
idx_change_clients[f'client_{client:02d}'] = [int(idx) for idx in idx_change]
dirs_clients[f'client_{client:02d}'] = [float(d) for d in dirs]
# Count the directions taken by the client
dirs_count = Counter(dirs)
# Add this to the total for the whole dataset
total_action_counts += dirs_count
# Add the counts to the title
total_actions = ''
for key in my_labels:
total_actions += f' - {my_labels[key]}: {dirs_count[key]}'
# Set title and x and y axes labels
suptitle = f'Client {client} - Actual max speed: {actual_max_speed:.4f}'
suptitle = f'{suptitle} - Augmented' if augmented else suptitle
suptitle = f'{suptitle}{total_actions}'
plt.suptitle(suptitle, fontsize=30)
plt.xlabel('Frame idx', fontsize=22)
plt.ylabel('Normed value', fontsize=22)
plt.xticks(list(range((client - 1) * num_frames_per_client,
client * num_frames_per_client + 1, len(df_client) // 20))) # ticks in 5% increments
# Fix the legend / remove duplicated areas and labels
hand, labl = ax.get_legend_handles_labels()
handout = []
lablout = []
for h, l in zip(hand, labl):
if l not in lablout:
lablout.append(l)
handout.append(h)
ax.legend(handout, lablout, fontsize='x-large')
sname = os.path.join(s_path, f'{dataset_name}_Client{client:02d}')
sname = f'{sname}-aug' if augmented else sname
plt.savefig(f'{sname}.png', dpi=300)
plt.close()
# Add summary and save it as a JSON file
actions_summary = {
'avg_no_action': total_action_counts[2.0] / num_clients,
'avg_turn_left': total_action_counts[3.0] / num_clients,
'avg_turn_right': total_action_counts[4.0] / num_clients,
'avg_continue_straight': total_action_counts[5.0] / num_clients
}
summary = {
'num_clients': num_clients,
'num_frames_per_client': num_frames_per_client,
'hours_per_client': num_frames_per_client / (20 * 60 * 60),
'total_action_counts': total_action_counts,
'actions_summary': actions_summary,
'max_speed_clients': max_speed_clients,
'idx_change_clients': idx_change_clients,
'dirs_clients': dirs_clients
}
with open(os.path.join(s_path, f'{dataset_name}-summary.json'), 'w') as f:
json.dump(summary, f, indent=4)
# ====================================================================
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--path', type=str, help='Path to the head of the dataset', required=True)
parser.add_argument('--filename', type=str, help='Name of file to save', default=None)
parser.add_argument('--preloads-name', type=str, help='Name of preload file', default=None)
parser.add_argument('--processes-per-cpu', '-proc', type=int, help='Processes per cpu (default: %(default)s)', default=2)
parser.add_argument('--speed-factor', '-sf', type=float, help='Speed factor to normalize data (default: %(default)s)', default=14.0)
parser.add_argument('--plot-clients', action='store_true', help='Add flag to plot the actions and speed of a client')
args = parser.parse_args()
# Create dir if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'data_analysis')):
os.mkdir(os.path.join(os.getcwd(), 'data_analysis'))
print('Getting the dataframe...')
if args.preloads_name is not None:
df, augmented = get_augmented_df(preloads_name=args.preloads_name)
save_name = os.path.basename(args.preloads_name).split('.')[0]
else:
assert args.filename is not None
assert args.filename.endswith('.npy')
df, augmented = get_original_df(args.path, args.filename, args.processes_per_cpu)
save_name = os.path.basename(args.filename).split('.')[0]
print('Plotting data...')
violin_plot(df, save_name, augmented)
if args.plot_clients:
print(f'Plotting actions taken by all clients in {args.path}...')
plot_clients(path=args.path, df=df, augmented=augmented, speed_factor=args.speed_factor)
print('Done!')
if __name__ == '__main__':
main()
| true | true |
79000fff9dd5b908998f890701e0187b67494624 | 145 | py | Python | yc208/799.py | c-yan/yukicoder | cdbbd65402177225dd989df7fe01f67908484a69 | [
"MIT"
] | null | null | null | yc208/799.py | c-yan/yukicoder | cdbbd65402177225dd989df7fe01f67908484a69 | [
"MIT"
] | null | null | null | yc208/799.py | c-yan/yukicoder | cdbbd65402177225dd989df7fe01f67908484a69 | [
"MIT"
] | null | null | null | A, B, C, D = map(int, input().split())
s1 = set(range(A, B + 1))
s2 = set(range(C, D + 1))
print(len(s1) * len(s2) - len(s1.intersection(s2)))
| 20.714286 | 51 | 0.551724 | A, B, C, D = map(int, input().split())
s1 = set(range(A, B + 1))
s2 = set(range(C, D + 1))
print(len(s1) * len(s2) - len(s1.intersection(s2)))
| true | true |
790010409f400cb3d51aa781eb051990e11bd6ee | 3,594 | py | Python | fork/consensus/default_constants.py | Fork-Network/fork-blockchain | 4e7c55b5787376dabacc8049eac49c0bb0bfd855 | [
"Apache-2.0"
] | 7 | 2021-07-23T22:06:56.000Z | 2022-02-09T04:30:23.000Z | fork/consensus/default_constants.py | Fork-Network/fork-blockchain | 4e7c55b5787376dabacc8049eac49c0bb0bfd855 | [
"Apache-2.0"
] | null | null | null | fork/consensus/default_constants.py | Fork-Network/fork-blockchain | 4e7c55b5787376dabacc8049eac49c0bb0bfd855 | [
"Apache-2.0"
] | 2 | 2021-07-29T10:11:56.000Z | 2021-08-01T19:37:18.000Z | from fork.util.ints import uint64
from .constants import ConsensusConstants
testnet_kwargs = {
"SLOT_BLOCKS_TARGET": 32,
"MIN_BLOCKS_PER_CHALLENGE_BLOCK": 16, # Must be less than half of SLOT_BLOCKS_TARGET
"MAX_SUB_SLOT_BLOCKS": 128, # Must be less than half of SUB_EPOCH_BLOCKS
"NUM_SPS_SUB_SLOT": 64, # Must be a power of 2
"SUB_SLOT_ITERS_STARTING": 2 ** 27,
# DIFFICULTY_STARTING is the starting difficulty for the first epoch, which is then further
# multiplied by another factor of DIFFICULTY_CONSTANT_FACTOR, to be used in the VDF iter calculation formula.
"DIFFICULTY_CONSTANT_FACTOR": 2 ** 64,
"DIFFICULTY_STARTING": 13,
"DIFFICULTY_CHANGE_MAX_FACTOR": 3, # The next difficulty is truncated to range [prev / FACTOR, prev * FACTOR]
# These 3 constants must be changed at the same time
"SUB_EPOCH_BLOCKS": 384, # The number of blocks per sub-epoch, mainnet 384
"EPOCH_BLOCKS": 4608, # The number of blocks per epoch, mainnet 4608. Must be multiple of SUB_EPOCH_SB
"SIGNIFICANT_BITS": 8, # The number of bits to look at in difficulty and min iters. The rest are zeroed
"DISCRIMINANT_SIZE_BITS": 1024, # Max is 1024 (based on ClassGroupElement int size)
"NUMBER_ZERO_BITS_PLOT_FILTER": 9, # H(plot signature of the challenge) must start with these many zeroes
"MIN_PLOT_SIZE": 32, # 32 for mainnet
"MAX_PLOT_SIZE": 50,
"SUB_SLOT_TIME_TARGET": 600, # The target number of seconds per slot, mainnet 600
"NUM_SP_INTERVALS_EXTRA": 3, # The number of sp intervals to add to the signage point
"MAX_FUTURE_TIME": 5 * 60, # The next block can have a timestamp of at most these many seconds in the future
"NUMBER_OF_TIMESTAMPS": 11, # Than the average of the last NUMBER_OF_TIMESTAMPS blocks
# Used as the initial cc rc challenges, as well as first block back pointers, and first SES back pointer
# We override this value based on the chain being run (testnet0, testnet1, mainnet, etc)
# Default used for tests is std_hash(b'')
"GENESIS_CHALLENGE": bytes.fromhex("be6bbdf83a789fd2b7e5ac8e2954f510e92115bb9e1c84591f6adb4055a3b845"),
# Forks of fork should change this value to provide replay attack protection. This is set to mainnet genesis chall
"AGG_SIG_ME_ADDITIONAL_DATA": bytes.fromhex("3800a9169891c0554775b12cbf5d79f6eb50ccb5f95630536a4cecd7a18aa34b"),
"GENESIS_PRE_FARM_POOL_PUZZLE_HASH": bytes.fromhex(
"75e5849b1a27d71e74de1390a4fc81c38b4ed8ce24d4efb2c9a5807d0e82106c"
),
"GENESIS_PRE_FARM_FARMER_PUZZLE_HASH": bytes.fromhex(
"75e5849b1a27d71e74de1390a4fc81c38b4ed8ce24d4efb2c9a5807d0e82106c"
),
"MAX_VDF_WITNESS_SIZE": 64,
# Size of mempool = 50x the size of block
"MEMPOOL_BLOCK_BUFFER": 50,
# Max coin amount, fits into 64 bits
"MAX_COIN_AMOUNT": uint64((1 << 64) - 1),
# Max block cost in clvm cost units
"MAX_BLOCK_COST_CLVM": 11000000000,
# The cost per byte of generator program
"COST_PER_BYTE": 12000,
"WEIGHT_PROOF_THRESHOLD": 2,
"BLOCKS_CACHE_SIZE": 4608 + (128 * 4),
"WEIGHT_PROOF_RECENT_BLOCKS": 1000,
"MAX_BLOCK_COUNT_PER_REQUESTS": 32, # Allow up to 32 blocks per request
"INITIAL_FREEZE_END_TIMESTAMP": 1620061200, # Mon May 03 2021 17:00:00 GMT+0000
"NETWORK_TYPE": 0,
"MAX_GENERATOR_SIZE": 1000000,
"MAX_GENERATOR_REF_LIST_SIZE": 512, # Number of references allowed in the block generator ref list
"POOL_SUB_SLOT_ITERS": 37600000000, # iters limit * NUM_SPS
}
DEFAULT_CONSTANTS = ConsensusConstants(**testnet_kwargs) # type: ignore
| 57.967742 | 118 | 0.741514 | from fork.util.ints import uint64
from .constants import ConsensusConstants
testnet_kwargs = {
"SLOT_BLOCKS_TARGET": 32,
"MIN_BLOCKS_PER_CHALLENGE_BLOCK": 16,
"MAX_SUB_SLOT_BLOCKS": 128,
"NUM_SPS_SUB_SLOT": 64,
"SUB_SLOT_ITERS_STARTING": 2 ** 27,
"DIFFICULTY_CONSTANT_FACTOR": 2 ** 64,
"DIFFICULTY_STARTING": 13,
"DIFFICULTY_CHANGE_MAX_FACTOR": 3,
"SUB_EPOCH_BLOCKS": 384,
"EPOCH_BLOCKS": 4608,
"SIGNIFICANT_BITS": 8,
"DISCRIMINANT_SIZE_BITS": 1024,
"NUMBER_ZERO_BITS_PLOT_FILTER": 9,
"MIN_PLOT_SIZE": 32,
"MAX_PLOT_SIZE": 50,
"SUB_SLOT_TIME_TARGET": 600,
"NUM_SP_INTERVALS_EXTRA": 3,
"MAX_FUTURE_TIME": 5 * 60,
"NUMBER_OF_TIMESTAMPS": 11,
"GENESIS_CHALLENGE": bytes.fromhex("be6bbdf83a789fd2b7e5ac8e2954f510e92115bb9e1c84591f6adb4055a3b845"),
"AGG_SIG_ME_ADDITIONAL_DATA": bytes.fromhex("3800a9169891c0554775b12cbf5d79f6eb50ccb5f95630536a4cecd7a18aa34b"),
"GENESIS_PRE_FARM_POOL_PUZZLE_HASH": bytes.fromhex(
"75e5849b1a27d71e74de1390a4fc81c38b4ed8ce24d4efb2c9a5807d0e82106c"
),
"GENESIS_PRE_FARM_FARMER_PUZZLE_HASH": bytes.fromhex(
"75e5849b1a27d71e74de1390a4fc81c38b4ed8ce24d4efb2c9a5807d0e82106c"
),
"MAX_VDF_WITNESS_SIZE": 64,
"MEMPOOL_BLOCK_BUFFER": 50,
"MAX_COIN_AMOUNT": uint64((1 << 64) - 1),
"MAX_BLOCK_COST_CLVM": 11000000000,
"COST_PER_BYTE": 12000,
"WEIGHT_PROOF_THRESHOLD": 2,
"BLOCKS_CACHE_SIZE": 4608 + (128 * 4),
"WEIGHT_PROOF_RECENT_BLOCKS": 1000,
"MAX_BLOCK_COUNT_PER_REQUESTS": 32,
"INITIAL_FREEZE_END_TIMESTAMP": 1620061200,
"NETWORK_TYPE": 0,
"MAX_GENERATOR_SIZE": 1000000,
"MAX_GENERATOR_REF_LIST_SIZE": 512,
"POOL_SUB_SLOT_ITERS": 37600000000,
}
DEFAULT_CONSTANTS = ConsensusConstants(**testnet_kwargs)
| true | true |
7900107e67259c846828bf5bc847ad5047773441 | 4,197 | py | Python | prototype/api/FlaskApp/FlaskApp/azure_components/azure/mgmt/common/filters.py | Grey-Peters/IanPeters | 3f3bb124ff9f340fad1057d96305594070ce8be9 | [
"Apache-2.0"
] | null | null | null | prototype/api/FlaskApp/FlaskApp/azure_components/azure/mgmt/common/filters.py | Grey-Peters/IanPeters | 3f3bb124ff9f340fad1057d96305594070ce8be9 | [
"Apache-2.0"
] | null | null | null | prototype/api/FlaskApp/FlaskApp/azure_components/azure/mgmt/common/filters.py | Grey-Peters/IanPeters | 3f3bb124ff9f340fad1057d96305594070ce8be9 | [
"Apache-2.0"
] | null | null | null | #-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import logging
import json
DEFAULT_LOG_NAME = 'azure.mgmt.common.filters'
DEFAULT_LOG_LEVEL = logging.DEBUG
DEFAULT_USER_AGENT = ''
class RequestFilter(object):
'''
Send the request.
'''
def __init__(self, session):
if session is None:
raise ValueError('session cannot be None.')
self._session = session
def send(self, prepared_request):
return self._session.send(prepared_request)
class SigningFilter(object):
'''
Sign the request.
'''
def __init__(self, creds):
if creds is None:
raise ValueError('creds cannot be None.')
self._creds = creds
def send(self, prepared_request):
self._creds.sign_request(prepared_request)
return self.next.send(prepared_request)
class UserAgentFilter(object):
'''
Add a user-agent header to the request.
'''
def __init__(self, user_agent):
if user_agent is None:
raise ValueError('user_agent cannot be None.')
self._user_agent = user_agent
def send(self, prepared_request):
prepared_request.headers['user-agent'] = self._user_agent
return self.next.send(prepared_request)
class LogFilter(object):
'''
Log the request to a standard python logger.
Example of enabling logging to the console:
import logging
logger = logging.getLogger('azure.mgmt.common.filters')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
'''
def __init__(self, name=DEFAULT_LOG_NAME, level=DEFAULT_LOG_LEVEL):
if name is None:
raise ValueError('name cannot be None.')
if level is None:
raise ValueError('level cannot be None.')
self.level = level
self.logger = logging.getLogger(name)
def send(self, prepared_request):
self._log_request(prepared_request)
response = self.next.send(prepared_request)
self._log_response(response)
return response
@staticmethod
def _headers_to_string(headers):
mask_headers = ['authorization']
headers_raw = []
for header, value in headers.items():
if header.lower() in mask_headers:
value = '*****'
headers_raw.append('%s: %s' % (header, value))
return '\n'.join(headers_raw)
@staticmethod
def _pretty_print(content):
try:
return json.dumps(
json.loads(content),
sort_keys=True,
indent=4,
separators=(',', ': '),
)
except Exception:
pass
return content
def _log_request(self, request):
if self.logger.isEnabledFor(self.level):
headers = self._headers_to_string(request.headers)
msg = ['Request: %s %s\n%s\n' % (request.method, request.url, headers)]
if request.body:
msg.append(self._pretty_print(request.body))
self.logger.log(self.level, '\n'.join(msg))
def _log_response(self, response):
if self.logger.isEnabledFor(self.level):
headers = self._headers_to_string(response.headers)
msg = ['Response: %s %s\n%s\n' % (response.status_code, response.reason, headers)]
if response.text:
msg.append(self._pretty_print(response.text))
self.logger.log(self.level, '\n'.join(msg))
| 31.088889 | 94 | 0.610674 |
import logging
import json
DEFAULT_LOG_NAME = 'azure.mgmt.common.filters'
DEFAULT_LOG_LEVEL = logging.DEBUG
DEFAULT_USER_AGENT = ''
class RequestFilter(object):
'''
Send the request.
'''
def __init__(self, session):
if session is None:
raise ValueError('session cannot be None.')
self._session = session
def send(self, prepared_request):
return self._session.send(prepared_request)
class SigningFilter(object):
'''
Sign the request.
'''
def __init__(self, creds):
if creds is None:
raise ValueError('creds cannot be None.')
self._creds = creds
def send(self, prepared_request):
self._creds.sign_request(prepared_request)
return self.next.send(prepared_request)
class UserAgentFilter(object):
'''
Add a user-agent header to the request.
'''
def __init__(self, user_agent):
if user_agent is None:
raise ValueError('user_agent cannot be None.')
self._user_agent = user_agent
def send(self, prepared_request):
prepared_request.headers['user-agent'] = self._user_agent
return self.next.send(prepared_request)
class LogFilter(object):
'''
Log the request to a standard python logger.
Example of enabling logging to the console:
import logging
logger = logging.getLogger('azure.mgmt.common.filters')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
'''
def __init__(self, name=DEFAULT_LOG_NAME, level=DEFAULT_LOG_LEVEL):
if name is None:
raise ValueError('name cannot be None.')
if level is None:
raise ValueError('level cannot be None.')
self.level = level
self.logger = logging.getLogger(name)
def send(self, prepared_request):
self._log_request(prepared_request)
response = self.next.send(prepared_request)
self._log_response(response)
return response
@staticmethod
def _headers_to_string(headers):
mask_headers = ['authorization']
headers_raw = []
for header, value in headers.items():
if header.lower() in mask_headers:
value = '*****'
headers_raw.append('%s: %s' % (header, value))
return '\n'.join(headers_raw)
@staticmethod
def _pretty_print(content):
try:
return json.dumps(
json.loads(content),
sort_keys=True,
indent=4,
separators=(',', ': '),
)
except Exception:
pass
return content
def _log_request(self, request):
if self.logger.isEnabledFor(self.level):
headers = self._headers_to_string(request.headers)
msg = ['Request: %s %s\n%s\n' % (request.method, request.url, headers)]
if request.body:
msg.append(self._pretty_print(request.body))
self.logger.log(self.level, '\n'.join(msg))
def _log_response(self, response):
if self.logger.isEnabledFor(self.level):
headers = self._headers_to_string(response.headers)
msg = ['Response: %s %s\n%s\n' % (response.status_code, response.reason, headers)]
if response.text:
msg.append(self._pretty_print(response.text))
self.logger.log(self.level, '\n'.join(msg))
| false | true |
790010e66a70ddcb69293dc906896900f0e63fe9 | 791 | py | Python | condition_variables/stingy_spendy_cond_variable.py | zahedul/multithreadinginpython | 52492b888191fd1acde8d43e7e5aa40cec445718 | [
"MIT"
] | 21 | 2020-09-24T00:13:15.000Z | 2022-03-09T03:31:11.000Z | condition_variables/stingy_spendy_cond_variable.py | zahedul/multithreadinginpython | 52492b888191fd1acde8d43e7e5aa40cec445718 | [
"MIT"
] | null | null | null | condition_variables/stingy_spendy_cond_variable.py | zahedul/multithreadinginpython | 52492b888191fd1acde8d43e7e5aa40cec445718 | [
"MIT"
] | 22 | 2020-10-23T00:02:33.000Z | 2022-03-28T13:29:44.000Z | import time
from threading import Thread, Condition
class StingySpendy:
money = 100
cv = Condition()
def stingy(self):
for i in range(1000000):
self.cv.acquire()
self.money += 10
self.cv.notify()
self.cv.release()
print("Stingy Done")
def spendy(self):
for i in range(500000):
self.cv.acquire()
while self.money < 20:
self.cv.wait()
self.money -= 20
if self.money < 0:
print("Money in bank", self.money)
self.cv.release()
print("Spendy Done")
ss = StingySpendy()
Thread(target=ss.stingy, args=()).start()
Thread(target=ss.spendy, args=()).start()
time.sleep(5)
print("Money in the end", ss.money)
| 23.264706 | 50 | 0.539823 | import time
from threading import Thread, Condition
class StingySpendy:
money = 100
cv = Condition()
def stingy(self):
for i in range(1000000):
self.cv.acquire()
self.money += 10
self.cv.notify()
self.cv.release()
print("Stingy Done")
def spendy(self):
for i in range(500000):
self.cv.acquire()
while self.money < 20:
self.cv.wait()
self.money -= 20
if self.money < 0:
print("Money in bank", self.money)
self.cv.release()
print("Spendy Done")
ss = StingySpendy()
Thread(target=ss.stingy, args=()).start()
Thread(target=ss.spendy, args=()).start()
time.sleep(5)
print("Money in the end", ss.money)
| true | true |
7900115e74f1993428ff8a05fc2b038f6f1693da | 9,058 | py | Python | test/units/modules/packaging/os/test_rhn_register.py | Container-Projects/ansible-provider-docs | 100b695b0b0c4d8d08af362069557ffc735d0d7e | [
"PSF-2.0",
"BSD-2-Clause",
"MIT"
] | 37 | 2017-08-15T15:02:43.000Z | 2021-07-23T03:44:31.000Z | test/units/modules/packaging/os/test_rhn_register.py | Container-Projects/ansible-provider-docs | 100b695b0b0c4d8d08af362069557ffc735d0d7e | [
"PSF-2.0",
"BSD-2-Clause",
"MIT"
] | 12 | 2018-01-10T05:25:25.000Z | 2021-11-28T06:55:48.000Z | test/units/modules/packaging/os/test_rhn_register.py | Container-Projects/ansible-provider-docs | 100b695b0b0c4d8d08af362069557ffc735d0d7e | [
"PSF-2.0",
"BSD-2-Clause",
"MIT"
] | 49 | 2017-08-15T09:52:13.000Z | 2022-03-21T17:11:54.000Z | import contextlib
import json
import os
from ansible.compat.tests.mock import mock_open
from ansible.module_utils import basic
from ansible.module_utils._text import to_native
import ansible.module_utils.six
from ansible.module_utils.six.moves import xmlrpc_client
from ansible.modules.packaging.os import rhn_register
import pytest
SYSTEMID = """<?xml version="1.0"?>
<params>
<param>
<value><struct>
<member>
<name>system_id</name>
<value><string>ID-123456789</string></value>
</member>
</struct></value>
</param>
</params>
"""
def skipWhenAllModulesMissing(modules):
"""Skip the decorated test unless one of modules is available."""
for module in modules:
try:
__import__(module)
return False
except ImportError:
continue
return True
orig_import = __import__
@pytest.fixture
def import_libxml(mocker):
def mock_import(name, *args, **kwargs):
if name in ['libxml2', 'libxml']:
raise ImportError()
else:
return orig_import(name, *args, **kwargs)
if ansible.module_utils.six.PY3:
mocker.patch('builtins.__import__', side_effect=mock_import)
else:
mocker.patch('__builtin__.__import__', side_effect=mock_import)
@pytest.fixture
def patch_rhn(mocker):
load_config_return = {
'serverURL': 'https://xmlrpc.rhn.redhat.com/XMLRPC',
'systemIdPath': '/etc/sysconfig/rhn/systemid'
}
mocker.patch.object(rhn_register.Rhn, 'load_config', return_value=load_config_return)
mocker.patch.object(rhn_register, 'HAS_UP2DATE_CLIENT', mocker.PropertyMock(return_value=True))
@pytest.mark.skipif(skipWhenAllModulesMissing(['libxml2', 'libxml']), reason='none are available: libxml2, libxml')
def test_systemid_with_requirements(capfd, mocker, patch_rhn):
"""Check 'msg' and 'changed' results"""
mocker.patch.object(rhn_register.Rhn, 'enable')
mock_isfile = mocker.patch('os.path.isfile', return_value=True)
mocker.patch('ansible.modules.packaging.os.rhn_register.open', mock_open(read_data=SYSTEMID), create=True)
rhn = rhn_register.Rhn()
assert '123456789' == to_native(rhn.systemid)
@pytest.mark.parametrize('patch_ansible_module', [{}], indirect=['patch_ansible_module'])
@pytest.mark.usefixtures('patch_ansible_module')
def test_systemid_requirements_missing(capfd, mocker, patch_rhn, import_libxml):
"""Check that missing dependencies are detected"""
mocker.patch('os.path.isfile', return_value=True)
mocker.patch('ansible.modules.packaging.os.rhn_register.open', mock_open(read_data=SYSTEMID), create=True)
with pytest.raises(SystemExit):
rhn_register.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed']
assert 'Missing arguments' in results['msg']
@pytest.mark.parametrize('patch_ansible_module', [{}], indirect=['patch_ansible_module'])
@pytest.mark.usefixtures('patch_ansible_module')
def test_without_required_parameters(capfd, patch_rhn):
"""Failure must occurs when all parameters are missing"""
with pytest.raises(SystemExit):
rhn_register.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed']
assert 'Missing arguments' in results['msg']
TESTED_MODULE = rhn_register.__name__
TEST_CASES = [
[
# Registering an unregistered host
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
},
{
'calls': [
('auth.login', ['X' * 43]),
('channel.software.listSystemChannels',
[[{'channel_name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)', 'channel_label': 'rhel-x86_64-server-6'}]]),
('channel.software.setSystemChannels', [1]),
('auth.logout', [1]),
],
'is_registered': False,
'is_registered.call_count': 1,
'enable.call_count': 1,
'systemid.call_count': 2,
'changed': True,
'msg': "System successfully registered to 'rhn.redhat.com'.",
'run_command.call_count': 1,
'run_command.call_args': '/usr/sbin/rhnreg_ks',
'request_called': True,
'unlink.call_count': 0,
}
],
[
# Register an host already registered, check that result is unchanged
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
},
{
'calls': [
],
'is_registered': True,
'is_registered.call_count': 1,
'enable.call_count': 0,
'systemid.call_count': 0,
'changed': False,
'msg': 'System already registered.',
'run_command.call_count': 0,
'request_called': False,
'unlink.call_count': 0,
},
],
[
# Unregister an host, check that result is changed
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
'state': 'absent',
},
{
'calls': [
('auth.login', ['X' * 43]),
('system.deleteSystems', [1]),
('auth.logout', [1]),
],
'is_registered': True,
'is_registered.call_count': 1,
'enable.call_count': 0,
'systemid.call_count': 1,
'changed': True,
'msg': 'System successfully unregistered from rhn.redhat.com.',
'run_command.call_count': 0,
'request_called': True,
'unlink.call_count': 1,
}
],
[
# Unregister a unregistered host (systemid missing) locally, check that result is unchanged
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
'state': 'absent',
},
{
'calls': [],
'is_registered': False,
'is_registered.call_count': 1,
'enable.call_count': 0,
'systemid.call_count': 0,
'changed': False,
'msg': 'System already unregistered.',
'run_command.call_count': 0,
'request_called': False,
'unlink.call_count': 0,
}
],
[
# Unregister an unknown host (an host with a systemid available locally, check that result contains failed
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
'state': 'absent',
},
{
'calls': [
('auth.login', ['X' * 43]),
('system.deleteSystems', xmlrpc_client.Fault(1003, 'The following systems were NOT deleted: 123456789')),
('auth.logout', [1]),
],
'is_registered': True,
'is_registered.call_count': 1,
'enable.call_count': 0,
'systemid.call_count': 1,
'failed': True,
'msg': "Failed to unregister: <Fault 1003: 'The following systems were NOT deleted: 123456789'>",
'run_command.call_count': 0,
'request_called': True,
'unlink.call_count': 0,
}
],
]
@pytest.mark.parametrize('patch_ansible_module, testcase', TEST_CASES, indirect=['patch_ansible_module'])
@pytest.mark.usefixtures('patch_ansible_module')
def test_register_parameters(mocker, capfd, mock_request, patch_rhn, testcase):
# successful execution, no output
mocker.patch.object(basic.AnsibleModule, 'run_command', return_value=(0, '', ''))
mock_is_registered = mocker.patch.object(rhn_register.Rhn, 'is_registered', mocker.PropertyMock(return_value=testcase['is_registered']))
mocker.patch.object(rhn_register.Rhn, 'enable')
mock_systemid = mocker.patch.object(rhn_register.Rhn, 'systemid', mocker.PropertyMock(return_value=12345))
mocker.patch('os.unlink', return_value=True)
with pytest.raises(SystemExit):
rhn_register.main()
assert basic.AnsibleModule.run_command.call_count == testcase['run_command.call_count']
if basic.AnsibleModule.run_command.call_count:
assert basic.AnsibleModule.run_command.call_args[0][0][0] == testcase['run_command.call_args']
assert mock_is_registered.call_count == testcase['is_registered.call_count']
assert rhn_register.Rhn.enable.call_count == testcase['enable.call_count']
assert mock_systemid.call_count == testcase['systemid.call_count']
assert xmlrpc_client.Transport.request.called == testcase['request_called']
assert os.unlink.call_count == testcase['unlink.call_count']
out, err = capfd.readouterr()
results = json.loads(out)
assert results.get('changed') == testcase.get('changed')
assert results.get('failed') == testcase.get('failed')
assert results['msg'] == testcase['msg']
assert not testcase['calls'] # all calls should have been consumed
| 34.310606 | 143 | 0.611945 | import contextlib
import json
import os
from ansible.compat.tests.mock import mock_open
from ansible.module_utils import basic
from ansible.module_utils._text import to_native
import ansible.module_utils.six
from ansible.module_utils.six.moves import xmlrpc_client
from ansible.modules.packaging.os import rhn_register
import pytest
SYSTEMID = """<?xml version="1.0"?>
<params>
<param>
<value><struct>
<member>
<name>system_id</name>
<value><string>ID-123456789</string></value>
</member>
</struct></value>
</param>
</params>
"""
def skipWhenAllModulesMissing(modules):
for module in modules:
try:
__import__(module)
return False
except ImportError:
continue
return True
orig_import = __import__
@pytest.fixture
def import_libxml(mocker):
def mock_import(name, *args, **kwargs):
if name in ['libxml2', 'libxml']:
raise ImportError()
else:
return orig_import(name, *args, **kwargs)
if ansible.module_utils.six.PY3:
mocker.patch('builtins.__import__', side_effect=mock_import)
else:
mocker.patch('__builtin__.__import__', side_effect=mock_import)
@pytest.fixture
def patch_rhn(mocker):
load_config_return = {
'serverURL': 'https://xmlrpc.rhn.redhat.com/XMLRPC',
'systemIdPath': '/etc/sysconfig/rhn/systemid'
}
mocker.patch.object(rhn_register.Rhn, 'load_config', return_value=load_config_return)
mocker.patch.object(rhn_register, 'HAS_UP2DATE_CLIENT', mocker.PropertyMock(return_value=True))
@pytest.mark.skipif(skipWhenAllModulesMissing(['libxml2', 'libxml']), reason='none are available: libxml2, libxml')
def test_systemid_with_requirements(capfd, mocker, patch_rhn):
mocker.patch.object(rhn_register.Rhn, 'enable')
mock_isfile = mocker.patch('os.path.isfile', return_value=True)
mocker.patch('ansible.modules.packaging.os.rhn_register.open', mock_open(read_data=SYSTEMID), create=True)
rhn = rhn_register.Rhn()
assert '123456789' == to_native(rhn.systemid)
@pytest.mark.parametrize('patch_ansible_module', [{}], indirect=['patch_ansible_module'])
@pytest.mark.usefixtures('patch_ansible_module')
def test_systemid_requirements_missing(capfd, mocker, patch_rhn, import_libxml):
mocker.patch('os.path.isfile', return_value=True)
mocker.patch('ansible.modules.packaging.os.rhn_register.open', mock_open(read_data=SYSTEMID), create=True)
with pytest.raises(SystemExit):
rhn_register.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed']
assert 'Missing arguments' in results['msg']
@pytest.mark.parametrize('patch_ansible_module', [{}], indirect=['patch_ansible_module'])
@pytest.mark.usefixtures('patch_ansible_module')
def test_without_required_parameters(capfd, patch_rhn):
with pytest.raises(SystemExit):
rhn_register.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed']
assert 'Missing arguments' in results['msg']
TESTED_MODULE = rhn_register.__name__
TEST_CASES = [
[
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
},
{
'calls': [
('auth.login', ['X' * 43]),
('channel.software.listSystemChannels',
[[{'channel_name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)', 'channel_label': 'rhel-x86_64-server-6'}]]),
('channel.software.setSystemChannels', [1]),
('auth.logout', [1]),
],
'is_registered': False,
'is_registered.call_count': 1,
'enable.call_count': 1,
'systemid.call_count': 2,
'changed': True,
'msg': "System successfully registered to 'rhn.redhat.com'.",
'run_command.call_count': 1,
'run_command.call_args': '/usr/sbin/rhnreg_ks',
'request_called': True,
'unlink.call_count': 0,
}
],
[
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
},
{
'calls': [
],
'is_registered': True,
'is_registered.call_count': 1,
'enable.call_count': 0,
'systemid.call_count': 0,
'changed': False,
'msg': 'System already registered.',
'run_command.call_count': 0,
'request_called': False,
'unlink.call_count': 0,
},
],
[
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
'state': 'absent',
},
{
'calls': [
('auth.login', ['X' * 43]),
('system.deleteSystems', [1]),
('auth.logout', [1]),
],
'is_registered': True,
'is_registered.call_count': 1,
'enable.call_count': 0,
'systemid.call_count': 1,
'changed': True,
'msg': 'System successfully unregistered from rhn.redhat.com.',
'run_command.call_count': 0,
'request_called': True,
'unlink.call_count': 1,
}
],
[
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
'state': 'absent',
},
{
'calls': [],
'is_registered': False,
'is_registered.call_count': 1,
'enable.call_count': 0,
'systemid.call_count': 0,
'changed': False,
'msg': 'System already unregistered.',
'run_command.call_count': 0,
'request_called': False,
'unlink.call_count': 0,
}
],
[
{
'activationkey': 'key',
'username': 'user',
'password': 'pass',
'state': 'absent',
},
{
'calls': [
('auth.login', ['X' * 43]),
('system.deleteSystems', xmlrpc_client.Fault(1003, 'The following systems were NOT deleted: 123456789')),
('auth.logout', [1]),
],
'is_registered': True,
'is_registered.call_count': 1,
'enable.call_count': 0,
'systemid.call_count': 1,
'failed': True,
'msg': "Failed to unregister: <Fault 1003: 'The following systems were NOT deleted: 123456789'>",
'run_command.call_count': 0,
'request_called': True,
'unlink.call_count': 0,
}
],
]
@pytest.mark.parametrize('patch_ansible_module, testcase', TEST_CASES, indirect=['patch_ansible_module'])
@pytest.mark.usefixtures('patch_ansible_module')
def test_register_parameters(mocker, capfd, mock_request, patch_rhn, testcase):
mocker.patch.object(basic.AnsibleModule, 'run_command', return_value=(0, '', ''))
mock_is_registered = mocker.patch.object(rhn_register.Rhn, 'is_registered', mocker.PropertyMock(return_value=testcase['is_registered']))
mocker.patch.object(rhn_register.Rhn, 'enable')
mock_systemid = mocker.patch.object(rhn_register.Rhn, 'systemid', mocker.PropertyMock(return_value=12345))
mocker.patch('os.unlink', return_value=True)
with pytest.raises(SystemExit):
rhn_register.main()
assert basic.AnsibleModule.run_command.call_count == testcase['run_command.call_count']
if basic.AnsibleModule.run_command.call_count:
assert basic.AnsibleModule.run_command.call_args[0][0][0] == testcase['run_command.call_args']
assert mock_is_registered.call_count == testcase['is_registered.call_count']
assert rhn_register.Rhn.enable.call_count == testcase['enable.call_count']
assert mock_systemid.call_count == testcase['systemid.call_count']
assert xmlrpc_client.Transport.request.called == testcase['request_called']
assert os.unlink.call_count == testcase['unlink.call_count']
out, err = capfd.readouterr()
results = json.loads(out)
assert results.get('changed') == testcase.get('changed')
assert results.get('failed') == testcase.get('failed')
assert results['msg'] == testcase['msg']
assert not testcase['calls']
| true | true |
790012983e9e4e3686a41f81bc9c31837bf74a23 | 5,958 | py | Python | http/__init__.py | radon-h2020/radon-ctt-agent-plugins | 280769d2d4c68e719fa4475ed6f89c44da295ad0 | [
"Apache-2.0"
] | null | null | null | http/__init__.py | radon-h2020/radon-ctt-agent-plugins | 280769d2d4c68e719fa4475ed6f89c44da295ad0 | [
"Apache-2.0"
] | 1 | 2021-03-30T12:14:59.000Z | 2021-03-30T12:16:57.000Z | http/__init__.py | radon-h2020/radon-ctt-agent-plugins | 280769d2d4c68e719fa4475ed6f89c44da295ad0 | [
"Apache-2.0"
] | 3 | 2020-06-02T15:07:27.000Z | 2021-03-02T13:41:38.000Z | import json
import os
import pickle
import requests
import shutil
import tempfile
import uuid
from flask import Blueprint, current_app, jsonify, request, send_file
name = 'HTTP'
prefix = 'http'
storage_enabled = True
global storage_path
plugin = Blueprint(name, __name__)
def register(app, plugin_storage_path=None):
app.register_blueprint(plugin, url_prefix=f'/{prefix}')
app.logger.info(f'{name} plugin registered.')
global storage_path
storage_path = plugin_storage_path
persistence = {
"configuration": {},
"execution": {},
}
result_zip_file_name = 'results.zip'
@plugin.route('/')
def index():
return f'This is the Radon CTT Agent HTTP Plugin.', 200
@plugin.route('/configuration/', methods=['POST'])
def configuration_create():
config_instance = {}
configuration_uuid = str(uuid.uuid4())
config_instance['uuid'] = configuration_uuid
params = {
'use_https': {
'required': True,
'default': False,
},
'method': {
'required': True,
'default': 'GET',
},
'hostname': {
'required': True,
'default': None,
},
'port': {
'required': True,
'default': 80,
},
'path': {
'required': True,
'default': "/",
},
'test_body': {
'required': False,
'default': None,
},
'test_header': {
'required': False,
'default': None,
},
}
for param in params:
is_required = params[param]['required']
default_value = params[param]['default']
if param in request.form:
value = request.form.get(param, type=str)
current_app.logger.info(f'\'{param}\' set to: \'{value}\'.')
config_instance[param] = value
else:
if is_required and default_value is not None:
value = default_value
current_app.logger.info(f'\'{param}\' set to default value: \'{value}\'.')
config_instance[param] = value
if is_required and param not in config_instance:
current_app.logger.error(f"Required parameter {param} not provided.")
return f'Required parameter {param} not provided.', 400
persistence['configuration'][configuration_uuid] = config_instance
current_app.logger.info(f"Config: {config_instance}")
return jsonify(config_instance), 201
@plugin.route('/execution/', methods=['POST'])
def execution():
execution_instance = {}
if 'config_uuid' in request.form:
config_uuid = request.form['config_uuid']
config_entry = persistence['configuration'][config_uuid]
execution_instance['config'] = config_entry
# Assign values from config if they are stored in the config, otherwise assign None
use_https = bool(config_entry['use_https']) if 'use_https' in config_entry else None
method = str(config_entry['method']).upper() if 'method' in config_entry else None
hostname = str(config_entry['hostname']) if 'hostname' in config_entry else None
port = int(config_entry['port']) if 'port' in config_entry else None
path = str(config_entry['path']) if 'path' in config_entry else None
test_body = config_entry['test_body'] if 'test_body' in config_entry else None
test_header = config_entry['test_header'] if 'test_header' in config_entry else None
# Check if required parameters are set
if use_https is not None and method and hostname and port and path:
protocol = 'http'
if use_https:
protocol += 's'
target_url = f'{protocol}://{hostname}:{port}{path}'
# Send request with given parameters
response = requests.request(method, target_url, headers=test_header, json=test_body)
response_status = response.status_code
# Create UUID for execution
execution_uuid = str(uuid.uuid4())
execution_instance['uuid'] = execution_uuid
execution_instance['target_url'] = target_url
execution_instance['status'] = str(response_status)
persistence['execution'][execution_uuid] = execution_instance
execution_results_dir = os.path.join(storage_path, execution_uuid)
os.makedirs(execution_results_dir)
execution_json = os.path.join(execution_results_dir, 'execution.json')
received_response = os.path.join(execution_results_dir, 'response.bin')
with open(execution_json, 'w') as exec_json:
exec_json.write(json.dumps(execution_instance))
with open(received_response, 'wb') as response_bin:
response_bin.write(pickle.dumps(response))
with tempfile.NamedTemporaryFile() as tf:
tmp_zip_file = shutil.make_archive(tf.name, 'zip', execution_results_dir)
shutil.copy2(tmp_zip_file, os.path.join(execution_results_dir, result_zip_file_name))
# Test was executed with any possible outcome
return jsonify(execution_instance), 200
else:
return "Required configuration parameters are missing.", jsonify(config_entry), 400
else:
return "No configuration with that ID found.", jsonify(persistence), 404
# Get execution results
@plugin.route('/execution/<string:exec_uuid>/', methods=['GET'])
def execution_results(exec_uuid):
try:
execution_uuid = persistence.get('execution').get(exec_uuid).get('uuid')
except AttributeError:
return "No execution found with that ID.", 404
results_zip_path = os.path.join(storage_path, execution_uuid, result_zip_file_name)
if os.path.isfile(results_zip_path):
return send_file(results_zip_path)
else:
return "No results available (yet).", 404
| 33.661017 | 101 | 0.630916 | import json
import os
import pickle
import requests
import shutil
import tempfile
import uuid
from flask import Blueprint, current_app, jsonify, request, send_file
name = 'HTTP'
prefix = 'http'
storage_enabled = True
global storage_path
plugin = Blueprint(name, __name__)
def register(app, plugin_storage_path=None):
app.register_blueprint(plugin, url_prefix=f'/{prefix}')
app.logger.info(f'{name} plugin registered.')
global storage_path
storage_path = plugin_storage_path
persistence = {
"configuration": {},
"execution": {},
}
result_zip_file_name = 'results.zip'
@plugin.route('/')
def index():
return f'This is the Radon CTT Agent HTTP Plugin.', 200
@plugin.route('/configuration/', methods=['POST'])
def configuration_create():
config_instance = {}
configuration_uuid = str(uuid.uuid4())
config_instance['uuid'] = configuration_uuid
params = {
'use_https': {
'required': True,
'default': False,
},
'method': {
'required': True,
'default': 'GET',
},
'hostname': {
'required': True,
'default': None,
},
'port': {
'required': True,
'default': 80,
},
'path': {
'required': True,
'default': "/",
},
'test_body': {
'required': False,
'default': None,
},
'test_header': {
'required': False,
'default': None,
},
}
for param in params:
is_required = params[param]['required']
default_value = params[param]['default']
if param in request.form:
value = request.form.get(param, type=str)
current_app.logger.info(f'\'{param}\' set to: \'{value}\'.')
config_instance[param] = value
else:
if is_required and default_value is not None:
value = default_value
current_app.logger.info(f'\'{param}\' set to default value: \'{value}\'.')
config_instance[param] = value
if is_required and param not in config_instance:
current_app.logger.error(f"Required parameter {param} not provided.")
return f'Required parameter {param} not provided.', 400
persistence['configuration'][configuration_uuid] = config_instance
current_app.logger.info(f"Config: {config_instance}")
return jsonify(config_instance), 201
@plugin.route('/execution/', methods=['POST'])
def execution():
execution_instance = {}
if 'config_uuid' in request.form:
config_uuid = request.form['config_uuid']
config_entry = persistence['configuration'][config_uuid]
execution_instance['config'] = config_entry
use_https = bool(config_entry['use_https']) if 'use_https' in config_entry else None
method = str(config_entry['method']).upper() if 'method' in config_entry else None
hostname = str(config_entry['hostname']) if 'hostname' in config_entry else None
port = int(config_entry['port']) if 'port' in config_entry else None
path = str(config_entry['path']) if 'path' in config_entry else None
test_body = config_entry['test_body'] if 'test_body' in config_entry else None
test_header = config_entry['test_header'] if 'test_header' in config_entry else None
if use_https is not None and method and hostname and port and path:
protocol = 'http'
if use_https:
protocol += 's'
target_url = f'{protocol}://{hostname}:{port}{path}'
response = requests.request(method, target_url, headers=test_header, json=test_body)
response_status = response.status_code
execution_uuid = str(uuid.uuid4())
execution_instance['uuid'] = execution_uuid
execution_instance['target_url'] = target_url
execution_instance['status'] = str(response_status)
persistence['execution'][execution_uuid] = execution_instance
execution_results_dir = os.path.join(storage_path, execution_uuid)
os.makedirs(execution_results_dir)
execution_json = os.path.join(execution_results_dir, 'execution.json')
received_response = os.path.join(execution_results_dir, 'response.bin')
with open(execution_json, 'w') as exec_json:
exec_json.write(json.dumps(execution_instance))
with open(received_response, 'wb') as response_bin:
response_bin.write(pickle.dumps(response))
with tempfile.NamedTemporaryFile() as tf:
tmp_zip_file = shutil.make_archive(tf.name, 'zip', execution_results_dir)
shutil.copy2(tmp_zip_file, os.path.join(execution_results_dir, result_zip_file_name))
return jsonify(execution_instance), 200
else:
return "Required configuration parameters are missing.", jsonify(config_entry), 400
else:
return "No configuration with that ID found.", jsonify(persistence), 404
@plugin.route('/execution/<string:exec_uuid>/', methods=['GET'])
def execution_results(exec_uuid):
try:
execution_uuid = persistence.get('execution').get(exec_uuid).get('uuid')
except AttributeError:
return "No execution found with that ID.", 404
results_zip_path = os.path.join(storage_path, execution_uuid, result_zip_file_name)
if os.path.isfile(results_zip_path):
return send_file(results_zip_path)
else:
return "No results available (yet).", 404
| true | true |
7900154d2f66b972db2a166fdf4148c6452eff91 | 5,359 | py | Python | WindowsTelemetryViewer/PyQtJsonModel.py | KOLANICH/WindowsTelemetryViewer.py | bbdfef55760ab0be63de72ff7dde27046f1c7960 | [
"Unlicense"
] | 2 | 2021-11-30T15:40:57.000Z | 2021-11-30T15:41:00.000Z | WindowsTelemetryViewer/PyQtJsonModel.py | KOLANICH-tools/WindowsTelemetryViewer.py | a276fe62e309475261901a11bf61ae055fd01d02 | [
"Unlicense"
] | 1 | 2020-06-30T08:33:52.000Z | 2020-06-30T08:33:52.000Z | WindowsTelemetryViewer/PyQtJsonModel.py | KOLANICH/WindowsTelemetryViewer.py | bbdfef55760ab0be63de72ff7dde27046f1c7960 | [
"Unlicense"
] | null | null | null | __license__ = "MIT"
__copyright__ = r"""
MIT License
Copyright (c) 2017 Gregor Engberding
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import sys
import json
from PySide2.QtCore import QAbstractItemModel, QAbstractListModel, QByteArray, QDataStream, QJsonParseError, QJsonValue, QMimeData, QModelIndex, Qt
from PySide2.QtWidgets import QApplication, QFileDialog
class QJsonTreeItem(object):
def __init__(self, parent=None):
self.mParent = parent
self.mChilds = []
self.mType = None
self.mValue = None
def appendChild(self, item):
self.mChilds.append(item)
def child(self, row: int):
return self.mChilds[row]
def parent(self):
return self.mParent
def childCount(self):
return len(self.mChilds)
def row(self):
if self.mParent is not None:
return self.mParent.mChilds.index(self)
return 0
def setKey(self, key: str):
self.mKey = key
def setValue(self, value: str):
self.mValue = value
def setType(self, type: QJsonValue.Type):
self.mType = type
def key(self):
return self.mKey
def value(self):
return self.mValue
def type(self):
return self.mType
def load(self, value, parent=None):
rootItem = QJsonTreeItem(parent)
rootItem.setKey("root")
jsonType = None
jsonType = value.__class__.__name__
if isinstance(value, dict):
# process the key/value pairs
for key in value:
v = value[key]
child = self.load(v, rootItem)
child.setKey(key)
child.setType(v.__class__.__name__)
rootItem.appendChild(child)
elif isinstance(value, list):
# process the values in the list
for i, v in enumerate(value):
child = self.load(v, rootItem)
child.setKey(str(i))
child.setType(v.__class__)
rootItem.appendChild(child)
else:
# value is processed
rootItem.setValue(value)
try:
rootItem.setType(value.type())
except AttributeError:
if jsonType is not None:
rootItem.setType(jsonType)
else:
rootItem.setType(value.__class__)
return rootItem
class QJsonModel(QAbstractItemModel):
def __init__(self, parent=None):
super().__init__(parent)
self.mRootItem = QJsonTreeItem()
self.mHeaders = ["key", "value", "type"]
def load(self, fileName):
if fileName is None or fileName is False:
return False
with open(fileName, "rb") as file:
if file is None:
return False
else:
jsonTxt = file.read()
self.loadJson(jsonTxt)
def loadJson(self, json):
error = QJsonParseError()
return self.loadDict(QJsonDocument.fromJson(json, error))
def loadDict(self, dic):
self.mDocument = dic
if self.mDocument is not None:
self.beginResetModel()
if isinstance(self.mDocument, list):
self.mRootItem.load(list(self.mDocument))
else:
self.mRootItem = self.mRootItem.load(self.mDocument)
self.endResetModel()
return True
# print("QJsonModel: error loading Json")
return False
def data(self, index: QModelIndex, role: int = ...):
if not index.isValid():
return None
item = index.internalPointer()
col = index.column()
if role == Qt.DisplayRole:
if col == 0:
return str(item.key())
elif col == 1:
return str(item.value())
elif col == 2:
return str(item.type())
return None
def headerData(self, section: int, orientation: Qt.Orientation, role: int = ...):
if role != Qt.DisplayRole:
return None
if orientation == Qt.Horizontal:
return self.mHeaders[section]
return QVariant()
def index(self, row: int, column: int, parent: QModelIndex = ...):
if not self.hasIndex(row, column, parent):
return QModelIndex()
if not parent.isValid():
parentItem = self.mRootItem
else:
parentItem = parent.internalPointer()
try:
childItem = parentItem.child(row)
return self.createIndex(row, column, childItem)
except IndexError:
return QModelIndex()
def parent(self, index: QModelIndex):
if not index.isValid():
return QModelIndex()
childItem = index.internalPointer()
parentItem = childItem.parent()
if parentItem == self.mRootItem:
return QModelIndex()
return self.createIndex(parentItem.row(), 0, parentItem)
def rowCount(self, parent: QModelIndex = ...):
if parent.column() > 0:
return 0
if not parent.isValid():
parentItem = self.mRootItem
else:
parentItem = parent.internalPointer()
return parentItem.childCount()
def columnCount(self, parent: QModelIndex = ...):
return 3
| 24.810185 | 147 | 0.717485 | __license__ = "MIT"
__copyright__ = r"""
MIT License
Copyright (c) 2017 Gregor Engberding
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import sys
import json
from PySide2.QtCore import QAbstractItemModel, QAbstractListModel, QByteArray, QDataStream, QJsonParseError, QJsonValue, QMimeData, QModelIndex, Qt
from PySide2.QtWidgets import QApplication, QFileDialog
class QJsonTreeItem(object):
def __init__(self, parent=None):
self.mParent = parent
self.mChilds = []
self.mType = None
self.mValue = None
def appendChild(self, item):
self.mChilds.append(item)
def child(self, row: int):
return self.mChilds[row]
def parent(self):
return self.mParent
def childCount(self):
return len(self.mChilds)
def row(self):
if self.mParent is not None:
return self.mParent.mChilds.index(self)
return 0
def setKey(self, key: str):
self.mKey = key
def setValue(self, value: str):
self.mValue = value
def setType(self, type: QJsonValue.Type):
self.mType = type
def key(self):
return self.mKey
def value(self):
return self.mValue
def type(self):
return self.mType
def load(self, value, parent=None):
rootItem = QJsonTreeItem(parent)
rootItem.setKey("root")
jsonType = None
jsonType = value.__class__.__name__
if isinstance(value, dict):
for key in value:
v = value[key]
child = self.load(v, rootItem)
child.setKey(key)
child.setType(v.__class__.__name__)
rootItem.appendChild(child)
elif isinstance(value, list):
for i, v in enumerate(value):
child = self.load(v, rootItem)
child.setKey(str(i))
child.setType(v.__class__)
rootItem.appendChild(child)
else:
rootItem.setValue(value)
try:
rootItem.setType(value.type())
except AttributeError:
if jsonType is not None:
rootItem.setType(jsonType)
else:
rootItem.setType(value.__class__)
return rootItem
class QJsonModel(QAbstractItemModel):
def __init__(self, parent=None):
super().__init__(parent)
self.mRootItem = QJsonTreeItem()
self.mHeaders = ["key", "value", "type"]
def load(self, fileName):
if fileName is None or fileName is False:
return False
with open(fileName, "rb") as file:
if file is None:
return False
else:
jsonTxt = file.read()
self.loadJson(jsonTxt)
def loadJson(self, json):
error = QJsonParseError()
return self.loadDict(QJsonDocument.fromJson(json, error))
def loadDict(self, dic):
self.mDocument = dic
if self.mDocument is not None:
self.beginResetModel()
if isinstance(self.mDocument, list):
self.mRootItem.load(list(self.mDocument))
else:
self.mRootItem = self.mRootItem.load(self.mDocument)
self.endResetModel()
return True
return False
def data(self, index: QModelIndex, role: int = ...):
if not index.isValid():
return None
item = index.internalPointer()
col = index.column()
if role == Qt.DisplayRole:
if col == 0:
return str(item.key())
elif col == 1:
return str(item.value())
elif col == 2:
return str(item.type())
return None
def headerData(self, section: int, orientation: Qt.Orientation, role: int = ...):
if role != Qt.DisplayRole:
return None
if orientation == Qt.Horizontal:
return self.mHeaders[section]
return QVariant()
def index(self, row: int, column: int, parent: QModelIndex = ...):
if not self.hasIndex(row, column, parent):
return QModelIndex()
if not parent.isValid():
parentItem = self.mRootItem
else:
parentItem = parent.internalPointer()
try:
childItem = parentItem.child(row)
return self.createIndex(row, column, childItem)
except IndexError:
return QModelIndex()
def parent(self, index: QModelIndex):
if not index.isValid():
return QModelIndex()
childItem = index.internalPointer()
parentItem = childItem.parent()
if parentItem == self.mRootItem:
return QModelIndex()
return self.createIndex(parentItem.row(), 0, parentItem)
def rowCount(self, parent: QModelIndex = ...):
if parent.column() > 0:
return 0
if not parent.isValid():
parentItem = self.mRootItem
else:
parentItem = parent.internalPointer()
return parentItem.childCount()
def columnCount(self, parent: QModelIndex = ...):
return 3
| true | true |
790015693b2d378a786146cdce2ad3b92a3dac4d | 2,250 | py | Python | ewah/hooks/aircall.py | Gemma-Analytics/ewah | 2971d9ba3135bfbcc0f101e1cb0e9822d22e8752 | [
"MIT"
] | 14 | 2020-05-05T08:29:23.000Z | 2022-01-11T11:23:36.000Z | ewah/hooks/aircall.py | Gemma-Analytics/ewah | 2971d9ba3135bfbcc0f101e1cb0e9822d22e8752 | [
"MIT"
] | 13 | 2020-06-15T13:53:27.000Z | 2021-11-24T22:15:29.000Z | ewah/hooks/aircall.py | Gemma-Analytics/ewah | 2971d9ba3135bfbcc0f101e1cb0e9822d22e8752 | [
"MIT"
] | 2 | 2020-10-15T12:39:27.000Z | 2021-01-24T03:49:09.000Z | from ewah.hooks.base import EWAHBaseHook
import requests
import time
class EWAHAircallHook(EWAHBaseHook):
_ATTR_RELABEL = {
"api_id": "login",
"api_token": "password",
}
conn_name_attr = "ewah_aircall_conn_id"
default_conn_name = "ewah_aircall_default"
conn_type = "ewah_aircall"
hook_name = "EWAH Aircall Connection"
_RESOURCES = {
"users": {"incremental": True},
"teams": {},
"calls": {"incremental": True},
"numbers": {"incremental": True},
"contacts": {"incremental": True},
"tags": {},
}
_BASE_URL = "https://api.aircall.io/v1/{0}"
@staticmethod
def get_ui_field_behaviour():
return {
"hidden_fields": ["port", "schema", "extra", "host"],
"relabeling": {
"login": "Basic Auth API ID",
"password": "Baisc Auth API Token",
},
}
def get_data_in_batches(
self,
resource,
data_from=None,
data_until=None,
batch_size=10000,
batch_call_pause_seconds=1,
):
_msg = "batch_size param must be a positive integer <= 10k "
assert isinstance(batch_size, int), _msg
assert batch_size > 0, _msg
assert batch_size <= 10000, _msg
page_size = 50 # maximum page size is 50
auth = requests.auth.HTTPBasicAuth(
self.conn.api_id,
self.conn.api_token,
)
url = self._BASE_URL.format(resource)
params = {
"per_page": page_size,
}
if data_from:
params["from"] = int(time.mktime(data_from.timetuple()))
if data_until:
params["to"] = int(time.mktime((data_until).timetuple()))
data = []
while url:
time.sleep(batch_call_pause_seconds)
request = requests.get(url, params=params, auth=auth)
assert request.status_code == 200, request.text
response = request.json()
url = response.get("meta", {}).get("next_page_link")
data += response.get(resource, [])
if (not url) or (len(data) + page_size > batch_size):
yield data
data = []
| 28.846154 | 69 | 0.549778 | from ewah.hooks.base import EWAHBaseHook
import requests
import time
class EWAHAircallHook(EWAHBaseHook):
_ATTR_RELABEL = {
"api_id": "login",
"api_token": "password",
}
conn_name_attr = "ewah_aircall_conn_id"
default_conn_name = "ewah_aircall_default"
conn_type = "ewah_aircall"
hook_name = "EWAH Aircall Connection"
_RESOURCES = {
"users": {"incremental": True},
"teams": {},
"calls": {"incremental": True},
"numbers": {"incremental": True},
"contacts": {"incremental": True},
"tags": {},
}
_BASE_URL = "https://api.aircall.io/v1/{0}"
@staticmethod
def get_ui_field_behaviour():
return {
"hidden_fields": ["port", "schema", "extra", "host"],
"relabeling": {
"login": "Basic Auth API ID",
"password": "Baisc Auth API Token",
},
}
def get_data_in_batches(
self,
resource,
data_from=None,
data_until=None,
batch_size=10000,
batch_call_pause_seconds=1,
):
_msg = "batch_size param must be a positive integer <= 10k "
assert isinstance(batch_size, int), _msg
assert batch_size > 0, _msg
assert batch_size <= 10000, _msg
page_size = 50
auth = requests.auth.HTTPBasicAuth(
self.conn.api_id,
self.conn.api_token,
)
url = self._BASE_URL.format(resource)
params = {
"per_page": page_size,
}
if data_from:
params["from"] = int(time.mktime(data_from.timetuple()))
if data_until:
params["to"] = int(time.mktime((data_until).timetuple()))
data = []
while url:
time.sleep(batch_call_pause_seconds)
request = requests.get(url, params=params, auth=auth)
assert request.status_code == 200, request.text
response = request.json()
url = response.get("meta", {}).get("next_page_link")
data += response.get(resource, [])
if (not url) or (len(data) + page_size > batch_size):
yield data
data = []
| true | true |
790015e43ecc627fad78e98ab220e3f1543e6df8 | 4,855 | py | Python | tensorflow_privacy/privacy/dp_query/gaussian_query.py | Juspem1980/privacy | d122e2d1c7182ba7195ecbcb1cb8da29b2a14d6f | [
"Apache-2.0"
] | null | null | null | tensorflow_privacy/privacy/dp_query/gaussian_query.py | Juspem1980/privacy | d122e2d1c7182ba7195ecbcb1cb8da29b2a14d6f | [
"Apache-2.0"
] | null | null | null | tensorflow_privacy/privacy/dp_query/gaussian_query.py | Juspem1980/privacy | d122e2d1c7182ba7195ecbcb1cb8da29b2a14d6f | [
"Apache-2.0"
] | 1 | 2021-01-13T06:34:19.000Z | 2021-01-13T06:34:19.000Z | # Copyright 2018, The TensorFlow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements DPQuery interface for Gaussian average queries.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from distutils.version import LooseVersion
import tensorflow.compat.v1 as tf
from tensorflow_privacy.privacy.dp_query import dp_query
from tensorflow_privacy.privacy.dp_query import normalized_query
class GaussianSumQuery(dp_query.SumAggregationDPQuery):
"""Implements DPQuery interface for Gaussian sum queries.
Accumulates clipped vectors, then adds Gaussian noise to the sum.
"""
# pylint: disable=invalid-name
_GlobalState = collections.namedtuple(
'_GlobalState', ['l2_norm_clip', 'stddev'])
def __init__(self, l2_norm_clip, stddev):
"""Initializes the GaussianSumQuery.
Args:
l2_norm_clip: The clipping norm to apply to the global norm of each
record.
stddev: The stddev of the noise added to the sum.
"""
self._l2_norm_clip = l2_norm_clip
self._stddev = stddev
self._ledger = None
def set_ledger(self, ledger):
self._ledger = ledger
def make_global_state(self, l2_norm_clip, stddev):
"""Creates a global state from the given parameters."""
return self._GlobalState(tf.cast(l2_norm_clip, tf.float32),
tf.cast(stddev, tf.float32))
def initial_global_state(self):
return self.make_global_state(self._l2_norm_clip, self._stddev)
def derive_sample_params(self, global_state):
return global_state.l2_norm_clip
def initial_sample_state(self, template):
return tf.nest.map_structure(
dp_query.zeros_like, template)
def preprocess_record_impl(self, params, record):
"""Clips the l2 norm, returning the clipped record and the l2 norm.
Args:
params: The parameters for the sample.
record: The record to be processed.
Returns:
A tuple (preprocessed_records, l2_norm) where `preprocessed_records` is
the structure of preprocessed tensors, and l2_norm is the total l2 norm
before clipping.
"""
l2_norm_clip = params
record_as_list = tf.nest.flatten(record)
clipped_as_list, norm = tf.clip_by_global_norm(record_as_list, l2_norm_clip)
return tf.nest.pack_sequence_as(record, clipped_as_list), norm
def preprocess_record(self, params, record):
preprocessed_record, _ = self.preprocess_record_impl(params, record)
return preprocessed_record
def get_noised_result(self, sample_state, global_state):
"""See base class."""
if LooseVersion(tf.__version__) < LooseVersion('2.0.0'):
def add_noise(v):
return v + tf.random.normal(
tf.shape(input=v), stddev=global_state.stddev)
else:
random_normal = tf.random_normal_initializer(
stddev=global_state.stddev)
def add_noise(v):
return v + random_normal(tf.shape(input=v))
if self._ledger:
dependencies = [
self._ledger.record_sum_query(
global_state.l2_norm_clip, global_state.stddev)
]
else:
dependencies = []
with tf.control_dependencies(dependencies):
return tf.nest.map_structure(add_noise, sample_state), global_state
class GaussianAverageQuery(normalized_query.NormalizedQuery):
"""Implements DPQuery interface for Gaussian average queries.
Accumulates clipped vectors, adds Gaussian noise, and normalizes.
Note that we use "fixed-denominator" estimation: the denominator should be
specified as the expected number of records per sample. Accumulating the
denominator separately would also be possible but would be produce a higher
variance estimator.
"""
def __init__(self,
l2_norm_clip,
sum_stddev,
denominator):
"""Initializes the GaussianAverageQuery.
Args:
l2_norm_clip: The clipping norm to apply to the global norm of each
record.
sum_stddev: The stddev of the noise added to the sum (before
normalization).
denominator: The normalization constant (applied after noise is added to
the sum).
"""
super(GaussianAverageQuery, self).__init__(
numerator_query=GaussianSumQuery(l2_norm_clip, sum_stddev),
denominator=denominator)
| 33.715278 | 80 | 0.72379 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from distutils.version import LooseVersion
import tensorflow.compat.v1 as tf
from tensorflow_privacy.privacy.dp_query import dp_query
from tensorflow_privacy.privacy.dp_query import normalized_query
class GaussianSumQuery(dp_query.SumAggregationDPQuery):
_GlobalState = collections.namedtuple(
'_GlobalState', ['l2_norm_clip', 'stddev'])
def __init__(self, l2_norm_clip, stddev):
self._l2_norm_clip = l2_norm_clip
self._stddev = stddev
self._ledger = None
def set_ledger(self, ledger):
self._ledger = ledger
def make_global_state(self, l2_norm_clip, stddev):
return self._GlobalState(tf.cast(l2_norm_clip, tf.float32),
tf.cast(stddev, tf.float32))
def initial_global_state(self):
return self.make_global_state(self._l2_norm_clip, self._stddev)
def derive_sample_params(self, global_state):
return global_state.l2_norm_clip
def initial_sample_state(self, template):
return tf.nest.map_structure(
dp_query.zeros_like, template)
def preprocess_record_impl(self, params, record):
l2_norm_clip = params
record_as_list = tf.nest.flatten(record)
clipped_as_list, norm = tf.clip_by_global_norm(record_as_list, l2_norm_clip)
return tf.nest.pack_sequence_as(record, clipped_as_list), norm
def preprocess_record(self, params, record):
preprocessed_record, _ = self.preprocess_record_impl(params, record)
return preprocessed_record
def get_noised_result(self, sample_state, global_state):
if LooseVersion(tf.__version__) < LooseVersion('2.0.0'):
def add_noise(v):
return v + tf.random.normal(
tf.shape(input=v), stddev=global_state.stddev)
else:
random_normal = tf.random_normal_initializer(
stddev=global_state.stddev)
def add_noise(v):
return v + random_normal(tf.shape(input=v))
if self._ledger:
dependencies = [
self._ledger.record_sum_query(
global_state.l2_norm_clip, global_state.stddev)
]
else:
dependencies = []
with tf.control_dependencies(dependencies):
return tf.nest.map_structure(add_noise, sample_state), global_state
class GaussianAverageQuery(normalized_query.NormalizedQuery):
def __init__(self,
l2_norm_clip,
sum_stddev,
denominator):
super(GaussianAverageQuery, self).__init__(
numerator_query=GaussianSumQuery(l2_norm_clip, sum_stddev),
denominator=denominator)
| true | true |
7900167ffa942e90d5bde8a9c494f63fb8cc5394 | 3,285 | py | Python | flye/assembly/repeat_graph.py | sebschmi/Flye | 7413f5c39b6c8e9ab9caa564e15e7edd4e727cfd | [
"BSD-3-Clause"
] | 1 | 2021-04-08T15:14:06.000Z | 2021-04-08T15:14:06.000Z | flye/assembly/repeat_graph.py | mingjuhao/Flye | 7041792f92add6f31401c12c39cb205578141417 | [
"BSD-3-Clause"
] | null | null | null | flye/assembly/repeat_graph.py | mingjuhao/Flye | 7041792f92add6f31401c12c39cb205578141417 | [
"BSD-3-Clause"
] | null | null | null | #(c) 2016 by Authors
#This file is a part of ABruijn program.
#Released under the BSD license (see LICENSE file)
"""
Runs repeat/contigger binary
"""
from __future__ import absolute_import
import subprocess
import logging
import os
from flye.utils.utils import which
REPEAT_BIN = "flye-modules"
CONTIGGER_BIN = "flye-modules"
logger = logging.getLogger()
class RepeatException(Exception):
pass
def check_binaries():
if not which(REPEAT_BIN) or not which(CONTIGGER_BIN):
raise RepeatException("Repeat/contigger binaries were not found. "
"Did you run 'make'?")
try:
devnull = open(os.devnull, "w")
subprocess.check_call([REPEAT_BIN, "repeat", "-h"], stderr=devnull)
except subprocess.CalledProcessError as e:
raise RepeatException(str(e))
except OSError as e:
raise RepeatException(str(e))
def analyse_repeats(args, run_params, input_assembly, out_folder,
log_file, config_file):
logger.debug("-----Begin repeat analyser log------")
cmdline = [REPEAT_BIN, "repeat", "--disjointigs", input_assembly,
"--reads", ",".join(args.reads), "--out-dir", out_folder,
"--config", config_file, "--log", log_file,
"--threads", str(args.threads)]
if args.debug:
cmdline.append("--debug")
if args.meta:
cmdline.append("--meta")
if args.keep_haplotypes:
cmdline.append("--keep-haplotypes")
#if args.kmer_size:
# cmdline.extend(["--kmer", str(args.kmer_size)])
cmdline.extend(["--min-ovlp", str(run_params["min_overlap"])])
if args.hifi_error:
cmdline.extend(["--extra-params",
"repeat_graph_ovlp_divergence={}".format(args.hifi_error)])
try:
logger.debug("Running: " + " ".join(cmdline))
subprocess.check_call(cmdline)
except subprocess.CalledProcessError as e:
if e.returncode == -9:
logger.error("Looks like the system ran out of memory")
raise RepeatException(str(e))
except OSError as e:
raise RepeatException(str(e))
def generate_contigs(args, run_params, graph_edges, out_folder,
log_file, config_file, repeat_graph, reads_alignment):
logger.debug("-----Begin contigger analyser log------")
cmdline = [CONTIGGER_BIN, "contigger", "--graph-edges", graph_edges,
"--reads", ",".join(args.reads), "--out-dir", out_folder,
"--config", config_file, "--repeat-graph", repeat_graph,
"--graph-aln", reads_alignment, "--log", log_file,
"--threads", str(args.threads)]
if args.debug:
cmdline.append("--debug")
if args.keep_haplotypes:
cmdline.append("--no-scaffold")
#if args.kmer_size:
# cmdline.extend(["--kmer", str(args.kmer_size)])
cmdline.extend(["--min-ovlp", str(run_params["min_overlap"])])
try:
logger.debug("Running: " + " ".join(cmdline))
subprocess.check_call(cmdline)
except subprocess.CalledProcessError as e:
if e.returncode == -9:
logger.error("Looks like the system ran out of memory")
raise RepeatException(str(e))
except OSError as e:
raise RepeatException(str(e))
| 33.865979 | 83 | 0.624353 |
from __future__ import absolute_import
import subprocess
import logging
import os
from flye.utils.utils import which
REPEAT_BIN = "flye-modules"
CONTIGGER_BIN = "flye-modules"
logger = logging.getLogger()
class RepeatException(Exception):
pass
def check_binaries():
if not which(REPEAT_BIN) or not which(CONTIGGER_BIN):
raise RepeatException("Repeat/contigger binaries were not found. "
"Did you run 'make'?")
try:
devnull = open(os.devnull, "w")
subprocess.check_call([REPEAT_BIN, "repeat", "-h"], stderr=devnull)
except subprocess.CalledProcessError as e:
raise RepeatException(str(e))
except OSError as e:
raise RepeatException(str(e))
def analyse_repeats(args, run_params, input_assembly, out_folder,
log_file, config_file):
logger.debug("-----Begin repeat analyser log------")
cmdline = [REPEAT_BIN, "repeat", "--disjointigs", input_assembly,
"--reads", ",".join(args.reads), "--out-dir", out_folder,
"--config", config_file, "--log", log_file,
"--threads", str(args.threads)]
if args.debug:
cmdline.append("--debug")
if args.meta:
cmdline.append("--meta")
if args.keep_haplotypes:
cmdline.append("--keep-haplotypes")
cmdline.extend(["--min-ovlp", str(run_params["min_overlap"])])
if args.hifi_error:
cmdline.extend(["--extra-params",
"repeat_graph_ovlp_divergence={}".format(args.hifi_error)])
try:
logger.debug("Running: " + " ".join(cmdline))
subprocess.check_call(cmdline)
except subprocess.CalledProcessError as e:
if e.returncode == -9:
logger.error("Looks like the system ran out of memory")
raise RepeatException(str(e))
except OSError as e:
raise RepeatException(str(e))
def generate_contigs(args, run_params, graph_edges, out_folder,
log_file, config_file, repeat_graph, reads_alignment):
logger.debug("-----Begin contigger analyser log------")
cmdline = [CONTIGGER_BIN, "contigger", "--graph-edges", graph_edges,
"--reads", ",".join(args.reads), "--out-dir", out_folder,
"--config", config_file, "--repeat-graph", repeat_graph,
"--graph-aln", reads_alignment, "--log", log_file,
"--threads", str(args.threads)]
if args.debug:
cmdline.append("--debug")
if args.keep_haplotypes:
cmdline.append("--no-scaffold")
cmdline.extend(["--min-ovlp", str(run_params["min_overlap"])])
try:
logger.debug("Running: " + " ".join(cmdline))
subprocess.check_call(cmdline)
except subprocess.CalledProcessError as e:
if e.returncode == -9:
logger.error("Looks like the system ran out of memory")
raise RepeatException(str(e))
except OSError as e:
raise RepeatException(str(e))
| true | true |
79001836abada63b22392713229f0a70ea8aa85c | 7,213 | py | Python | tools/config_style_checker.py | jdoxley/Swamp-Aux-Test | 0e3db46103d3acde94833e6916a68c44c98e2d46 | [
"MIT"
] | 1 | 2020-04-02T22:28:44.000Z | 2020-04-02T22:28:44.000Z | tools/config_style_checker.py | jdoxley/Swamp-Aux-Test | 0e3db46103d3acde94833e6916a68c44c98e2d46 | [
"MIT"
] | 1 | 2020-02-17T07:04:56.000Z | 2020-02-17T22:09:45.000Z | tools/config_style_checker.py | jdoxley/Swamp-Aux | 6a6d81fe221313468fa0343d4943737d1b862968 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import fnmatch
import os
import re
import ntpath
import sys
import argparse
excluded_files = []
def check_config_style(filepath):
bad_count_file = 0
def pushClosing(t):
closingStack.append(closing.expr)
closing << Literal( closingFor[t[0]] )
def popClosing():
closing << closingStack.pop()
with open(filepath, 'r', encoding='utf-8', errors='ignore') as file:
content = file.read()
# Store all brackets we find in this file, so we can validate everything on the end
brackets_list = []
# To check if we are in a comment block
isInCommentBlock = False
checkIfInComment = False
# Used in case we are in a line comment (//)
ignoreTillEndOfLine = False
# Used in case we are in a comment block (/* */). This is true if we detect a * inside a comment block.
# If the next character is a /, it means we end our comment block.
checkIfNextIsClosingBlock = False
# We ignore everything inside a string
isInString = False
# Used to store the starting type of a string, so we can match that to the end of a string
inStringType = '';
lastIsCurlyBrace = False
checkForSemiColumn = False
# Extra information so we know what line we find errors at
lineNumber = 1
indexOfCharacter = 0
# Parse all characters in the content of this file to search for potential errors
for c in content:
if (lastIsCurlyBrace):
lastIsCurlyBrace = False
if c == '\n': # Keeping track of our line numbers
lineNumber += 1 # so we can print accurate line number information when we detect a possible error
if (isInString): # while we are in a string, we can ignore everything else, except the end of the string
if (c == inStringType):
isInString = False
# if we are not in a comment block, we will check if we are at the start of one or count the () {} and []
elif (isInCommentBlock == False):
# This means we have encountered a /, so we are now checking if this is an inline comment or a comment block
if (checkIfInComment):
checkIfInComment = False
if c == '*': # if the next character after / is a *, we are at the start of a comment block
isInCommentBlock = True
elif (c == '/'): # Otherwise, will check if we are in an line comment
ignoreTillEndOfLine = True # and an line comment is a / followed by another / (//) We won't care about anything that comes after it
if (isInCommentBlock == False):
if (ignoreTillEndOfLine): # we are in a line comment, just continue going through the characters until we find an end of line
if (c == '\n'):
ignoreTillEndOfLine = False
else: # validate brackets
if (c == '"' or c == "'"):
isInString = True
inStringType = c
elif (c == '/'):
checkIfInComment = True
elif (c == '('):
brackets_list.append('(')
elif (c == ')'):
if (len(brackets_list) > 0 and brackets_list[-1] in ['{', '[']):
print("ERROR: Possible missing round bracket ')' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append(')')
elif (c == '['):
brackets_list.append('[')
elif (c == ']'):
if (len(brackets_list) > 0 and brackets_list[-1] in ['{', '(']):
print("ERROR: Possible missing square bracket ']' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append(']')
elif (c == '{'):
brackets_list.append('{')
elif (c == '}'):
lastIsCurlyBrace = True
if (len(brackets_list) > 0 and brackets_list[-1] in ['(', '[']):
print("ERROR: Possible missing curly brace '}}' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append('}')
else: # Look for the end of our comment block
if (c == '*'):
checkIfNextIsClosingBlock = True;
elif (checkIfNextIsClosingBlock):
if (c == '/'):
isInCommentBlock = False
elif (c != '*'):
checkIfNextIsClosingBlock = False
indexOfCharacter += 1
if brackets_list.count('[') != brackets_list.count(']'):
print("ERROR: A possible missing square bracket [ or ] in file {0} [ = {1} ] = {2}".format(filepath,brackets_list.count('['),brackets_list.count(']')))
bad_count_file += 1
if brackets_list.count('(') != brackets_list.count(')'):
print("ERROR: A possible missing round bracket ( or ) in file {0} ( = {1} ) = {2}".format(filepath,brackets_list.count('('),brackets_list.count(')')))
bad_count_file += 1
if brackets_list.count('{') != brackets_list.count('}'):
print("ERROR: A possible missing curly brace {{ or }} in file {0} {{ = {1} }} = {2}".format(filepath,brackets_list.count('{'),brackets_list.count('}')))
bad_count_file += 1
return bad_count_file
def main():
print("Validating Config Style")
for test in excluded_files:
print("Excluded File: ",test)
sqf_list = []
bad_count = 0
parser = argparse.ArgumentParser()
parser.add_argument('-m','--module', help='only search specified module addon folder', required=False, default="")
args = parser.parse_args()
# Allow running from root directory as well as from inside the tools directory
rootDir = "Swamp Aux/"
for root, dirnames, filenames in os.walk(rootDir + '/' + args.module):
for filename in fnmatch.filter(filenames, '*.cpp'):
sqf_list.append(os.path.join(root, filename))
for filename in fnmatch.filter(filenames, '*.hpp'):
sqf_list.append(os.path.join(root, filename))
for filename in sqf_list:
if (filename not in excluded_files):
bad_count = bad_count + check_config_style(filename)
print("------\nChecked {0} files\nErrors detected: {1}".format(len(sqf_list), bad_count))
if (bad_count == 0):
print("Config validation PASSED")
else:
print("Config validation FAILED")
return bad_count
if __name__ == "__main__":
sys.exit(main()) | 45.651899 | 164 | 0.539859 |
import fnmatch
import os
import re
import ntpath
import sys
import argparse
excluded_files = []
def check_config_style(filepath):
bad_count_file = 0
def pushClosing(t):
closingStack.append(closing.expr)
closing << Literal( closingFor[t[0]] )
def popClosing():
closing << closingStack.pop()
with open(filepath, 'r', encoding='utf-8', errors='ignore') as file:
content = file.read()
brackets_list = []
isInCommentBlock = False
checkIfInComment = False
ignoreTillEndOfLine = False
checkIfNextIsClosingBlock = False
isInString = False
inStringType = '';
lastIsCurlyBrace = False
checkForSemiColumn = False
lineNumber = 1
indexOfCharacter = 0
for c in content:
if (lastIsCurlyBrace):
lastIsCurlyBrace = False
if c == '\n':
lineNumber += 1
if (isInString):
if (c == inStringType):
isInString = False
elif (isInCommentBlock == False):
if (checkIfInComment):
checkIfInComment = False
if c == '*':
isInCommentBlock = True
elif (c == '/'):
ignoreTillEndOfLine = True
if (isInCommentBlock == False):
if (ignoreTillEndOfLine): # we are in a line comment, just continue going through the characters until we find an end of line
if (c == '\n'):
ignoreTillEndOfLine = False
else: # validate brackets
if (c == '"' or c == "'"):
isInString = True
inStringType = c
elif (c == '/'):
checkIfInComment = True
elif (c == '('):
brackets_list.append('(')
elif (c == ')'):
if (len(brackets_list) > 0 and brackets_list[-1] in ['{', '[']):
print("ERROR: Possible missing round bracket ')' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append(')')
elif (c == '['):
brackets_list.append('[')
elif (c == ']'):
if (len(brackets_list) > 0 and brackets_list[-1] in ['{', '(']):
print("ERROR: Possible missing square bracket ']' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append(']')
elif (c == '{'):
brackets_list.append('{')
elif (c == '}'):
lastIsCurlyBrace = True
if (len(brackets_list) > 0 and brackets_list[-1] in ['(', '[']):
print("ERROR: Possible missing curly brace '}}' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append('}')
else: # Look for the end of our comment block
if (c == '*'):
checkIfNextIsClosingBlock = True;
elif (checkIfNextIsClosingBlock):
if (c == '/'):
isInCommentBlock = False
elif (c != '*'):
checkIfNextIsClosingBlock = False
indexOfCharacter += 1
if brackets_list.count('[') != brackets_list.count(']'):
print("ERROR: A possible missing square bracket [ or ] in file {0} [ = {1} ] = {2}".format(filepath,brackets_list.count('['),brackets_list.count(']')))
bad_count_file += 1
if brackets_list.count('(') != brackets_list.count(')'):
print("ERROR: A possible missing round bracket ( or ) in file {0} ( = {1} ) = {2}".format(filepath,brackets_list.count('('),brackets_list.count(')')))
bad_count_file += 1
if brackets_list.count('{') != brackets_list.count('}'):
print("ERROR: A possible missing curly brace {{ or }} in file {0} {{ = {1} }} = {2}".format(filepath,brackets_list.count('{'),brackets_list.count('}')))
bad_count_file += 1
return bad_count_file
def main():
print("Validating Config Style")
for test in excluded_files:
print("Excluded File: ",test)
sqf_list = []
bad_count = 0
parser = argparse.ArgumentParser()
parser.add_argument('-m','--module', help='only search specified module addon folder', required=False, default="")
args = parser.parse_args()
# Allow running from root directory as well as from inside the tools directory
rootDir = "Swamp Aux/"
for root, dirnames, filenames in os.walk(rootDir + '/' + args.module):
for filename in fnmatch.filter(filenames, '*.cpp'):
sqf_list.append(os.path.join(root, filename))
for filename in fnmatch.filter(filenames, '*.hpp'):
sqf_list.append(os.path.join(root, filename))
for filename in sqf_list:
if (filename not in excluded_files):
bad_count = bad_count + check_config_style(filename)
print("------\nChecked {0} files\nErrors detected: {1}".format(len(sqf_list), bad_count))
if (bad_count == 0):
print("Config validation PASSED")
else:
print("Config validation FAILED")
return bad_count
if __name__ == "__main__":
sys.exit(main()) | true | true |
7900188b2ad7be7f80e06943a10f23aa571e38e0 | 957 | py | Python | GemmyTheNerd/studentclass.py | GemmyTheGeek/GemmyTheNerd | 8484887cdea6fa2d5e08c0c949221178e9ac9132 | [
"MIT"
] | null | null | null | GemmyTheNerd/studentclass.py | GemmyTheGeek/GemmyTheNerd | 8484887cdea6fa2d5e08c0c949221178e9ac9132 | [
"MIT"
] | null | null | null | GemmyTheNerd/studentclass.py | GemmyTheGeek/GemmyTheNerd | 8484887cdea6fa2d5e08c0c949221178e9ac9132 | [
"MIT"
] | null | null | null | class Student:
def __init__(self,name):
self.name = name
self.exp = 0
self.lesson = 0
self.AddEXP(10)
def Hello(self):
print('Hello World! My name is {}!'.format(self.name))
def Coding(self):
print('{}: Currently coding...'.format(self.name))
self.exp += 5
self.lesson += 1
def ShowEXP(self):
print('- {} has {} EXP'.format(self.name,self.exp))
print('- Learned {} times'.format(self.lesson))
def AddEXP(self, score):
self.exp += score
class SpecialStudent(Student):
def __init__(self,name,father):
super().__init__(name)
self.father = father
mafia = ['Bill Gates', 'Thomas Edison']
if father in mafia:
self.exp += 100
def AddEXP(self,score):
self.exp += (score * 3)
self.lessson += 1
def AskEXP(self,score=10):
print('*Holding Gun* Gimme some EXP!')
self.AddEXP(score)
print(__name__)
if __name__ == '__studentclass__':
print('===== 1 Jan =====')
| 20.804348 | 58 | 0.61233 | class Student:
def __init__(self,name):
self.name = name
self.exp = 0
self.lesson = 0
self.AddEXP(10)
def Hello(self):
print('Hello World! My name is {}!'.format(self.name))
def Coding(self):
print('{}: Currently coding...'.format(self.name))
self.exp += 5
self.lesson += 1
def ShowEXP(self):
print('- {} has {} EXP'.format(self.name,self.exp))
print('- Learned {} times'.format(self.lesson))
def AddEXP(self, score):
self.exp += score
class SpecialStudent(Student):
def __init__(self,name,father):
super().__init__(name)
self.father = father
mafia = ['Bill Gates', 'Thomas Edison']
if father in mafia:
self.exp += 100
def AddEXP(self,score):
self.exp += (score * 3)
self.lessson += 1
def AskEXP(self,score=10):
print('*Holding Gun* Gimme some EXP!')
self.AddEXP(score)
print(__name__)
if __name__ == '__studentclass__':
print('===== 1 Jan =====')
| true | true |
790018d88b2e26eefe31f61c3e46c9a2e169c4c7 | 56 | py | Python | CodeWars/7 Kyu/Number Of Occurrences.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/7 Kyu/Number Of Occurrences.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/7 Kyu/Number Of Occurrences.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | def number_of_occurrences(s, xs):
return xs.count(s) | 28 | 33 | 0.732143 | def number_of_occurrences(s, xs):
return xs.count(s) | true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.