function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def update(
self,
update_object,
processor="sparql",
initNs=None,
initBindings=None,
use_store_provided=True,
**kwargs, | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def n3(self):
"""Return an n3 identifier for the Graph"""
return "[%s]" % self.identifier.n3() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def isomorphic(self, other):
"""
does a very basic check if these graphs are the same
If no BNodes are involved, this is accurate.
See rdflib.compare for a correct implementation of isomorphism checks
"""
# TODO: this is only an approximation.
if len(self) != len(other):
return False
for s, p, o in self:
if not isinstance(s, BNode) and not isinstance(o, BNode):
if not (s, p, o) in other:
return False
for s, p, o in other:
if not isinstance(s, BNode) and not isinstance(o, BNode):
if not (s, p, o) in self:
return False
# TODO: very well could be a false positive at this point yet.
return True | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def all_nodes(self):
res = set(self.objects())
res.update(self.subjects())
return res | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def resource(self, identifier):
"""Create a new ``Resource`` instance.
Parameters:
- ``identifier``: a URIRef or BNode instance.
Example::
>>> graph = Graph()
>>> uri = URIRef("http://example.org/resource")
>>> resource = graph.resource(uri)
>>> assert isinstance(resource, Resource)
>>> assert resource.identifier is uri
>>> assert resource.graph is graph
"""
if not isinstance(identifier, Node):
identifier = URIRef(identifier)
return Resource(self, identifier) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def skolemize(self, new_graph=None, bnode=None, authority=None, basepath=None):
def do_skolemize(bnode, t):
(s, p, o) = t
if s == bnode:
s = s.skolemize(authority=authority, basepath=basepath)
if o == bnode:
o = o.skolemize(authority=authority, basepath=basepath)
return s, p, o
def do_skolemize2(t):
(s, p, o) = t
if isinstance(s, BNode):
s = s.skolemize(authority=authority, basepath=basepath)
if isinstance(o, BNode):
o = o.skolemize(authority=authority, basepath=basepath)
return s, p, o
retval = Graph() if new_graph is None else new_graph
if bnode is None:
self._process_skolem_tuples(retval, do_skolemize2)
elif isinstance(bnode, BNode):
self._process_skolem_tuples(retval, lambda t: do_skolemize(bnode, t))
return retval | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def do_de_skolemize(uriref, t):
(s, p, o) = t
if s == uriref:
s = s.de_skolemize()
if o == uriref:
o = o.de_skolemize()
return s, p, o | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def cbd(self, resource):
"""Retrieves the Concise Bounded Description of a Resource from a Graph
Concise Bounded Description (CBD) is defined in [1] as:
Given a particular node (the starting node) in a particular RDF graph (the source graph), a subgraph of that
particular graph, taken to comprise a concise bounded description of the resource denoted by the starting node,
can be identified as follows:
1. Include in the subgraph all statements in the source graph where the subject of the statement is the
starting node;
2. Recursively, for all statements identified in the subgraph thus far having a blank node object, include
in the subgraph all statements in the source graph where the subject of the statement is the blank node
in question and which are not already included in the subgraph.
3. Recursively, for all statements included in the subgraph thus far, for all reifications of each statement
in the source graph, include the concise bounded description beginning from the rdf:Statement node of
each reification.
This results in a subgraph where the object nodes are either URI references, literals, or blank nodes not
serving as the subject of any statement in the graph.
[1] https://www.w3.org/Submission/CBD/
:param resource: a URIRef object, of the Resource for queried for
:return: a Graph, subgraph of self
"""
subgraph = Graph()
def add_to_cbd(uri):
for s, p, o in self.triples((uri, None, None)):
subgraph.add((s, p, o))
# recurse 'down' through ll Blank Nodes
if type(o) == BNode and not (o, None, None) in subgraph:
add_to_cbd(o)
# for Rule 3 (reification)
# for any rdf:Statement in the graph with the given URI as the object of rdf:subject,
# get all triples with that rdf:Statement instance as subject
# find any subject s where the predicate is rdf:subject and this uri is the object
# (these subjects are of type rdf:Statement, given the domain of rdf:subject)
for s, p, o in self.triples((None, RDF.subject, uri)):
# find all triples with s as the subject and add these to the subgraph
for s2, p2, o2 in self.triples((s, None, None)):
subgraph.add((s2, p2, o2))
add_to_cbd(resource)
return subgraph | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __init__(
self,
store: Union[Store, str] = "default",
identifier: Optional[Union[IdentifiedNode, str]] = None,
default_graph_base: Optional[str] = None, | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __str__(self):
pattern = (
"[a rdflib:ConjunctiveGraph;rdflib:storage "
"[a rdflib:Store;rdfs:label '%s']]"
)
return pattern % self.store.__class__.__name__ | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def _spoc(
self,
triple_or_quad: Union[
Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node]
],
default: bool = False, | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def _spoc(
self,
triple_or_quad: None,
default: bool = False, | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def _spoc(
self,
triple_or_quad: Optional[
Union[Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node]]
],
default: bool = False, | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __contains__(self, triple_or_quad):
"""Support for 'triple/quad in graph' syntax"""
s, p, o, c = self._spoc(triple_or_quad)
for t in self.triples((s, p, o), context=c):
return True
return False | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def _graph(self, c: Union[Graph, Node, str]) -> Graph:
... | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def _graph(self, c: None) -> None:
... | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]):
"""Add a sequence of triples with context"""
self.store.addN(
(s, p, o, self._graph(c)) for s, p, o, c in quads if _assertnode(s, p, o)
)
return self | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def triples(self, triple_or_quad, context=None):
"""
Iterate over all the triples in the entire conjunctive graph
For legacy reasons, this can take the context to query either
as a fourth element of the quad, or as the explicit context
keyword parameter. The kw param takes precedence.
"""
s, p, o, c = self._spoc(triple_or_quad)
context = self._graph(context or c)
if self.default_union:
if context == self.default_context:
context = None
else:
if context is None:
context = self.default_context
if isinstance(p, Path):
if context is None:
context = self
for s, o in p.eval(context, s, o):
yield s, p, o
else:
for (s, p, o), cg in self.store.triples((s, p, o), context=context):
yield s, p, o | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def triples_choices(self, triple, context=None):
"""Iterate over all the triples in the entire conjunctive graph"""
s, p, o = triple
if context is None:
if not self.default_union:
context = self.default_context
else:
context = self._graph(context)
for (s1, p1, o1), cg in self.store.triples_choices((s, p, o), context=context):
yield s1, p1, o1 | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def contexts(self, triple=None):
"""Iterate over all contexts in the graph
If triple is specified, iterate over all contexts the triple is in.
"""
for context in self.store.contexts(triple):
if isinstance(context, Graph):
# TODO: One of these should never happen and probably
# should raise an exception rather than smoothing over
# the weirdness - see #225
yield context
else:
yield self.get_context(context) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def remove_context(self, context):
"""Removes the given context from the graph"""
self.store.remove((None, None, None), context) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def parse(
self,
source: Optional[
Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath]
] = None,
publicID: Optional[str] = None,
format: Optional[str] = None,
location: Optional[str] = None,
file: Optional[Union[BinaryIO, TextIO]] = None,
data: Optional[Union[str, bytes]] = None,
**args, | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __reduce__(self):
return ConjunctiveGraph, (self.store, self.identifier) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __init__(self, store="default", default_union=False, default_graph_base=None):
super(Dataset, self).__init__(store=store, identifier=None)
if not self.store.graph_aware:
raise Exception("DataSet must be backed by a graph-aware store!")
self.default_context = Graph(
store=self.store,
identifier=DATASET_DEFAULT_GRAPH_ID,
base=default_graph_base,
)
self.default_union = default_union | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __reduce__(self):
return (type(self), (self.store, self.default_union)) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __setstate__(self, state):
self.store, self.identifier, self.default_context, self.default_union = state | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def parse(
self,
source=None,
publicID=None,
format=None,
location=None,
file=None,
data=None,
**args, | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def add_graph(self, g):
"""alias of graph for consistency"""
return self.graph(g) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def contexts(self, triple=None):
default = False
for c in super(Dataset, self).contexts(triple):
default |= c.identifier == DATASET_DEFAULT_GRAPH_ID
yield c
if not default:
yield self.graph(DATASET_DEFAULT_GRAPH_ID) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def quads(self, quad):
for s, p, o, c in super(Dataset, self).quads(quad):
if c.identifier == self.default_context:
yield s, p, o, None
else:
yield s, p, o, c.identifier | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __init__(self, store, identifier):
super(QuotedGraph, self).__init__(store, identifier) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def addN(self, quads: Tuple[Node, Node, Node, Any]) -> "QuotedGraph": # type: ignore[override]
"""Add a sequence of triple with context"""
self.store.addN(
(s, p, o, c)
for s, p, o, c in quads
if isinstance(c, QuotedGraph)
and c.identifier is self.identifier
and _assertnode(s, p, o)
)
return self | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __str__(self):
identifier = self.identifier.n3()
label = self.store.__class__.__name__
pattern = (
"{this rdflib.identifier %s;rdflib:storage "
"[a rdflib:Store;rdfs:label '%s']}"
)
return pattern % (identifier, label) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __init__(self, graph, subject):
"""Parameters:
- graph:
the graph containing the Seq
- subject:
the subject of a Seq. Note that the init does not
check whether this is a Seq, this is done in whoever
creates this instance!
"""
_list = self._list = list()
LI_INDEX = URIRef(str(RDF) + "_")
for (p, o) in graph.predicate_objects(subject):
if p.startswith(LI_INDEX): # != RDF.Seq: #
i = int(p.replace(LI_INDEX, ""))
_list.append((i, o))
# here is the trick: the predicates are _1, _2, _3, etc. Ie,
# by sorting the keys (by integer) we have what we want!
_list.sort() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __iter__(self):
"""Generator over the items in the Seq"""
for _, item in self._list:
yield item | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __getitem__(self, index):
"""Item given by index from the Seq"""
index, item = self._list.__getitem__(index)
return item | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __init__(self):
pass | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __init__(self):
pass | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __init__(self, graphs, store="default"):
if store is not None:
super(ReadOnlyGraphAggregate, self).__init__(store)
Graph.__init__(self, store)
self.__namespace_manager = None
assert (
isinstance(graphs, list)
and graphs
and [g for g in graphs if isinstance(g, Graph)]
), "graphs argument must be a list of Graphs!!"
self.graphs = graphs | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def destroy(self, configuration):
raise ModificationException() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def commit(self):
raise ModificationException() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def open(self, configuration, create=False):
# TODO: is there a use case for this method?
for graph in self.graphs:
graph.open(self, configuration, create) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def add(self, triple):
raise ModificationException() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def remove(self, triple):
raise ModificationException() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __contains__(self, triple_or_quad):
context = None
if len(triple_or_quad) == 4:
context = triple_or_quad[3]
for graph in self.graphs:
if context is None or graph.identifier == context.identifier:
if triple_or_quad[:3] in graph:
return True
return False | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __len__(self):
return sum(len(g) for g in self.graphs) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __cmp__(self, other):
if other is None:
return -1
elif isinstance(other, Graph):
return -1
elif isinstance(other, ReadOnlyGraphAggregate):
return (self.graphs > other.graphs) - (self.graphs < other.graphs)
else:
return -1 | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __isub__(self, other):
raise ModificationException() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def triples_choices(self, triple, context=None):
subject, predicate, object_ = triple
for graph in self.graphs:
choices = graph.triples_choices((subject, predicate, object_))
for (s, p, o) in choices:
yield s, p, o | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def compute_qname(self, uri, generate=True):
if hasattr(self, "namespace_manager") and self.namespace_manager:
return self.namespace_manager.compute_qname(uri, generate)
raise UnSupportedAggregateOperation() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def namespaces(self):
if hasattr(self, "namespace_manager"):
for prefix, namespace in self.namespace_manager.namespaces():
yield prefix, namespace
else:
for graph in self.graphs:
for prefix, namespace in graph.namespaces():
yield prefix, namespace | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def parse(self, source, publicID=None, format=None, **args):
raise ModificationException() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __reduce__(self):
raise UnSupportedAggregateOperation() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __init__(self, graph: Graph, batch_size: int = 1000, batch_addn: bool = False):
if not batch_size or batch_size < 2:
raise ValueError("batch_size must be a positive number")
self.graph = graph
self.__graph_tuple = (graph,)
self.__batch_size = batch_size
self.__batch_addn = batch_addn
self.reset() | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def add(
self,
triple_or_quad: Union[Tuple[Node, Node, Node], Tuple[Node, Node, Node, Any]], | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]):
if self.__batch_addn:
for q in quads:
self.add(q)
else:
self.graph.addN(quads)
return self | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def __exit__(self, *exc):
if exc[0] is None:
self.graph.addN(self.batch) | RDFLib/rdflib | [
1851,
516,
1851,
221,
1328248153
] |
def activate_user(self, activation_key):
"""Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return
return profile.activate() | bruth/django-registration2 | [
11,
7,
11,
1,
1306440552
] |
def test_exception_for_no_callbacks(self) -> None:
m = SomeModel()
with pytest.raises(ValueError):
m.js_on_change('foo') | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_with_propname(self) -> None:
cb = CustomJS(code="")
m0 = SomeModel()
for name in m0.properties():
m = SomeModel()
m.js_on_change(name, cb)
assert m.js_property_callbacks == {"change:%s" % name: [cb]} | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_with_multple_callbacks(self) -> None:
cb1 = CustomJS(code="")
cb2 = CustomJS(code="")
m = SomeModel()
m.js_on_change('foo', cb1, cb2)
assert m.js_property_callbacks == {"foo": [cb1, cb2]} | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_ignores_dupe_callbacks(self) -> None:
cb = CustomJS(code="")
m = SomeModel()
m.js_on_change('foo', cb, cb)
assert m.js_property_callbacks == {"foo": [cb]} | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_with_multple_callbacks(self) -> None:
cb1 = CustomJS(code="foo")
cb2 = CustomJS(code="bar")
m = SomeModel()
m.js_on_event("some", cb1, cb2)
assert m.js_event_callbacks == {"some": [cb1, cb2]} | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_ignores_dupe_callbacks(self) -> None:
cb = CustomJS(code="foo")
m = SomeModel()
m.js_on_event("some", cb, cb)
assert m.js_event_callbacks == {"some": [cb]} | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_value_error_on_bad_attr(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
with pytest.raises(ValueError) as e:
m1.js_link('junk', m2, 'b')
assert str(e.value).endswith("%r is not a property of self (%r)" % ("junk", m1)) | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_value_error_on_bad_other_attr(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
with pytest.raises(ValueError) as e:
m1.js_link('a', m2, 'junk')
assert str(e.value).endswith("%r is not a property of other (%r)" % ("junk", m2)) | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_attr_selector_creates_customjs_int(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
assert len(m1.js_property_callbacks) == 0
m1.js_link('a', m2, 'b', 1)
assert len(m1.js_property_callbacks) == 1
assert "change:a" in m1.js_property_callbacks
cbs = m1.js_property_callbacks["change:a"]
assert len(cbs) == 1
cb = cbs[0]
assert isinstance(cb, CustomJS)
assert cb.args == dict(other=m2)
assert cb.code == "other.b = this.a[1]" | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_attr_selector_creates_customjs_str(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
assert len(m1.js_property_callbacks) == 0
m1.js_link('a', m2, 'b', "test")
assert len(m1.js_property_callbacks) == 1
assert "change:a" in m1.js_property_callbacks
cbs = m1.js_property_callbacks["change:a"]
assert len(cbs) == 1
cb = cbs[0]
assert isinstance(cb, CustomJS)
assert cb.args == dict(other=m2)
assert cb.code == "other.b = this.a['test']" | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def test_select() -> None:
# we aren't trying to replace test_query here, only test
# our wrappers around it, so no need to try every kind of
# query
d = document.Document()
root1 = SomeModel(a=42, name='a')
root2 = SomeModel(a=43, name='c')
root3 = SomeModel(a=44, name='d')
root4 = SomeModel(a=45, name='d')
d.add_root(root1)
d.add_root(root2)
d.add_root(root3)
d.add_root(root4)
# select()
assert {root1} == set(root1.select(dict(a=42)))
assert {root1} == set(root1.select(dict(name="a")))
assert {root2} == set(root2.select(dict(name="c")))
assert set() == set(root1.select(dict(name="nope")))
# select() on object
assert set() == set(root3.select(dict(name='a')))
assert {root3} == set(root3.select(dict(a=44)))
# select_one()
assert root3 == root3.select_one(dict(name='d'))
assert None == root1.select_one(dict(name='nope'))
with pytest.raises(ValueError) as e:
d.select_one(dict(name='d'))
assert 'Found more than one' in repr(e)
# select_one() on object
assert None == root3.select_one(dict(name='a'))
assert None == root3.select_one(dict(name='c'))
# set_select()
root1.set_select(dict(a=42), dict(name="c", a=44))
assert {root1} == set(root1.select(dict(name="c")))
assert {root1} == set(root1.select(dict(a=44)))
# set_select() on object
root3.set_select(dict(name='d'), dict(a=57))
assert {root3} == set(root3.select(dict(a=57)))
# set_select() on class
root2.set_select(SomeModel, dict(name='new_name'))
assert {root2} == set(root2.select(dict(name="new_name"))) | bokeh/bokeh | [
17326,
4066,
17326,
698,
1332776401
] |
def authenticateWebAppUser(self, username, password):
""" Attempts to validate authenticate the supplied username/password
Attempt to authenticate the user against the list of users in
web_app_user table. If successful, a dict with user innformation is
returned. If not, the function returns False.
"""
with TRN:
sql = """SELECT cast(ag_login_id as varchar(100)) as ag_login_id,
email, name, address, city,
state, zip, country,kit_password
FROM ag_login
INNER JOIN ag_kit USING (ag_login_id)
WHERE supplied_kit_id = %s"""
TRN.add(sql, [username])
row = TRN.execute_fetchindex()
if not row:
return False
results = dict(row[0])
password = password.encode('utf-8')
if not bcrypt.checkpw(password, results['kit_password']):
return False
results['ag_login_id'] = str(results['ag_login_id'])
return results | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def addAGLogin(self, email, name, address, city, state, zip_, country):
"""Adds a new login or returns the login_id if email already exists
Parameters
----------
email : str
Email to register for user
name : str
Name to register for user
address : str
Street address to register for user
city : str
City to register for user
state : str
State to register for user
zip_ : str
Postal code to register for user
country : str
Country to register for user
Returns
-------
ag_login_id : str
UUID for new user, or existing user if email already in system
"""
with TRN:
clean_email = email.strip().lower()
ag_login_id = self.check_login_exists(email)
if not ag_login_id:
# create the login
sql = """INSERT INTO ag_login
(email, name, address, city, state, zip, country)
VALUES (%s, %s, %s, %s, %s, %s, %s)
RETURNING ag_login_id"""
TRN.add(sql, [clean_email, name, address, city, state, zip_,
country])
ag_login_id = TRN.execute_fetchlast()
return ag_login_id | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def getAGSurveyDetails(self, survey_id, language):
"""Returns survey information of a specific survey_id and language
Parameters
----------
survey_id : str
the id of the survey group
language : str
the language the survey is intended for
Returns
-------
DataFrame
pandas DataFrame of sorted survey details
Raises
------
ValueError
survey_id not found in database
ValueError
language not found in database
"""
if survey_id not in self.getKnownSurveyIds():
raise ValueError('Invalid survey_id')
if language not in self.getKnownLanguages():
raise ValueError('Invalid language')
sql = """SELECT survey_question_id,
survey_group,
%s,
question_shortname,
response,
ag.survey_question_response.display_index
AS response_index
FROM ag.survey_question
LEFT JOIN ag.survey_question_response
USING (survey_question_id)
LEFT JOIN ag.group_questions USING (survey_question_id)
LEFT JOIN ag.surveys USING (survey_group)
WHERE survey_id = %s""" % (language, survey_id)
with TRN:
TRN.add(sql)
survey_details = TRN.execute_fetchindex()
df = pd.DataFrame([dict(r) for r in survey_details],
columns=['survey_question_id',
'survey_group',
language,
'question_shortname',
'response',
'response_index'])
# sorts so that questions emmulate survey order
df = df.sort_values(by=['survey_group',
'survey_question_id',
'response_index']).drop(columns='survey_group')
# converts response_index from float to int
df['response_index'] = df['response_index'].apply(
lambda x: None if np.isnan(x) else int(x), convert_dtype=False)
return df | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def getKnownLanguages(self):
"""Returns all known language locales
Returns
-------
list of strings
List of language locales used for surveys
Raises
------
ValueError
Languages were not able to be found
"""
sql = """SELECT column_name FROM information_schema.columns
WHERE table_name = 'survey_response'"""
with TRN:
TRN.add(sql)
languages = TRN.execute_fetchindex()
if not languages:
raise ValueError('Languages were not able to be found')
languages = [x[0] for x in languages]
languages_set = set([])
for i in languages:
languages_set.add(i)
return languages_set | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def registerHandoutKit(self, ag_login_id, supplied_kit_id):
"""Registeres a handout kit to a user
Parameters
----------
ag_login_id : str
UUID4 formatted string of login ID to associate with kit
supplied_kit_id : str
kit ID for the handout kit
Returns
-------
bool
True: success
False: insert failed due to IntegrityError
Raises
------
ValueError
Non-UUID4 value sent as ag_login_id
"""
with TRN:
# make sure properly formatted UUID passed in
UUID(ag_login_id, version=4)
printresults = self.checkPrintResults(supplied_kit_id)
# make sure login_id and skid exists
sql = """SELECT EXISTS(SELECT *
FROM ag.ag_login
WHERE ag_login_id = %s)"""
TRN.add(sql, [ag_login_id])
exists = TRN.execute_fetchlast()
if not exists:
return False
sql = """SELECT EXISTS(SELECT *
FROM ag.ag_handout_kits
WHERE kit_id = %s)"""
TRN.add(sql, [supplied_kit_id])
if not TRN.execute_fetchlast():
return False
sql = """
DO $do$
DECLARE
k_id uuid;
bc varchar;
BEGIN
INSERT INTO ag_kit
(ag_login_id, supplied_kit_id, kit_password, swabs_per_kit,
kit_verification_code, print_results)
SELECT '{0}', kit_id, password, swabs_per_kit,
verification_code, '{1}'
FROM ag_handout_kits WHERE kit_id = %s LIMIT 1
RETURNING ag_kit_id INTO k_id;
FOR bc IN
SELECT barcode
FROM ag_handout_barcodes
WHERE kit_id = %s
LOOP
INSERT INTO ag_kit_barcodes
(ag_kit_id, barcode, sample_barcode_file)
VALUES (k_id, bc, bc || '.jpg');
END LOOP;
DELETE FROM ag_handout_barcodes WHERE kit_id = %s;
DELETE FROM ag_handout_kits WHERE kit_id = %s;
END $do$;
""".format(ag_login_id, printresults)
TRN.add(sql, [supplied_kit_id] * 4)
try:
TRN.execute()
except psycopg2.IntegrityError:
logging.exception('Error on skid %s:' % ag_login_id)
return False
return True | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def deleteAGParticipantSurvey(self, ag_login_id, participant_name):
# Remove user from new schema
with TRN:
sql = """SELECT survey_id, participant_email
FROM ag_login_surveys
JOIN ag_consent USING (ag_login_id, participant_name)
WHERE ag_login_id = %s AND participant_name = %s"""
TRN.add(sql, (ag_login_id, participant_name))
# collect all survey_ids and participant_names, since at least the
# former might be more than one.
survey_ids = set()
participant_emails = set()
for hit in TRN.execute_fetchindex():
survey_ids.add(hit[0])
participant_emails.add(hit[1])
sql = """SELECT barcode
FROM ag.source_barcodes_surveys
WHERE survey_id IN %s"""
TRN.add(sql, [tuple(survey_ids)])
barcodes = [x[0] for x in TRN.execute_fetchindex()]
sql = "DELETE FROM survey_answers WHERE survey_id IN %s"
TRN.add(sql, [tuple(survey_ids)])
sql = "DELETE FROM survey_answers_other WHERE survey_id IN %s"
TRN.add(sql, [tuple(survey_ids)])
# Reset survey attached to barcode(s)
for info in self.getParticipantSamples(ag_login_id,
participant_name):
self.deleteSample(info['barcode'], ag_login_id)
# Delete last due to foreign keys
sql = """DELETE FROM ag.source_barcodes_surveys
WHERE survey_id IN %s"""
TRN.add(sql, [tuple(survey_ids)])
if len(barcodes) != 0:
# only delete barcode information, if this is the
# last survey for the given source, i.e. ag_login_id,
# participant_name combination
if len(survey_ids) == 1:
sql = """DELETE FROM ag.ag_kit_barcodes
WHERE barcode IN %s"""
TRN.add(sql, [tuple(barcodes)])
sql = "DELETE FROM ag_login_surveys WHERE survey_id IN %s"
TRN.add(sql, [tuple(survey_ids)])
sql = """DELETE FROM ag_consent
WHERE ag_login_id = %s AND participant_name = %s"""
TRN.add(sql, [ag_login_id, participant_name])
# checks if user has previously been
# removed and is has still revoked consent
sql = """SELECT ag_login_id FROM ag.consent_revoked"""
TRN.add(sql)
revoked = {result[0] for result in TRN.execute_fetchindex()}
# only inserts to ag.consent_revoked if not already there
if ag_login_id not in revoked:
sql = """INSERT INTO ag.consent_revoked
(ag_login_id, participant_name, participant_email)
VALUES (%s, %s, %s)"""
sql_args = [[ag_login_id, participant_name, pemail]
for pemail in participant_emails]
TRN.add(sql, sql_args, many=True)
TRN.execute() | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def getConsent(self, survey_id):
with TRN:
TRN.add("""SELECT agc.participant_name,
agc.participant_email,
agc.parent_1_name,
agc.parent_2_name,
agc.is_juvenile,
agc.deceased_parent,
agc.ag_login_id,
agc.date_signed,
agc.assent_obtainer,
agc.age_range,
agl.survey_id
FROM ag_consent agc
JOIN ag_login_surveys agl
USING (ag_login_id, participant_name)
WHERE agl.survey_id = %s""", [survey_id])
result = TRN.execute_fetchindex()
if not result:
raise ValueError("Survey ID does not exist in DB: %s" %
survey_id)
return dict(result[0]) | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def deleteSample(self, barcode, ag_login_id):
""" Removes by either releasing barcode back for relogging or withdraw
Parameters
----------
barcode : str
Barcode to delete
ag_login_id : UUID4
Login ID for the barcode
Notes
-----
Strictly speaking the ag_login_id isn't needed but it makes it really
hard to hack the function when you would need to know someone else's
login id (a GUID) to delete something maliciously.
If the barcode has never been scanned, assume a mis-log and wipe it so
barcode can be logged again. If barcode has been scanned, that means we
have recieved it and must withdraw it to delete it from the system.
"""
with TRN:
# Figure out if we've received the barcode or not
sql = "SELECT scan_date FROM barcode WHERE barcode = %s"
TRN.add(sql, [barcode])
received = TRN.execute_fetchlast()
if not received:
# Not recieved, so we release the barcode back to be relogged
set_text = """site_sampled = NULL,
sample_time = NULL, sample_date = NULL,
environment_sampled = NULL, notes = NULL"""
sql = "UPDATE barcode SET status = NULL WHERE barcode = %s"
TRN.add(sql, [barcode])
else:
# barcode already recieved, so we withdraw the barcode
set_text = "withdrawn = 'Y'"
sql = """UPDATE ag_kit_barcodes
SET {}
WHERE barcode IN (
SELECT akb.barcode
FROM ag_kit_barcodes akb
INNER JOIN ag_kit ak USING (ag_kit_id)
WHERE ak.ag_login_id = %s
AND akb.barcode = %s)""".format(set_text)
TRN.add(sql, [ag_login_id, barcode])
sql = """DELETE FROM ag.source_barcodes_surveys
WHERE barcode = %s"""
TRN.add(sql, [barcode]) | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def associate_barcode_to_survey_id(self, ag_login_id, participant_name,
barcode, survey_id):
"""Associate a barcode to a survey ID
Parameters
----------
ag_login_id : str
A valid AG login ID
participant_name : str
The name of a participant associated with the login
barcode : str
A valid barcode associated with the login
survey_id : str
A valid survey ID
"""
with TRN:
# test first if the barcode is already associated to a participant
sql = """SELECT ag_login_id, participant_name, barcode
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING(survey_id)
WHERE ag_login_id=%s
AND participant_name=%s
AND barcode=%s"""
TRN.add(sql, [ag_login_id, participant_name, barcode])
results = TRN.execute_fetchflatten()
if len(results) == 0:
# this implies the barcode was unassigned, and this is a new
# assignment.
# Let's verify the barcode is associated to the kit and login
sql = """SELECT 1
FROM ag.ag_login
JOIN ag.ag_kit USING (ag_login_id)
JOIN ag.ag_kit_barcodes USING (ag_kit_id)
WHERE ag_login_id=%s
AND barcode=%s"""
TRN.add(sql, [ag_login_id, barcode])
results = TRN.execute_fetchflatten()
if len(results) == 0:
# the barcode is not part of a kit with the login ID
raise ValueError("Unexpected barcode / kit relationship")
# the barcode should also not already be linked to a
# participant within the kit
sql = """SELECT 1
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING(survey_id)
WHERE ag_login_id=%s
AND barcode=%s"""
TRN.add(sql, [ag_login_id, barcode])
results = TRN.execute_fetchflatten()
if len(results) > 0:
# the barcode is already assigned to someone on the kit
raise ValueError("Barcode already assigned")
sql = """INSERT INTO ag_login_surveys
(ag_login_id, survey_id, participant_name)
VALUES (%s, %s, %s)"""
TRN.add(sql, [ag_login_id, survey_id, participant_name])
sql = """INSERT INTO ag.source_barcodes_surveys
(survey_id, barcode)
VALUES (%s, %s)"""
TRN.add(sql, [survey_id, barcode]) | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def get_vioscreen_status(self, survey_id):
"""Retrieves the vioscreen status for a survey_id
Parameters
----------
survey_id : str
The survey to get status for
Returns
-------
int
Vioscreen status
Raises
------
ValueError
survey_id passed is not in the database
"""
with TRN:
sql = """SELECT vioscreen_status
FROM ag.ag_login_surveys
WHERE survey_id = %s"""
TRN.add(sql, [survey_id])
status = TRN.execute_fetchindex()
if not status:
raise ValueError("Survey ID %s not in database" % survey_id)
return status[0][0] | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def getParticipantSamples(self, ag_login_id, participant_name):
sql = """SELECT DISTINCT
ag_kit_barcodes.barcode,
ag_kit_barcodes.site_sampled,
ag_kit_barcodes.sample_date,
ag_kit_barcodes.sample_time,
ag_kit_barcodes.notes,
barcodes.barcode.status
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING (survey_id)
JOIN ag.ag_kit_barcodes USING (barcode)
JOIN barcodes.barcode USING (barcode)
WHERE ag_login_id = %s
AND participant_name = %s
AND (site_sampled IS NOT NULL
AND site_sampled::text <> '')"""
with TRN:
TRN.add(sql, [ag_login_id, participant_name])
rows = TRN.execute_fetchindex()
return [dict(row) for row in rows] | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def getAvailableBarcodes(self, ag_login_id):
sql = """SELECT barcode
FROM ag_kit_barcodes
INNER JOIN ag_kit USING (ag_kit_id)
WHERE coalesce(sample_date::text, '') = ''
AND kit_verified = 'y' AND ag_login_id = %s"""
with TRN:
TRN.add(sql, [ag_login_id])
return TRN.execute_fetchflatten() | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def _get_unverified_kits(self):
"""Gets list of unverified kit IDs, Helper function for tests"""
sql = """SELECT supplied_kit_id
FROM AG_KIT
WHERE NOT kit_verified = 'y'"""
with TRN:
TRN.add(sql)
return TRN.execute_fetchflatten() | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def handoutCheck(self, username, password):
with TRN:
password = password.encode('utf-8')
sql = "SELECT password FROM ag.ag_handout_kits WHERE kit_id = %s"
TRN.add(sql, [username])
to_check = TRN.execute_fetchindex()
if not to_check:
return False
else:
return bcrypt.checkpw(password, to_check[0][0]) | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def getAGKitIDsByEmail(self, email):
"""Returns a list of kitids based on email
email is email address of login
returns a list of kit_id's associated with the email or an empty list
"""
with TRN:
sql = """SELECT supplied_kit_id
FROM ag_kit
INNER JOIN ag_login USING (ag_login_id)
WHERE email = %s"""
TRN.add(sql, [email.lower()])
return TRN.execute_fetchflatten() | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def ag_update_kit_password(self, kit_id, password):
"""updates ag_kit table with password
kit_id is supplied_kit_id in the ag_kit table
password is the new password
"""
with TRN:
password = password.encode('utf-8')
password = bcrypt.hashpw(password, bcrypt.gensalt())
sql = """UPDATE AG_KIT
SET kit_password = %s, pass_reset_code = NULL
WHERE supplied_kit_id = %s"""
TRN.add(sql, [password, kit_id]) | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def getBarcodesByKit(self, kitid):
"""Returns a list of barcodes in a kit
kitid is the supplied_kit_id from the ag_kit table
"""
sql = """SELECT barcode
FROM ag_kit_barcodes
INNER JOIN ag_kit USING (ag_kit_id)
WHERE supplied_kit_id = %s"""
with TRN:
TRN.add(sql, [kitid])
return TRN.execute_fetchflatten() | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def checkPrintResults(self, kit_id):
"""Checks whether or not results are available for a given `kit_id`
Parameters
----------
kit_id : str
The supplied kit identifier to check for results availability.
Returns
-------
bool
Whether or not the results are ready for the supplied kit_id.
Notes
-----
If a `kit_id` does not exist this function will return False, as no
results would be available for a non-existent `kit_id`.
"""
with TRN:
sql = "SELECT print_results FROM ag_handout_kits WHERE kit_id = %s"
TRN.add(sql, [kit_id])
results = TRN.execute_fetchindex()
return False if not results else results[0][0] | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def get_menu_items(self, supplied_kit_id):
"""Returns information required to populate the menu of the website"""
with TRN:
ag_login_id = self.get_user_for_kit(supplied_kit_id)
info = self.getAGKitDetails(supplied_kit_id)
kit_verified = False
if info['kit_verified'] == 'y':
kit_verified = True
human_samples = {hs: self.getParticipantSamples(ag_login_id, hs)
for hs in self.getHumanParticipants(ag_login_id)}
animal_samples = {a: self.getParticipantSamples(ag_login_id, a)
for a in self.getAnimalParticipants(ag_login_id)}
environmental_samples = self.getEnvironmentalSamples(ag_login_id)
return (human_samples, animal_samples, environmental_samples,
kit_verified) | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def get_user_info(self, supplied_kit_id):
with TRN:
sql = """SELECT CAST(ag_login_id AS VARCHAR(100)) AS ag_login_id,
email, name, address, city, state, zip, country
FROM ag_login
INNER JOIN ag_kit USING(ag_login_id)
WHERE supplied_kit_id = %s"""
TRN.add(sql, [supplied_kit_id])
row = TRN.execute_fetchindex()
if not row:
raise ValueError('Supplied kit id is not in DB: %s' %
supplied_kit_id)
user_data = dict(row[0])
user_data['ag_login_id'] = str(user_data['ag_login_id'])
return user_data | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def get_login_info(self, ag_login_id):
"""Get kit registration information
Parameters
----------
ag_login_id : str
A valid login ID, that should be a test as a valid UUID
Returns
-------
list of dict
A list of registration information associated with a common login
ID.
Raises
------
ValueError
Unknown ag_login_id passed
"""
with TRN:
sql = """SELECT ag_login_id, email, name, address, city, state,
zip, country
FROM ag_login
WHERE ag_login_id = %s"""
TRN.add(sql, [ag_login_id])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('ag_login_id not in database: %s' %
ag_login_id)
return [dict(row) for row in info] | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def get_participants_surveys(self, ag_login_id, participant_name,
locale='american'):
"""Returns all surveys (except external) for one participant for a
AG login.
Parameters
----------
ag_login_id : str
A valid login ID, that should be a test as a valid UUID.
participant_name : str
A participant name.
locale : str
The names for the surveys are fetched from table ag.survey_group.
For localization, there are columns for each language, which is set
by locale.
Returns
-------
List of lists or None
A list for surveys for the given participant of the given
ag_login_id. Each element is a list again [int, str, str]. Where
the first element is the survey group id, the second the survey_id
and the third is a speaking name for the survey.
None if no survey ID can be found for the combination of
participant and ag_login_id.
Raises
------
ValueError
Unknown ag_login_id or participant_name passed
"""
with TRN:
sql = """SELECT DISTINCT gq.survey_group, als.survey_id, sg.{0}
FROM ag.ag_login_surveys als
LEFT JOIN ag.survey_answers sa USING (survey_id)
LEFT JOIN ag.group_questions gq USING (survey_question_id)
LEFT JOIN ag.survey_group sg ON (survey_group=group_order)
WHERE als.ag_login_id = %s AND als.participant_name = %s
AND gq.survey_group < 0""".format(locale)
TRN.add(sql, [ag_login_id, participant_name])
surveys = TRN.execute_fetchindex()
if not surveys:
raise ValueError("No survey IDs found!")
return surveys | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def get_countries(self):
"""
Returns
-------
list of str
All country names in database"""
with TRN:
sql = 'SELECT country FROM ag.iso_country_lookup ORDER BY country'
TRN.add(sql)
return TRN.execute_fetchflatten() | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def ut_get_arbitrary_supplied_kit_id_scanned_unconsented(self):
""" Returns arbitrarily chosen supplied_kit_id and barcode which has
been scanned but is without consent.
For unit testing only!
Returns
-------
list of str: [supplied_kit_id, barcode]
example: ['fNIYa', '000001053']
Raises
------
ValueError
If no kits can be found in the DB that have been scanned and
are without consent."""
with TRN:
sql = """SELECT supplied_kit_id, barcode
FROM barcodes.barcode
JOIN ag.ag_kit_barcodes USING (barcode)
JOIN ag.ag_kit USING (ag_kit_id)
LEFT JOIN ag.source_barcodes_surveys USING (barcode)
WHERE barcodes.barcode.scan_date IS NOT NULL
AND ag.source_barcodes_surveys.survey_id IS NULL
LIMIT 1"""
TRN.add(sql, [])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No kits found.')
return info[0] | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def ut_get_arbitrary_email(self):
""" Return arbitrarily chosen email.
For unit testing only!
Returns
-------
str: email
Example: 'a03E9u6ZAu@glA+)./Vn'
Raises
------
ValueError
If no emails be found in the DB."""
with TRN:
sql = """SELECT email
FROM ag.ag_login
LIMIT 1"""
TRN.add(sql, [])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No emails found.')
return info[0][0] | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def ut_get_email_from_ag_login_id(self, ag_login_id):
""" Returns email for a given ag_login_id.
For unit testing only!
Parameters
----------
ag_login_id : str
Existing ag_login_id.
Returns
-------
str: email
Example: 'xX/tEv7O+T@6Ri7C.)LO'
Raises
------
ValueError
If ag_login_id is not in DB.
"""
with TRN:
sql = """SELECT email
FROM ag.ag_login
WHERE ag_login_id=%s"""
TRN.add(sql, [ag_login_id])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No emails found.')
return info[0][0] | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def ut_get_participant_names_from_ag_login_id(self, ag_login_id):
""" Returns all participant_name(s) for a given ag_login_id.
For unit testing only!
Parameters
----------
ag_login_id : str
Existing ag_login_id.
Returns
-------
[[str]]
Example: ["Name - z\xc3\x96DOZ8(Z~'",
"Name - z\xc3\x96DOZ8(Z~'",
'Name - QpeY\xc3\xb8u#0\xc3\xa5<',
'Name - S)#@G]xOdL',
'Name - Y5"^&sGQiW',
'Name - L\xc3\xa7+c\r\xc3\xa5?\r\xc2\xbf!',
'Name - (~|w:S\xc3\x85#L\xc3\x84']
Raises
------
ValueError
If ag_login_id is not in DB.
"""
with TRN:
sql = """SELECT participant_name
FROM ag.ag_login_surveys
WHERE ag_login_id = %s"""
TRN.add(sql, [ag_login_id])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('ag_login_id not in database: %s' %
ag_login_id)
return [n[0] for n in info] | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def ut_get_arbitrary_supplied_kit_id_unverified(self):
""" Returns a randomly chosen supplied_kit_id that is unverified.
For unit testing only!
Returns
-------
str: supplied_kit_id
Example: 'FajNh'
Raises
------
ValueError
If no unverified supplied_kit_id can be found in the DB.
"""
with TRN:
sql = """SELECT supplied_kit_id
FROM ag.ag_kit
WHERE ag.ag_kit.kit_verified = 'n'
LIMIT 1"""
TRN.add(sql, [])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No unverified kits in DB')
return info[0][0] | biocore/american-gut-web | [
5,
24,
5,
54,
1407188842
] |
def test_model_definition_pickle():
defn = model_definition(10, [bb, niw(3)])
bstr = pickle.dumps(defn)
defn1 = pickle.loads(bstr)
assert_equals(defn.n(), defn1.n())
assert_equals(len(defn.models()), len(defn1.models()))
for a, b in zip(defn.models(), defn1.models()):
assert_equals(a.name(), b.name()) | datamicroscopes/mixturemodel | [
12,
3,
12,
2,
1403576375
] |
def extractToomtummootstranslationsWordpressCom(item):
'''
Parser for 'toomtummootstranslations.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | fake-name/ReadableWebProxy | [
191,
16,
191,
3,
1437712243
] |
def handle_sharp_command(command, user, randomuri, implant_id):
# alias mapping
for alias in cs_alias:
if alias[0] == command[:len(command.rstrip())]:
command = alias[1]
# alias replace
for alias in cs_replace:
if command.startswith(alias[0]):
command = command.replace(alias[0], alias[1])
original_command = command
command = command.strip()
run_autoloads_sharp(command, randomuri, user)
if command.startswith("searchhelp"):
do_searchhelp(user, command, randomuri)
return
elif command.startswith("searchallhelp"):
do_searchallhelp(user, command, randomuri)
return
elif command.startswith("searchhistory"):
do_searchhistory(user, command, randomuri)
return
elif command.startswith("upload-file"):
do_upload_file(user, command, randomuri)
return
elif command.startswith("inject-shellcode"):
do_inject_shellcode(user, command, randomuri)
return
elif command.startswith("migrate"):
do_migrate(user, command, randomuri)
return
elif command == "kill-implant" or command == "exit":
do_kill_implant(user, command, randomuri)
return
elif command == "sharpsocks":
do_sharpsocks(user, command, randomuri)
return
elif (command.startswith("stop-keystrokes")):
do_stop_keystrokes(user, command, randomuri)
return
elif (command.startswith("start-keystrokes")):
do_start_keystrokes(user, command, randomuri)
return
elif (command.startswith("get-keystrokes")):
do_get_keystrokes(user, command, randomuri)
return
elif (command.startswith("get-screenshotmulti")):
do_get_screenshotmulti(user, command, randomuri)
return
elif command.startswith("get-screenshot"):
do_get_screenshot(user, command, randomuri)
return
elif command == "getpowerstatus":
do_get_powerstatus(user, command, randomuri)
return
elif command == "stoppowerstatus":
do_stoppowerstatus(user, command, randomuri)
return
elif command.startswith("run-exe SharpWMI.Program") and "execute" in command and "payload" not in command:
do_sharpwmi_execute(user, command, randomuri)
return
elif (command.startswith("get-hash")):
do_get_hash(user, command, randomuri)
return
elif (command.startswith("enable-rotation")):
do_rotation(user, command, randomuri)
return
elif (command.startswith("safetykatz")):
do_safetykatz(user, command, randomuri)
return
elif command.startswith("loadmoduleforce"):
do_loadmoduleforce(user, command, randomuri)
return
elif command.startswith("loadmodule"):
do_loadmodule(user, command, randomuri)
return
elif command.startswith("listmodules"):
do_listmodules(user, command, randomuri)
return
elif command.startswith("modulesloaded"):
do_modulesloaded(user, command, randomuri)
return
elif command.startswith("pbind-connect"):
do_pbind_start(user, command, randomuri)
return
elif command.startswith("fcomm-connect"):
do_fcomm_start(user, command, randomuri)
return
elif command.startswith("dynamic-code"):
do_dynamic_code(user, command, randomuri)
return
elif command.startswith("startdaisy"):
do_startdaisy(user, command, randomuri)
return
elif command == "help":
do_help(user, command, randomuri)
return
else:
if command:
do_shell(user, original_command, randomuri)
return | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.