hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a11c3b7310a7b1fe080a25625b730286991b423
| 1,618
|
py
|
Python
|
tonguetwister/gui/chunkviews/memorymap.py
|
lham/tonguetwister
|
ae8cd1555782f2e551a5d4156d8d247e08e354e1
|
[
"MIT"
] | 4
|
2021-03-06T19:26:27.000Z
|
2022-02-07T16:00:01.000Z
|
tonguetwister/gui/chunkviews/memorymap.py
|
lham/tonguetwister
|
ae8cd1555782f2e551a5d4156d8d247e08e354e1
|
[
"MIT"
] | null | null | null |
tonguetwister/gui/chunkviews/memorymap.py
|
lham/tonguetwister
|
ae8cd1555782f2e551a5d4156d8d247e08e354e1
|
[
"MIT"
] | null | null | null |
from tonguetwister.disassembler.mappings.chunks import ChunkType
from tonguetwister.gui.chunkview import ResourceLink
from tonguetwister.gui.widgets.generic.labels import FixedSizeLinkLabel, FixedSizeLabel, InactiveFixedSizeLabel
from tonguetwister.gui.widgets.entrylistview import EntryListView, EntryView
class MemoryMapEntryView(EntryView):
rows = 1
cols = 6
col_widths = [35, 20, 140, 20, 250, 200]
def __init__(self, index, entry, **kwargs):
self.chunk_type = ChunkType(entry.four_cc)
super().__init__(index, entry, **kwargs)
def is_active(self):
return self.chunk_type != ChunkType.Free and self.chunk_type != ChunkType.Junk
@property
def label_class(self):
return FixedSizeLabel if self.is_active() else InactiveFixedSizeLabel
@property
def link_label_class(self):
return FixedSizeLinkLabel if self.is_active() else InactiveFixedSizeLabel
def label_kwargs(self):
return [
{'text': f'{self.index}', 'halign': 'right'},
{'text': '--', 'halign': 'center'},
{'text': f'0x{self.entry.chunk_address:08x} / {self.chunk_type}', 'halign': 'center'},
{'text': '->', 'halign': 'center'},
{
'text': f'[{self.index:4d}]: {self.chunk_type.name}',
'link_target': self.set_resource_link
},
{'text': f'(Size: {self.entry.chunk_length} bytes)'}
]
def set_resource_link(self):
self.resource_link = ResourceLink(self.index)
class MemoryMapView(EntryListView):
entry_class = MemoryMapEntryView
| 35.173913
| 111
| 0.65513
|
4a11c47573f02a66af5dac22e971ad7cc69c99f7
| 1,508
|
py
|
Python
|
tests/importer/test_simple_loader.py
|
madman-bob/python-custom-imports
|
e9d6979865bfde5f149a2190d8f2895d333ab219
|
[
"MIT"
] | null | null | null |
tests/importer/test_simple_loader.py
|
madman-bob/python-custom-imports
|
e9d6979865bfde5f149a2190d8f2895d333ab219
|
[
"MIT"
] | 1
|
2020-05-21T02:36:07.000Z
|
2020-05-21T12:55:24.000Z
|
tests/importer/test_simple_loader.py
|
madman-bob/python-custom-imports
|
e9d6979865bfde5f149a2190d8f2895d333ab219
|
[
"MIT"
] | null | null | null |
import sys
from dataclasses import dataclass
from unittest import TestCase
from custom_imports.importer import Module, ModuleSpec, SimpleLoader
PY_36 = sys.version_info[:2] == (3, 6)
class TestSimpleLoader(TestCase):
def test_simple_loader(self):
@dataclass
class SimpleModule:
value: str = ""
def set_value(self, value):
self.value = value
loader = SimpleLoader(
module_type=SimpleModule,
module_type_kwargs={"value": "Initial value"},
load_module=SimpleModule.set_value,
)
module_spec = ModuleSpec(
"fake_module", None, loader_state="Lorem ipsum, dolor sit amet"
)
with self.subTest("Create module"):
module = loader.create_module(module_spec)
self.assertIsInstance(module, SimpleModule)
self.assertEqual("Initial value", module.value)
if PY_36:
self.assertTrue(issubclass(type(module), Module))
else:
self.assertIsInstance(module, Module)
with self.subTest("Load module"):
module.__spec__ = module_spec
loader.exec_module(module)
self.assertIsInstance(module, SimpleModule)
self.assertEqual("Lorem ipsum, dolor sit amet", module.value)
if PY_36:
self.assertTrue(issubclass(type(module), Module))
else:
self.assertIsInstance(module, Module)
| 29.568627
| 75
| 0.605438
|
4a11c496d40f241232e1318ce2568f8d4169443f
| 2,918
|
py
|
Python
|
tests/skip_test_velomesh.py
|
Krissmedt/imprunko
|
94171d0d47171cc4b199cd52f5f29385cbff903e
|
[
"MIT"
] | 5
|
2018-10-26T07:08:16.000Z
|
2019-05-10T06:47:37.000Z
|
tests/skip_test_velomesh.py
|
Krissmedt/imprunko
|
94171d0d47171cc4b199cd52f5f29385cbff903e
|
[
"MIT"
] | 9
|
2018-11-09T08:50:48.000Z
|
2019-06-06T20:11:12.000Z
|
tests/skip_test_velomesh.py
|
Krissmedt/imprunko
|
94171d0d47171cc4b199cd52f5f29385cbff903e
|
[
"MIT"
] | null | null | null |
import unittest
import sys
sys.path.append('python')
import numpy as np
import plasmatools as plasma
class Params:
mins = None
maxs = None
lens = None
def cellID2index(cellID, dvs):
cellID -= 1
k = np.int( ( cellID / (dvs[0] * dvs[1]) ) )
j = np.int( ( cellID / dvs[0] ) % dvs[1] )
i = np.int( cellID % dvs[0] )
return (i,j,k)
def populate_mesh( mesh ):
for k in range(mesh.Nblocks[2]):
for j in range(mesh.Nblocks[1]):
for i in range(mesh.Nblocks[0]):
cid = mesh.get_block_id([i,j,k])
(x,y,z) = mesh.get_center( cid )
fval = physical_vel(x,y,z)
mesh[i,j,k] = [fval, fval, fval, fval]
#print "({},{},{}) = {}".format(i,j,k,fval)
#physical "real" distribution to compare against
def physical_vel(x,y,z):
mux = 1.0
muy = 2.0
muz = 3.0
sigmax = 2.0
sigmay = 3.0
sigmaz = 4.0
vx = np.exp(-(x-mux)**2 / sigmax**2 )
vy = np.exp(-(y-muy)**2 / sigmay**2 )
#vz = np.exp(-(z-muz)**2 / sigmaz**2 )
vz = 1.0
return vx*vy*vz
#return 0.5
class Basics(unittest.TestCase):
def setUp(self):
self.Nx = 50
self.Ny = 30
self.Nz = 5
self.params = Params()
self.params.mins = [ -10.0, -10.0, -10.0 ]
self.params.maxs = [ 10.0, 10.0, 10.0 ]
self.mesh = plasma.VeloMesh()
self.mesh.Nblocks = [self.Nx, self.Ny, self.Nz]
def test_zFill(self):
self.mesh.z_fill( self.params.mins, self.params.maxs )
self.assertEqual( self.mesh.number_of_blocks, self.Nx*self.Ny*self.Nz )
def test_indices(self):
tests = [ [1,1,0], [2,2,0], [3,3,0] ]
for test in tests:
#get cell id and compare to internal python formula
cid = self.mesh.get_block_id( test )
indx = cellID2index( cid , self.mesh.Nblocks )
ref_indx = self.mesh.get_indices( cid )
#check i, j, k indices
self.assertEqual(indx[0], ref_indx[0])
self.assertEqual(indx[1], ref_indx[1])
self.assertEqual(indx[2], ref_indx[2])
class Data(unittest.TestCase):
def setUp(self):
self.Nx = 50
self.Ny = 30
self.Nz = 5
self.params = Params()
self.params.mins = [ -10.0, -10.0, -10.0 ]
self.params.maxs = [ 10.0, 10.0, 10.0 ]
self.mesh = plasma.VeloMesh()
self.mesh.Nblocks = [self.Nx, self.Ny, self.Nz]
self.mesh.z_fill( self.params.mins, self.params.maxs )
def test_meshData(self):
data = [1.0, 2.0, 3.0, 4.5]
#test cellid indexing
self.mesh[1] = data
self.assertEqual( self.mesh[1], data )
#test ijk indexing
self.mesh[2,2,0] = data
self.assertEqual( self.mesh[2,2,0], data )
if __name__ == '__main__':
unittest.main()
| 21.614815
| 79
| 0.53427
|
4a11c5156a6d7468d88c1730648d6f3e7faab9cf
| 18,946
|
py
|
Python
|
tools/grit/grit/node/base.py
|
bluebellzhy/chromium
|
008c4fef2676506869a0404239da31e83fd6ccc7
|
[
"BSD-3-Clause"
] | 1
|
2016-05-08T15:35:17.000Z
|
2016-05-08T15:35:17.000Z
|
tools/grit/grit/node/base.py
|
bluebellzhy/chromium
|
008c4fef2676506869a0404239da31e83fd6ccc7
|
[
"BSD-3-Clause"
] | null | null | null |
tools/grit/grit/node/base.py
|
bluebellzhy/chromium
|
008c4fef2676506869a0404239da31e83fd6ccc7
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python2.4
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Base types for nodes in a GRIT resource tree.
'''
import os
import types
from xml.sax import saxutils
from grit import exception
from grit import util
from grit import clique
import grit.format.interface
class Node(grit.format.interface.ItemFormatter):
'''An item in the tree that has children. Also implements the
ItemFormatter interface to allow formatting a node as a GRD document.'''
# Valid content types that can be returned by _ContentType()
_CONTENT_TYPE_NONE = 0 # No CDATA content but may have children
_CONTENT_TYPE_CDATA = 1 # Only CDATA, no children.
_CONTENT_TYPE_MIXED = 2 # CDATA and children, possibly intermingled
def __init__(self):
self.children = [] # A list of child elements
self.mixed_content = [] # A list of u'' and/or child elements (this
# duplicates 'children' but
# is needed to preserve markup-type content).
self.name = u'' # The name of this element
self.attrs = {} # The set of attributes (keys to values)
self.parent = None # Our parent unless we are the root element.
self.uberclique = None # Allows overriding uberclique for parts of tree
def __iter__(self):
'''An in-order iteration through the tree that this node is the
root of.'''
return self.inorder()
def inorder(self):
'''Generator that generates first this node, then the same generator for
any child nodes.'''
yield self
for child in self.children:
for iterchild in child.inorder():
yield iterchild
def GetRoot(self):
'''Returns the root Node in the tree this Node belongs to.'''
curr = self
while curr.parent:
curr = curr.parent
return curr
# TODO(joi) Use this (currently untested) optimization?:
#if hasattr(self, '_root'):
# return self._root
#curr = self
#while curr.parent and not hasattr(curr, '_root'):
# curr = curr.parent
#if curr.parent:
# self._root = curr._root
#else:
# self._root = curr
#return self._root
def StartParsing(self, name, parent):
'''Called at the start of parsing.
Args:
name: u'elementname'
parent: grit.node.base.Node or subclass or None
'''
assert isinstance(name, types.StringTypes)
assert not parent or isinstance(parent, Node)
self.name = name
self.parent = parent
def AddChild(self, child):
'''Adds a child to the list of children of this node, if it is a valid
child for the node.'''
assert isinstance(child, Node)
if (not self._IsValidChild(child) or
self._ContentType() == self._CONTENT_TYPE_CDATA):
if child.parent:
explanation = 'child %s of parent %s' % (child.name, child.parent.name)
else:
explanation = 'node %s with no parent' % child.name
raise exception.UnexpectedChild(explanation)
self.children.append(child)
self.mixed_content.append(child)
def RemoveChild(self, child_id):
'''Removes the first node that has a "name" attribute which
matches "child_id" in the list of immediate children of
this node.
Args:
child_id: String identifying the child to be removed
'''
index = 0
# Safe not to copy since we only remove the first element found
for child in self.children:
name_attr = child.attrs['name']
if name_attr == child_id:
self.children.pop(index)
self.mixed_content.pop(index)
break
index += 1
def AppendContent(self, content):
'''Appends a chunk of text as content of this node.
Args:
content: u'hello'
Return:
None
'''
assert isinstance(content, types.StringTypes)
if self._ContentType() != self._CONTENT_TYPE_NONE:
self.mixed_content.append(content)
elif content.strip() != '':
raise exception.UnexpectedContent()
def HandleAttribute(self, attrib, value):
'''Informs the node of an attribute that was parsed out of the GRD file
for it.
Args:
attrib: 'name'
value: 'fooblat'
Return:
None
'''
assert isinstance(attrib, types.StringTypes)
assert isinstance(value, types.StringTypes)
if self._IsValidAttribute(attrib, value):
self.attrs[attrib] = value
else:
raise exception.UnexpectedAttribute(attrib)
def EndParsing(self):
'''Called at the end of parsing.'''
# TODO(joi) Rewrite this, it's extremely ugly!
if len(self.mixed_content):
if isinstance(self.mixed_content[0], types.StringTypes):
# Remove leading and trailing chunks of pure whitespace.
while (len(self.mixed_content) and
isinstance(self.mixed_content[0], types.StringTypes) and
self.mixed_content[0].strip() == ''):
self.mixed_content = self.mixed_content[1:]
# Strip leading and trailing whitespace from mixed content chunks
# at front and back.
if (len(self.mixed_content) and
isinstance(self.mixed_content[0], types.StringTypes)):
self.mixed_content[0] = self.mixed_content[0].lstrip()
# Remove leading and trailing ''' (used to demarcate whitespace)
if (len(self.mixed_content) and
isinstance(self.mixed_content[0], types.StringTypes)):
if self.mixed_content[0].startswith("'''"):
self.mixed_content[0] = self.mixed_content[0][3:]
if len(self.mixed_content):
if isinstance(self.mixed_content[-1], types.StringTypes):
# Same stuff all over again for the tail end.
while (len(self.mixed_content) and
isinstance(self.mixed_content[-1], types.StringTypes) and
self.mixed_content[-1].strip() == ''):
self.mixed_content = self.mixed_content[:-1]
if (len(self.mixed_content) and
isinstance(self.mixed_content[-1], types.StringTypes)):
self.mixed_content[-1] = self.mixed_content[-1].rstrip()
if (len(self.mixed_content) and
isinstance(self.mixed_content[-1], types.StringTypes)):
if self.mixed_content[-1].endswith("'''"):
self.mixed_content[-1] = self.mixed_content[-1][:-3]
# Check that all mandatory attributes are there.
for node_mandatt in self.MandatoryAttributes():
mandatt_list = []
if node_mandatt.find('|') >= 0:
mandatt_list = node_mandatt.split('|')
else:
mandatt_list.append(node_mandatt)
mandatt_option_found = False
for mandatt in mandatt_list:
assert mandatt not in self.DefaultAttributes().keys()
if mandatt in self.attrs:
if not mandatt_option_found:
mandatt_option_found = True
else:
raise exception.MutuallyExclusiveMandatoryAttribute(mandatt)
if not mandatt_option_found:
raise exception.MissingMandatoryAttribute(mandatt)
# Add default attributes if not specified in input file.
for defattr in self.DefaultAttributes():
if not defattr in self.attrs:
self.attrs[defattr] = self.DefaultAttributes()[defattr]
def GetCdata(self):
'''Returns all CDATA of this element, concatenated into a single
string. Note that this ignores any elements embedded in CDATA.'''
return ''.join(filter(lambda c: isinstance(c, types.StringTypes),
self.mixed_content))
def __unicode__(self):
'''Returns this node and all nodes below it as an XML document in a Unicode
string.'''
header = u'<?xml version="1.0" encoding="UTF-8"?>\n'
return header + self.FormatXml()
# Compliance with ItemFormatter interface.
def Format(self, item, lang_re = None, begin_item=True):
if not begin_item:
return ''
else:
return item.FormatXml()
def FormatXml(self, indent = u'', one_line = False):
'''Returns this node and all nodes below it as an XML
element in a Unicode string. This differs from __unicode__ in that it does
not include the <?xml> stuff at the top of the string. If one_line is true,
children and CDATA are layed out in a way that preserves internal
whitespace.
'''
assert isinstance(indent, types.StringTypes)
content_one_line = (one_line or
self._ContentType() == self._CONTENT_TYPE_MIXED)
inside_content = self.ContentsAsXml(indent, content_one_line)
# Then the attributes for this node.
attribs = u' '
for (attrib, value) in self.attrs.iteritems():
# Only print an attribute if it is other than the default value.
if (not self.DefaultAttributes().has_key(attrib) or
value != self.DefaultAttributes()[attrib]):
attribs += u'%s=%s ' % (attrib, saxutils.quoteattr(value))
attribs = attribs.rstrip() # if no attribs, we end up with '', otherwise
# we end up with a space-prefixed string
# Finally build the XML for our node and return it
if len(inside_content) > 0:
if one_line:
return u'<%s%s>%s</%s>' % (self.name, attribs, inside_content, self.name)
elif content_one_line:
return u'%s<%s%s>\n%s %s\n%s</%s>' % (
indent, self.name, attribs,
indent, inside_content,
indent, self.name)
else:
return u'%s<%s%s>\n%s\n%s</%s>' % (
indent, self.name, attribs,
inside_content,
indent, self.name)
else:
return u'%s<%s%s />' % (indent, self.name, attribs)
def ContentsAsXml(self, indent, one_line):
'''Returns the contents of this node (CDATA and child elements) in XML
format. If 'one_line' is true, the content will be laid out on one line.'''
assert isinstance(indent, types.StringTypes)
# Build the contents of the element.
inside_parts = []
last_item = None
for mixed_item in self.mixed_content:
if isinstance(mixed_item, Node):
inside_parts.append(mixed_item.FormatXml(indent + u' ', one_line))
if not one_line:
inside_parts.append(u'\n')
else:
message = mixed_item
# If this is the first item and it starts with whitespace, we add
# the ''' delimiter.
if not last_item and message.lstrip() != message:
message = u"'''" + message
inside_parts.append(util.EncodeCdata(message))
last_item = mixed_item
# If there are only child nodes and no cdata, there will be a spurious
# trailing \n
if len(inside_parts) and inside_parts[-1] == '\n':
inside_parts = inside_parts[:-1]
# If the last item is a string (not a node) and ends with whitespace,
# we need to add the ''' delimiter.
if (isinstance(last_item, types.StringTypes) and
last_item.rstrip() != last_item):
inside_parts[-1] = inside_parts[-1] + u"'''"
return u''.join(inside_parts)
def RunGatherers(self, recursive=0, debug=False):
'''Runs all gatherers on this object, which may add to the data stored
by the object. If 'recursive' is true, will call RunGatherers() recursively
on all child nodes first. If 'debug' is True, will print out information
as it is running each nodes' gatherers.
Gatherers for <translations> child nodes will always be run after all other
child nodes have been gathered.
'''
if recursive:
process_last = []
for child in self.children:
if child.name == 'translations':
process_last.append(child)
else:
child.RunGatherers(recursive=recursive, debug=debug)
for child in process_last:
child.RunGatherers(recursive=recursive, debug=debug)
def ItemFormatter(self, type):
'''Returns an instance of the item formatter for this object of the
specified type, or None if not supported.
Args:
type: 'rc-header'
Return:
(object RcHeaderItemFormatter)
'''
if type == 'xml':
return self
else:
return None
def SatisfiesOutputCondition(self):
'''Returns true if this node is either not a child of an <if> element
or if it is a child of an <if> element and the conditions for it being
output are satisfied.
Used to determine whether to return item formatters for formats that
obey conditional output of resources (e.g. the RC formatters).
'''
from grit.node import misc
if not self.parent or not isinstance(self.parent, misc.IfNode):
return True
else:
return self.parent.IsConditionSatisfied()
def _IsValidChild(self, child):
'''Returns true if 'child' is a valid child of this node.
Overridden by subclasses.'''
return False
def _IsValidAttribute(self, name, value):
'''Returns true if 'name' is the name of a valid attribute of this element
and 'value' is a valid value for that attribute. Overriden by
subclasses unless they have only mandatory attributes.'''
return (name in self.MandatoryAttributes() or
name in self.DefaultAttributes())
def _ContentType(self):
'''Returns the type of content this element can have. Overridden by
subclasses. The content type can be one of the _CONTENT_TYPE_XXX constants
above.'''
return self._CONTENT_TYPE_NONE
def MandatoryAttributes(self):
'''Returns a list of attribute names that are mandatory (non-optional)
on the current element. One can specify a list of
"mutually exclusive mandatory" attributes by specifying them as one
element in the list, separated by a "|" character.
'''
return []
def DefaultAttributes(self):
'''Returns a dictionary of attribute names that have defaults, mapped to
the default value. Overridden by subclasses.'''
return {}
def GetCliques(self):
'''Returns all MessageClique objects belonging to this node. Overridden
by subclasses.
Return:
[clique1, clique2] or []
'''
return []
def ToRealPath(self, path_from_basedir):
'''Returns a real path (which can be absolute or relative to the current
working directory), given a path that is relative to the base directory
set for the GRIT input file.
Args:
path_from_basedir: '..'
Return:
'resource'
'''
return util.normpath(os.path.join(self.GetRoot().GetBaseDir(),
path_from_basedir))
def FilenameToOpen(self):
'''Returns a path, either absolute or relative to the current working
directory, that points to the file the node refers to. This is only valid
for nodes that have a 'file' or 'path' attribute. Note that the attribute
is a path to the file relative to the 'base-dir' of the .grd file, whereas
this function returns a path that can be used to open the file.'''
file_attribute = 'file'
if not file_attribute in self.attrs:
file_attribute = 'path'
return self.ToRealPath(self.attrs[file_attribute])
def UberClique(self):
'''Returns the uberclique that should be used for messages originating in
a given node. If the node itself has its uberclique set, that is what we
use, otherwise we search upwards until we find one. If we do not find one
even at the root node, we set the root node's uberclique to a new
uberclique instance.
'''
node = self
while not node.uberclique and node.parent:
node = node.parent
if not node.uberclique:
node.uberclique = clique.UberClique()
return node.uberclique
def IsTranslateable(self):
'''Returns false if the node has contents that should not be translated,
otherwise returns false (even if the node has no contents).
'''
if not 'translateable' in self.attrs:
return True
else:
return self.attrs['translateable'] == 'true'
def GetNodeById(self, id):
'''Returns the node in the subtree parented by this node that has a 'name'
attribute matching 'id'. Returns None if no such node is found.
'''
for node in self:
if 'name' in node.attrs and node.attrs['name'] == id:
return node
return None
def GetTextualIds(self):
'''Returns the textual ids of this node, if it has some.
Otherwise it just returns None.
'''
if 'name' in self.attrs:
return [self.attrs['name']]
return None
def EvaluateCondition(self, expr):
'''Returns true if and only if the Python expression 'expr' evaluates
to true.
The expression is given a few local variables:
- 'lang' is the language currently being output
- 'defs' is a map of C preprocessor-style define names to their values
- 'pp_ifdef(define)' which behaves just like the C preprocessors #ifdef,
i.e. it is shorthand for "define in defs"
- 'pp_if(define)' which behaves just like the C preprocessor's #if, i.e.
it is shorthand for "define in defs and defs[define]".
'''
root = self.GetRoot()
lang = ''
defs = {}
def pp_ifdef(define):
return define in defs
def pp_if(define):
return define in defs and defs[define]
if hasattr(root, 'output_language'):
lang = root.output_language
if hasattr(root, 'defines'):
defs = root.defines
return eval(expr, {},
{'lang' : lang,
'defs' : defs,
'pp_ifdef' : pp_ifdef,
'pp_if' : pp_if})
def OnlyTheseTranslations(self, languages):
'''Turns off loading of translations for languages not in the provided list.
Attrs:
languages: ['fr', 'zh_cn']
'''
for node in self:
if (hasattr(node, 'IsTranslation') and
node.IsTranslation() and
node.GetLang() not in languages):
node.DisableLoading()
def PseudoIsAllowed(self):
'''Returns true if this node is allowed to use pseudo-translations. This
is true by default, unless this node is within a <release> node that has
the allow_pseudo attribute set to false.
'''
p = self.parent
while p:
if 'allow_pseudo' in p.attrs:
return (p.attrs['allow_pseudo'].lower() == 'true')
p = p.parent
return True
def ShouldFallbackToEnglish(self):
'''Returns true iff this node should fall back to English when
pseudotranslations are disabled and no translation is available for a
given message.
'''
p = self.parent
while p:
if 'fallback_to_english' in p.attrs:
return (p.attrs['fallback_to_english'].lower() == 'true')
p = p.parent
return False
class ContentNode(Node):
'''Convenience baseclass for nodes that can have content.'''
def _ContentType(self):
return self._CONTENT_TYPE_MIXED
| 36.087619
| 81
| 0.649425
|
4a11c53e53027d30026380006f70834ae292e1e3
| 245
|
py
|
Python
|
map_matching/map_match.py
|
ianberg-volpe/map_matching
|
5f3db9f9adae0320dfbb7b39b8f0f9dc0ebe82a5
|
[
"Apache-2.0"
] | 30
|
2017-05-15T01:43:36.000Z
|
2022-03-30T14:11:08.000Z
|
map_matching/map_match.py
|
ianberg-volpe/map_matching
|
5f3db9f9adae0320dfbb7b39b8f0f9dc0ebe82a5
|
[
"Apache-2.0"
] | 7
|
2018-01-06T08:11:57.000Z
|
2020-08-26T04:24:52.000Z
|
map_matching/map_match.py
|
ianberg-volpe/map_matching
|
5f3db9f9adae0320dfbb7b39b8f0f9dc0ebe82a5
|
[
"Apache-2.0"
] | 7
|
2018-01-14T21:28:15.000Z
|
2022-03-22T07:11:29.000Z
|
from find_stops import find_stops
from finding_network_links import find_network_links
from finding_routes import find_route
def map_match(trip, network):
find_stops(trip)
find_network_links(trip, network)
find_route(trip, network)
| 27.222222
| 52
| 0.816327
|
4a11c7ec3999e399036183173f4825d6496e7188
| 565
|
py
|
Python
|
molsysmt/item/mdtraj_Trajectory/to_mdtraj_Topology.py
|
uibcdf/MolModMTs
|
4f6b6f671a9fa3e73008d1e9c48686d5f20a6573
|
[
"MIT"
] | null | null | null |
molsysmt/item/mdtraj_Trajectory/to_mdtraj_Topology.py
|
uibcdf/MolModMTs
|
4f6b6f671a9fa3e73008d1e9c48686d5f20a6573
|
[
"MIT"
] | null | null | null |
molsysmt/item/mdtraj_Trajectory/to_mdtraj_Topology.py
|
uibcdf/MolModMTs
|
4f6b6f671a9fa3e73008d1e9c48686d5f20a6573
|
[
"MIT"
] | null | null | null |
from molsysmt._private.digestion import digest_item, digest_atom_indices, digest_structure_indices
def to_mdtraj_Topology(item, atom_indices='all', check=True):
if check:
digest_item(item, 'mdtraj.Trajectory')
atom_indices = digest_atom_indices(atom_indices)
structure_indices = digest_structure_indices(structure_indices)
from ..mdtraj_Topology import extract as extract_mdtraj_Topology
tmp_item = item.topology
tmp_item = extract_mdtraj_Topology(tmp_item, atom_indices=atom_indices, check=False)
return tmp_item
| 31.388889
| 98
| 0.780531
|
4a11ca051e41910990713008f3efa645e7405a08
| 7,472
|
py
|
Python
|
esprima/tokenizer.py
|
SeaHOH/esprima-python
|
a72db2895a991b88141ee2ee4870f9d41d344220
|
[
"BSD-2-Clause"
] | 174
|
2017-07-18T12:02:37.000Z
|
2022-03-10T21:04:41.000Z
|
esprima/tokenizer.py
|
SeaHOH/esprima-python
|
a72db2895a991b88141ee2ee4870f9d41d344220
|
[
"BSD-2-Clause"
] | 17
|
2017-08-15T14:50:34.000Z
|
2022-02-05T22:51:27.000Z
|
esprima/tokenizer.py
|
SeaHOH/esprima-python
|
a72db2895a991b88141ee2ee4870f9d41d344220
|
[
"BSD-2-Clause"
] | 43
|
2017-08-17T06:15:14.000Z
|
2022-02-05T23:02:27.000Z
|
# -*- coding: utf-8 -*-
# Copyright JS Foundation and other contributors, https://js.foundation/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, unicode_literals
from collections import deque
from .objects import Object
from .error_handler import ErrorHandler
from .scanner import Scanner, SourceLocation, Position, RegExp
from .token import Token, TokenName
class BufferEntry(Object):
def __init__(self, type, value, regex=None, range=None, loc=None):
self.type = type
self.value = value
self.regex = regex
self.range = range
self.loc = loc
class Reader(object):
def __init__(self):
self.values = []
self.curly = self.paren = -1
# A function following one of those tokens is an expression.
def beforeFunctionExpression(self, t):
return t in (
'(', '{', '[', 'in', 'typeof', 'instanceof', 'new',
'return', 'case', 'delete', 'throw', 'void',
# assignment operators
'=', '+=', '-=', '*=', '**=', '/=', '%=', '<<=', '>>=', '>>>=',
'&=', '|=', '^=', ',',
# binary/unary operators
'+', '-', '*', '**', '/', '%', '++', '--', '<<', '>>', '>>>', '&',
'|', '^', '!', '~', '&&', '||', '?', ':', '===', '==', '>=',
'<=', '<', '>', '!=', '!=='
)
# Determine if forward slash (/) is an operator or part of a regular expression
# https://github.com/mozilla/sweet.js/wiki/design
def isRegexStart(self):
if not self.values:
return True
previous = self.values[-1]
regex = previous is not None
if previous in (
'this',
']',
):
regex = False
elif previous == ')':
keyword = self.values[self.paren - 1]
regex = keyword in ('if', 'while', 'for', 'with')
elif previous == '}':
# Dividing a function by anything makes little sense,
# but we have to check for that.
regex = True
if len(self.values) >= 3 and self.values[self.curly - 3] == 'function':
# Anonymous function, e.g. function(){} /42
check = self.values[self.curly - 4]
regex = not self.beforeFunctionExpression(check) if check else False
elif len(self.values) >= 4 and self.values[self.curly - 4] == 'function':
# Named function, e.g. function f(){} /42/
check = self.values[self.curly - 5]
regex = not self.beforeFunctionExpression(check) if check else True
return regex
def append(self, token):
if token.type in (Token.Punctuator, Token.Keyword):
if token.value == '{':
self.curly = len(self.values)
elif token.value == '(':
self.paren = len(self.values)
self.values.append(token.value)
else:
self.values.append(None)
class Config(Object):
def __init__(self, tolerant=None, comment=None, range=None, loc=None, **options):
self.tolerant = tolerant
self.comment = comment
self.range = range
self.loc = loc
for k, v in options.items():
setattr(self, k, v)
class Tokenizer(object):
def __init__(self, code, options):
self.config = Config(**options)
self.errorHandler = ErrorHandler()
self.errorHandler.tolerant = self.config.tolerant
self.scanner = Scanner(code, self.errorHandler)
self.scanner.trackComment = self.config.comment
self.trackRange = self.config.range
self.trackLoc = self.config.loc
self.buffer = deque()
self.reader = Reader()
def errors(self):
return self.errorHandler.errors
def getNextToken(self):
if not self.buffer:
comments = self.scanner.scanComments()
if self.scanner.trackComment:
for e in comments:
value = self.scanner.source[e.slice[0]:e.slice[1]]
comment = BufferEntry(
type='BlockComment' if e.multiLine else 'LineComment',
value=value
)
if self.trackRange:
comment.range = e.range
if self.trackLoc:
comment.loc = e.loc
self.buffer.append(comment)
if not self.scanner.eof():
if self.trackLoc:
loc = SourceLocation(
start=Position(
line=self.scanner.lineNumber,
column=self.scanner.index - self.scanner.lineStart
),
end=Position(),
)
maybeRegex = self.scanner.source[self.scanner.index] == '/' and self.reader.isRegexStart()
if maybeRegex:
state = self.scanner.saveState()
try:
token = self.scanner.scanRegExp()
except Exception:
self.scanner.restoreState(state)
token = self.scanner.lex()
else:
token = self.scanner.lex()
self.reader.append(token)
entry = BufferEntry(
type=TokenName[token.type],
value=self.scanner.source[token.start:token.end]
)
if self.trackRange:
entry.range = [token.start, token.end]
if self.trackLoc:
loc.end = Position(
line=self.scanner.lineNumber,
column=self.scanner.index - self.scanner.lineStart
)
entry.loc = loc
if token.type is Token.RegularExpression:
entry.regex = RegExp(
pattern=token.pattern,
flags=token.flags,
)
self.buffer.append(entry)
return self.buffer.popleft() if self.buffer else None
| 38.515464
| 106
| 0.54644
|
4a11cab9f2dbb435f508413de0aad50366fc5dfc
| 3,858
|
py
|
Python
|
histogram/scripts/3_estimateResults.py
|
KastnerRG/spector
|
a24a9cc1e25128430adbea0481b5d223e5cf649c
|
[
"BSD-3-Clause"
] | 45
|
2017-03-31T07:28:08.000Z
|
2022-02-08T20:28:33.000Z
|
histogram/scripts/3_estimateResults.py
|
kongqi-best/spector
|
a24a9cc1e25128430adbea0481b5d223e5cf649c
|
[
"BSD-3-Clause"
] | 4
|
2017-05-17T19:35:51.000Z
|
2018-05-23T18:09:10.000Z
|
histogram/scripts/3_estimateResults.py
|
kongqi-best/spector
|
a24a9cc1e25128430adbea0481b5d223e5cf649c
|
[
"BSD-3-Clause"
] | 21
|
2016-11-25T02:40:00.000Z
|
2021-09-06T09:29:37.000Z
|
#!/usr/bin/python
# ----------------------------------------------------------------------
# Copyright (c) 2016, The Regents of the University of California All
# rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of The Regents of the University of California
# nor the names of its contributors may be used to endorse or
# promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL REGENTS OF THE
# UNIVERSITY OF CALIFORNIA BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
# ----------------------------------------------------------------------
# Filename: 3_estimateResults.py
# Version: 1.0
# Description: Python script to run AOC estimations on all the designs.
# Author: Quentin Gautier
import subprocess
import os
import multiprocessing as mp
import random
import sys
logName = "estimateResults.log" # Where to store the results
rootDir = "../benchmarks" # Directory containing all the designs
dir_basename = "hist_design" # Basename of the design folders
cl_basename = "histogram" # Basename of the OpenCL file
def launchCommand(path):
command = "aoc " + cl_basename + ".cl -c -g"
subprocess.call(command, cwd=path, shell=True)
pathDelete = os.path.join(path, cl_basename)
commandDelete = "rm -R -- */ ; find ! \( -name " + cl_basename + ".log \) -type f -exec rm {} + ; rm ../" + cl_basename + ".aoco"
subprocess.call(commandDelete, cwd=pathDelete, shell=True)
outFile = open(logName, 'at')
outFile.write(path + '\n')
outFile.close()
def main():
# Get number of processors
num_processes = 0
if len(sys.argv) >= 2:
num_processes = int(sys.argv[1])
if num_processes > 0:
print("Using " + str(num_processes) + " processes")
else:
print("Using all available processes")
# Folders to compile
folders = [os.path.join(rootDir, d) for d in os.listdir(rootDir) if d.startswith(dir_basename)]
# Get folders already compiled from previous log file
compiled = []
if os.path.isfile(logName):
print("Reading processed folders from log file")
logFile = open(logName, 'rt')
for line in logFile:
name = line.split('\n')
compiled.append(name[0])
logFile.close()
for f in compiled:
folders.remove(f)
# Start processes
print("Processing " + str(len(folders)) + " folders")
if num_processes > 0:
pool = mp.Pool(num_processes)
else:
pool = mp.Pool()
pool.map(launchCommand, folders)
if __name__=="__main__":
main()
| 31.884298
| 133
| 0.659668
|
4a11ceb0d4e69ee0b072bbc770c7452057d25d43
| 983
|
py
|
Python
|
dfas/migrations/0001_initial.py
|
sumeyyekilic/FoursquareAPI_DjangoApp
|
91e9376cd6a47bcf090e27181df562a38883e93a
|
[
"PostgreSQL"
] | 1
|
2020-02-23T20:11:11.000Z
|
2020-02-23T20:11:11.000Z
|
dfas/migrations/0001_initial.py
|
sumeyyekilic/FoursquareAPI_DjangoApp
|
91e9376cd6a47bcf090e27181df562a38883e93a
|
[
"PostgreSQL"
] | 10
|
2019-12-08T19:27:10.000Z
|
2021-06-10T19:31:02.000Z
|
dfas/migrations/0001_initial.py
|
sumeyyekilic/FoursquareAPI_DjangoApp
|
91e9376cd6a47bcf090e27181df562a38883e93a
|
[
"PostgreSQL"
] | null | null | null |
# Generated by Django 2.2.4 on 2019-11-29 09:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='QueryModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location', models.CharField(max_length=150, verbose_name='Konum Bilgisi')),
('venue', models.CharField(max_length=150, verbose_name='Mekan Türü')),
('limit', models.CharField(max_length=40, verbose_name='Listelenecek Limit.')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 35.107143
| 141
| 0.645982
|
4a11ceb966bca89316308899f5e637f080d2b1ca
| 4,057
|
py
|
Python
|
src/biokbase/narrative/handlers/authhandlers.py
|
pranjan77/narrative
|
5714d199c7ca3d65cbfc1110b3d0641e250e62f9
|
[
"MIT"
] | null | null | null |
src/biokbase/narrative/handlers/authhandlers.py
|
pranjan77/narrative
|
5714d199c7ca3d65cbfc1110b3d0641e250e62f9
|
[
"MIT"
] | 220
|
2020-07-13T11:13:03.000Z
|
2022-03-28T11:01:18.000Z
|
src/biokbase/narrative/handlers/authhandlers.py
|
pranjan77/narrative
|
5714d199c7ca3d65cbfc1110b3d0641e250e62f9
|
[
"MIT"
] | null | null | null |
from tornado.escape import url_escape
from notebook.base.handlers import IPythonHandler
from traitlets.config import Application
from notebook.auth.login import LoginHandler
from notebook.auth.logout import LogoutHandler
from biokbase.narrative.common.kblogging import (
get_logger, log_event
)
from biokbase.narrative.common.util import kbase_env
import tornado.log
import os
import urllib.parse
import logging
from biokbase.auth import (
get_user_info,
init_session_env,
set_environ_token
)
"""
KBase handlers for authentication in the Jupyter notebook.
"""
__author__ = 'Bill Riehl <wjriehl@lbl.gov>'
# Set logging up globally.
g_log = get_logger("biokbase.narrative")
app_log = tornado.log.app_log # alias
if Application.initialized:
app_log = Application.instance().log
if os.environ.get('KBASE_DEBUG', False):
app_log.setLevel(logging.DEBUG)
auth_cookie_name = "kbase_session"
class KBaseLoginHandler(LoginHandler):
"""KBase-specific login handler.
This should get the cookie and put it where it belongs.
A (not-so-distant) future version will return a session token.
"""
def get(self):
"""
Initializes the KBase session from the cookie passed into it.
"""
# cookie_regex = re.compile('([^ =|]+)=([^\|]*)')
client_ip = self.request.remote_ip
http_headers = self.request.headers
ua = http_headers.get('User-Agent', 'unknown')
# save client ip in environ for later logging
kbase_env.client_ip = client_ip
auth_cookie = self.cookies.get(auth_cookie_name, None)
if auth_cookie:
token = urllib.parse.unquote(auth_cookie.value)
auth_info = dict()
try:
auth_info = get_user_info(token)
except Exception as e:
app_log.error("Unable to get user information from authentication token!")
raise
# re-enable if token logging info is needed.
# if app_log.isEnabledFor(logging.DEBUG):
# app_log.debug("kbase cookie = {}".format(cookie_val))
# app_log.debug("KBaseLoginHandler.get: user_id={uid} token={tok}"
# .format(uid=auth_info.get('user', 'none'),
# tok=token))
init_session_env(auth_info, client_ip)
self.current_user = kbase_env.user
log_event(g_log, 'session_start', {'user': kbase_env.user, 'user_agent': ua})
app_log.info("KBaseLoginHandler.get(): user={}".format(kbase_env.user))
if self.current_user:
self.redirect(self.get_argument('next', default=self.base_url))
else:
self.write('This is a test?')
def post(self):
pass
@classmethod
def get_user(cls, handler):
user_id = kbase_env.user
if user_id == '':
user_id = 'anonymous'
if user_id is None:
handler.clear_login_cookie()
if not handler.login_available:
user_id = 'anonymous'
return user_id
@classmethod
def password_from_settings(cls, settings):
return ''
@classmethod
def login_available(cls, settings):
"""Whether this LoginHandler is needed - and therefore whether the login page should be displayed."""
return True
class KBaseLogoutHandler(LogoutHandler):
def get(self):
client_ip = self.request.remote_ip
http_headers = self.request.headers
user = kbase_env.user
ua = http_headers.get('User-Agent', 'unknown')
kbase_env.auth_token = 'none'
kbase_env.narrative = 'none'
kbase_env.session = 'none'
kbase_env.user = 'anonymous'
kbase_env.workspace = 'none'
set_environ_token(None)
app_log.info('Successfully logged out')
log_event(g_log, 'session_close', {'user': user, 'user_agent': ua})
self.write(self.render_template('logout.html', message={'info': 'Successfully logged out'}))
| 31.695313
| 109
| 0.640868
|
4a11cfef817bb462c45702f37739585785621eb4
| 10,574
|
py
|
Python
|
vissl/utils/extract_features_utils.py
|
dmitryvinn/vissl-1
|
1d8befefbbeabf9fec6a34b4581080d4221fde9d
|
[
"MIT"
] | null | null | null |
vissl/utils/extract_features_utils.py
|
dmitryvinn/vissl-1
|
1d8befefbbeabf9fec6a34b4581080d4221fde9d
|
[
"MIT"
] | null | null | null |
vissl/utils/extract_features_utils.py
|
dmitryvinn/vissl-1
|
1d8befefbbeabf9fec6a34b4581080d4221fde9d
|
[
"MIT"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import os
import re
from typing import List, NamedTuple
import numpy as np
from iopath.common.file_io import g_pathmgr
from vissl.utils.io import load_file, makedir
class ExtractedFeaturesShardPaths(NamedTuple):
"""
The file paths relevant to load a shard of the extract features
"""
feature_file: str
targets_file: str
indices_file: str
class ExtractedFeatures(NamedTuple):
"""
The file paths relevant to load a shard of the extract features
"""
features: np.ndarray
targets: np.ndarray
indices: np.ndarray
@property
def num_samples(self) -> int:
return self.features.shape[0]
class ExtractedFeaturesLoader:
"""
Utility class to deal with features extracted with extract_engine
For multi-gpu feature extraction, each GPU saves features corresponding to its
share of the data. This class offers an API to abstract away the loading of
these extracted features.
"""
@staticmethod
def get_shard_file_names(
input_dir: str,
split: str,
layer: str,
sorted: bool = True,
) -> List[ExtractedFeaturesShardPaths]:
"""
Get the list of files needed to load the extracted features
"""
# List all the files that are containing the features for a given
# dataset split and a given layer
feature_regex = re.compile(rf"(.*)_{split}_{layer}_features.npy")
prefixes = []
for file_path in g_pathmgr.ls(input_dir):
match = feature_regex.match(file_path)
if match is not None:
prefixes.append(match.group(1))
# Sort the shards by file name if required: it might be useful
# if the algorithm that uses the shards is influenced by ordering
if sorted:
prefixes.sort()
# Yield all the files needed to merge the features dumped on
# the different GPUs
shard_paths = []
for prefix in prefixes:
feat_file = os.path.join(
input_dir, f"{prefix}_{split}_{layer}_features.npy"
)
targets_file = os.path.join(
input_dir, f"{prefix}_{split}_{layer}_targets.npy"
)
indices_file = os.path.join(input_dir, f"{prefix}_{split}_{layer}_inds.npy")
shard_paths.append(
ExtractedFeaturesShardPaths(
feature_file=feat_file,
targets_file=targets_file,
indices_file=indices_file,
)
)
return shard_paths
@classmethod
def load_feature_shard(
cls, paths: ExtractedFeaturesShardPaths
) -> ExtractedFeatures:
"""
Load a shard of the extracted features and returns its content:
features, targets and indices.
"""
logging.info(
f"Loading:\n{paths.feature_file}\n{paths.targets_file}\n{paths.indices_file}"
)
return ExtractedFeatures(
features=load_file(paths.feature_file),
targets=load_file(paths.targets_file),
indices=load_file(paths.indices_file),
)
@classmethod
def load_features(
cls, input_dir: str, split: str, layer: str, flatten_features: bool = False
):
"""
Merge the features across all GPUs to get the features for the full data.
Args:
input_dir (str): input path where the features are dumped
split (str): whether the features are train or test data features
layer (str): the features correspond to what layer of the model
flatten_features (bool): whether or not to flatten the features
Returns:
output (Dict): contains features, targets, inds as the keys
"""
logging.info(f"Merging features: {split} {layer}")
logging.info(f"input_dir: {input_dir}")
# Reassemble each feature shard (dumped by a given rank)
output_feats, output_targets = {}, {}
shard_paths = cls.get_shard_file_names(input_dir, split=split, layer=layer)
if not shard_paths:
raise ValueError(f"No features found for {split} {layer}")
for shard_path in shard_paths:
shard_content = cls.load_feature_shard(shard_path)
for idx in range(shard_content.num_samples):
index = shard_content.indices[idx]
output_feats[index] = shard_content.features[idx]
output_targets[index] = shard_content.targets[idx]
# Sort the entries by sample index
indices = np.array(sorted(output_targets.keys()))
features = np.array([output_feats[i] for i in indices])
targets = np.array([output_targets[i] for i in indices])
# Return the outputs
N = len(indices)
if flatten_features:
features = features.reshape(N, -1)
output = {
"features": features,
"targets": targets,
"inds": indices,
}
logging.info(f"Features: {output['features'].shape}")
logging.info(f"Targets: {output['targets'].shape}")
logging.info(f"Indices: {output['inds'].shape}")
return output
@classmethod
def map_features_to_img_filepath(
cls, image_paths: List[str], input_dir: str, split: str, layer: str
):
"""
Map the features across all GPUs to the respective filenames.
Args:
image_paths (List[str]): list of image paths. Obtained by dataset.get_image_paths()
input_dir (str): input path where the features are dumped
split (str): whether the features are train or test data features
layer (str): the features correspond to what layer of the model
"""
logging.info(f"Merging features: {split} {layer}")
output_dir = f"{input_dir}/features_to_image/{split}/{layer}"
makedir(output_dir)
logging.info(f"Saving the mapped features to dir: {output_dir} ...")
shard_paths = cls.get_shard_file_names(input_dir, split=split, layer=layer)
if not shard_paths:
raise ValueError(f"No features found for {split} {layer}")
for shard_path in shard_paths:
shard_content = cls.load_feature_shard(shard_path)
for idx in range(shard_content.num_samples):
img_index = shard_content.indices[idx]
img_feat = shard_content.features[idx]
img_filename = os.path.splitext(
os.path.basename(image_paths[img_index])
)[0]
out_feat_filename = os.path.join(output_dir, img_filename + ".npy")
with g_pathmgr.open(out_feat_filename, "wb") as fopen:
np.save(fopen, np.expand_dims(img_feat, axis=0))
@classmethod
def sample_features(
cls,
input_dir: str,
split: str,
layer: str,
num_samples: int,
seed: int,
flatten_features: bool = False,
):
"""
This function sample N features across all GPUs in an optimized way, using
reservoir sampling, to avoid loading all features in memory.
This is especially useful if the number of feature is huge, cannot hold into
memory, and we need a small number of them to do an estimation (example of
k-means on a 1B dataset: we can use a few random million samples to compute
relatively good centroids)
Args:
input_dir (str): input path where the features are dumped
split (str): whether the features are train or test data features
layer (str): the features correspond to what layer of the model
num_samples (int): how many features to sample, if negative load everything
seed (int): the random seed used for sampling
flatten_features (bool): whether or not to flatten the features
Returns:
output (Dict): contains features, targets, inds as the keys
"""
if num_samples < 0:
return cls.load_features(
input_dir=input_dir,
split=split,
layer=layer,
flatten_features=flatten_features,
)
features = []
targets = []
indices = []
# Find the shards containing the features to samples
count = 0
shard_paths = cls.get_shard_file_names(input_dir, split=split, layer=layer)
if not shard_paths:
raise ValueError(f"No features found for {split} {layer}")
# Use reservoir sampling to sample some features
rng = np.random.default_rng(seed)
for shard_path in shard_paths:
shard_content = cls.load_feature_shard(shard_path)
for idx in range(shard_content.num_samples):
count += 1
# Fill the reservoir of samples
if len(features) < num_samples:
features.append(shard_content.features[idx])
targets.append(shard_content.targets[idx])
indices.append(shard_content.indices[idx])
# Else implement reservoir sampling substitution
else:
pos = rng.integers(low=0, high=count)
if pos < num_samples:
features[pos] = shard_content.features[idx]
targets[pos] = shard_content.targets[idx]
indices[pos] = shard_content.indices[idx]
# Cast the output to numpy arrays
features = np.stack(features)
targets = np.stack(targets)
indices = np.stack(indices)
# Sort the entries by sample index
sorted_indices = np.argsort(indices)
indices = indices[sorted_indices]
features = features[sorted_indices]
targets = targets[sorted_indices]
# Return the output
if flatten_features:
N = len(indices)
features = features.reshape(N, -1)
output = {"features": features, "targets": targets, "inds": indices}
logging.info(f"Features: {output['features'].shape}")
logging.info(f"Targets: {output['targets'].shape}")
logging.info(f"Indices: {output['inds'].shape}")
return output
| 36.843206
| 95
| 0.610365
|
4a11d09f6ffe7aa94ba4032abb78b66f619c3da4
| 10,235
|
py
|
Python
|
crf-seq/sets/sets/4/seq_detect_1i.py
|
roma-patel/lstm-crf
|
25012b1218b60090f467fe5ed5a15d7a28b3134c
|
[
"Apache-2.0"
] | 1
|
2020-02-24T06:25:17.000Z
|
2020-02-24T06:25:17.000Z
|
crf-seq/sets/sets/4/seq_detect_1i.py
|
roma-patel/lstm-crf
|
25012b1218b60090f467fe5ed5a15d7a28b3134c
|
[
"Apache-2.0"
] | null | null | null |
crf-seq/sets/sets/4/seq_detect_1i.py
|
roma-patel/lstm-crf
|
25012b1218b60090f467fe5ed5a15d7a28b3134c
|
[
"Apache-2.0"
] | null | null | null |
import pycrfsuite
import sklearn
from itertools import chain
from sklearn.metrics import classification_report, confusion_matrix
from sklearn.preprocessing import LabelBinarizer
import re
import json
annotypes = ['Participants', 'Intervention', 'Outcome']
annotype = annotypes[1]
path = '/nlp/data/romap/crf/'
#path = '/Users/romapatel/Desktop/crf/'
def run():
train_sents, test_sents = get_train_test_sets()
print len(test_sents)
indwords_list = get_ind_words()
patterns_list = get_patterns()
X_train = [sent_features(train_sents[docid], indwords_list, patterns_list) for docid in train_sents.keys()]
y_train = [sent_labels(train_sents[docid]) for docid in train_sents.keys()]
X_test = [sent_features(test_sents[docid], indwords_list, patterns_list) for docid in test_sents.keys()]
y_test = [sent_labels(test_sents[docid]) for docid in test_sents.keys()]
trainer = pycrfsuite.Trainer(verbose=False)
for xseq, yseq in zip(X_train, y_train):
trainer.append(xseq, yseq)
trainer.set_params({'c1': 1.0,'c2': 1e-3, 'max_iterations': 50, 'feature.possible_transitions': True})
trainer.train('PICO.crfsuite')
tagger = pycrfsuite.Tagger()
tagger.open('PICO.crfsuite')
get_results(test_sents, tagger, indwords_list, patterns_list)
def get_results(test_sents, tagger, indwords_list, patterns_list):
f1 = open(path + 'sets/4/' + annotype + '-test_pred.json', 'w+')
f2 = open(path + 'sets/4/' + annotype + '-test_correct.json', 'w+')
pred_dict, correct_dict = {}, {}
for docid in test_sents:
pred, correct = tagger.tag(sent_features(test_sents[docid], indwords_list, patterns_list)), sent_labels(test_sents[docid])
spans, span, outside = [], [], True
for i in range(len(pred)):
if pred[i] == '0' and outside is True: continue
elif pred[i] == '0' and outside is False:
span.append(i+1)
spans.append(span)
span, outside = [], True
elif pred[i] == '1' and outside is False: continue
elif pred[i] == '1' and outside is True:
outside = False
span.append(i)
pred_dict[docid] = spans
spans, span, outside = [], [], True
for i in range(len(correct)):
if correct[i] == '0' and outside is True: continue
elif correct[i] == '0' and outside is False:
span.append(i+1)
spans.append(span)
span, outside = [], True
elif correct[i] == '1' and outside is False: continue
elif correct[i] == '1' and outside is True:
outside = False
span.append(i)
correct_dict[docid] = spans
f1.write(json.dumps(pred_dict))
f2.write(json.dumps(correct_dict))
def get_ind_words():
fin_list = []
for annotype in annotypes:
list = []
#filename = annotype.lower() + '_words.txt'
filename = annotype.lower() + '_unigrams.tsv'
f = open(path + 'crf_files/' + filename, 'r')
for line in f:
#word = line[:-1]
items = line.split('\t')
word = items[1][:-1]
if word not in list:
list.append(word)
if annotype == 'Intervention':
f = open(path + 'crf_files/drug_names.txt', 'r')
for line in f:
word = line[:-1]
if word not in list:
list.append(word)
fin_list.append(list)
indwords = [fin_list[0], fin_list[1], fin_list[2]]
return indwords
#all lowercased
def get_patterns():
fin_list = []
for annotype in annotypes:
list = []
#filename = annotype.lower() + '_pattern_copy.txt'
filename = annotype.lower() + '_trigrams3.tsv'
f = open(path + 'crf_files/' + filename, 'r')
for line in f:
#word = line[:-1].lower()
word = line[:-1].split('\t')
word = word[1]
if word not in list:
list.append(word)
fin_list.append(list)
patterns = [fin_list[0], fin_list[1], fin_list[2]]
return patterns
def isindword(word, annotype, indwords_list):
if annotype == annotypes[0]: list = indwords_list[0]
elif annotype == annotypes[1]: list = indwords_list[1]
else: list = indwords_list[2]
if word.lower() in list or word.lower()[:-1] in list or word.lower()[-3:] in list: return True
else: return False
def ispattern(word, pos, annotype, pattern_list):
if annotype == annotypes[0]: list = pattern_list[0]
elif annotype == annotypes[1]: list = pattern_list[1]
else: list = pattern_list[2]
for pattern in pattern_list:
if word.lower() in pattern or pos.lower() in pattern: return True
else: return False
def word_features(sent, i, indwords_list, pattern_list):
word = sent[i][0]
postag = sent[i][2]
features = ['bias', 'word.lower=' + word.lower(),'word[-3:]=' + word[-3:],
'word[-4:]=' + word[-4:],'word.isupper=%s' % word.isupper(),
'word.istitle=%s' % word.istitle(), 'word.isdigit=%s' % word.isdigit(),
'postag=' + postag, 'isindword=%s' % isindword(word, annotype, indwords_list),
'word[0:4]=' + word[0:4], 'ispattern=%s' % ispattern(word, postag, annotype, pattern_list)]
#prev previous word
if i > 1:
word1 = sent[i-2][0]
postag1 = sent[i-2][2]
features.extend(['-1:word.lower=' + word1.lower(), '-1:word.istitle=%s' % word1.istitle(),
'-1:word.isupper=%s' % word1.isupper(), '-1:postag=' + postag1,
'isindword=%s' % isindword(word1, annotype, indwords_list), 'word[0:4]=' + word[0:4],
'word[-3:]=' + word[-3:]])
#previous word
if i > 0:
word1 = sent[i-1][0]
postag1 = sent[i-1][2]
features.extend(['-1:word.lower=' + word1.lower(), '-1:word.istitle=%s' % word1.istitle(),
'-1:word.isupper=%s' % word1.isupper(), '-1:postag=' + postag1,
'isindword=%s' % isindword(word1, annotype, indwords_list), 'word[0:4]=' + word[0:4],
'word[-3:]=' + word[-3:], 'ispattern=%s' % ispattern(word, postag, annotype, pattern_list)])
else:
features.append('BOS')
#next to next word
if i < len(sent)-2:
word1 = sent[i+2][0]
postag1 = sent[i+2][2]
features.extend(['+1:word.lower=' + word1.lower(), '+1:word.istitle=%s' % word1.istitle(),
'+1:word.isupper=%s' % word1.isupper(), '+1:postag=' + postag1,
'isindword=%s' % isindword(word1, annotype, indwords_list), 'word[0:4]=' + word[0:4],
'word[-3:]=' + word[-3:]])
#next word
if i < len(sent)-1:
word1 = sent[i+1][0]
postag1 = sent[i+1][2]
features.extend(['+1:word.lower=' + word1.lower(), '+1:word.istitle=%s' % word1.istitle(),
'+1:word.isupper=%s' % word1.isupper(), '+1:postag=' + postag1,
'isindword=%s' % isindword(word1, annotype, indwords_list), 'word[0:4]=' + word[0:4],
'word[-3:]=' + word[-3:], 'ispattern=%s' % ispattern(word, postag, annotype, pattern_list)])
else:
features.append('EOS')
return features
def sent_features(sent, indwords_list, patterns_list):
return [word_features(sent, i, indwords_list, patterns_list) for i in range(len(sent))]
def sent_labels(sent):
return [str(i_label) for token, ner, postag, p_label, i_label, o_label in sent]
def sent_tokens(sent):
return [token for token, ner, postag, p_label, i_label, o_label in sent]
def print_results(example_sent, tagger, indwords_list, docid, dict):
pred, correct = tagger.tag(sent_features(example_sent, indwords_list)), sent_labels(example_sent)
spans, span, outside = [], [], True
for i in range(len(pred)):
if pred[i] == '0' and outside is True: continue
elif pred[i] == '0' and outside is False:
span.append(i+1)
spans.append(span)
span, outside = [], True
elif pred[i] == '1' and outside is False: continue
elif pred[i] == '1' and outside is True:
outside = False
span.append(i)
f = open(path + annotype + '-test.json', 'w+')
print '\n\nPredicted: ' + str(spans)
for span in spans:
s = ' '
for i in range(span[0], span[1]):
s += example_sent[i][0] + ' '
print s
spans, span, outside = [], [], True
for i in range(len(correct)):
if correct[i] == '0' and outside is True: continue
elif correct[i] == '0' and outside is False:
span.append(i+1)
spans.append(span)
span, outside = [], True
elif correct[i] == '1' and outside is False: continue
elif correct[i] == '1' and outside is True:
outside = False
span.append(i)
print '\n\nCorrect: ' + str(spans)
for span in spans:
s = ' '
for i in range(span[0], span[1]):
s += example_sent[i][0] + ' '
print s
def get_training_data():
f = open(path + 'crf_files/difficulty_crf_mv.json', 'r')
for line in f:
dict = json.loads(line)
return dict
def get_train_test_sets():
test_docids = []
f = open(path + 'crf_files/gold_docids.txt', 'r')
for line in f:
test_docids.append(line[:-1])
doc_dict = get_training_data()
test_sents, train_sents = {}, {}
count = 0
for docid in doc_dict:
sents = doc_dict[docid]
if len(sents) == 0: continue
count += 1
#if count >= 100: break
if docid not in test_docids:
train_sents[docid] = sents
else:
test_sents[docid] = sents
f = open(path + 'difficulty_new.json', 'r')
for line in f:
doc_dict_new = json.loads(line)
count = 1
for docid in doc_dict_new:
if docid in train_sents.keys(): continue
if count < 9481:
count += 1
continue
train_sents[docid] = doc_dict_new[docid]
count += 1
return train_sents, test_sents
if __name__ == '__main__':
run()
| 37.907407
| 130
| 0.575965
|
4a11d0e1870f4cdd24111b8a58401e82ead5dbd9
| 384
|
py
|
Python
|
munge_image_windows.py
|
tobyriddell/self-driving-car
|
48d2ef0590fed9201a6c59fb44db6b7e913d7130
|
[
"MIT"
] | null | null | null |
munge_image_windows.py
|
tobyriddell/self-driving-car
|
48d2ef0590fed9201a6c59fb44db6b7e913d7130
|
[
"MIT"
] | null | null | null |
munge_image_windows.py
|
tobyriddell/self-driving-car
|
48d2ef0590fed9201a6c59fb44db6b7e913d7130
|
[
"MIT"
] | null | null | null |
import os
import cv2
import numpy as np
path = 'C:\\Users\\toby\\Downloads\\self-driving-car\\training_images_3'
os.chdir(path)
img = cv2.imread("training_image_1502733794000_left.png")
img = img[240:480, 0:640]
img = cv2.blur(img, (5, 5))
b,g,r = cv2.split(img)
img = b
retval, img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY)
cv2.imshow("Image", img)
| 19.2
| 73
| 0.669271
|
4a11d0fb0ca0a24db2947d0fcb8c76dedef5db7c
| 6,390
|
py
|
Python
|
tests/python/unittest/test_auto_scheduler_layout_rewrite.py
|
cgerum/incubator-tvm
|
01b98c1d9c59c1b75078599248bfc5cae3f93974
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null |
tests/python/unittest/test_auto_scheduler_layout_rewrite.py
|
cgerum/incubator-tvm
|
01b98c1d9c59c1b75078599248bfc5cae3f93974
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null |
tests/python/unittest/test_auto_scheduler_layout_rewrite.py
|
cgerum/incubator-tvm
|
01b98c1d9c59c1b75078599248bfc5cae3f93974
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test AutoScheduler Layout Rewrite"""
import tempfile
import numpy as np
import tvm
from tvm import topi
from tvm import auto_scheduler, te
from test_auto_scheduler_common import get_tiled_matmul, matmul_auto_scheduler_test
def test_apply_steps_with_layout_rewrite():
dag, s = get_tiled_matmul()
_, bufs = dag.apply_steps_from_state(s)
assert bufs[1].shape[0] == 512
assert bufs[1].shape[1] == 512
_, bufs = dag.apply_steps_from_state(
s, layout_rewrite=auto_scheduler.compute_dag.ComputeDAG.RewriteForPreTransformed
)
assert bufs[1].shape[0] == 4
assert bufs[1].shape[1] == 8
assert bufs[1].shape[2] == 4
assert bufs[1].shape[3] == 4
assert bufs[1].shape[4] == 512
_, bufs = dag.apply_steps_from_state(
s, layout_rewrite=auto_scheduler.compute_dag.ComputeDAG.InsertTransformStage
)
assert bufs[1].shape[0] == 512
assert bufs[1].shape[1] == 512
@tvm.testing.requires_llvm
def test_correctness_layout_rewrite_rewrite_for_preTransformed():
N = 128
target = tvm.target.Target("llvm")
task = auto_scheduler.create_task(matmul_auto_scheduler_test, (N, N, N), target)
dag = task.compute_dag
with tempfile.NamedTemporaryFile() as fp:
log_file = fp.name
search_policy = auto_scheduler.SketchPolicy(task)
measure_ctx = auto_scheduler.LocalRPCMeasureContext()
tuning_options = auto_scheduler.TuningOptions(
num_measure_trials=2,
runner=measure_ctx.runner,
verbose=1,
measure_callbacks=[auto_scheduler.RecordToFile(log_file)],
)
auto_scheduler.auto_schedule(task, search_policy, tuning_options)
inp, _ = auto_scheduler.load_best(log_file, task.workload_key, target)
s, bufs = dag.apply_steps_from_state(
inp.state, layout_rewrite=auto_scheduler.compute_dag.ComputeDAG.RewriteForPreTransformed
)
s_ref, bufs_ref = dag.apply_steps_from_state(inp.state)
np_args = [np.random.randn(*topi.get_const_tuple(x.shape)).astype(x.dtype) for x in bufs]
np_args_ref = [np.array(x) for x in np_args]
weight = np_args_ref[1]
# infer shape for the rewritten layout
if len(weight.shape) >= 6:
# For cpu tile structure SSRSRS
base = len(weight.shape) - 6
red_dim = weight.shape[2 + base] * weight.shape[4 + base]
out_dim = weight.shape[3 + base] * weight.shape[5 + base]
for i in range(base + 2):
out_dim *= weight.shape[i]
new_order = (
[
2 + base,
4 + base,
]
+ list(range(base + 2))
+ [
3 + base,
5 + base,
]
)
np_args_ref[1] = np_args_ref[1].transpose(new_order)
np_args_ref[1] = np_args_ref[1].reshape((red_dim, out_dim))
func = tvm.build(s, bufs, target=target)
func_ref = tvm.build(s_ref, bufs_ref, target=target)
ctx = tvm.context(str(target))
ctx_ref = tvm.cpu()
args = [tvm.nd.array(x, ctx=ctx) for x in np_args]
args_ref = [tvm.nd.array(x, ctx=ctx_ref) for x in np_args_ref]
ctx.sync()
func(*args)
func_ref(*args_ref)
ctx.sync()
tvm.testing.assert_allclose(args[0].asnumpy(), args_ref[0].asnumpy(), rtol=1e-4)
tvm.testing.assert_allclose(args[2].asnumpy(), args_ref[2].asnumpy(), rtol=1e-4)
del measure_ctx
@tvm.testing.requires_llvm
def test_correctness_layout_rewrite_insert_transform_stage():
N = 128
target = tvm.target.Target("llvm")
task = auto_scheduler.create_task(matmul_auto_scheduler_test, (N, N, N), target)
dag = task.compute_dag
with tempfile.NamedTemporaryFile() as fp:
log_file = fp.name
search_policy = auto_scheduler.SketchPolicy(task)
measure_ctx = auto_scheduler.LocalRPCMeasureContext()
tuning_options = auto_scheduler.TuningOptions(
num_measure_trials=2,
runner=measure_ctx.runner,
verbose=1,
measure_callbacks=[auto_scheduler.RecordToFile(log_file)],
)
auto_scheduler.auto_schedule(task, search_policy, tuning_options)
inp, _ = auto_scheduler.load_best(log_file, task.workload_key, target)
s, bufs = dag.apply_steps_from_state(
inp.state, layout_rewrite=auto_scheduler.compute_dag.ComputeDAG.InsertTransformStage
)
s_ref, bufs_ref = dag.apply_steps_from_state(inp.state)
np_args = [np.random.randn(*topi.get_const_tuple(x.shape)).astype(x.dtype) for x in bufs]
func = tvm.build(s, bufs, target=target)
func_ref = tvm.build(s_ref, bufs_ref, target=target)
ctx = tvm.context(str(target))
ctx_ref = tvm.cpu()
args = [tvm.nd.array(x, ctx=ctx) for x in np_args]
args_ref = [tvm.nd.array(x, ctx=ctx_ref) for x in np_args]
ctx.sync()
func(*args)
func_ref(*args_ref)
ctx.sync()
tvm.testing.assert_allclose(args[0].asnumpy(), args_ref[0].asnumpy(), rtol=1e-4)
tvm.testing.assert_allclose(args[1].asnumpy(), args_ref[1].asnumpy(), rtol=1e-4)
tvm.testing.assert_allclose(args[2].asnumpy(), args_ref[2].asnumpy(), rtol=1e-4)
del measure_ctx
if __name__ == "__main__":
test_apply_steps_with_layout_rewrite()
test_correctness_layout_rewrite_rewrite_for_preTransformed()
test_correctness_layout_rewrite_insert_transform_stage()
| 37.368421
| 100
| 0.658216
|
4a11d18cac545e74e72448b3a2de99c549c511ca
| 792
|
pyde
|
Python
|
class05-06/paint/paint.pyde
|
Wuradclan/programming-course
|
d87f9f5567d9f46d2213079e061bde8820cd5712
|
[
"MIT"
] | 7
|
2019-11-26T18:23:08.000Z
|
2020-06-14T17:56:33.000Z
|
class05-06/paint/paint.pyde
|
Wuradclan/programming-course
|
d87f9f5567d9f46d2213079e061bde8820cd5712
|
[
"MIT"
] | null | null | null |
class05-06/paint/paint.pyde
|
Wuradclan/programming-course
|
d87f9f5567d9f46d2213079e061bde8820cd5712
|
[
"MIT"
] | 4
|
2019-12-10T20:01:01.000Z
|
2020-02-25T18:12:07.000Z
|
stroke_width = 5
def setup():
size(400, 400)
background(255)
def draw():
global stroke_width
delta = .5
if keyPressed:
if keyCode == DOWN:
stroke_width = stroke_width - delta
if keyCode == UP:
stroke_width = stroke_width + delta
if stroke_width < 1:
stroke_width = 1
print(stroke_width)
strokeWeight(stroke_width)
if mousePressed:
if mouseButton == LEFT:
stroke(0)
if mouseButton == CENTER:
stroke(0, 0, 255)
if mouseButton == RIGHT:
# change stroke color to hot pink (255, 102, 189)
stroke(255, 102, 189)
line(pmouseX, pmouseY, mouseX, mouseY)
| 22
| 61
| 0.510101
|
4a11d1f769fb0278c23e2d8ef56a076fbec26e6e
| 923
|
py
|
Python
|
release/stubs.min/System/ComponentModel/__init___parts/ComponentEditor.py
|
YKato521/ironpython-stubs
|
b1f7c580de48528490b3ee5791b04898be95a9ae
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/ComponentModel/__init___parts/ComponentEditor.py
|
YKato521/ironpython-stubs
|
b1f7c580de48528490b3ee5791b04898be95a9ae
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/ComponentModel/__init___parts/ComponentEditor.py
|
YKato521/ironpython-stubs
|
b1f7c580de48528490b3ee5791b04898be95a9ae
|
[
"MIT"
] | null | null | null |
class ComponentEditor(object):
""" Provides the base class for a custom component editor. """
def EditComponent(self, *__args):
"""
EditComponent(self: ComponentEditor,context: ITypeDescriptorContext,component: object) -> bool
Edits the component and returns a value indicating whether the component was modified based upon
a given context.
context: An optional context object that can be used to obtain further information about the edit.
component: The component to be edited.
Returns: true if the component was modified; otherwise,false.
EditComponent(self: ComponentEditor,component: object) -> bool
Edits the component and returns a value indicating whether the component was modified.
component: The component to be edited.
Returns: true if the component was modified; otherwise,false.
"""
pass
| 24.945946
| 102
| 0.696641
|
4a11d396bdd831fb625f524d6aeb06524351c98a
| 2,983
|
py
|
Python
|
contrib/testgen/base58.py
|
bitcoin-black/bitcoinblack
|
ad87d315c635ef439d4664da46e6672153f91b79
|
[
"MIT"
] | null | null | null |
contrib/testgen/base58.py
|
bitcoin-black/bitcoinblack
|
ad87d315c635ef439d4664da46e6672153f91b79
|
[
"MIT"
] | null | null | null |
contrib/testgen/base58.py
|
bitcoin-black/bitcoinblack
|
ad87d315c635ef439d4664da46e6672153f91b79
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2012-2017 The DigiByte Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
DigiByte base58 encoding and decoding.
Based on https://digibytetalk.org/index.php?topic=1026.0 (public domain)
'''
import hashlib
# for compatibility with following code...
class SHA256:
new = hashlib.sha256
if str != bytes:
# Python 3.x
def ord(c):
return c
def chr(n):
return bytes( (n,) )
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
b58chars = __b58chars
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# DigiByte does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length = None):
""" decode v into a string of len bytes
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = bytes()
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def checksum(v):
"""Return 32-bit checksum based on SHA256"""
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
def b58encode_chk(v):
"""b58encode a string, with 32-bit checksum"""
return b58encode(v + checksum(v))
def b58decode_chk(v):
"""decode a base58 string, check and remove checksum"""
result = b58decode(v)
if result is None:
return None
if result[-4:] == checksum(result[:-4]):
return result[:-4]
else:
return None
def get_bcaddress_version(strAddress):
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
addr = b58decode_chk(strAddress)
if addr is None or len(addr)!=21: return None
version = addr[0]
return ord(version)
if __name__ == '__main__':
# Test case (from http://gitorious.org/digibyte/python-base58.git)
assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0
_ohai = 'o hai'.encode('ascii')
_tmp = b58encode(_ohai)
assert _tmp == 'DYB3oMS'
assert b58decode(_tmp, 5) == _ohai
print("Tests passed")
| 27.878505
| 97
| 0.639625
|
4a11d4025591dc0e69f2c3e25faea8cc04a01783
| 800
|
py
|
Python
|
mysite/urls.py
|
Amos-lii/404Lab04
|
fe7dc13ef033b8e43ff2d5354734c2112c4befe1
|
[
"Apache-2.0"
] | null | null | null |
mysite/urls.py
|
Amos-lii/404Lab04
|
fe7dc13ef033b8e43ff2d5354734c2112c4befe1
|
[
"Apache-2.0"
] | null | null | null |
mysite/urls.py
|
Amos-lii/404Lab04
|
fe7dc13ef033b8e43ff2d5354734c2112c4befe1
|
[
"Apache-2.0"
] | null | null | null |
"""mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include,path
urlpatterns = [
path('polls/', include('polls.urls')),
path('admin/', admin.site.urls),
]
| 33.333333
| 77
| 0.7025
|
4a11d41c7b42d9f4de45cd62103d793a861b32b3
| 395
|
py
|
Python
|
test/test_block.py
|
micetti/TweetsOnBlocks
|
e9355a0d35c0bfe1755d80fc64ae2d781c762e30
|
[
"MIT"
] | null | null | null |
test/test_block.py
|
micetti/TweetsOnBlocks
|
e9355a0d35c0bfe1755d80fc64ae2d781c762e30
|
[
"MIT"
] | null | null | null |
test/test_block.py
|
micetti/TweetsOnBlocks
|
e9355a0d35c0bfe1755d80fc64ae2d781c762e30
|
[
"MIT"
] | null | null | null |
from blockchain.block import Block
class TestBlock:
def test_block_setup(self):
previous_hash = '0000000000000000000000000000000000000000000000000000000000000000'
block = Block(0, previous_hash, 'TestBlock')
assert block.index == 0
assert block.previous_hash == previous_hash
assert block.data == 'TestBlock'
assert len(block.hash) == 64
| 26.333333
| 90
| 0.696203
|
4a11d490759fc42486cda38198fe65ad3dd32a52
| 1,657
|
py
|
Python
|
sina_login/sina_login_by_selenium.py
|
wtus/smart_login
|
436b43a36f1a9e50c2ca38ab3488ff670edde88f
|
[
"MIT"
] | 1
|
2017-09-01T07:11:33.000Z
|
2017-09-01T07:11:33.000Z
|
sina_login/sina_login_by_selenium.py
|
wtus/smart_login
|
436b43a36f1a9e50c2ca38ab3488ff670edde88f
|
[
"MIT"
] | null | null | null |
sina_login/sina_login_by_selenium.py
|
wtus/smart_login
|
436b43a36f1a9e50c2ca38ab3488ff670edde88f
|
[
"MIT"
] | 1
|
2019-10-28T15:39:57.000Z
|
2019-10-28T15:39:57.000Z
|
import re
import time
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions as EC
def login(account, passwd, url):
# 如果driver没加入环境变量中,那么就需要明确指定其路径
# 验证于2017年4月11日
# 直接登陆新浪微博
driver = webdriver.Chrome(executable_path='/Users/resolvewang/Documents/program/driver/chromedriver')
driver.maximize_window()
# locator = (By.)
driver.get(url)
print('开始登陆')
name_field = driver.find_element_by_id('loginname')
name_field.clear()
name_field.send_keys(account)
password_field = driver.find_element_by_class_name('password').find_element_by_name('password')
password_field.clear()
password_field.send_keys(passwd)
submit = driver.find_element_by_xpath('//*[@id="pl_login_form"]/div/div[3]/div[6]/a/span')
ActionChains(driver).double_click(submit).perform()
time.sleep(5)
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, 'WB_miniblog')))
source = driver.page_source
if is_login(source):
print('登录成功')
sina_cookies = driver.get_cookies()
driver.quit()
return sina_cookies
def is_login(source):
rs = re.search("CONFIG\['islogin'\]='(\d)'", source)
if rs:
return int(rs.group(1)) == 1
else:
return False
if __name__ == '__main__':
url = 'http://weibo.com/login.php'
name_input = input('请输入你的账号\n')
passwd_input = input('请输入你的密码\n')
cookies = login(name_input, passwd_input, url)
| 26.301587
| 105
| 0.71213
|
4a11d4a355143fb2ef3ccb4981fe7f03124fa2c8
| 48,643
|
py
|
Python
|
labelfactory/labeling/datamanager.py
|
Orieus/one_def_classification
|
3269290e1fa06ec104a38810c5dffa5401f34ef1
|
[
"MIT"
] | null | null | null |
labelfactory/labeling/datamanager.py
|
Orieus/one_def_classification
|
3269290e1fa06ec104a38810c5dffa5401f34ef1
|
[
"MIT"
] | null | null | null |
labelfactory/labeling/datamanager.py
|
Orieus/one_def_classification
|
3269290e1fa06ec104a38810c5dffa5401f34ef1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Python libraries
from __future__ import print_function
import os
import sys
# import cPickle as pickle
import pickle
import json
import shutil
import pandas as pd
import numpy as np
# import copy
import time
from datetime import datetime
from pymongo import MongoClient
import ipdb
# Services from the project
# sys.path.append(os.getcwd())
class DataManager(object):
"""
DataManager is the class providing read and write facilities to access and
update the dataset of labels and predictions
It assumes that data will be stored in files or in a databes.
If files, the following data structure is assumed
project_path/.
/label_dataset_fname.pkl
/preds_dataset_fname.pkl
/labelhistory_fname.pkl
/input/.
/labels_fname
/preds_fname
/output/.
/used_/.
(the specific file and folder names can be configured)
If project_path does not exist, an error is returned.
The class provides facilities to:
- Read and write data in .pkl files or a mongo database
- Read labels from the /input/ folder in csv format
- Read preds from the /input/ folder in pkl files
- Write outputs (tipically, new labels) in the desired format in
/ouput/
"""
def __init__(self, source_type, dest_type, file_info, db_info,
categories, parentcat, ref_class, alphabet,
compute_wid='yes', unknown_pred=0):
"""
Stores files, folder and path names into the data struture of the
DataManager object.
:Args:
:source_type: 'file' if the data sources are in files
'db' if data is stored in a database
:dest_type: 'file' if the data sources are in files
'db' if data is stored in a database
:file_info: a dictionary containing strings about the names of
path and folders. Fields are:
:project_path: string with the path to all input and ouput
files
:input_folder: Name of folder containing input data
:output_folder: Name of folder containing output data
:used_folder: Name of the folder containing copies of old data
files
:dataset_fname: Name of file containing the data
:labelhistory_fname: Name of the label history file
:labels_endname: Suffix of the label file
:preds_endname: Suffix of the prediction files
:urls_fname: Name of the file containing urls only
:db_info: a dictionary containing information about the database.
Fields are:
:name: Name of the database
:hostname: Name of the database host
:user: User name
:pwd: Password
:label_coll_name: Name of the label collection
:history_coll_name: Name of the history collection
:port: Port
:mode: mode of saving data in the database.
:categories: Set of categories
:parentcat: Dictionary defining the hyerarchical category structure
:ref_class: Name of the category that the predictions refer to
:alphabet: Possible labels for each category
:compute_wid: Type of wid. If yes, the wid is a transformed url.
In no, the wid is equal to the url.
:unknown_pred: Default value for unknown predictions.
"""
self.source_type = source_type
# Set variables about files and folders
if file_info is not None:
project_path = file_info['project_path']
input_folder = file_info['input_folder']
output_folder = file_info['output_folder']
used_folder = file_info['used_folder']
# Revise path and folder terminations
if not project_path.endswith('/'):
project_path = project_path + '/'
if not input_folder.endswith('/'):
input_folder = input_folder + '/'
if not output_folder.endswith('/'):
output_folder = output_folder + '/'
if not used_folder.endswith('/'):
used_folder = used_folder + '/'
# Folder containing all files related to labeling.
self.directory = project_path
# Check input and output folders
self.input_path = os.path.join(self.directory, input_folder)
if not os.path.isdir(self.input_path):
os.makedirs(self.input_path)
self.output_path = os.path.join(self.directory, output_folder)
if not os.path.isdir(self.output_path):
os.makedirs(self.output_path)
self.used_path = os.path.join(self.directory, used_folder)
if not os.path.isdir(self.used_path):
os.makedirs(self.used_path)
# Store names of files in the input folder
self.labels_endname = file_info['labels_endname']
self.preds_endname = file_info['preds_endname']
self.urls_fname = file_info['urls_fname']
self.export_labels_fname = file_info['export_labels_fname']
# Store file names
self.dataset_fname = file_info['dataset_fname']
self.datalabels_fname = self.dataset_fname + self.labels_endname
self.datapreds_fname = self.dataset_fname + self.preds_endname
self.dataset_file = os.path.join(
self.directory, self.dataset_fname + '.pkl')
self.datalabels_file = os.path.join(
self.directory, self.datalabels_fname + '.pkl')
self.datapreds_file = os.path.join(
self.directory, self.datapreds_fname + '.pkl')
self.labelhistory_fname = file_info['labelhistory_fname']
self.labelhistory_file = os.path.join(
self.directory, self.labelhistory_fname + '.pkl')
self.exportlabels_file = os.path.join(
self.output_path, self.export_labels_fname + '.csv')
# Store info about the database
self.db_info = db_info
# Type of wid
self.compute_wid = compute_wid
# Store category names
self.categories = categories
self.ref_class = ref_class
self._yes = alphabet['yes']
self._no = alphabet['no']
self._unknown = alphabet['unknown']
self._error = alphabet['error']
# Default value for predictions
self._unknown_p = unknown_pred
def loadData(self):
""" Load data and label history from file.
This is the basic method to read the information about labels, urls
and predictions from files in the standard format.
If the dataset file or the labelhistory file does not exist, no
error is returned, though empty data variables are returned.
:Returns:
:df_labels: Multi-index Pandas dataframe containing labels.
Fields are:
'info': With columns marker', 'relabel', 'weight',
'userId', 'date'
'label': One column per categorie, containing the labels
:df_preds: Pandas dataframa indexed by the complete list of
wids, with one column of urls and one addicional
column per category containing predictions.
:labelhistory: Dictionary containing, for each wid, a record of
the labeling events up to date.
"""
if self.source_type == 'file':
# Read label history
if os.path.isfile(self.labelhistory_file):
if sys.version_info.major == 3:
try:
with open(self.labelhistory_file, 'rb') as f:
labelhistory = pickle.load(f)
except:
print("---- Cannot read a pkl file version for " +
"Python 2. Trying to load a json file.")
print("---- An ERROR will arise if json file does " +
"not exist.")
print("---- IF THIS IS THE CASE, YOU SHOULD DO \n" +
" python run_pkl2json.py [path to "
"labelhistory]\n" +
" FROM PYTHON 2 BEFORE RUNNING THIS SCRIPT.")
fname = self.labelhistory_file.replace('.pkl', '.json')
with open(fname, 'r', encoding='latin1') as f:
labelhistory = json.load(f)
# Convert date field, which is in string format, to
# datetime format.
for url, events in labelhistory.items():
for idx, record in events.items():
labelhistory[url][idx]['date'] = (
datetime.strptime(
labelhistory[url][idx]['date'],
"%Y-%m-%dT%H:%M:%S.%f"))
else:
with open(self.labelhistory_file, 'r') as f:
labelhistory = pickle.load(f)
else:
labelhistory = {}
# Load dataset files.
if (os.path.isfile(self.datalabels_file) and
os.path.isfile(self.datapreds_file)):
# Load label and prediction dataframes stored in pickle files
df_labels = pd.read_pickle(self.datalabels_file)
df_preds = pd.read_pickle(self.datapreds_file)
elif os.path.isfile(self.dataset_file):
# If there is an old dataset structure, read data there and
# convert it into the label and preds dataframes
with open(self.dataset_file, 'r') as handle:
data = pickle.load(handle)
df_labels, df_preds = self.get_df(data, labelhistory)
else:
# Warning: the next 4 commands are duplicated in importData.
# Make sure taht any changes here are also done there
# (I know, this is not a good programming style..)
info = ['marker', 'relabel', 'weight', 'userId', 'date']
arrays = [len(info)*['info'] + len(self.categories)*['label'],
info + self.categories]
tuples = list(zip(*arrays))
mindex = pd.MultiIndex.from_tuples(tuples)
# Create empty pandas dataframe
df_labels = pd.DataFrame(self._unknown, index=[],
columns=mindex)
# df_labels = None
# df_preds = None
cols = ['url'] + self.categories
df_preds = pd.DataFrame(index=[], columns=cols)
print(df_preds)
else:
dbName = self.db_info['name']
hostname = self.db_info['hostname']
user = self.db_info['user']
pwd = self.db_info['pwd']
label_coll_name = self.db_info['label_coll_name']
history_coll_name = self.db_info['history_coll_name']
port = self.db_info['port']
try:
print("Trying connection...")
client = MongoClient(hostname)
client[dbName].authenticate(user, pwd)
db = client[dbName]
print("Connected to mongodb @ {0}:[{1}]".format(
hostname, port))
except Exception as E:
print("Fail to connect mongodb @ {0}:{1}, {2}".format(
hostname, port, E))
exit()
# Read label collection
collection = db[label_coll_name]
num_urls = collection.count()
data = {}
if num_urls > 0:
dataDB = collection.find({})
for i in range(num_urls):
wid = dataDB[i]['idna']
data[wid] = dataDB[i]['value']
if 'url' not in data[wid]:
data[wid]['url'] = wid
# Read history
collection = db[history_coll_name]
num_events = collection.count()
labelhistory = {}
if num_events > 0:
dataDB = collection.find({})
for i in range(num_events):
wid = dataDB[i]['idna']
labelhistory[wid] = dataDB[i]['value']
df_labels, df_preds = self.get_df(data, labelhistory)
# In the current version, predictions are not being stored in the
# mongo db. They must be loaded from files.
if os.path.isfile(self.datapreds_file):
# Load prediction dataframes stored in pickle files
df_preds = pd.read_pickle(self.datapreds_file)
return df_labels, df_preds, labelhistory
def get_df(self, data, labelhistory):
""" Converts the data dictionary used in former versions of the web
labeler into the label and prediction dataframes.
:Args:
:data: Data dictionary of labels and predicts.
:labelhistory: The labelhistory is used to get the date of the
last labelling event for each wid.
:Returns:
:df_labels:
:df_preds:
"""
# #######################
# Compute preds dataframe
# Create pandas dataframe structure
wids = data.keys()
cols = ['url'] + self.categories
df_preds = pd.DataFrame(index=wids, columns=cols)
# Fill urls
urls = [data[w]['url'] for w in wids]
df_preds['url'] = urls
for w in wids:
if 'pred' in data[w]:
for c in self.categories:
df_preds[c] = [data[w]['pred'][c] for w in wids]
# ########################
# Compute labels dataframe
# Create multiindex for the label dataframe
info = ['marker', 'relabel', 'weight', 'userId', 'date']
arrays = [len(info)*['info'] + len(self.categories)*['label'],
info + self.categories]
tuples = list(zip(*arrays))
mindex = pd.MultiIndex.from_tuples(tuples)
# Create empty pandas dataframe
df_labels = pd.DataFrame(columns=mindex)
# Fill dataframe with wids that have some known label
for w in data:
# Only the wids with some label are stored.
lab_list = data[w]['label'].values()
if self._yes in lab_list or self._no in lab_list:
# Fill row with category labels
for c in self.categories:
if c in data[w]['label']:
df_labels.loc[w, ('label', c)] = data[w]['label'][c]
else:
df_labels.loc[w, ('label', c)] = self._unknown
# Fill rows with category info
for i in info:
if i != 'date':
df_labels.loc[w, ('info', i)] = data[w][i]
else:
# Read the date, if it exists, in the label history
if w in labelhistory:
# Take the most recent labeling date
record = max(labelhistory[w])
df_labels.loc[w, ('info', i)] = (
labelhistory[w][record]['date'])
return df_labels, df_preds
def df2data(self, df_labels, df_preds):
""" Converts the label and prediction dataframes into the data
dictionary used in former versions of the web labeler.
:Args:
:df_labels:
:df_preds:
:Returns:
:data: Data dictionary of labels and predicts.
:labelhistory: The labelhistory is used to get the date of the
last labelling event for each wid.
"""
data = {}
wids = df_preds.index.values
for i, w in enumerate(wids):
print('Processing {0} wids out of {1}\r'.format(i, len(wids)),
end="")
dataw = df_preds.loc[w]
data[w] = {}
data[w]['url'] = dataw['url']
data[w]['pred'] = {}
for c in self.categories:
data[w]['pred'][c] = dataw[c]
wids = df_labels.index.values
for w in wids:
print ('Processing {0} wids out of {1}\r').format(i, len(wids)),
dataw = df_preds.loc[w]
data[w]['marker'] = dataw['marker']
data[w]['relabel'] = dataw['relabel']
data[w]['weight'] = dataw['weight']
data[w]['userId'] = dataw['userId']
data[w]['date'] = dataw['date']
for c in self.categories:
data[w]['pred'][c] = dataw['label'][c]
return data
def importData(self):
""" Read data from the input folder.
Only labels with positive or negative label are loaded
:Args:
:-: None. File locations and the set of categories are taken
from the class attributes
:Returns:
:df_labels: Pandas dataframe of labels
:df_preds: Pandas dataframe of predictions
"""
# Warning.
if 'url' in self.categories or 'uid' in self.categories:
sys.exit("ERROR: url and uid are reserved words. They cannot be " +
"used as category names.")
# Initialize dictionaries
labels = {}
preds = {}
# ###############
# Read data files
# Read new labels and predictions
for cat in self.categories:
labels[cat] = self.importLabels(cat)
preds[cat] = self.importPredicts(cat)
# Read predictions for a csv file directly in a pandas dataframe
df_imported_preds = self.importPredicts()
if df_imported_preds is not None:
wid_list_csv = df_imported_preds.index.tolist()
else:
wid_list_csv = []
# # Transfer the dictionary [wid][cat] in predsall into preds, which is
# # a dictionary [cat][wid]
# for wid in predsall:
# for cat in predsall[wid]:
# if cat in self.categories:
# # Note that these predictions override those in the
# # 'pkl' files
# preds[cat][wid] = predsall[wid][cat]
# else:
# print("---- WARNING: The prediction file contains " +
# "unknown category " + cat + ", ignored")
# Read new urls (without preditions or labels)
urls_dict = self.importURLs()
# ####################################
# Import predictions from pickle files
# Capture all wids in pred or label files
print("---- Capturing wids from prediction files ")
wid_set = set(urls_dict.keys())
for cat in self.categories:
wid_set = wid_set | set(preds[cat].keys())
wid_set = wid_set | set(wid_list_csv)
wid_set = list(wid_set)
# Create the dictionary structure for data2
print("---- Building predictions structure ")
cols = ['url'] + self.categories
df2_preds = pd.DataFrame(index=wid_set, columns=cols)
# Join all urls and predictions in a dataset struture
# First, insert data from the dictionary of urls
df2_preds['url'].update(pd.Series(urls_dict))
# Second, insert data from dictionaries of predictions
ntot = len(wid_set)
for cat in self.categories:
urls_dict = {}
pred_dict = {}
for nk, wid in enumerate(preds[cat]):
if ntot > 10000 and nk % 100 == 0:
print(('---- ---- Processing {0} wids out of {1} from ' +
'category {2} \r').format(nk, ntot, cat),
end="")
urls_dict[wid] = preds[cat][wid]['url']
pred_dict[wid] = preds[cat][wid]['pred']
df2_preds['url'].update(pd.Series(urls_dict))
df2_preds[cat].update(pd.Series(pred_dict))
# ################################
# Import predictions from csv file
if df_imported_preds is not None:
# Capture all wids in the dataframe
print("---- Capturing wids from prediction csv file ")
# Categories to import:
cat_list = list(set(self.categories) &
set(df_imported_preds.columns.tolist()))
cat_unk = (set(df_imported_preds.columns.tolist()) -
set(self.categories))
if len(cat_unk) > 0:
print("WARNING: There as unknown categories in the " +
"prediction files: {}".format(cat_unk))
# Insert predictions imported from csv file in to df2_preds
# ntot = len(wid_list_csv)
# TO-DO: maybe a loop is not necessary to merge these dataframes.
print('---- ---- Processing {} wids...'.format(ntot))
df2_preds.loc[wid_list_csv, cat_list] = (
df_imported_preds.loc[wid_list_csv, cat_list])
# for cat in cat_set:
# ipdb.set_trace()
# df2_preds.loc[wid_list_csv, cat_list] = (
# df_imported_preds.loc[wid_list_csv, cat])
print('... done.')
# for n, w in enumerate(wid_set):
# if ntot > 10000 and n % 100 == 0:
# print('Processing {0} wids out of {1}\r'.format(n, ntot),
# end="")
# for cat in cat_set:
# df2_preds.loc[w, cat] = df_imported_preds.loc[w, cat]
# #############
# Import labels
# Capture all wids in pred or lfiles
print("---- Capturing wids from label files ")
wid_set = set()
for cat in self.categories:
wid_set = wid_set | set(labels[cat].keys())
wid_set = list(wid_set)
# Create the dictionary structure for data2
print("---- Building dataset structure ")
# Warning: the next 4 commands are duplicated in loadData.
# Make sure taht any changes here are also done there
# (I know, this is not a good programming style..)
info = ['marker', 'relabel', 'weight', 'userId', 'date']
arrays = [len(info)*['info'] + len(self.categories)*['label'],
info + self.categories]
tuples = list(zip(*arrays))
mindex = pd.MultiIndex.from_tuples(tuples)
# Create empty pandas dataframe
df2_labels = pd.DataFrame(self._unknown, index=wid_set,
columns=mindex)
# Second, insert data from dictionaries of predictions and labels
ntot = len(wid_set)
for cat in self.categories:
for nk, w in enumerate(labels[cat]):
if ntot > 10000 and nk % 100 == 0:
print(('Processing {0} wids out of {1} from ' +
'category {2} \r').format(nk, ntot, cat),
end="")
df2_labels.loc[w, ('label', cat)] = labels[cat][wid]
print("---- End of import ")
return df2_labels, df2_preds
def importLabels(self, category):
""" Get dictionary of labels relative to a given category
:Args:
:category: The category to load.
:Returns:
:labels: Dictionary of labels
"""
# If there are no labels to return, an empty dict is returned.
labels = {}
# Read raw data from file, if it exists
labels_file = self.input_path + category + self.labels_endname + '.csv'
if os.path.isfile(labels_file):
print("---- Importing labels from category " + category)
data = []
with open(labels_file, "r") as f:
data = f.readlines()
f.close()
# Structure raw data into labels_dict dictionary
data = [d.replace("\n", "") for d in data]
for d in data:
d = d.split(";")
# Remove \r appearing in some labels.
d[1] = d[1].replace("\r", "")
# Store the label. Note that if some wid is duplicated.
# The latter label records overwrite the former ones.
labels.update({d[0]: d[1]})
# WARNING: In former versions, labels were converted to
# integers using
# labels.update({d[0]: int(d[1])})
# Now they are stored as strings.
# Move the label file to the "used" folder.
date_str = datetime.now().strftime("%Y%m%d%H%M%S%f")
dest_file = self.used_path + category + self.labels_endname + \
'_in' + date_str + '.csv'
shutil.move(labels_file, dest_file)
return labels
def importPredicts(self, category=None):
""" Get dictionary of predictions relative to a given category
:Args:
:category: The category to load (from a pkl file)
If None, all categories are read from a unique
csv file
:Returns:
:preds: Dictionary of predictions
- If category is not None, preds[wid] has the
prediction for url wid about the given category.
- If category is not None, preds is a dataframe
with the wid as uid column and one column with
predictions for each category.
"""
# The default category is the reference class used by the
# active learning algorithm
if category is None:
# Default output
preds = None
# Read raw data
# preds_file = self.input_path + self.preds_endname + '.json'
preds_file = self.input_path + self.preds_endname + '.csv'
# Load predictions from file, if it exists
if os.path.isfile(preds_file):
print("---- Importing multicategory predictions")
with open(preds_file, 'r') as f:
# preds = json.load(f)
preds = pd.read_csv(f)
preds.set_index('uid', inplace=True)
# Move the preditions file to the "used" folder.
date_str = datetime.now().strftime("%Y%m%d%H%M%S%f")
dest_file = self.used_path + self.preds_endname + \
'_in' + date_str + '.csv'
# '_in' + date_str + '.json'
shutil.move(preds_file, dest_file)
else:
# Default output
preds = {}
# Read raw data
preds_file = (self.input_path + category + self.preds_endname +
'.pkl')
# Load predictions from file, if it exists
if os.path.isfile(preds_file):
print("---- Importing predictions from category " + category)
with open(preds_file, 'r') as f:
preds = pickle.load(f)
# Move the preditions file to the "used" folder.
date_str = datetime.now().strftime("%Y%m%d%H%M%S%f")
dest_file = self.used_path + category + self.preds_endname + \
'_in' + date_str + '.pkl'
shutil.move(preds_file, dest_file)
return preds
def importURLs(self):
""" Reads a list of urls from a file, computes a wid (web identifier)
for each one of them and returns it in a dictionary {wid:url}.
If self.computeWID is None, the wid is equal to the url.
:Returns:
:url_dict: Dictionary of urls
"""
# Initialize ouput dictionary (this is the default output if no
# urlfile exists)
url_dict = {}
# Read raw data from file, if it exists
urls_file = self.input_path + self.urls_fname + '.csv'
if os.path.isfile(urls_file):
print("---- Reading new URLs")
# data = []
# with open(urls_file, "r") as f:
# data = f.readlines()
# f.close()
data = pd.read_csv(urls_file, header=None)
data = data[0].tolist()
data = [d.replace("\r", "") for d in data]
data = [d.replace("\n", "") for d in data]
for url in data:
# Transform url into wid
if self.compute_wid in ['yes', 'www']:
wid = self.computeWID(url, mode=self.compute_wid)
else:
wid = url
# Store the pair wid:url in the ouput dictionary.
# The latter wid records overwrite the former ones.
url_dict.update({wid: url})
# Move the urls file to the "used" folder.
date_str = datetime.now().strftime("%Y%m%d%H%M%S%f")
dest_file = self.used_path + self.urls_fname + \
'_in' + date_str + '.csv'
shutil.move(urls_file, dest_file)
return url_dict
def computeWID(self, url, mode='yes'):
""" Computes a web identifier for a given url
The computations are based in a code used by another software
project (IaD2015_B2C)
:Args:
url: Input url
mode: If 'yes', a complete transformation is done, by removing
the part of 'http://www.' that exists in the original
url
If 'www', only an initial 'www.' is removed
WARNING:
The url-to-wid transformation is not one-to-one: in some bad-luck
cases, two different urls could be transformed into the same wid.
"""
if mode == 'yes':
wid = url.lower()
wid = wid.replace("http://", '')
wid = wid.replace("//", '')
wid = wid.replace("www.", '')
wid = wid.replace(".", '_')
# This replacement does not affect the url if it is a domain site.
# But it may transform the url of specific web pages.
wid = wid.replace("/", "__")
elif mode == 'www':
if url[0:4] == 'www.':
wid = url[4:]
else:
wid = url
else:
print('---- WARNING: The transformation mode is unknown.')
print('---- The wid is taken as the url without changes')
wid = url
return wid
def getDataset(self, df_labels, df_preds):
""" Read the whole dataset from pickle files containing predictions,
labels and the labeling history.
:Args:
:df_labels: Pandas dataframe of labels
:df_preds: Pandas dataframe of predictions
:Returns:
:preds: Dict of predictions
:labels: Dict of labels
:urls: Dict of urls
:markers: Dict of markers
:relabels: Dict of relabels
:weights: Dict of weights
"""
# Initialize dictionaries
preds = dict((c, {}) for c in self.categories)
labels = dict((c, {}) for c in self.categories)
# Read labels and predictions for all categories
for cat in self.categories:
preds[cat] = df_preds[cat].to_dict()
for wid in preds[cat]:
# Get prediction
if preds[cat][wid] is None or np.isnan(preds[cat][wid]):
# Default value for none predictions. Not clear if this
# is a good options.
preds[cat][wid] = self._unknown_p
labels[cat] = df_labels[('label', cat)].to_dict()
# Get urls, markers and relabels
urls = df_preds['url'].to_dict()
markers = df_labels[('info', 'marker')].to_dict()
relabels = df_labels[('info', 'relabel')].to_dict()
weights = df_labels[('info', 'weight')].to_dict()
# The following assignment can cause an error because dataset files
# from older versions of this sw did not include a 'userId' entry.
if 'userId' in df_labels:
userIds = df_labels[('info', 'userId')].to_dict()
else:
userIds = None
return preds, labels, urls, markers, relabels, weights, userIds
def getHistory(self):
""" Loads the history file, and creates a dictionary recording the last
labelling event for each url.
If the history file does not exist, an empty dictionary is returned
:Returns:
:hdict: A dictionary containing, for every url identifier (wid)
the record of the last time it was labeled
"""
#################################
# Read the whole labeling history
# Name of the fie containing the recorded labeling history
file_labelh = self.directory + self.labelh_filename + '.pkl'
if os.path.isfile(file_labelh):
# Read data from history pickle file.
with open(file_labelh, 'r') as handle:
labelh_list = pickle.load(handle)
else:
print('Histórico de etiquetado no disponible.')
# Create an incomplete history list from the current label
# dictionary
labelh_list = []
######################################################
# Create dictionary with the last record for every wid
hdict = {}
for record in labelh_list:
wid = record['wid']
hdict[wid] = {'url': record['url'],
'label': record['label'],
'marker': record['marker'],
'date': record['date']}
if 'userId' in record:
hdict[wid]['userId'] = record['userId']
else:
hdict[wid]['userId'] = None
return hdict
def saveData(self, df_labels, df_preds, labelhistory, dest='file'):
""" Save label and prediction dataframes and labelhistory pickle files.
If dest='mongodb', they are also saved in a mongo database.
If dest='mongodb', the dataframes are store in the mode specified
in self.db_info['mode'].
'rewrite' :The existing db collection is removed and data are
saved in a new one
'update' :The data are upserted to the existing db.
:Args:
:df_labels: Pandas dataframe of labels
:df_preds: Pandas dataframe of predictions
:labelhistory:
:dest: Type of destination: 'file' (data is saved in files) or
'mongodb'
"""
if dest == 'file':
# Keep a copy of the original datasets, just in case some
# mistakes are made during labelling
date_str = datetime.now().strftime("%Y%m%d%H%M%S%f")
if os.path.isfile(self.dataset_file):
dest_file = (self.used_path + self.dataset_fname + '_' +
date_str + '.pkl')
shutil.move(self.dataset_file, dest_file)
if os.path.isfile(self.datalabels_file):
dest_file = (self.used_path + self.datalabels_fname + '_' +
date_str + '.pkl')
shutil.move(self.datalabels_file, dest_file)
if os.path.isfile(self.datapreds_file):
dest_file = (self.used_path + self.datapreds_fname + '_' +
date_str + '.pkl')
shutil.move(self.datapreds_file, dest_file)
if os.path.isfile(self.labelhistory_file):
dest_file = (self.used_path + self.labelhistory_fname + '_' +
date_str + '.pkl')
shutil.move(self.labelhistory_file, dest_file)
# Save label history
with open(self.labelhistory_file, 'wb') as f:
pickle.dump(labelhistory, f)
# Save dataframes to files
df_labels.to_pickle(self.datalabels_file)
df_preds.to_pickle(self.datapreds_file)
else:
# Start a db connection
dbName = self.db_info['name']
hostname = self.db_info['hostname']
user = self.db_info['user']
pwd = self.db_info['pwd']
label_coll_name = self.db_info['label_coll_name']
mode = self.db_info['mode']
# history_coll_name = self.db_info['history_coll_name']
port = self.db_info['port']
try:
print("Trying db connection...")
client = MongoClient(hostname)
client[dbName].authenticate(user, pwd)
db = client[dbName]
# history_collection = db[history_coll_name]
print("Connected to mongodb @ {0}:[{1}]".format(
hostname, port))
except Exception as E:
print("Fail to connect mongodb @ {0}:{1}, {2}".format(
hostname, port, E))
exit()
start_time = time.time()
print("Saving database. This might take a while...")
if mode == 'rewrite':
# The database is deleted completely and the whole set of
# labels and predictions in data are loaded
label_collection = db[label_coll_name]
label_collection.drop()
# Open collection, or create it, if it does not exist.
label_collection = db[label_coll_name]
for i, w in enumerate(df_labels.index):
# For each wid, create the corresponding data dictionary to
# send to the db
dataw = {}
dataw['relabel'] = df_labels.loc[w, ('info', 'relabel')]
dataw['marker'] = df_labels.loc[w, ('info', 'marker')]
dataw['userId'] = df_labels.loc[w, ('info', 'userId')]
dataw['date'] = df_labels.loc[w, ('info', 'date')]
dataw['weight'] = df_labels.loc[w, ('info', 'weight')]
dataw['label'] = {}
for c in self.categories:
dataw['label'][c] = df_labels.loc[w, ('label', c)]
# Store in db.
if mode == 'rewrite':
# Insert data in the database
label_collection.insert({'idna': w, 'value': dataw})
else: # mode == 'update'
# The database is updated. Only the wids in dataw are
# modified.
label_collection.replace_one(
{'idna': w}, {'idna': w, 'value': dataw}, upsert=True)
print(("\rSaving entry {0} out of {1}. Speed {2} entries" +
"/min").format(i + 1, len(df_labels), 60 * (i+1) /
(time.time() - start_time)), end="")
def exportLabels(self, df_labels, category):
""" Export labels to a csv file.
:Args:
:df_labels:
:category: Category to be exported.
:Returns:
:-: None. The result is the saved csv file.
"""
# path_labels :Keeps web identifiers and labels only.
path_labels = self.output_path + category + self.labels_endname + \
'.csv'
# Keep a copy of the original file of labels, just in case some
# mistakes are made during labelling
# date_str = datetime.now().strftime("%Y%m%d%H%M%S%f")
# dest_file = self.used_path + category + self.labels_endname + \
# '_out' + date_str + '.csv'
# if os.path.isfile(path_labels):
# shutil.move(path_labels, dest_file)
# Copy the data to be exported in a list.
# Note that wids with an unknown label are not saved.
data_out = []
for wid in df_labels.index:
data_out.append(
wid + ";" + str(df_labels.loc[wid, ('label', category)]))
# Export data
with open(path_labels, "w") as f:
f.writelines(list("%s\r\n" % item for item in data_out))
def exportHistory(self, labelhistory):
""" Saves label history in a pickle and a csv files.
Args:
:labelhistory:
Returns:
Nothing
"""
# Three label files will be stored:
# path_labelh (.csv): stores a whole record for each label.
path_labelh = self.output_path + self.labelhistory_fname + '.csv'
# Keep a copy of the original file of labels, just in case some
# mistakes are made during labelling
date_str = datetime.now().strftime("%Y%m%d%H%M%S%f")
dest_file = self.used_path + self.labelhistory_fname + '_out' + \
date_str + '.csv'
if os.path.isfile(path_labelh):
shutil.move(path_labelh, dest_file)
# Append new label record to the history file
print('Updating history file')
data_out = []
for wid in labelhistory:
for tid in labelhistory[wid]:
tags = list(labelhistory[wid][tid])
tags.sort()
text = ''.join(str(t) + ";" + str(labelhistory[wid][tid][t]) +
";" for t in tags)
data_out.append(text)
with open(path_labelh, "w") as f:
f.writelines(list("%s\r\n" % item for item in data_out))
def migrate2DB(self, df_labels):
""" Migrate all labeled urls in data to a mongo db.
The db collection, if it exists, is droped.
This function is deprecated because the 'data' dictionary is no
longer used.
:Args:
:data: Data dictionary containing the labels to save.
"""
# Start a db connection
dbName = self.db_info['name']
hostname = self.db_info['hostname']
user = self.db_info['user']
pwd = self.db_info['pwd']
label_coll_name = self.db_info['label_coll_name']
file2db_mode = self.db_info['file2db_mode']
# history_coll_name = self.db_info['history_coll_name']
port = self.db_info['port']
try:
print("Trying db connection...")
client = MongoClient(hostname)
client[dbName].authenticate(user, pwd)
db = client[dbName]
print("Connected to mongodb @ {0}:[{1}]".format(hostname, port))
except Exception as E:
sys.exit("Fail to connect mongodb @ {0}:{1}, {2}".format(
hostname, port, E))
print("Saving database. This might take a while...")
start_time = time.time()
if file2db_mode == 'rewrite':
# The database is deleted completely and the whole set of
# labels and predictions in data are loaded
label_collection = db[label_coll_name]
label_collection.drop()
label_collection = db[label_coll_name]
for i, w in enumerate(df_labels.index):
# For each wid, create the corresponding data dictionary to
# send to the db
dataw = {}
dataw['relabel'] = df_labels.loc[w, ('info', 'relabel')]
dataw['marker'] = df_labels.loc[w, ('info', 'marker')]
dataw['userId'] = df_labels.loc[w, ('info', 'userId')]
dataw['date'] = df_labels.loc[w, ('info', 'date')]
dataw['weight'] = df_labels.loc[w, ('info', 'weight')]
dataw['label'] = {}
for c in self.categories:
dataw['label'][c] = df_labels.loc[w, ('label', c)]
# Store in db.
if file2db_mode == 'rewrite':
# Insert data in the database
label_collection.insert({'idna': w, 'value': dataw})
else: # mode == 'update'
# The database is updated. Only the wids in dataw are
# modified.
label_collection.replace_one(
{'idna': w}, {'idna': w, 'value': dataw}, upsert=True)
print(("\rSaving entry {0} out of {1}. Speed {2} entries" +
"/min").format(i + 1, len(df_labels), 60 * (i+1) /
(time.time() - start_time)), end="")
def migrate2file(self):
''' Migrate all labeled urls in the mongo db to a pickle data file
WARNING: THIS METHOD IS UNDER CONSTRUCTION.
'''
# Start a db connection
dbName = self.db_info['name']
hostname = self.db_info['hostname']
user = self.db_info['user']
pwd = self.db_info['pwd']
label_coll_name = self.db_info['label_coll_name']
# history_coll_name = self.db_info['history_coll_name']
port = self.db_info['port']
try:
print("Trying db connection...")
client = MongoClient(hostname)
client[dbName].authenticate(user, pwd)
db = client[dbName]
print("Connected to mongodb @ {0}:[{1}]".format(hostname, port))
except Exception as E:
sys.exit("Fail to connect mongodb @ {0}:{1}, {2}".format(
hostname, port, E))
print("Open collection...")
label_collection = db[label_coll_name]
start_time = time.time()
# count = 0
# for i, w in enumerate(data):
# # Only the wids with at least some label are migrated to the db.
# lab_list = [data[w]['label'][c] for c in self.categories]
# if self._yes in lab_list or self._no in lab_list:
# count += 1
# label_collection.insert({'idna': w, 'value': data[w]})
# print ("\rSaving entry {0} out of {1}. Speed {2} entries" +
# "/min").format(i + 1, len(data), 60 * (i+1) /
# (time.time() - start_time)),
# print ""
# print "Migration finished: {0} out of {1} wids saved in DB".format(
# count, len(data))
| 40.16763
| 80
| 0.509816
|
4a11d4ac72167965bae7590b44f1a621c792e1eb
| 506
|
py
|
Python
|
setup.py
|
nhuthai/SAN
|
f35ab6b48947ad8663b722a13a60c8769357893e
|
[
"MIT"
] | null | null | null |
setup.py
|
nhuthai/SAN
|
f35ab6b48947ad8663b722a13a60c8769357893e
|
[
"MIT"
] | null | null | null |
setup.py
|
nhuthai/SAN
|
f35ab6b48947ad8663b722a13a60c8769357893e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='SAN',
version='0.0',
author='Nhut Hai Huynh',
author_email='nhut.h.huynh@fh-kiel.de',
packages=find_packages(),
install_requires=[
'click',
'colorama',
'matplotlib',
'natsort',
'numpy',
'scipy',
'scikit-image',
'SimpleITK',
'voluptuous'
],
entry_points='''
[console_scripts]
SAN=SAN.cli:main
''',
)
| 18.071429
| 43
| 0.533597
|
4a11d5009d5bb312d284002e2c59ce212109531d
| 1,624
|
py
|
Python
|
alipay/aop/api/response/MybankPaymentTradeNormalpayTransferResponse.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 213
|
2018-08-27T16:49:32.000Z
|
2021-12-29T04:34:12.000Z
|
alipay/aop/api/response/MybankPaymentTradeNormalpayTransferResponse.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 29
|
2018-09-29T06:43:00.000Z
|
2021-09-02T03:27:32.000Z
|
alipay/aop/api/response/MybankPaymentTradeNormalpayTransferResponse.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 59
|
2018-08-27T16:59:26.000Z
|
2022-03-25T10:08:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class MybankPaymentTradeNormalpayTransferResponse(AlipayResponse):
def __init__(self):
super(MybankPaymentTradeNormalpayTransferResponse, self).__init__()
self._order_no = None
self._request_accept_time = None
self._request_no = None
self._retry = None
@property
def order_no(self):
return self._order_no
@order_no.setter
def order_no(self, value):
self._order_no = value
@property
def request_accept_time(self):
return self._request_accept_time
@request_accept_time.setter
def request_accept_time(self, value):
self._request_accept_time = value
@property
def request_no(self):
return self._request_no
@request_no.setter
def request_no(self, value):
self._request_no = value
@property
def retry(self):
return self._retry
@retry.setter
def retry(self, value):
self._retry = value
def parse_response_content(self, response_content):
response = super(MybankPaymentTradeNormalpayTransferResponse, self).parse_response_content(response_content)
if 'order_no' in response:
self.order_no = response['order_no']
if 'request_accept_time' in response:
self.request_accept_time = response['request_accept_time']
if 'request_no' in response:
self.request_no = response['request_no']
if 'retry' in response:
self.retry = response['retry']
| 29
| 116
| 0.676724
|
4a11d5857cea1f9b0e2951b5de9a348cf4dfd27b
| 2,406
|
py
|
Python
|
edward2/tensorflow/losses_test.py
|
google/edward2
|
5574e773ca4ff5f36a5d9bf3b75ac8505973aa4b
|
[
"Apache-2.0"
] | 591
|
2019-08-13T20:48:00.000Z
|
2022-03-31T03:34:32.000Z
|
edward2/tensorflow/losses_test.py
|
google/edward2
|
5574e773ca4ff5f36a5d9bf3b75ac8505973aa4b
|
[
"Apache-2.0"
] | 162
|
2019-08-15T00:39:09.000Z
|
2022-03-25T07:19:15.000Z
|
edward2/tensorflow/losses_test.py
|
google/edward2
|
5574e773ca4ff5f36a5d9bf3b75ac8505973aa4b
|
[
"Apache-2.0"
] | 78
|
2019-08-13T20:45:13.000Z
|
2022-03-10T11:54:18.000Z
|
# coding=utf-8
# Copyright 2021 The Edward2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for ed.losses."""
from absl.testing import parameterized
import edward2 as ed
import numpy as np
import tensorflow as tf
def test_cases_uce():
sparsities = [True, False]
nums_classes = [2, 6]
entropy_regs = [1e-5, 1e-2]
cases = []
for sparse in sparsities:
for num_classes in nums_classes:
for entropy_reg in entropy_regs:
cases.append({
'testcase_name': f'_uce_loss_{sparse}_{num_classes}_{entropy_reg}',
'sparse': sparse,
'num_classes': num_classes,
'entropy_reg': entropy_reg,
})
return parameterized.named_parameters(*cases)
class LossesTest(tf.test.TestCase, parameterized.TestCase):
def _generate_data(self, sparse, num_classes):
labels = np.random.randint(low=0, high=num_classes, size=(16,))
if not sparse:
labels = np.eye(num_classes)[labels]
alphas = np.random.random(size=(16, num_classes))
return (tf.convert_to_tensor(alphas, dtype=tf.float32),
tf.convert_to_tensor(labels, dtype=tf.float32))
@test_cases_uce()
def test_uce_loss(self, sparse, num_classes, entropy_reg):
alphas, labels = self._generate_data(sparse, num_classes)
loss_fn = ed.losses.uce_loss(entropy_reg=entropy_reg,
sparse=sparse,
num_classes=num_classes)
loss_value = loss_fn(labels, alphas)
self.assertIsNotNone(loss_value)
def test_uce_value(self):
labels = np.random.randint(low=0, high=5, size=(16,))
alphas = np.eye(5)[labels].astype(np.float32) + 1.
loss_fn = ed.losses.uce_loss(sparse=True, entropy_reg=1e-6, num_classes=5)
loss_value = loss_fn(labels, alphas)
self.assertAllClose(loss_value, 1.2833369, rtol=1e-3, atol=1e-3)
if __name__ == '__main__':
tf.test.main()
| 34.371429
| 79
| 0.692436
|
4a11d624c369e7e52d4025f1e90770267d1b2a6b
| 2,041
|
py
|
Python
|
profiles_api/models.py
|
vasicbre/profiles-rest-api
|
6487199e70ed05a6f8a3786b0a543a0414167163
|
[
"MIT"
] | null | null | null |
profiles_api/models.py
|
vasicbre/profiles-rest-api
|
6487199e70ed05a6f8a3786b0a543a0414167163
|
[
"MIT"
] | 8
|
2019-12-04T23:48:45.000Z
|
2022-02-10T15:58:17.000Z
|
profiles_api/models.py
|
vasicbre/profiles-rest-api
|
6487199e70ed05a6f8a3786b0a543a0414167163
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
from django.contrib.auth.models import BaseUserManager
from django.conf import settings
class UserProfileManager(BaseUserManager):
"""Manager for user profiles"""
def create_user(self, email, name, password=None):
"""Create a new user profile"""
if not email:
raise ValueError('User must specify an email address')
email = self.normalize_email(email)
user = self.model(email=email, name=name)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, name, password):
"""Create and save a new superuser with given details"""
user = self.create_user(email, name, password)
user.is_superuser = True
user.is_staff = True
user.save(using=self._db)
class UserProfile(AbstractBaseUser, PermissionsMixin):
"""Database model for users in the system"""
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserProfileManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['name']
def get_full_name(self):
"""Retrieve full name of user"""
return self.name
def get_short_name(self):
"""Retrieve full name of user"""
return self.name
def __str__(self):
"""Return string representation of user"""
return self.email
class ProfileFeedItem(models.Model):
"""Profile status update"""
user_profile = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE
)
status_text = models.CharField(max_length=255)
created_on = models.DateTimeField(auto_now_add=True)
def __str__(self):
"""Return the model as a string"""
return self.status_text
| 30.462687
| 66
| 0.682999
|
4a11d624d88df1b7ce9b546957f89cc1b088f47d
| 15,357
|
py
|
Python
|
nltk/inference/prover9.py
|
tousif/nltk-gae
|
9181f8991d0566e693f82d0bb0479219c3fc8768
|
[
"Apache-2.0"
] | 3
|
2017-03-07T10:54:56.000Z
|
2019-10-18T20:36:56.000Z
|
nltk/inference/prover9.py
|
tousif/nltk-gae
|
9181f8991d0566e693f82d0bb0479219c3fc8768
|
[
"Apache-2.0"
] | null | null | null |
nltk/inference/prover9.py
|
tousif/nltk-gae
|
9181f8991d0566e693f82d0bb0479219c3fc8768
|
[
"Apache-2.0"
] | null | null | null |
# Natural Language Toolkit: Interface to the Prover9 Theorem Prover
#
# Copyright (C) 2001-2012 NLTK Project
# Author: Dan Garrette <dhgarrette@gmail.com>
# Ewan Klein <ewan@inf.ed.ac.uk>
#
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
"""
A theorem prover that makes use of the external 'Prover9' package.
"""
from __future__ import print_function
import os
import subprocess
import nltk
from nltk.sem.logic import LogicParser, ExistsExpression, AllExpression, \
NegatedExpression, AndExpression, IffExpression, OrExpression, \
EqualityExpression, ImpExpression
from api import BaseProverCommand, Prover
#
# Following is not yet used. Return code for 2 actually realized as 512.
#
p9_return_codes = {
0: True,
1: "(FATAL)", #A fatal error occurred (user's syntax error).
2: False, # (SOS_EMPTY) Prover9 ran out of things to do
# (sos list exhausted).
3: "(MAX_MEGS)", # The max_megs (memory limit) parameter was exceeded.
4: "(MAX_SECONDS)", # The max_seconds parameter was exceeded.
5: "(MAX_GIVEN)", # The max_given parameter was exceeded.
6: "(MAX_KEPT)", # The max_kept parameter was exceeded.
7: "(ACTION)", # A Prover9 action terminated the search.
101: "(SIGSEGV)", # Prover9 crashed, most probably due to a bug.
}
class Prover9CommandParent(object):
"""
A common base class used by both ``Prover9Command`` and ``MaceCommand``,
which is responsible for maintaining a goal and a set of assumptions,
and generating prover9-style input files from them.
"""
def print_assumptions(self, output_format='nltk'):
"""
Print the list of the current assumptions.
"""
if output_format.lower() == 'nltk':
for a in self.assumptions():
print(a)
elif output_format.lower() == 'prover9':
for a in convert_to_prover9(self.assumptions()):
print(a)
else:
raise NameError("Unrecognized value for 'output_format': %s" %
output_format)
class Prover9Command(Prover9CommandParent, BaseProverCommand):
"""
A ``ProverCommand`` specific to the ``Prover9`` prover. It contains
the a print_assumptions() method that is used to print the list
of assumptions in multiple formats.
"""
def __init__(self, goal=None, assumptions=None, timeout=60, prover=None):
"""
:param goal: Input expression to prove
:type goal: sem.Expression
:param assumptions: Input expressions to use as assumptions in
the proof.
:type assumptions: list(sem.Expression)
:param timeout: number of seconds before timeout; set to 0 for
no timeout.
:type timeout: int
:param prover: a prover. If not set, one will be created.
:type prover: Prover9
"""
if not assumptions:
assumptions = []
if prover is not None:
assert isinstance(prover, Prover9)
else:
prover = Prover9(timeout)
BaseProverCommand.__init__(self, prover, goal, assumptions)
def decorate_proof(self, proof_string, simplify=True):
"""
:see BaseProverCommand.decorate_proof()
"""
if simplify:
return self._prover._call_prooftrans(proof_string, ['striplabels'])[0].rstrip()
else:
return proof_string.rstrip()
class Prover9Parent(object):
"""
A common class extended by both ``Prover9`` and ``Mace <mace.Mace>``.
It contains the functionality required to convert NLTK-style
expressions into Prover9-style expressions.
"""
_binary_location = None
def config_prover9(self, binary_location, verbose=False):
if binary_location is None:
self._binary_location = None
self._prover9_bin = None
else:
name = 'prover9'
self._prover9_bin = nltk.internals.find_binary(
name,
path_to_bin=binary_location,
env_vars=['PROVER9HOME'],
url='http://www.cs.unm.edu/~mccune/prover9/',
binary_names=[name, name + '.exe'],
verbose=verbose)
self._binary_location = self._prover9_bin.rsplit(os.path.sep, 1)
def prover9_input(self, goal, assumptions):
"""
:return: The input string that should be provided to the
prover9 binary. This string is formed based on the goal,
assumptions, and timeout value of this object.
"""
s = ''
if assumptions:
s += 'formulas(assumptions).\n'
for p9_assumption in convert_to_prover9(assumptions):
s += ' %s.\n' % p9_assumption
s += 'end_of_list.\n\n'
if goal:
s += 'formulas(goals).\n'
s += ' %s.\n' % convert_to_prover9(goal)
s += 'end_of_list.\n\n'
return s
def binary_locations(self):
"""
A list of directories that should be searched for the prover9
executables. This list is used by ``config_prover9`` when searching
for the prover9 executables.
"""
return ['/usr/local/bin/prover9',
'/usr/local/bin/prover9/bin',
'/usr/local/bin',
'/usr/bin',
'/usr/local/prover9',
'/usr/local/share/prover9']
def _find_binary(self, name, verbose=False):
binary_locations = self.binary_locations()
if self._binary_location is not None:
binary_locations += [self._binary_location]
return nltk.internals.find_binary(name,
searchpath=binary_locations,
env_vars=['PROVER9HOME'],
url='http://www.cs.unm.edu/~mccune/prover9/',
binary_names=[name, name + '.exe'],
verbose=verbose)
def _call(self, input_str, binary, args=[], verbose=False):
"""
Call the binary with the given input.
:param input_str: A string whose contents are used as stdin.
:param binary: The location of the binary to call
:param args: A list of command-line arguments.
:return: A tuple (stdout, returncode)
:see: ``config_prover9``
"""
if verbose:
print('Calling:', binary)
print('Args:', args)
print('Input:\n', input_str, '\n')
# Call prover9 via a subprocess
cmd = [binary] + args
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate(input_str)
if verbose:
print('Return code:', p.returncode)
if stdout: print('stdout:\n', stdout, '\n')
if stderr: print('stderr:\n', stderr, '\n')
return (stdout, p.returncode)
def convert_to_prover9(input):
"""
Convert a ``logic.Expression`` to Prover9 format.
"""
if isinstance(input, list):
result = []
for s in input:
try:
result.append(_convert_to_prover9(s.simplify()))
except:
print('input %s cannot be converted to Prover9 input syntax' % input)
raise
return result
else:
try:
return _convert_to_prover9(input.simplify())
except:
print('input %s cannot be converted to Prover9 input syntax' % input)
raise
def _convert_to_prover9(expression):
"""
Convert ``logic.Expression`` to Prover9 formatted string.
"""
if isinstance(expression, ExistsExpression):
return 'exists ' + str(expression.variable) + ' ' + _convert_to_prover9(expression.term)
elif isinstance(expression, AllExpression):
return 'all ' + str(expression.variable) + ' ' + _convert_to_prover9(expression.term)
elif isinstance(expression, NegatedExpression):
return '-(' + _convert_to_prover9(expression.term) + ')'
elif isinstance(expression, AndExpression):
return '(' + _convert_to_prover9(expression.first) + ' & ' + \
_convert_to_prover9(expression.second) + ')'
elif isinstance(expression, OrExpression):
return '(' + _convert_to_prover9(expression.first) + ' | ' + \
_convert_to_prover9(expression.second) + ')'
elif isinstance(expression, ImpExpression):
return '(' + _convert_to_prover9(expression.first) + ' -> ' + \
_convert_to_prover9(expression.second) + ')'
elif isinstance(expression, IffExpression):
return '(' + _convert_to_prover9(expression.first) + ' <-> ' + \
_convert_to_prover9(expression.second) + ')'
elif isinstance(expression, EqualityExpression):
return '(' + _convert_to_prover9(expression.first) + ' = ' + \
_convert_to_prover9(expression.second) + ')'
else:
return str(expression)
class Prover9(Prover9Parent, Prover):
_prover9_bin = None
_prooftrans_bin = None
def __init__(self, timeout=60):
self._timeout = timeout
"""The timeout value for prover9. If a proof can not be found
in this amount of time, then prover9 will return false.
(Use 0 for no timeout.)"""
def _prove(self, goal=None, assumptions=None, verbose=False):
"""
Use Prover9 to prove a theorem.
:return: A pair whose first element is a boolean indicating if the
proof was successful (i.e. returns value of 0) and whose second element
is the output of the prover.
"""
if not assumptions:
assumptions = []
stdout, returncode = self._call_prover9(self.prover9_input(goal, assumptions),
verbose=verbose)
return (returncode == 0, stdout)
def prover9_input(self, goal, assumptions):
"""
:see: Prover9Parent.prover9_input
"""
s = 'clear(auto_denials).\n' #only one proof required
return s + Prover9Parent.prover9_input(self, goal, assumptions)
def _call_prover9(self, input_str, args=[], verbose=False):
"""
Call the ``prover9`` binary with the given input.
:param input_str: A string whose contents are used as stdin.
:param args: A list of command-line arguments.
:return: A tuple (stdout, returncode)
:see: ``config_prover9``
"""
if self._prover9_bin is None:
self._prover9_bin = self._find_binary('prover9', verbose)
updated_input_str = ''
if self._timeout > 0:
updated_input_str += 'assign(max_seconds, %d).\n\n' % self._timeout
updated_input_str += input_str
stdout, returncode = self._call(updated_input_str, self._prover9_bin, args, verbose)
if returncode not in [0,2]:
errormsgprefix = '%%ERROR:'
if errormsgprefix in stdout:
msgstart = stdout.index(errormsgprefix)
errormsg = stdout[msgstart:].strip()
else:
errormsg = None
if returncode in [3,4,5,6]:
raise Prover9LimitExceededException(returncode, errormsg)
else:
raise Prover9FatalException(returncode, errormsg)
return stdout, returncode
def _call_prooftrans(self, input_str, args=[], verbose=False):
"""
Call the ``prooftrans`` binary with the given input.
:param input_str: A string whose contents are used as stdin.
:param args: A list of command-line arguments.
:return: A tuple (stdout, returncode)
:see: ``config_prover9``
"""
if self._prooftrans_bin is None:
self._prooftrans_bin = self._find_binary('prooftrans', verbose)
return self._call(input_str, self._prooftrans_bin, args, verbose)
class Prover9Exception(Exception):
def __init__(self, returncode, message):
msg = p9_return_codes[returncode]
if message:
msg += '\n%s' % message
Exception.__init__(self, msg)
class Prover9FatalException(Prover9Exception):
pass
class Prover9LimitExceededException(Prover9Exception):
pass
######################################################################
#{ Tests and Demos
######################################################################
def test_config():
a = LogicParser().parse('(walk(j) & sing(j))')
g = LogicParser().parse('walk(j)')
p = Prover9Command(g, assumptions=[a])
p._executable_path = None
p.prover9_search=[]
p.prove()
#config_prover9('/usr/local/bin')
print(p.prove())
print(p.proof())
def test_convert_to_prover9(expr):
"""
Test that parsing works OK.
"""
for t in expr:
e = LogicParser().parse(t)
print(convert_to_prover9(e))
def test_prove(arguments):
"""
Try some proofs and exhibit the results.
"""
for (goal, assumptions) in arguments:
g = LogicParser().parse(goal)
alist = [LogicParser().parse(a) for a in assumptions]
p = Prover9Command(g, assumptions=alist).prove()
for a in alist:
print(' %s' % a)
print('|- %s: %s\n' % (g, p))
arguments = [
('(man(x) <-> (not (not man(x))))', []),
('(not (man(x) & (not man(x))))', []),
('(man(x) | (not man(x)))', []),
('(man(x) & (not man(x)))', []),
('(man(x) -> man(x))', []),
('(not (man(x) & (not man(x))))', []),
('(man(x) | (not man(x)))', []),
('(man(x) -> man(x))', []),
('(man(x) <-> man(x))', []),
('(not (man(x) <-> (not man(x))))', []),
('mortal(Socrates)', ['all x.(man(x) -> mortal(x))', 'man(Socrates)']),
('((all x.(man(x) -> walks(x)) & man(Socrates)) -> some y.walks(y))', []),
('(all x.man(x) -> all x.man(x))', []),
('some x.all y.sees(x,y)', []),
('some e3.(walk(e3) & subj(e3, mary))',
['some e1.(see(e1) & subj(e1, john) & some e2.(pred(e1, e2) & walk(e2) & subj(e2, mary)))']),
('some x e1.(see(e1) & subj(e1, x) & some e2.(pred(e1, e2) & walk(e2) & subj(e2, mary)))',
['some e1.(see(e1) & subj(e1, john) & some e2.(pred(e1, e2) & walk(e2) & subj(e2, mary)))'])
]
expressions = [r'some x y.sees(x,y)',
r'some x.(man(x) & walks(x))',
r'\x.(man(x) & walks(x))',
r'\x y.sees(x,y)',
r'walks(john)',
r'\x.big(x, \y.mouse(y))',
r'(walks(x) & (runs(x) & (threes(x) & fours(x))))',
r'(walks(x) -> runs(x))',
r'some x.(PRO(x) & sees(John, x))',
r'some x.(man(x) & (not walks(x)))',
r'all x.(man(x) -> walks(x))']
def spacer(num=45):
print('-' * num)
def demo():
print("Testing configuration")
spacer()
test_config()
print()
print("Testing conversion to Prover9 format")
spacer()
test_convert_to_prover9(expressions)
print()
print("Testing proofs")
spacer()
test_prove(arguments)
if __name__ == '__main__':
demo()
| 35.880841
| 101
| 0.573094
|
4a11d653bc58e03daf03e07b4ac573455efce68b
| 1,855
|
py
|
Python
|
jet_bridge/utils/settings.py
|
kingemma0430/jet-bridge
|
7cb68a0232850c28ebf9c474b031db507845feb5
|
[
"MIT"
] | 2
|
2020-07-27T22:18:42.000Z
|
2021-11-15T10:03:14.000Z
|
jet_bridge/utils/settings.py
|
kingemma0430/jet-bridge
|
7cb68a0232850c28ebf9c474b031db507845feb5
|
[
"MIT"
] | null | null | null |
jet_bridge/utils/settings.py
|
kingemma0430/jet-bridge
|
7cb68a0232850c28ebf9c474b031db507845feb5
|
[
"MIT"
] | null | null | null |
import os
from six.moves import configparser
from tornado.options import Error
def parse_environment(self, final=True):
for name in os.environ:
normalized = self._normalize_name(name)
normalized = normalized.lower()
if normalized in self._options:
option = self._options[normalized]
if option.multiple:
if not isinstance(os.environ[name], (list, str)):
raise Error("Option %r is required to be a list of %s "
"or a comma-separated string" %
(option.name, option.type.__name__))
if type(os.environ[name]) == str and option.type != str:
option.parse(os.environ[name])
else:
option.set(os.environ[name])
def parse_config_file(self, path, section, final=True):
config_parser = configparser.ConfigParser()
if not config_parser.read(path):
raise IOError('Config file at path "{}" not found'.format(path))
try:
config = config_parser[section]
except KeyError:
raise ValueError('Config file does not have [{}] section]'.format(section))
for name in config:
normalized = self._normalize_name(name)
normalized = normalized.lower()
if normalized in self._options:
option = self._options[normalized]
if option.multiple:
if not isinstance(config[name], (list, str)):
raise Error("Option %r is required to be a list of %s "
"or a comma-separated string" %
(option.name, option.type.__name__))
if type(config[name]) == str and option.type != str:
option.parse(config[name])
else:
option.set(config[name])
| 37.1
| 83
| 0.568733
|
4a11d831e8def86905738dc15ddebd7faaba21cd
| 638
|
py
|
Python
|
NotificationSystem/manage.py
|
ValentinaRodrigues/notification-system
|
35dbcea4e72d5e1f8cd5d4f5a435d83905611f31
|
[
"MIT"
] | 2
|
2020-03-14T22:03:48.000Z
|
2020-04-07T14:04:50.000Z
|
NotificationSystem/manage.py
|
ValentinaRodrigues/notification-system
|
35dbcea4e72d5e1f8cd5d4f5a435d83905611f31
|
[
"MIT"
] | null | null | null |
NotificationSystem/manage.py
|
ValentinaRodrigues/notification-system
|
35dbcea4e72d5e1f8cd5d4f5a435d83905611f31
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'NotificationSystem.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 29
| 82
| 0.688088
|
4a11d8600507902864884470a5d40861fe7551e1
| 853
|
py
|
Python
|
shopping_app/db/managers/manager.py
|
gr1d99/flask-shopping-list
|
de5aa20b3378eb63e040440da77afd79c48f2fd6
|
[
"MIT"
] | 1
|
2017-10-16T17:09:30.000Z
|
2017-10-16T17:09:30.000Z
|
shopping_app/db/managers/manager.py
|
gr1d99/flask-shopping-list
|
de5aa20b3378eb63e040440da77afd79c48f2fd6
|
[
"MIT"
] | 5
|
2017-09-19T07:50:11.000Z
|
2021-06-01T21:54:45.000Z
|
shopping_app/db/managers/manager.py
|
gr1d99/flask-shopping-list
|
de5aa20b3378eb63e040440da77afd79c48f2fd6
|
[
"MIT"
] | 1
|
2017-09-29T07:37:37.000Z
|
2017-09-29T07:37:37.000Z
|
"""Contain User class managers"""
from ..user import User
from .base_manager import BaseUserManager
class UserManager(BaseUserManager):
"""a manager class for User class, responsible for creation of user instances"""
def __init__(self):
super(UserManager, self).__init__()
self.model = User()
def create_user(self, username=None, email=None, password=None):
"""
create user instance but first ensure that required data is provided
:param username:
:param email:
:param password:
:return:
"""
if not username:
raise ValueError("%(field)s cannot be empty" % dict(field=username))
if not password:
raise ValueError("%(field)s cannot be empty" % dict(field=password))
return self._create_user(username, email, password)
| 31.592593
| 84
| 0.644783
|
4a11d92706ff48f356d251b9a8a902bfdd3d6374
| 9,973
|
py
|
Python
|
official/vision/beta/modeling/retinanet_model_test.py
|
agentmorris/models
|
f2d90e1a1e146420dceff439cf1ecc2162c7a103
|
[
"Apache-2.0"
] | 2
|
2017-10-26T06:23:51.000Z
|
2020-09-11T21:09:41.000Z
|
official/vision/beta/modeling/retinanet_model_test.py
|
whittyman98/models
|
3dacd474364813781c0154d881055502ce09d79c
|
[
"Apache-2.0"
] | 2
|
2018-06-18T17:08:12.000Z
|
2021-04-12T05:39:04.000Z
|
official/vision/beta/modeling/retinanet_model_test.py
|
whittyman98/models
|
3dacd474364813781c0154d881055502ce09d79c
|
[
"Apache-2.0"
] | 2
|
2020-04-11T19:31:17.000Z
|
2021-04-07T12:53:28.000Z
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for RetinaNet models."""
# Import libraries
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import strategy_combinations
from official.vision.beta.modeling import retinanet_model
from official.vision.beta.modeling.backbones import resnet
from official.vision.beta.modeling.decoders import fpn
from official.vision.beta.modeling.heads import dense_prediction_heads
from official.vision.beta.modeling.layers import detection_generator
from official.vision.beta.ops import anchor
class RetinaNetTest(parameterized.TestCase, tf.test.TestCase):
@parameterized.parameters(
{
'use_separable_conv': True,
'build_anchor_boxes': True,
'is_training': False,
'has_att_heads': False
},
{
'use_separable_conv': False,
'build_anchor_boxes': True,
'is_training': False,
'has_att_heads': False
},
{
'use_separable_conv': False,
'build_anchor_boxes': False,
'is_training': False,
'has_att_heads': False
},
{
'use_separable_conv': False,
'build_anchor_boxes': False,
'is_training': True,
'has_att_heads': False
},
{
'use_separable_conv': False,
'build_anchor_boxes': True,
'is_training': True,
'has_att_heads': True
},
{
'use_separable_conv': False,
'build_anchor_boxes': True,
'is_training': False,
'has_att_heads': True
},
)
def test_build_model(self, use_separable_conv, build_anchor_boxes,
is_training, has_att_heads):
num_classes = 3
min_level = 3
max_level = 7
num_scales = 3
aspect_ratios = [1.0]
anchor_size = 3
fpn_num_filters = 256
head_num_convs = 4
head_num_filters = 256
num_anchors_per_location = num_scales * len(aspect_ratios)
image_size = 384
images = np.random.rand(2, image_size, image_size, 3)
image_shape = np.array([[image_size, image_size], [image_size, image_size]])
if build_anchor_boxes:
anchor_boxes = anchor.Anchor(
min_level=min_level,
max_level=max_level,
num_scales=num_scales,
aspect_ratios=aspect_ratios,
anchor_size=anchor_size,
image_size=(image_size, image_size)).multilevel_boxes
for l in anchor_boxes:
anchor_boxes[l] = tf.tile(
tf.expand_dims(anchor_boxes[l], axis=0), [2, 1, 1, 1])
else:
anchor_boxes = None
if has_att_heads:
attribute_heads = {'depth': ('regression', 1)}
else:
attribute_heads = None
backbone = resnet.ResNet(model_id=50)
decoder = fpn.FPN(
input_specs=backbone.output_specs,
min_level=min_level,
max_level=max_level,
num_filters=fpn_num_filters,
use_separable_conv=use_separable_conv)
head = dense_prediction_heads.RetinaNetHead(
min_level=min_level,
max_level=max_level,
num_classes=num_classes,
attribute_heads=attribute_heads,
num_anchors_per_location=num_anchors_per_location,
use_separable_conv=use_separable_conv,
num_convs=head_num_convs,
num_filters=head_num_filters)
generator = detection_generator.MultilevelDetectionGenerator(
max_num_detections=10)
model = retinanet_model.RetinaNetModel(
backbone=backbone,
decoder=decoder,
head=head,
detection_generator=generator,
min_level=min_level,
max_level=max_level,
num_scales=num_scales,
aspect_ratios=aspect_ratios,
anchor_size=anchor_size)
_ = model(images, image_shape, anchor_boxes, training=is_training)
@combinations.generate(
combinations.combine(
strategy=[
strategy_combinations.cloud_tpu_strategy,
strategy_combinations.one_device_strategy_gpu,
],
image_size=[
(128, 128),
],
training=[True, False],
has_att_heads=[True, False],
))
def test_forward(self, strategy, image_size, training, has_att_heads):
"""Test for creation of a R50-FPN RetinaNet."""
tf.keras.backend.set_image_data_format('channels_last')
num_classes = 3
min_level = 3
max_level = 7
num_scales = 3
aspect_ratios = [1.0]
num_anchors_per_location = num_scales * len(aspect_ratios)
images = np.random.rand(2, image_size[0], image_size[1], 3)
image_shape = np.array(
[[image_size[0], image_size[1]], [image_size[0], image_size[1]]])
with strategy.scope():
anchor_gen = anchor.build_anchor_generator(
min_level=min_level,
max_level=max_level,
num_scales=num_scales,
aspect_ratios=aspect_ratios,
anchor_size=3)
anchor_boxes = anchor_gen(image_size)
for l in anchor_boxes:
anchor_boxes[l] = tf.tile(
tf.expand_dims(anchor_boxes[l], axis=0), [2, 1, 1, 1])
backbone = resnet.ResNet(model_id=50)
decoder = fpn.FPN(
input_specs=backbone.output_specs,
min_level=min_level,
max_level=max_level)
if has_att_heads:
attribute_heads = {'depth': ('regression', 1)}
else:
attribute_heads = None
head = dense_prediction_heads.RetinaNetHead(
min_level=min_level,
max_level=max_level,
num_classes=num_classes,
attribute_heads=attribute_heads,
num_anchors_per_location=num_anchors_per_location)
generator = detection_generator.MultilevelDetectionGenerator(
max_num_detections=10)
model = retinanet_model.RetinaNetModel(
backbone=backbone,
decoder=decoder,
head=head,
detection_generator=generator)
model_outputs = model(
images,
image_shape,
anchor_boxes,
training=training)
if training:
cls_outputs = model_outputs['cls_outputs']
box_outputs = model_outputs['box_outputs']
for level in range(min_level, max_level + 1):
self.assertIn(str(level), cls_outputs)
self.assertIn(str(level), box_outputs)
self.assertAllEqual([
2,
image_size[0] // 2**level,
image_size[1] // 2**level,
num_classes * num_anchors_per_location
], cls_outputs[str(level)].numpy().shape)
self.assertAllEqual([
2,
image_size[0] // 2**level,
image_size[1] // 2**level,
4 * num_anchors_per_location
], box_outputs[str(level)].numpy().shape)
if has_att_heads:
att_outputs = model_outputs['att_outputs']
for att in att_outputs.values():
self.assertAllEqual([
2, image_size[0] // 2**level, image_size[1] // 2**level,
1 * num_anchors_per_location
], att[str(level)].numpy().shape)
else:
self.assertIn('detection_boxes', model_outputs)
self.assertIn('detection_scores', model_outputs)
self.assertIn('detection_classes', model_outputs)
self.assertIn('num_detections', model_outputs)
self.assertAllEqual(
[2, 10, 4], model_outputs['detection_boxes'].numpy().shape)
self.assertAllEqual(
[2, 10], model_outputs['detection_scores'].numpy().shape)
self.assertAllEqual(
[2, 10], model_outputs['detection_classes'].numpy().shape)
self.assertAllEqual(
[2,], model_outputs['num_detections'].numpy().shape)
if has_att_heads:
self.assertIn('detection_attributes', model_outputs)
self.assertAllEqual(
[2, 10, 1],
model_outputs['detection_attributes']['depth'].numpy().shape)
def test_serialize_deserialize(self):
"""Validate the network can be serialized and deserialized."""
num_classes = 3
min_level = 3
max_level = 7
num_scales = 3
aspect_ratios = [1.0]
num_anchors_per_location = num_scales * len(aspect_ratios)
backbone = resnet.ResNet(model_id=50)
decoder = fpn.FPN(
input_specs=backbone.output_specs,
min_level=min_level,
max_level=max_level)
head = dense_prediction_heads.RetinaNetHead(
min_level=min_level,
max_level=max_level,
num_classes=num_classes,
num_anchors_per_location=num_anchors_per_location)
generator = detection_generator.MultilevelDetectionGenerator(
max_num_detections=10)
model = retinanet_model.RetinaNetModel(
backbone=backbone,
decoder=decoder,
head=head,
detection_generator=generator,
min_level=min_level,
max_level=max_level,
num_scales=num_scales,
aspect_ratios=aspect_ratios,
anchor_size=3)
config = model.get_config()
new_model = retinanet_model.RetinaNetModel.from_config(config)
# Validate that the config can be forced to JSON.
_ = new_model.to_json()
# If the serialization was successful, the new config should match the old.
self.assertAllEqual(model.get_config(), new_model.get_config())
if __name__ == '__main__':
tf.test.main()
| 33.80678
| 80
| 0.652762
|
4a11d9a7e669ca3a5cc23d51b2487250e6f4c09e
| 2,165
|
py
|
Python
|
python-client/test/test_deleted.py
|
gabisurita/kinto-codegen-tutorial
|
b5921ec603df031f9ff25683b7a3fbed5af79094
|
[
"MIT"
] | 2
|
2017-03-02T13:12:24.000Z
|
2017-10-22T10:31:09.000Z
|
python-client/test/test_deleted.py
|
gabisurita/kinto-codegen-tutorial
|
b5921ec603df031f9ff25683b7a3fbed5af79094
|
[
"MIT"
] | null | null | null |
python-client/test/test_deleted.py
|
gabisurita/kinto-codegen-tutorial
|
b5921ec603df031f9ff25683b7a3fbed5af79094
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
kinto
Kinto is a minimalist JSON storage service with synchronisation and sharing abilities. It is meant to be easy to use and easy to self-host. **Limitations of this OpenAPI specification:** 1. Validation on OR clauses is not supported (e.g. provide `data` or `permissions` in patch operations). 2. [Filtering](http://kinto.readthedocs.io/en/stable/api/1.x/filtering.html) is supported on any field by using `?{prefix}{field_name}={value}`. 3. [Backoff headers](http://kinto.readthedocs.io/en/stable/api/1.x/backoff.html) may occur with any response, but they are only present if the server is under in heavy load, so we cannot validate them on every request. They are listed only on the default error message. 4. [Collection schemas](http://kinto.readthedocs.io/en/stable/api/1.x/collections.html#collection-json-schema) can be provided when defining a collection, but they are not validated by this specification.
OpenAPI spec version: 1.13
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.deleted import Deleted
class TestDeleted(unittest.TestCase):
""" Deleted unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testDeleted(self):
"""
Test Deleted
"""
model = swagger_client.models.deleted.Deleted()
if __name__ == '__main__':
unittest.main()
| 40.092593
| 938
| 0.725635
|
4a11da0857aa04533906984d1f9e8bb4859219a6
| 711
|
py
|
Python
|
diamond_agent/tests/test.py
|
cloudify-cosmo/cloudify-diamond-plugin
|
2d5cd1bbb8e5b272d13b26e3ddd45759cde5e8a7
|
[
"Apache-2.0"
] | 4
|
2016-02-28T17:01:34.000Z
|
2019-07-15T08:01:19.000Z
|
diamond_agent/tests/test.py
|
cloudify-cosmo/cloudify-diamond-plugin
|
2d5cd1bbb8e5b272d13b26e3ddd45759cde5e8a7
|
[
"Apache-2.0"
] | 5
|
2015-10-06T14:46:24.000Z
|
2020-09-10T05:49:43.000Z
|
diamond_agent/tests/test.py
|
cloudify-cosmo/cloudify-diamond-plugin
|
2d5cd1bbb8e5b272d13b26e3ddd45759cde5e8a7
|
[
"Apache-2.0"
] | 10
|
2015-01-21T17:10:36.000Z
|
2019-07-22T06:30:28.000Z
|
#########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import unittest
class TestHelperFunctions(unittest.TestCase):
pass
| 33.857143
| 77
| 0.745429
|
4a11db681d16eb178c4c9000e616df78c3ead4d5
| 8,053
|
py
|
Python
|
openpeerpower/helpers/storage.py
|
pcaston/core
|
e74d946cef7a9d4e232ae9e0ba150d18018cfe33
|
[
"Apache-2.0"
] | 1
|
2021-07-08T20:09:55.000Z
|
2021-07-08T20:09:55.000Z
|
openpeerpower/helpers/storage.py
|
pcaston/core
|
e74d946cef7a9d4e232ae9e0ba150d18018cfe33
|
[
"Apache-2.0"
] | 47
|
2021-02-21T23:43:07.000Z
|
2022-03-31T06:07:10.000Z
|
openpeerpower/helpers/storage.py
|
OpenPeerPower/core
|
f673dfac9f2d0c48fa30af37b0a99df9dd6640ee
|
[
"Apache-2.0"
] | null | null | null |
"""Helper to help store data."""
from __future__ import annotations
import asyncio
from contextlib import suppress
from json import JSONEncoder
import logging
import os
from typing import Any, Callable
from openpeerpower.const import EVENT_OPENPEERPOWER_FINAL_WRITE
from openpeerpower.core import CALLBACK_TYPE, CoreState, Event, OpenPeerPower, callback
from openpeerpower.helpers.event import async_call_later
from openpeerpower.loader import bind_opp
from openpeerpower.util import json as json_util
# mypy: allow-untyped-calls, allow-untyped-defs, no-warn-return-any
# mypy: no-check-untyped-defs
STORAGE_DIR = ".storage"
_LOGGER = logging.getLogger(__name__)
@bind_opp
async def async_migrator(
opp,
old_path,
store,
*,
old_conf_load_func=None,
old_conf_migrate_func=None,
):
"""Migrate old data to a store and then load data.
async def old_conf_migrate_func(old_data)
"""
store_data = await store.async_load()
# If we already have store data we have already migrated in the past.
if store_data is not None:
return store_data
def load_old_config():
"""Load old config."""
if not os.path.isfile(old_path):
return None
if old_conf_load_func is not None:
return old_conf_load_func(old_path)
return json_util.load_json(old_path)
config = await opp.async_add_executor_job(load_old_config)
if config is None:
return None
if old_conf_migrate_func is not None:
config = await old_conf_migrate_func(config)
await store.async_save(config)
await opp.async_add_executor_job(os.remove, old_path)
return config
@bind_opp
class Store:
"""Class to help storing data."""
def __init__(
self,
opp: OpenPeerPower,
version: int,
key: str,
private: bool = False,
*,
encoder: type[JSONEncoder] | None = None,
) -> None:
"""Initialize storage class."""
self.version = version
self.key = key
self.opp = opp
self._private = private
self._data: dict[str, Any] | None = None
self._unsub_delay_listener: CALLBACK_TYPE | None = None
self._unsub_final_write_listener: CALLBACK_TYPE | None = None
self._write_lock = asyncio.Lock()
self._load_task: asyncio.Future | None = None
self._encoder = encoder
@property
def path(self):
"""Return the config path."""
return self.opp.config.path(STORAGE_DIR, self.key)
async def async_load(self) -> dict | list | None:
"""Load data.
If the expected version does not match the given version, the migrate
function will be invoked with await migrate_func(version, config).
Will ensure that when a call comes in while another one is in progress,
the second call will wait and return the result of the first call.
"""
if self._load_task is None:
self._load_task = self.opp.async_create_task(self._async_load())
return await self._load_task
async def _async_load(self):
"""Load the data and ensure the task is removed."""
try:
return await self._async_load_data()
finally:
self._load_task = None
async def _async_load_data(self):
"""Load the data."""
# Check if we have a pending write
if self._data is not None:
data = self._data
# If we didn't generate data yet, do it now.
if "data_func" in data:
data["data"] = data.pop("data_func")()
else:
data = await self.opp.async_add_executor_job(json_util.load_json, self.path)
if data == {}:
return None
if data["version"] == self.version:
stored = data["data"]
else:
_LOGGER.info(
"Migrating %s storage from %s to %s",
self.key,
data["version"],
self.version,
)
stored = await self._async_migrate_func(data["version"], data["data"])
return stored
async def async_save(self, data: dict | list) -> None:
"""Save data."""
self._data = {"version": self.version, "key": self.key, "data": data}
if self.opp.state == CoreState.stopping:
self._async_ensure_final_write_listener()
return
await self._async_handle_write_data()
@callback
def async_delay_save(self, data_func: Callable[[], dict], delay: float = 0) -> None:
"""Save data with an optional delay."""
self._data = {"version": self.version, "key": self.key, "data_func": data_func}
self._async_cleanup_delay_listener()
self._async_ensure_final_write_listener()
if self.opp.state == CoreState.stopping:
return
self._unsub_delay_listener = async_call_later(
self.opp, delay, self._async_callback_delayed_write
)
@callback
def _async_ensure_final_write_listener(self) -> None:
"""Ensure that we write if we quit before delay has passed."""
if self._unsub_final_write_listener is None:
self._unsub_final_write_listener = self.opp.bus.async_listen_once(
EVENT_OPENPEERPOWER_FINAL_WRITE, self._async_callback_final_write
)
@callback
def _async_cleanup_final_write_listener(self) -> None:
"""Clean up a stop listener."""
if self._unsub_final_write_listener is not None:
self._unsub_final_write_listener()
self._unsub_final_write_listener = None
@callback
def _async_cleanup_delay_listener(self) -> None:
"""Clean up a delay listener."""
if self._unsub_delay_listener is not None:
self._unsub_delay_listener()
self._unsub_delay_listener = None
async def _async_callback_delayed_write(self, _now):
"""Handle a delayed write callback."""
# catch the case where a call is scheduled and then we stop Open Peer Power
if self.opp.state == CoreState.stopping:
self._async_ensure_final_write_listener()
return
await self._async_handle_write_data()
async def _async_callback_final_write(self, _event: Event) -> None:
"""Handle a write because Open Peer Power is in final write state."""
self._unsub_final_write_listener = None
await self._async_handle_write_data()
async def _async_handle_write_data(self, *_args):
"""Handle writing the config."""
async with self._write_lock:
self._async_cleanup_delay_listener()
self._async_cleanup_final_write_listener()
if self._data is None:
# Another write already consumed the data
return
data = self._data
if "data_func" in data:
data["data"] = data.pop("data_func")()
self._data = None
try:
await self.opp.async_add_executor_job(self._write_data, self.path, data)
except (json_util.SerializationError, json_util.WriteError) as err:
_LOGGER.error("Error writing config for %s: %s", self.key, err)
def _write_data(self, path: str, data: dict) -> None:
"""Write the data."""
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
_LOGGER.debug("Writing data for %s to %s", self.key, path)
json_util.save_json(path, data, self._private, encoder=self._encoder)
async def _async_migrate_func(self, old_version, old_data):
"""Migrate to the new version."""
raise NotImplementedError
async def async_remove(self) -> None:
"""Remove all data."""
self._async_cleanup_delay_listener()
self._async_cleanup_final_write_listener()
with suppress(FileNotFoundError):
await self.opp.async_add_executor_job(os.unlink, self.path)
| 32.869388
| 88
| 0.637402
|
4a11dc0cf42f1592a46c0600f2b230d7174e4baf
| 6,624
|
py
|
Python
|
zerver/templatetags/app_filters.py
|
Wowol/zulip
|
b46d1c8d07279987fc0cbf06aec419c43b830e44
|
[
"Apache-2.0"
] | null | null | null |
zerver/templatetags/app_filters.py
|
Wowol/zulip
|
b46d1c8d07279987fc0cbf06aec419c43b830e44
|
[
"Apache-2.0"
] | null | null | null |
zerver/templatetags/app_filters.py
|
Wowol/zulip
|
b46d1c8d07279987fc0cbf06aec419c43b830e44
|
[
"Apache-2.0"
] | 1
|
2020-07-06T11:43:28.000Z
|
2020-07-06T11:43:28.000Z
|
from typing import Any, Dict, List, Optional
import markdown
import markdown.extensions.admonition
import markdown.extensions.codehilite
import markdown.extensions.extra
import markdown.extensions.toc
from django.template import Library, engines
from django.utils.safestring import mark_safe
from jinja2.exceptions import TemplateNotFound
import zerver.lib.bugdown.fenced_code
import zerver.lib.bugdown.api_arguments_table_generator
import zerver.lib.bugdown.api_code_examples
import zerver.lib.bugdown.nested_code_blocks
import zerver.lib.bugdown.tabbed_sections
import zerver.lib.bugdown.help_settings_links
import zerver.lib.bugdown.help_relative_links
import zerver.lib.bugdown.help_emoticon_translations_table
import zerver.lib.bugdown.include
from zerver.lib.cache import ignore_unhashable_lru_cache, dict_to_items_tuple, items_tuple_to_dict
register = Library()
def and_n_others(values: List[str], limit: int) -> str:
# A helper for the commonly appended "and N other(s)" string, with
# the appropriate pluralization.
return " and %d other%s" % (len(values) - limit,
"" if len(values) == limit + 1 else "s")
@register.filter(name='display_list', is_safe=True)
def display_list(values: List[str], display_limit: int) -> str:
"""
Given a list of values, return a string nicely formatting those values,
summarizing when you have more than `display_limit`. Eg, for a
`display_limit` of 3 we get the following possible cases:
Jessica
Jessica and Waseem
Jessica, Waseem, and Tim
Jessica, Waseem, Tim, and 1 other
Jessica, Waseem, Tim, and 2 others
"""
if len(values) == 1:
# One value, show it.
display_string = "%s" % (values[0],)
elif len(values) <= display_limit:
# Fewer than `display_limit` values, show all of them.
display_string = ", ".join(
"%s" % (value,) for value in values[:-1])
display_string += " and %s" % (values[-1],)
else:
# More than `display_limit` values, only mention a few.
display_string = ", ".join(
"%s" % (value,) for value in values[:display_limit])
display_string += and_n_others(values, display_limit)
return display_string
md_extensions: Optional[List[Any]] = None
md_macro_extension: Optional[Any] = None
# Prevent the automatic substitution of macros in these docs. If
# they contain a macro, it is always used literally for documenting
# the macro system.
docs_without_macros = [
"incoming-webhooks-walkthrough.md",
]
# render_markdown_path is passed a context dictionary (unhashable), which
# results in the calls not being cached. To work around this, we convert the
# dict to a tuple of dict items to cache the results.
@dict_to_items_tuple
@ignore_unhashable_lru_cache(512)
@items_tuple_to_dict
@register.filter(name='render_markdown_path', is_safe=True)
def render_markdown_path(markdown_file_path: str,
context: Optional[Dict[Any, Any]]=None,
pure_markdown: Optional[bool]=False) -> str:
"""Given a path to a markdown file, return the rendered html.
Note that this assumes that any HTML in the markdown file is
trusted; it is intended to be used for documentation, not user
data."""
if context is None:
context = {}
# We set this global hackishly
from zerver.lib.bugdown.help_settings_links import set_relative_settings_links
set_relative_settings_links(bool(context.get('html_settings_links')))
from zerver.lib.bugdown.help_relative_links import set_relative_help_links
set_relative_help_links(bool(context.get('html_settings_links')))
global md_extensions
global md_macro_extension
if md_extensions is None:
md_extensions = [
markdown.extensions.extra.makeExtension(),
markdown.extensions.toc.makeExtension(),
markdown.extensions.admonition.makeExtension(),
markdown.extensions.codehilite.makeExtension(
linenums=False,
guess_lang=False
),
zerver.lib.bugdown.fenced_code.makeExtension(
run_content_validators=context.get('run_content_validators', False)
),
zerver.lib.bugdown.api_arguments_table_generator.makeExtension(
base_path='templates/zerver/api/'),
zerver.lib.bugdown.nested_code_blocks.makeExtension(),
zerver.lib.bugdown.tabbed_sections.makeExtension(),
zerver.lib.bugdown.help_settings_links.makeExtension(),
zerver.lib.bugdown.help_relative_links.makeExtension(),
zerver.lib.bugdown.help_emoticon_translations_table.makeExtension(),
]
if md_macro_extension is None:
md_macro_extension = zerver.lib.bugdown.include.makeExtension(
base_path='templates/zerver/help/include/')
extensions = md_extensions
if 'api_url' in context:
# We need to generate the API code examples extension each
# time so the `api_url` config parameter can be set dynamically.
#
# TODO: Convert this to something more efficient involving
# passing the API URL as a direct parameter.
extensions = extensions + [zerver.lib.bugdown.api_code_examples.makeExtension(
api_url=context["api_url"],
)]
if not any(doc in markdown_file_path for doc in docs_without_macros):
extensions = extensions + [md_macro_extension]
md_engine = markdown.Markdown(extensions=extensions)
md_engine.reset()
jinja = engines['Jinja2']
try:
# By default, we do both Jinja2 templating and markdown
# processing on the file, to make it easy to use both Jinja2
# context variables and markdown includes in the file.
markdown_string = jinja.env.loader.get_source(jinja.env, markdown_file_path)[0]
except TemplateNotFound as e:
if pure_markdown:
# For files such as /etc/zulip/terms.md where we don't intend
# to use Jinja2 template variables, we still try to load the
# template using Jinja2 (in case the file path isn't absolute
# and does happen to be in Jinja's recognized template
# directories), and if that fails, we try to load it directly
# from disk.
with open(markdown_file_path) as fp:
markdown_string = fp.read()
else:
raise e
html = md_engine.convert(markdown_string)
rendered_html = jinja.from_string(html).render(context)
return mark_safe(rendered_html)
| 41.660377
| 98
| 0.695954
|
4a11df11c2df455410fb8ef2d3816bca436db696
| 278
|
py
|
Python
|
Design Lab/designLab09/tk.py
|
huangyz0918/MIT-EECS-6.01
|
4856cc17a306760d6e79b7dc22696a795b518c2c
|
[
"MIT"
] | 1
|
2021-04-22T09:39:17.000Z
|
2021-04-22T09:39:17.000Z
|
Design Lab/designLab09/tk.py
|
huangyz0918/MIT-EECS-6.01
|
4856cc17a306760d6e79b7dc22696a795b518c2c
|
[
"MIT"
] | null | null | null |
Design Lab/designLab09/tk.py
|
huangyz0918/MIT-EECS-6.01
|
4856cc17a306760d6e79b7dc22696a795b518c2c
|
[
"MIT"
] | 2
|
2020-06-19T11:17:55.000Z
|
2020-11-12T08:43:57.000Z
|
import Tkinter
if not globals().has_key('__tk_inited'):
global __tk_inited
__tk_inited = False
def init():
global __tk_inited
if not __tk_inited:
w = Tkinter.Tk()
w.withdraw()
def setInited():
global __tk_inited
__tk_inited = True
| 17.375
| 40
| 0.643885
|
4a11dfdce27c518e2c082d303c1ca51c05d23eca
| 157
|
py
|
Python
|
py/t2/demo2.py
|
firekyrin/train-demo
|
a949a461bb9324fc2732cdf80c42c26379442c96
|
[
"MIT"
] | null | null | null |
py/t2/demo2.py
|
firekyrin/train-demo
|
a949a461bb9324fc2732cdf80c42c26379442c96
|
[
"MIT"
] | null | null | null |
py/t2/demo2.py
|
firekyrin/train-demo
|
a949a461bb9324fc2732cdf80c42c26379442c96
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import re
sentence = "from 12/22/1629 to 11/14/1643"
a2 = re.sub(r'(\d{2})/(\d{2})/(\d{4})', r'\3-\1-\2', sentence)
print "a2=", a2
| 17.444444
| 62
| 0.56051
|
4a11e00821b6764b79f67b2617996595e985c165
| 2,408
|
py
|
Python
|
src/main/python/rpcserver/server.py
|
iottyys/ttyys-core
|
241942245c3d1bcf46c533a6fb221df4a0eddfc7
|
[
"MIT"
] | null | null | null |
src/main/python/rpcserver/server.py
|
iottyys/ttyys-core
|
241942245c3d1bcf46c533a6fb221df4a0eddfc7
|
[
"MIT"
] | null | null | null |
src/main/python/rpcserver/server.py
|
iottyys/ttyys-core
|
241942245c3d1bcf46c533a6fb221df4a0eddfc7
|
[
"MIT"
] | null | null | null |
# -*-coding:utf-8-*-
import psutil
import time
from threading import Thread
import socketserver
from socketserver import TCPServer, ThreadingMixIn
from typing import Tuple
from enum import Enum, unique
from rpcserver.avro.proxy import AvroProxyFactory
from rpcserver.logger import logger
from rpcserver.protobuf import handler as protobuf_handler
from rpcserver.avro import handler as avro_handler
@unique
class Protocol(Enum):
Protobuf = 0
Avro = 1
def process_check(pid, server):
while 1:
try:
psutil.Process(pid)
time.sleep(1)
except psutil.NoSuchProcess:
break
server.shutdown()
class SocketServer:
def __init__(self, port, host='0.0.0.0', protocol=Protocol.Avro):
self.host = host
self.port = port
self.serviceMap = {}
self.protocols = []
self.avro_proxy_factory = None
self.proto = protocol
def register_service(self, service=None) -> None:
if service is None:
return
self.serviceMap[service.GetDescriptor().full_name] = service
def register_avro_protocols(self, protocols) -> None:
self.protocols = protocols
self.avro_proxy_factory = AvroProxyFactory()
for protocol in protocols:
self.avro_proxy_factory.load(protocol)
def run(self, pid) -> None:
logger.info('starting server on host: %s - port: %d' % (self.host, self.port))
handler = avro_handler.RequestHandler
if self.proto == Protocol.Protobuf:
handler = protobuf_handler.RequestHandler
server = None
try:
server = ThreadingTCPServer((self.host, self.port), handler, self)
if pid is not None:
Thread(target=process_check, args=(pid, server), daemon=True).start()
server.serve_forever()
except KeyboardInterrupt:
if server is not None:
server.shutdown()
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
socketserver.allow_reuse_address = True
def __init__(self, server_address, handler, server):
socketserver.TCPServer.__init__(self, server_address, handler)
self.server = server
def finish_request(self, request: bytes,
client_address: Tuple[str, int]) -> None:
self.RequestHandlerClass(request, client_address, self, self.server)
| 30.481013
| 86
| 0.658638
|
4a11e0345f6dde948bf8d03296dfa83ae98a612e
| 2,382
|
py
|
Python
|
aiohttp_devtools/start/main.py
|
Dreamsorcerer/aiohttp-devtools
|
b5e9ffa9a9077d224640e6aad92cc5e0798d4ab7
|
[
"MIT"
] | null | null | null |
aiohttp_devtools/start/main.py
|
Dreamsorcerer/aiohttp-devtools
|
b5e9ffa9a9077d224640e6aad92cc5e0798d4ab7
|
[
"MIT"
] | null | null | null |
aiohttp_devtools/start/main.py
|
Dreamsorcerer/aiohttp-devtools
|
b5e9ffa9a9077d224640e6aad92cc5e0798d4ab7
|
[
"MIT"
] | null | null | null |
import base64
import os
import re
from pathlib import Path
from ..exceptions import AiohttpDevConfigError
from ..logs import main_logger as logger
THIS_DIR = Path(__file__).parent
TEMPLATE_DIR = THIS_DIR / 'template'
def check_dir_clean(d: Path):
if d.exists():
existing_paths = {p.name for p in d.iterdir()}
new_paths = {p.name for p in TEMPLATE_DIR.iterdir()}
conflicts = existing_paths & new_paths
if conflicts:
raise AiohttpDevConfigError('The path "{}" already has files/directories which would conflict '
'with the new project: {}'.format(d, ', '.join(sorted(conflicts))))
class StartProject:
def __init__(self, *, path: str, name: str, template_dir: Path = TEMPLATE_DIR) -> None:
self.project_root = Path(path)
self.template_dir = template_dir
check_dir_clean(self.project_root)
try:
display_path = self.project_root.relative_to(Path('.').resolve())
except ValueError:
display_path = self.project_root
logger.info('Starting new aiohttp project "%s" at "%s"', name, display_path)
self.ctx = {
'name': name,
'cookie_name': re.sub(r'[^\w_]', '', re.sub(r'[.-]', '_', name)),
'auth_key': base64.urlsafe_b64encode(os.urandom(32)).decode(),
}
self.ctx_regex = re.compile(r'\{\{ ?(%s) ?\}\}' % '|'.join(self.ctx.keys()))
self.files_created = 0
self.generate_directory(TEMPLATE_DIR)
logger.info('project created, %d files generated', self.files_created)
def generate_directory(self, p: Path):
for pp in p.iterdir():
if pp.is_dir():
self.generate_directory(pp)
else:
assert pp.is_file()
if not pp.name.endswith('.pyc'):
self.generate_file(pp)
def generate_file(self, p: Path):
text = p.read_text()
new_path = self.project_root / p.relative_to(self.template_dir)
logger.debug('creating "%s"', new_path)
if p.name == 'settings.py':
text = self.ctx_regex.sub(self.ctx_replace, text)
new_path.parent.mkdir(parents=True, exist_ok=True)
new_path.write_text(text)
self.files_created += 1
def ctx_replace(self, m):
return self.ctx[m.group(1)]
| 35.029412
| 107
| 0.601175
|
4a11e06767dad48d13e9726c97daa19157063ab7
| 360
|
py
|
Python
|
codebase/Exposure Fusion/MRB/utils.py
|
SecretMG/NTIRE21
|
1d22afc46a792f56693e32ed5df484fd910aa056
|
[
"MIT"
] | null | null | null |
codebase/Exposure Fusion/MRB/utils.py
|
SecretMG/NTIRE21
|
1d22afc46a792f56693e32ed5df484fd910aa056
|
[
"MIT"
] | null | null | null |
codebase/Exposure Fusion/MRB/utils.py
|
SecretMG/NTIRE21
|
1d22afc46a792f56693e32ed5df484fd910aa056
|
[
"MIT"
] | null | null | null |
import cv2 as cv
import numpy as np
INF = 255
def readImageSeq(locations):
assert len(locations) > 1 # 至少要读到多于1张图片
imgs = []
for loc in locations:
img = cv.imread(loc, 1)
# img = cv.resize(img, (1024, 512))
# cv.imwrite(loc, img)
imgs.append(img)
imgs = np.asarray(imgs) # 从列表转换成numpy数组
return imgs
| 17.142857
| 44
| 0.591667
|
4a11e10c5e26b44e9feb1dabc3664ddf0410685a
| 12,252
|
py
|
Python
|
plasTeX/Renderers/ManPage/__init__.py
|
perfectbark/LaTex2Docx
|
e32f9dcc59cce7bea4e7b114687b2300c623d8c0
|
[
"MIT"
] | 23
|
2019-06-16T06:00:39.000Z
|
2022-03-29T14:44:32.000Z
|
plasTeX/Renderers/ManPage/__init__.py
|
hao-han/LaTex2Docx
|
e32f9dcc59cce7bea4e7b114687b2300c623d8c0
|
[
"MIT"
] | null | null | null |
plasTeX/Renderers/ManPage/__init__.py
|
hao-han/LaTex2Docx
|
e32f9dcc59cce7bea4e7b114687b2300c623d8c0
|
[
"MIT"
] | 12
|
2019-05-27T06:32:06.000Z
|
2022-03-15T10:22:07.000Z
|
#!/usr/bin/env python
from plasTeX.Renderers import Renderer as BaseRenderer
import textwrap, re, string
class ManPageRenderer(BaseRenderer):
""" Renderer for UNIX man pages """
outputType = unicode
fileExtension = '.man'
aliases = {
'superscript': 'active::^',
'subscript': 'active::_',
'dollar': '$',
'percent': '%',
'opencurly': '{',
'closecurly': '}',
'underscore': '_',
'ampersand': '&',
'hashmark': '#',
'space': ' ',
'tilde': 'active::~',
'at': '@',
'backslash': '\\',
}
def __init__(self, *args, **kwargs):
BaseRenderer.__init__(self, *args, **kwargs)
# Load dictionary with methods
for key in vars(type(self)):
if key.startswith('do__'):
self[self.aliases[key[4:]]] = getattr(self, key)
elif key.startswith('do_'):
self[key[3:]] = getattr(self, key)
self['default-layout'] = self['document-layout'] = self.default
self.footnotes = []
self.blocks = []
def default(self, node):
""" Rendering method for all non-text nodes """
# Handle characters like \&, \$, \%, etc.
if len(node.nodeName) == 1 and node.nodeName not in string.letters:
return self.textDefault(node.nodeName)
# Render child nodes
return unicode(node)
def textDefault(self, node):
return unicode(node)
def processFileContent(self, document, s):
s = BaseRenderer.processFileContent(self, document, s)
# Clean up newlines
s = re.sub(r'\s*\n(\s*\n)+', r'\n\n', s)
s = re.sub(r'(\s*\n)+(\.B[ld])', r'\n\2', s)
s = re.sub(r'(\.E[ld])\s*(\.B[ld])', r'\1\n\n\2', s)
s = re.sub(r'\.Ed\s*\.Bd', r'.Ed\n.Bd', s)
s = s.lstrip()
return s
# Alignment
def do_flushleft(self, node):
return u'\n.Bd -ragged\n%s\n.Ed\n' % node
do_raggedbottom = do_raggedright = do_leftline = do_flushleft
def center(self, text):
return u'\n.Bd -centered\n%s\n.Ed\n' % text
def do_center(self, node):
return self.center(unicode(node))
do_centering = do_centerline = do_center
def do_flushright(self, node):
return u'\n.Bd -offset right\n%s\n.Ed\n' % node
do_raggedleft = do_llap = do_flushright
# Arrays
def do_array(self, node, render=unicode):
output = ['.TS']
# Process colspecs
if node.colspec:
alignments = [x.style['text-align'] for x in node.colspec]
else:
alignments = ['l']*100
for row in node:
colspec = []
for i, cell in enumerate(row):
colspec.append(cell.style.get('text-align', alignments[i])[0])
output.append(' '.join(colspec))
output[-1] += '.'
# Render table
for row in node:
content = []
for cell in row:
content.append(render(cell).strip())
output.append('\t'.join(content))
output.append('.TE')
output.append('')
return re.sub(r'\s*.TE\s*', r'\n.TE\n', u'\n'.join(output))
do_tabular = do_tabularx = do_longtable = do_array
def do_cline(self, node):
return ''
def do_multicolumn(self, node):
return unicode(node)
# Bibliography
def do_thebibliography(self, node):
output = ['','.Sh Bibliography','']
output.append('.Bl -tag -width indent')
for item in node:
output.append('.It %s' % unicode(item.bibcite).strip())
output.append(unicode(item).strip())
output.append('.El')
output.append('')
return u'\n'.join(output)
def do_bibliographystyle(self, node):
return u''
def do_bibliography(self, node):
return self.default(node)
def do_cite(self, node):
output = []
for item in node.citation():
output.append(unicode(item))
return u''.join(output)
def do_bibliographyref(self, node):
return self.default(node)
# Boxes
do_mbax = do_makebox = do_fbox = do_framebox = do_parbox = default
do_minipage = do_raisebox = do_rule = default
# Breaking
def do_linebreak(self, node):
return u'\n\n'
do_newline = do_pagebreak = do_newpage = do_clearpage = do_cleardoublepage = do_linebreak
# Crossref
def do_ref(self, node):
return unicode(node.idref['label'].ref)
def do_pageref(self, node):
return u'*'
def do_label(self, node):
return u''
# Floats
def do_figure(self, node):
return unicode(node)
do_table = do_marginpar = do_figure
def do_caption(self, node):
return u'\n%s %s: %s\n' % (node.title, node.ref, unicode(node).strip())
# Font Selection
do_sffamily = do_textsf = default
do_upshape = do_textup = default
do_scshape = do_textsc = default
do_sc = default
do_tiny = do_scriptsize = do_footnotesize = do_small = default
do_normalsize = do_large = do_Large = do_LARGE = do_huge = do_HUGE = default
def do_textbf(self, node):
return u'\\fB%s\\fP' % node
do_bfseries = do_bf = do_textbf
def do_textit(self, node):
return u'\\fI%s\\fP' % node
do_itshape = do_it = do_slshape = do_textsl = do_sl = do_cal = do_textit
def do_texttt(self, node):
return u'\\fC%s\\fP' % node
do_ttfamily = do_tt = do_texttt
def do_textmd(self, node):
return u'\\fR%s\\fP' % node
do_mdseries = do_rmfamily = do_textrm = do_textnormal = do_rm = do_textmd
def do_symbol(self, node):
return u'*'
# Footnotes
def do_footnote(self, node):
mark = u'[%s]' % (len(self.footnotes)+1)
self.footnotes.append(unicode(node))
return mark
def do_footnotetext(self, node):
self.do_footnote(self, node)
return ''
def do_footnotemark(self, node):
return u'[%s]' % (len(self.footnotes)+1)
# Index
def do_theindex(self, node):
return u''
do_printindex = do_index = do_theindex
# Lists
def do_itemize(self, node):
output =['','.Bl -bullet -offset 3n -compact']
for item in node:
output.append('.It')
output.append(unicode(item).strip())
output.append('.El')
output.append('')
return u'\n'.join(output)
def do_enumerate(self, node):
output = ['','.Bl -enum -offset 3n -compact']
for item in node:
output.append('.It')
output.append(unicode(item).strip())
output.append('.El')
output.append('')
return u'\n'.join(output)
def do_description(self, node):
output = ['','.Bl -tag -width 3n']
for item in node:
output.append('.It %s' % unicode(item.attributes.get('term','')).strip())
output.append(unicode(item).strip())
output.append('.El')
output.append('')
return u'\n'.join(output)
do_list = do_trivlist = do_description
# Math
def do_math(self, node):
return re.sub(r'\s*(_|\^)\s*', r'\1', node.source.replace('\\','\\\\'))
do_ensuremath = do_math
def do_equation(self, node):
s = u' %s' % re.compile(r'^\s*\S+\s*(.*?)\s*\S+\s*$', re.S).sub(r'\1', node.source.replace('\\','\\\\'))
return re.sub(r'\s*(_|\^)\s*', r'\1', s)
do_displaymath = do_equation
def do_eqnarray(self, node):
def render(node):
s = re.compile(r'^\$\\\\displaystyle\s*(.*?)\s*\$\s*$', re.S).sub(r'\1', node.source.replace('\\','\\\\'))
return re.sub(r'\s*(_|\^)\s*', r'\1', s)
return self.do_array(node, render=render)
do_align = do_gather = do_falign = do_multiline = do_eqnarray
do_multline = do_alignat = do_split = do_eqnarray
# Misc
do_bgroup = default
def do_def(self, node):
return u''
do_tableofcontents = do_input = do_protect = do_let = do_def
do_newcommand = do_hfill = do_hline = do_openout = do_renewcommand = do_def
do_write = do_hspace = do_appendix = do_global = do_noindent = do_def
do_include = do_markboth = do_setcounter = do_refstepcounter = do_def
do_medskip = do_smallskip = do_parindent = do_indent = do_setlength = do_def
do_settowidth = do_addtolength = do_nopagebreak = do_newwrite = do_def
do_newcounter = do_typeout = do_sloppypar = do_hfil = do_thispagestyle = do_def
def do_egroup(self, node):
return u''
# Pictures
def do_picture(self, node):
return u''
# Primitives
def do_par(self, node):
return u'\n%s\n' % unicode(node).strip()
def do__superscript(self, node):
return self.default(node)
def do__subscript(self, node):
return self.default(node)
# Quotations
def do_quote(self, node):
return self.center(node)
do_quotation = do_verse = do_quote
# Sectioning
def do_document(self, node):
content = unicode(node).rstrip()
footnotes = ''
if self.footnotes:
output = ['','.Bl -tag -offset indent']
for i, item in enumerate(self.footnotes):
output.append('.It [%s]' % (i+1))
output.append(item)
output.append('.El')
output.append('')
footnotes = '\n'.join(output)
return u'%s%s' % (content, footnotes)
def do_maketitle(self, node):
output = []
metadata = node.ownerDocument.userdata
if 'date' in metadata:
output.append('.Dd %s' % metadata['date'])
if 'title' in metadata:
output.append('.Dt %s' % unicode(metadata['title']).upper())
output.append('')
return u'\n'.join(output)
def do_section(self, node):
return u'.Sh %s\n%s' % (node.title, node)
do_part = do_chapter = do_section
def do_subsection(self, node):
return u'.Ss %s\n%s' % (node.title, node)
do_subsubsection = do_paragraph = do_subparagraph = do_subsubparagraph = do_subsection
def do_title(self, node):
return u''
do_author = do_date = do_thanks = do_title
def do_abstract(self, node):
return self.center(unicode(node).strip())
# Sentences
def do__dollar(self, node):
return u'$'
def do__percent(self, node):
return u'%'
def do__opencurly(self, node):
return u'{'
def do__closecurly(self, node):
return u'}'
def do__underscore(self, node):
return u'_'
def do__ampersand(self, node):
return u'&'
def do__hashmark(self, node):
return u'#'
def do__space(self, node):
return u' '
def do_LaTeX(self, node):
return u'LaTeX'
def do_TeX(self, node):
return u'TeX'
def do_emph(self, node):
return self.default(node)
do_em = do_emph
def do__tilde(self, node):
return u' '
def do_enspace(self, node):
return u' '
do_quad = do_qquad = do_enspace
def do_enskip(self, node):
return u''
do_thinspace = do_enskip
def do_underbar(self, node):
return self.default(node)
# Space
def do_hspace(self, node):
return u' '
def do_vspace(self, node):
return u''
do_bigskip = do_medskip = do_smallskip = do_vspace
# Tabbing - not implemented yet
# Verbatim
def do_verbatim(self, node):
return u'\n.Bd -literal%s.Ed\n' % node
do_alltt = do_verbatim
def do_mbox(self, node):
return self.default(node)
def do__at(self, node):
return u''
def do__backslash(self, node):
return u'\\'
Renderer = ManPageRenderer
| 27.046358
| 118
| 0.550522
|
4a11e163090f18dd2f82d7219e74f1fb6a891b92
| 2,579
|
py
|
Python
|
telegram_bot/buttons.py
|
brain4tech/b4t-python-telegram-bot
|
f7b9e300ab25ecf5a6ee05976c42034d12fe93e3
|
[
"MIT"
] | null | null | null |
telegram_bot/buttons.py
|
brain4tech/b4t-python-telegram-bot
|
f7b9e300ab25ecf5a6ee05976c42034d12fe93e3
|
[
"MIT"
] | null | null | null |
telegram_bot/buttons.py
|
brain4tech/b4t-python-telegram-bot
|
f7b9e300ab25ecf5a6ee05976c42034d12fe93e3
|
[
"MIT"
] | null | null | null |
# classes for inline and reply keyboards
class InlineButton:
def __init__(self, text_, callback_data_ = "", url_=""):
self.text = text_
self.callback_data = callback_data_
self.url = url_
if not self.callback_data and not self.url:
raise TypeError("Either callback_data or url must be given")
def __str__(self):
return str(self.toDict())
def toDict(self):
return self.__dict__
class KeyboardButton:
def __init__(self, text_):
self.text = text_
def __str__(self):
return str(self.toDict())
def toDict(self):
return self.__dict__
class ButtonList:
def __init__(self, button_type_: type, button_list_: list = []):
self.__button_type = None
self.__button_type_str = ""
self.__button_list = []
if button_type_ == InlineButton:
self.__button_type = button_type_
self.__button_type_str = "inline"
elif button_type_ == KeyboardButton:
self.__button_type = button_type_
self.__button_type_str = "keyboard"
else:
raise TypeError(
"given button_type is not type InlineButton or KeyboardButton")
if button_list_:
for element in button_list_:
if isinstance(element, self.__button_type):
self.__button_list.append(element)
def __str__(self):
return str(self.toDict())
def toDict(self):
return [button.toDict() for button in self.__button_list]
def addCommand(self, button_):
if isinstance(button_, self.__button_type):
self.__button_list.append(button_)
def bulkAddCommands(self, button_list: list):
for element in button_list:
if isinstance(element, self.__button_type):
self.__button_list.append(element)
def getButtonType(self):
return self.__button_type
def toBotDict(self, special_button: type = None, column_count: int = 3):
button_list = []
button_row = []
for button in self.__button_list:
button_row.append(button.toDict())
if len(button_row) >= column_count or button == self.__button_list[-1]:
button_list.append(button_row[:])
button_row.clear()
if special_button and isinstance(special_button, self.__button_type):
button_list.append([special_button.toDict()])
return {f'{"inline_" if self.__button_type_str == "inline" else ""}keyboard': button_list}
| 29.988372
| 98
| 0.625436
|
4a11e24837291efd30ace0220238b4a375920855
| 4,841
|
py
|
Python
|
user_mgmt/handler.py
|
WIPACrepo/keycloak-rest-services
|
2661b0db2dd320bdb8eefc62c805188bec52ecc7
|
[
"MIT"
] | 1
|
2021-09-23T14:39:36.000Z
|
2021-09-23T14:39:36.000Z
|
user_mgmt/handler.py
|
WIPACrepo/keycloak-rest-services
|
2661b0db2dd320bdb8eefc62c805188bec52ecc7
|
[
"MIT"
] | 38
|
2020-08-31T22:53:09.000Z
|
2022-03-28T20:55:39.000Z
|
user_mgmt/handler.py
|
WIPACrepo/keycloak-rest-services
|
2661b0db2dd320bdb8eefc62c805188bec52ecc7
|
[
"MIT"
] | null | null | null |
from collections import defaultdict
import logging
from tornado.web import HTTPError
from tornado.escape import json_decode, json_encode
from rest_tools.server import RestHandler
import krs.email
from krs.users import user_info
class MyHandler(RestHandler):
def initialize(self, db=None, krs_client=None, group_cache=None, **kwargs):
super().initialize(**kwargs)
self.db = db
self.krs_client = krs_client
self.group_cache = group_cache
def write(self, chunk):
"""
Writes the given chunk to the output buffer.
A copy of the Tornado src, without the list json restriction.
"""
if self._finished:
raise RuntimeError("Cannot write() after finish()")
if not isinstance(chunk, (bytes, str, dict, list)):
message = "write() only accepts bytes, str, dict, and list objects"
raise TypeError(message)
if isinstance(chunk, (dict, list)):
chunk = json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = chunk if isinstance(chunk, bytes) else chunk.encode("utf-8")
self._write_buffer.append(chunk)
def json_filter(self, req_fields, opt_fields):
"""
Filter json body data.
Args:
req_fields (dict): required fields and type
opt_fields (dict): optional fields and type
Returns:
dict: data
"""
incoming_data = json_decode(self.request.body)
data = {}
for f in req_fields:
if f not in incoming_data:
raise HTTPError(400, f'missing field "{f}"', reason=f'missing field "{f}"')
elif not isinstance(incoming_data[f], req_fields[f]):
raise HTTPError(400, reason=f'invalid type for field "{f}"')
data[f] = incoming_data[f]
for f in opt_fields:
if f in incoming_data:
if not isinstance(incoming_data[f], opt_fields[f]):
raise HTTPError(400, reason=f'invalid type for field "{f}"')
data[f] = incoming_data[f]
extra_fields = set(incoming_data)-set(req_fields)-set(opt_fields)
if extra_fields:
raise HTTPError(400, f'invalid fields: {extra_fields}', reason='extra invalid fields in request')
return data
async def get_admins(self, group_path):
ret = await self.group_cache.get_members(group_path+'/_admin')
users = {}
for username in ret:
ret2 = await user_info(username, rest_client=self.krs_client)
users[username] = ret2
logging.info(f'get_admins: {users}')
return users
async def send_admin_email(self, group_path, body):
subject = 'IceCube Account '
if group_path.startswith('/institutions'):
subject += 'Institution'
else:
subject += 'Group'
subject += ' Request'
try:
admin_users = await self.get_admins(group_path)
for user in admin_users.values():
krs.email.send_email(
recipient={'name': f'{user["firstName"]} {user["lastName"]}', 'email': user['email']},
subject=subject,
content=body)
except Exception:
logging.warning(f'failed to send email for approval to {group_path}', exc_info=True)
async def get_admin_groups(self):
if '/admin' in self.auth_data['groups']: # super admin - all groups
admin_groups = await self.group_cache.list_groups()
else:
admin_groups = [g[:-7] for g in self.auth_data['groups'] if g.endswith('/_admin')]
groups = set()
for group in admin_groups:
val = group.strip('/').split('/')
if len(val) >= 1 and val[0] != 'institutions':
groups.add(group)
logging.info(f'get_admin_groups: {groups}')
return groups
async def get_admin_institutions(self):
if '/admin' in self.auth_data['groups']: # super admin - all institutions
admin_groups = await self.group_cache.list_institutions()
insts = defaultdict(list)
for group in admin_groups:
val = group.split('/')
insts[val[2]].append(val[3])
else:
admin_groups = [g[:-7] for g in self.auth_data['groups'] if g.endswith('/_admin')]
insts = defaultdict(list)
for group in admin_groups:
val = group.strip('/').split('/')
logging.debug(f'eval group: {group} | val: {val}')
if len(val) == 3 and val[0] == 'institutions':
insts[val[1]].append(val[2])
logging.info(f'get_admin_instutitons: {insts}')
return insts
| 40.008264
| 109
| 0.587482
|
4a11e282864d953baa87ba0d3d6f0a1c9a7dc90a
| 170
|
py
|
Python
|
scripts/item/consume_2435430.py
|
Snewmy/swordie
|
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
|
[
"MIT"
] | null | null | null |
scripts/item/consume_2435430.py
|
Snewmy/swordie
|
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
|
[
"MIT"
] | null | null | null |
scripts/item/consume_2435430.py
|
Snewmy/swordie
|
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
|
[
"MIT"
] | null | null | null |
# Blue Fire Damage Skin
success = sm.addDamageSkin(2435430)
if success:
sm.chat("The Blue Fire Damage Skin has been added to your account's damage skin collection.")
| 34
| 97
| 0.758824
|
4a11e2b4ef0ee87c11936fd54319aea26366481f
| 1,365
|
py
|
Python
|
JobGenie/urls.py
|
atharvaze/Algorithm-6.0
|
2f47a5e0b260b128195f23b6a722cad5aa591134
|
[
"MIT"
] | null | null | null |
JobGenie/urls.py
|
atharvaze/Algorithm-6.0
|
2f47a5e0b260b128195f23b6a722cad5aa591134
|
[
"MIT"
] | 2
|
2020-06-06T01:33:51.000Z
|
2021-06-10T22:38:26.000Z
|
JobGenie/urls.py
|
atharvaze/Algorithm-6.0
|
2f47a5e0b260b128195f23b6a722cad5aa591134
|
[
"MIT"
] | null | null | null |
"""JobGenie URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from .views import base,home,homepage,skillset,skills
from django.contrib.auth import views as auth_views
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('login/', auth_views.LoginView.as_view(template_name='login.html'), name='Login'),
path('home', home, name='home'),
path('', homepage, name='homepage'),
path('skillfinder/',skillset,name='js wala page'),
path('skillfinder/skill/',skills, name='skills'),
]
urlpatterns+=static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns+=static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 39
| 91
| 0.732601
|
4a11e3a65d55cec50cf4815daa3d96b88f96d98d
| 3,481
|
py
|
Python
|
third-party/thrift/src/thrift/compiler/test/compiler_test.py
|
hkirsman/hhvm_centos7_builds
|
2a1fd6de0d2d289c1575f43f10018f3bec23bb13
|
[
"PHP-3.01",
"Zend-2.0"
] | 5
|
2015-11-23T00:26:06.000Z
|
2020-07-31T12:56:08.000Z
|
third-party/thrift/src/thrift/compiler/test/compiler_test.py
|
hkirsman/hhvm_centos7_builds
|
2a1fd6de0d2d289c1575f43f10018f3bec23bb13
|
[
"PHP-3.01",
"Zend-2.0"
] | 2
|
2017-05-10T15:43:34.000Z
|
2018-01-04T22:36:04.000Z
|
third-party/thrift/src/thrift/compiler/test/compiler_test.py
|
hkirsman/hhvm_centos7_builds
|
2a1fd6de0d2d289c1575f43f10018f3bec23bb13
|
[
"PHP-3.01",
"Zend-2.0"
] | 7
|
2017-09-01T01:30:25.000Z
|
2019-02-04T17:46:24.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import re
import unittest
import shlex
import shutil
import subprocess
import sys
import tempfile
def ascend_find_exe(path, target):
if not os.path.isdir(path):
path = os.path.dirname(path)
while True:
test = os.path.join(path, target)
if os.access(test, os.X_OK):
return test
parent = os.path.dirname(path)
if os.path.samefile(parent, path):
return None
path = parent
def read_file(path):
with open(path, 'r') as f:
return f.read()
def read_lines(path):
with open(path, 'r') as f:
return f.readlines()
def mkdir_p(path, mode):
try:
os.makedirs(path, mode)
except OSError:
pass
def parse_manifest(raw):
manifest = {}
for line in raw.splitlines():
fixture, filename = line.split('/', 1)
if fixture not in manifest:
manifest[fixture] = []
manifest[fixture].append(filename)
return manifest
exe = os.path.join(os.getcwd(), sys.argv[0])
thrift = ascend_find_exe(exe, 'thrift')
fixtureDir = os.path.join(os.path.dirname(exe), 'fixtures')
manifest = parse_manifest(read_file(os.path.join(fixtureDir, 'MANIFEST')))
fixtureNames = manifest.keys()
class MyTest(unittest.TestCase):
MSG = " ".join([
"One or more fixtures are out of sync with the thrift compiler.",
"To sync them, build thrift and then run:",
"`thrift/compiler/test/build_fixtures <build-dir>`, where",
"<build-dir> is a path where the program `thrift/compiler/thrift`",
"may be found.",
])
def setUp(self):
tmp = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmp, True)
self.tmp = tmp
self.maxDiff = None
def runTest(self, name):
fixtureChildDir = os.path.join(fixtureDir, name)
shutil.copy2(os.path.join(fixtureChildDir, 'cmd'), self.tmp)
for fn in manifest[name]:
if fn.startswith('src/'):
dn = os.path.dirname(os.path.join(self.tmp, fn))
mkdir_p(dn, 0o700)
shutil.copy2(os.path.join(fixtureChildDir, fn), dn)
cmds = read_lines(os.path.join(self.tmp, 'cmd'))
for cmd in cmds:
subprocess.check_call(
[thrift, '-r', '--gen'] + shlex.split(cmd.strip()),
cwd=self.tmp,
close_fds=True,
)
gens = subprocess.check_output(
["find", ".", "-type", "f"],
cwd=self.tmp,
close_fds=True,
).splitlines()
gens = [gen.split('/', 1)[1] for gen in gens]
try:
self.assertEqual(sorted(gens), sorted(manifest[name]))
for gen in gens:
genc = read_file(os.path.join(self.tmp, gen))
fixc = read_file(os.path.join(fixtureChildDir, gen))
self.assertEqual(len(genc), len(fixc))
self.assertMultiLineEqual(genc, fixc)
except Exception as e:
print(self.MSG, file=sys.stderr)
raise e
def add_fixture(klazz, name):
def test_method(self):
self.runTest(name)
test_method.__name__ = str('test_' + re.sub('[^0-9a-zA-Z]', '_', name))
setattr(klazz, test_method.__name__, test_method)
for name in fixtureNames:
add_fixture(MyTest, name)
| 31.080357
| 75
| 0.599253
|
4a11e4210371f706fee2eeab0d40340f24d56530
| 205
|
py
|
Python
|
Mundo 1/ex011.py
|
RafaelSdm/Curso-de-Python
|
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
|
[
"MIT"
] | 1
|
2021-03-10T21:53:38.000Z
|
2021-03-10T21:53:38.000Z
|
Mundo 1/ex011.py
|
RafaelSdm/Curso-de-Python
|
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
|
[
"MIT"
] | null | null | null |
Mundo 1/ex011.py
|
RafaelSdm/Curso-de-Python
|
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
|
[
"MIT"
] | null | null | null |
altura =float(input("altura da parede:"))
largura = float(input("largura:"))
area = float(largura*altura)
tinta = float(area/2)
print("a area é {:.2f} e gasta {:.2f} litrso de tinta".format(area,tinta))
| 25.625
| 74
| 0.682927
|
4a11e4bdd0ff5ea166538ba2d3e18e643f49e010
| 3,124
|
py
|
Python
|
tests/test_basic_auth.py
|
rra/aiohttp-remotes
|
a1aa6916c1713af40688370ab48bfffb350574fd
|
[
"MIT"
] | null | null | null |
tests/test_basic_auth.py
|
rra/aiohttp-remotes
|
a1aa6916c1713af40688370ab48bfffb350574fd
|
[
"MIT"
] | null | null | null |
tests/test_basic_auth.py
|
rra/aiohttp-remotes
|
a1aa6916c1713af40688370ab48bfffb350574fd
|
[
"MIT"
] | null | null | null |
import base64
import aiohttp
from aiohttp import web
from aiohttp_remotes import BasicAuth
from aiohttp_remotes import setup as _setup
async def test_basic_auth_ok(aiohttp_client):
async def handler(request):
return web.Response()
app = web.Application()
app.router.add_get('/', handler)
await _setup(app, BasicAuth('user', 'pass', 'realm'))
cl = await aiohttp_client(app)
resp = await cl.get('/', auth=aiohttp.BasicAuth('user', 'pass'))
assert resp.status == 200
async def test_basic_auth_request_auth(aiohttp_client):
async def handler(request):
return web.Response()
app = web.Application()
app.router.add_get('/', handler)
await _setup(app, BasicAuth('user', 'pass', 'realm'))
cl = await aiohttp_client(app)
resp = await cl.get('/')
assert resp.status == 401
assert resp.headers['WWW-Authenticate'] == 'Basic realm=realm'
async def test_basic_auth_wrong_creds(aiohttp_client):
async def handler(request):
return web.Response()
app = web.Application()
app.router.add_get('/', handler)
await _setup(app, BasicAuth('user', 'pass', 'realm'))
cl = await aiohttp_client(app)
resp = await cl.get('/', auth=aiohttp.BasicAuth('user', 'badpass'))
assert resp.status == 401
assert resp.headers['WWW-Authenticate'] == 'Basic realm=realm'
async def test_basic_auth_malformed_req(aiohttp_client):
async def handler(request):
return web.Response()
app = web.Application()
app.router.add_get('/', handler)
await _setup(app, BasicAuth('user', 'pass', 'realm'))
cl = await aiohttp_client(app)
resp = await cl.get('/', headers={'Authorization': 'Basic nonbase64'})
assert resp.status == 401
assert resp.headers['WWW-Authenticate'] == 'Basic realm=realm'
async def test_basic_auth_malformed_req2(aiohttp_client):
async def handler(request):
return web.Response()
app = web.Application()
app.router.add_get('/', handler)
await _setup(app, BasicAuth('user', 'pass', 'realm'))
cl = await aiohttp_client(app)
resp = await cl.get('/', headers={'Authorization': 'Basic nonbase64'})
assert resp.status == 401
assert resp.headers['WWW-Authenticate'] == 'Basic realm=realm'
async def test_basic_auth_malformed_req3(aiohttp_client):
async def handler(request):
return web.Response()
app = web.Application()
app.router.add_get('/', handler)
await _setup(app, BasicAuth('user', 'pass', 'realm'))
cl = await aiohttp_client(app)
creds = base64.encodebytes(b'a:b:c').decode('utf-8')
resp = await cl.get('/', headers={'Authorization': 'Basic '+creds})
assert resp.status == 401
assert resp.headers['WWW-Authenticate'] == 'Basic realm=realm'
async def test_basic_auth_white_path(aiohttp_client):
async def handler(request):
return web.Response()
app = web.Application()
app.router.add_get('/', handler)
await _setup(app, BasicAuth('user', 'pass', 'realm', white_paths=['/']))
cl = await aiohttp_client(app)
resp = await cl.get('/')
assert resp.status == 200
| 31.877551
| 76
| 0.672855
|
4a11e4e2b0e16e5885dcacf273518f57e22da2b7
| 15,044
|
py
|
Python
|
trafficdb/blueprint/api.py
|
rjw57/trafficdb
|
7c895e14a52c8c313981243e36732a5e8dcc909a
|
[
"MIT"
] | 1
|
2016-12-12T21:23:26.000Z
|
2016-12-12T21:23:26.000Z
|
trafficdb/blueprint/api.py
|
rjw57/trafficdb
|
7c895e14a52c8c313981243e36732a5e8dcc909a
|
[
"MIT"
] | null | null | null |
trafficdb/blueprint/api.py
|
rjw57/trafficdb
|
7c895e14a52c8c313981243e36732a5e8dcc909a
|
[
"MIT"
] | null | null | null |
"""API to access and modify traffic data records.
"""
import base64
import datetime
try:
from urllib.parse import urljoin, urlencode, parse_qs
except ImportError:
from urlparse import urljoin, parse_qs
from urllib import urlencode
import uuid
from flask import *
import six
from sqlalchemy import func
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
import pytz
from werkzeug.exceptions import NotFound, BadRequest
from trafficdb.models import *
from trafficdb.queries import (
observation_date_range,
observations_for_link,
prepare_resolve_link_aliases,
resolve_link_aliases,
)
__all__ = ['api']
# Create a Blueprint for the web api
api = Blueprint('api', __name__)
# alias api as "app" for use below
app = api
# Maximum number of results to return
PAGE_LIMIT = 20
# Maximum duration to query over in *milliseconds*
MAX_DURATION = 3*24*60*60*1000
JAVASCRIPT_EPOCH = datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)
def javascript_timestamp_to_datetime(ts):
return JAVASCRIPT_EPOCH + datetime.timedelta(milliseconds=ts)
def datetime_to_javascript_timestamp(dt):
return int((dt - JAVASCRIPT_EPOCH).total_seconds() * 1000)
def uuid_to_urlsafe_id(uuid_text):
id_bytes = base64.urlsafe_b64encode(uuid.UUID(uuid_text).bytes).rstrip(b'=')
try:
return str(id_bytes, 'utf8')
except TypeError:
# On python 2, str doesn't take 2 arguments so use .decode()
return id_bytes.decode('utf8')
def urlsafe_id_to_uuid(urlsafe_id):
if not isinstance(urlsafe_id, bytes):
urlsafe_id = urlsafe_id.encode('utf8')
padding = 4 - (len(urlsafe_id) % 4)
return uuid.UUID(bytes=base64.urlsafe_b64decode(urlsafe_id + b'='*padding)).hex
def verify_link_id(unverified_link_id):
"""Return a Link given the unverified link id from a URL. Aborts with 404
if the link id is invalid or not found.
"""
# Verify link id
try:
link_uuid = urlsafe_id_to_uuid(unverified_link_id)
except:
# If the uuid is invalid, just return 404
return abort(404)
link_q = db.session.query(Link.id, Link.uuid).filter(Link.uuid == link_uuid).limit(1)
try:
return link_q.one()
except NoResultFound:
# 404 on non-existent link
raise NotFound()
# Should be unreachable
assert False
class ApiBadRequest(BadRequest):
def __init__(self, message):
resp = dict(error=dict(message=message))
super(ApiBadRequest, self).__init__(
description=message,
response=make_response(jsonify(resp), 400)
)
@app.route('/')
def index():
return jsonify(dict(
version=1,
resources=dict(
links=url_for('.links', _external=True),
linkAliases=url_for('.link_aliases', _external=True),
),
))
def extend_request_query(base_url, query):
qs = parse_qs(request.query_string)
for k, v in query.items():
if not isinstance(k, bytes):
k = k.encode('utf8')
qs[k] = [v,]
return urljoin(base_url, '?' + urlencode(qs, doseq=True))
@app.route('/links/')
def links():
try:
requested_count = int(request.args.get('count', PAGE_LIMIT))
except ValueError:
# requested count was not an integer
raise ApiBadRequest('count parameter must be an integer')
# Limit count to the maximum we're prepared to give
requested_count = min(PAGE_LIMIT, requested_count)
# Count must be +ve
if requested_count < 0:
raise ApiBadRequest('count parameter must be positive')
# Query link objects
links_q = db.session.query(Link.uuid, func.ST_AsGeoJSON(Link.geom)).order_by(Link.uuid)
unverified_from_id = request.args.get('from')
if unverified_from_id is not None:
try:
from_uuid = urlsafe_id_to_uuid(unverified_from_id)
except:
# If from id is invalid, this is a bad request but raise a 404 to
# avoid exposing details of link id encoding.
raise NotFound()
links_q = links_q.filter(Link.uuid >= from_uuid)
links_q = links_q.limit(requested_count+1)
def row_to_dict(row):
id_string = uuid_to_urlsafe_id(row[0])
properties=dict(
observationsUrl=url_for(
'.observations', unverified_link_id=id_string, _external=True),
url=url_for(
'.link', unverified_link_id=id_string, _external=True),
)
feature = dict(
type='Feature',
id=id_string,
geometry=json.loads(row[1]),
properties=properties,
)
return feature
links = list(row_to_dict(l) for l in links_q)
# How many links to return and do we still have more?
count = min(requested_count, len(links))
# Limit size of output
next_link_id = links[requested_count]['id'] if len(links) > requested_count else None
links = links[:requested_count]
# Form response
page = dict(count = count)
feature_collection = dict(
type='FeatureCollection',
features=links,
properties=dict(page=page),
)
# Form next url if necessary
if next_link_id is not None:
next_args = parse_qs(request.query_string)
next_args['from'.encode('utf8')] = [next_link_id,]
page['next'] = extend_request_query(
url_for('.links', _external=True),
{'from': next_link_id}
)
return jsonify(feature_collection)
@app.route('/links/', methods=['PATCH'])
def patch_links():
# Get request body as JSON document
body = request.get_json()
# Sanitise body
if body is None:
raise ApiBadRequest('request body must be non-empty')
if not isinstance(body, dict):
raise ApiBadRequest('request body must be a JSON object')
# Extract create requests
try:
create_requests = body['create']
except KeyError:
create_requests = []
if not isinstance(create_requests, list) or len(create_requests) > PAGE_LIMIT:
raise ApiBadRequest('create request must be an array of at most {0} items'.format(PAGE_LIMIT))
# Process create requests
created_links = []
for r in create_requests:
geom_geojson = json.dumps(dict(type='LineString', coordinates=r['coordinates']))
created_links.append(Link(
uuid=uuid.uuid4().hex,
geom=func.ST_SetSRID(func.ST_GeomFromGeoJSON(geom_geojson), 4326)))
db.session.add_all(created_links)
def make_create_response(l):
id = uuid_to_urlsafe_id(l.uuid)
return dict(id=id, url=url_for('.link', unverified_link_id=id, _external=True))
create_responses = list(make_create_response(l) for l in created_links)
db.session.commit()
response = dict(create=create_responses)
return jsonify(response)
@app.route('/links/<unverified_link_id>/observations')
def observations(unverified_link_id):
# Verify link id
link_id, link_uuid = verify_link_id(unverified_link_id)
link_urlsafe_id=uuid_to_urlsafe_id(link_uuid)
link_data = dict(id=link_urlsafe_id)
# Work out if a time range has been specified
try:
duration = int(request.args.get('duration', MAX_DURATION))
except ValueError:
# If duration can't be parsed as an integer, that's a bad request
raise ApiBadRequest('duration parameter must be an integer')
# Restrict duration to the maximum we're comfortable with
duration = min(MAX_DURATION, duration)
if duration < 0:
raise ApiBadRequest('duration parameter must be positive')
start_ts = request.args.get('start')
if start_ts is None:
# Get minimum and maximum times
min_d, max_d = observation_date_range(db.session).one()
# Corner case: if there are no observations in the database, it doesn't
# really matter what start time we use so just use now.
if min_d is None:
start_ts = datetime_to_javascript_timestamp(
pytz.utc.localize(datetime.datetime.utcnow()))
else:
start_ts = datetime_to_javascript_timestamp(
max_d - datetime.timedelta(milliseconds=duration))
else:
# Verify start ts is indeed an integer
try:
start_ts = int(start_ts)
except ValueError:
raise ApiBadRequest('start timestamp must be an integer')
# Record parameters of sanitised query
query_params = dict(start=start_ts, duration=duration)
query_params['earlier'] = extend_request_query(
url_for('.observations', unverified_link_id=link_urlsafe_id, _external=True),
dict(start=start_ts-duration),
)
query_params['later'] = extend_request_query(
url_for('.observations', unverified_link_id=link_urlsafe_id, _external=True),
dict(start=start_ts+duration),
)
start_date = javascript_timestamp_to_datetime(start_ts)
end_date = javascript_timestamp_to_datetime(start_ts + duration)
data = {}
for type in ObservationType:
values = []
q = observations_for_link(db.session, link_id, type, start_date, end_date)
for obs in q:
values.append((datetime_to_javascript_timestamp(obs.observed_at), obs.value))
data[type.value] = dict(values=values)
response = dict(link=link_data, data=data, query=query_params)
return jsonify(response)
@app.route('/links/<unverified_link_id>/')
def link(unverified_link_id):
link_id, link_uuid = verify_link_id(unverified_link_id)
link_url_id = uuid_to_urlsafe_id(link_uuid)
# Query aliases
aliases = list(r[0] for r in
db.session.query(LinkAlias.name).filter(LinkAlias.link_id==link_id))
# Query geometry
geom = json.loads(
db.session.query(func.ST_AsGeoJSON(Link.geom)).filter(Link.id==link_id).one()[0]
)
response = dict(
type='Feature',
id=link_url_id,
geometry=geom,
properties=dict(
observationsUrl=url_for('.observations', unverified_link_id=link_url_id, _external=True),
aliases=aliases,
),
)
return jsonify(response)
@app.route('/aliases/')
def link_aliases():
try:
requested_count = int(request.args.get('count', PAGE_LIMIT))
except ValueError:
# requested count was not an integer
raise ApiBadRequest('count parameter must be an integer')
# Limit count to the maximum we're prepared to give
requested_count = min(PAGE_LIMIT, requested_count)
# Count must be +ve
if requested_count < 0:
raise ApiBadRequest('count parameter must be positive')
# Query link objects
aliases_q = db.session.query(LinkAlias.name, Link.uuid).join(Link).\
order_by(LinkAlias.name)
from_id = request.args.get('from', None)
if from_id is not None:
aliases_q = aliases_q.filter(LinkAlias.name >= str(from_id))
aliases_q = aliases_q.limit(requested_count+1)
def row_to_item(row):
link_id = uuid_to_urlsafe_id(row[1])
link_url = url_for('.link', unverified_link_id=link_id, _external=True)
return dict(id=row[0], linkId=link_id, linkUrl=link_url)
aliases = list(row_to_item(l) for l in aliases_q)
# How many aliases to return and do we still have more?
count = min(requested_count, len(aliases))
# Limit size of output
next_link_id = aliases[requested_count]['id'] if len(aliases) > requested_count else None
aliases = aliases[:requested_count]
# Form response
page = dict(count = count)
# Form next url if necessary
if next_link_id is not None:
next_args = parse_qs(request.query_string)
next_args['from'.encode('utf8')] = [next_link_id,]
page['next'] = extend_request_query(
url_for('.link_aliases', _external=True),
{'from': next_link_id}
)
return jsonify(dict(aliases=aliases, page=page))
@app.route('/aliases/resolve', methods=['POST'])
def link_aliases_resolve():
# Request body should be JSON
req_body = request.get_json()
if req_body is None:
raise ApiBadRequest('request body must be non-empty')
# Retrieve and sanitise alias list
try:
aliases = req_body['aliases']
except KeyError:
raise ApiBadRequest('request body must have an "aliases" field')
if not isinstance(aliases, list):
raise ApiBadRequest('aliases must be an array')
if len(aliases) > PAGE_LIMIT:
raise ApiBadRequest('aliases may only have at most {0} entries'.format(PAGE_LIMIT))
if any(not isinstance(a, six.string_types) for a in aliases):
raise ApiBadRequest('aliases must contain only strings')
def link_from_uuid(link_uuid):
if link_uuid is None:
return None
link_id = uuid_to_urlsafe_id(link_uuid)
return dict(id=link_id, url=url_for('.link', unverified_link_id=link_id, _external=True))
db.session.commit() # HACK: seems that temporary table sometimes is not created without this
tmp_table = prepare_resolve_link_aliases(db.session)
q = resolve_link_aliases(db.session, aliases, tmp_table)
resolutions = list((r[0], link_from_uuid(r[2])) for r in q)
response = dict(resolutions=resolutions)
return jsonify(response)
@app.route('/aliases/', methods=['PATCH'])
def patch_aliases():
# Get request body as JSON document
body = request.get_json()
# Sanitise body
if body is None:
raise ApiBadRequest('request body must be non-empty')
if not isinstance(body, dict):
raise ApiBadRequest('request body must be a JSON object')
# Extract create requests
try:
create_requests = body['create']
except KeyError:
create_requests = []
if not isinstance(create_requests, list) or len(create_requests) > PAGE_LIMIT:
raise ApiBadRequest('create request must be an array of at most {0} items'.format(PAGE_LIMIT))
# Process create requests
created_aliases = []
for r in create_requests:
try:
req_name, req_link = r['name'], r['link']
assert isinstance(req_name, six.string_types)
assert isinstance(req_link, six.string_types)
except:
raise ApiBadRequest('create request number {0} is malformed'.format(
len(created_aliases)+1))
try:
link_id, _ = verify_link_id(req_link)
except NotFound:
raise ApiBadRequest(
'create request number {0} references non-existent link "{1}"'.format(
len(created_aliases)+1, req_link))
created_aliases.append(LinkAlias(name=req_name, link_id=link_id))
db.session.add_all(created_aliases)
try:
db.session.commit()
except IntegrityError:
raise ApiBadRequest('invalid request (perhaps identical alias names?)')
response = dict(create={ 'status': 'ok', 'count': len(created_aliases) })
return jsonify(response)
| 33.730942
| 102
| 0.667974
|
4a11e59fdd65f55af3f7f6ad53b678579975e44e
| 4,279
|
py
|
Python
|
oscar/apps/catalogue/reviews/views.py
|
LUKKIEN/django-oscar
|
e3a418e07910806b0605a591106eff2a51d27c83
|
[
"BSD-3-Clause"
] | null | null | null |
oscar/apps/catalogue/reviews/views.py
|
LUKKIEN/django-oscar
|
e3a418e07910806b0605a591106eff2a51d27c83
|
[
"BSD-3-Clause"
] | null | null | null |
oscar/apps/catalogue/reviews/views.py
|
LUKKIEN/django-oscar
|
e3a418e07910806b0605a591106eff2a51d27c83
|
[
"BSD-3-Clause"
] | null | null | null |
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.views.generic import ListView, DetailView, CreateView, View
from django.contrib import messages
from django.db.models import get_model
from django.utils.translation import ugettext_lazy as _
from oscar.core.loading import get_classes
from oscar.apps.catalogue.reviews.signals import review_added
ProductReviewForm, VoteForm = get_classes(
'catalogue.reviews.forms', ['ProductReviewForm', 'VoteForm'])
Vote = get_model('reviews', 'vote')
ProductReview = get_model('reviews', 'ProductReview')
Product = get_model('catalogue', 'product')
class CreateProductReview(CreateView):
template_name = "catalogue/reviews/review_form.html"
model = get_model('reviews', 'ProductReview')
product_model = get_model('catalogue', 'product')
form_class = ProductReviewForm
view_signal = review_added
def dispatch(self, request, *args, **kwargs):
self.product = get_object_or_404(
self.product_model, pk=kwargs['product_pk'])
if self.product.has_review_by(request.user):
messages.warning(
self.request, _("You have already reviewed this product!"))
return HttpResponseRedirect(self.product.get_absolute_url())
return super(CreateProductReview, self).dispatch(
request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(CreateProductReview, self).get_context_data(**kwargs)
context['product'] = self.product
return context
def get_form_kwargs(self):
kwargs = super(CreateProductReview, self).get_form_kwargs()
kwargs['product'] = self.product
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
response = super(CreateProductReview, self).form_valid(form)
self.send_signal(self.request, response, self.object)
return response
def get_success_url(self):
messages.success(
self.request, _("Thank you for reviewing this product"))
return self.product.get_absolute_url()
def send_signal(self, request, response, review):
self.view_signal.send(sender=self, review=review, user=request.user,
request=request, response=response)
class ProductReviewDetail(DetailView):
template_name = "catalogue/reviews/review_detail.html"
context_object_name = 'review'
model = ProductReview
def get_context_data(self, **kwargs):
context = super(ProductReviewDetail, self).get_context_data(**kwargs)
context['product'] = get_object_or_404(
Product, pk=self.kwargs['product_pk'])
return context
class AddVoteView(View):
def post(self, request, *args, **kwargs):
product = get_object_or_404(Product, pk=self.kwargs['product_pk'])
review = get_object_or_404(ProductReview, pk=self.kwargs['pk'])
instance = Vote(review=review, user=request.user)
form = VoteForm(data=request.POST, instance=instance)
if form.is_valid():
review.vote_up(request.user)
messages.success(request, _("Thanks for voting!"))
else:
for error in form.errors:
messages.error(request, error)
return HttpResponseRedirect(
request.META.get('HTTP_REFERER', product.get_absolute_url()))
class ProductReviewList(ListView):
"""
Browse reviews for a product
"""
template_name = 'catalogue/reviews/review_list.html'
context_object_name = "reviews"
model = get_model('reviews', 'productreview')
product_model = get_model('catalogue', 'product')
paginate_by = 20
def get_queryset(self):
qs = self.model.approved.filter(product=self.kwargs['product_pk'])
if 'sort_by' in self.request.GET \
and self.request.GET['sort_by'] == 'score':
return qs.order_by('-score')
return qs.order_by('-date_created')
def get_context_data(self, **kwargs):
context = super(ProductReviewList, self).get_context_data(**kwargs)
context['product'] = get_object_or_404(
self.product_model, pk=self.kwargs['product_pk'])
return context
| 37.867257
| 77
| 0.681701
|
4a11e6864168cae82f5d91621afcf41538df5851
| 460
|
py
|
Python
|
config.py
|
Gluuz/architecture-patterns-with-python
|
c5ff170c8079c8bb302e4a4f83ff0cb29130a742
|
[
"MIT"
] | 1
|
2022-03-07T03:14:42.000Z
|
2022-03-07T03:14:42.000Z
|
config.py
|
Gluuz/architecture-patterns-with-python
|
c5ff170c8079c8bb302e4a4f83ff0cb29130a742
|
[
"MIT"
] | null | null | null |
config.py
|
Gluuz/architecture-patterns-with-python
|
c5ff170c8079c8bb302e4a4f83ff0cb29130a742
|
[
"MIT"
] | null | null | null |
import os
def get_postgres_uri():
host = os.environ.get("DB_HOST", "localhost")
port = 54321 if host == "localhost" else 5432
password = os.environ.get("DB_PASSWORD", "abc123")
user, db_name = "allocation", "allocation"
return f"postgresql://{user}:{password}@{host}:{port}/{db_name}"
def get_api_url():
host = os.environ.get("API_HOST", "localhost")
port = 5005 if host == "localhost" else 80
return f"http://{host}:{port}"
| 30.666667
| 68
| 0.647826
|
4a11ea0104d511ea6fc4a34483f8fa19caacffba
| 7,240
|
py
|
Python
|
jina/peapods/runtimes/__init__.py
|
tadej-redstone/jina
|
07f341e791e011a4eb3f13fd7718caadb164a512
|
[
"Apache-2.0"
] | 1
|
2020-12-23T08:58:49.000Z
|
2020-12-23T08:58:49.000Z
|
jina/peapods/runtimes/__init__.py
|
4un98737/jina
|
199445efe66f9fea307dc4cb49edaec5f2149ea9
|
[
"Apache-2.0"
] | null | null | null |
jina/peapods/runtimes/__init__.py
|
4un98737/jina
|
199445efe66f9fea307dc4cb49edaec5f2149ea9
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import multiprocessing
import threading
from multiprocessing.synchronize import Event
from typing import Dict, Union
from jina.peapods.zmq import send_ctrl_message, Zmqlet
from jina.enums import PeaRoleType
from jina.excepts import PeaFailToStart
from jina.helper import typename
from jina.logging import JinaLogger
__all__ = ['BaseRuntime']
def _get_event(obj: 'BaseRuntime') -> Event:
if isinstance(obj, threading.Thread):
return threading.Event()
elif isinstance(obj, multiprocessing.Process):
return multiprocessing.Event()
else:
raise NotImplementedError
def _make_or_event(obj: 'BaseRuntime', *events) -> Event:
or_event = _get_event(obj)
def or_set(self):
self._set()
self.changed()
def or_clear(self):
self._clear()
self.changed()
def orify(e, changed_callback):
e._set = e.set
e._clear = e.clear
e.changed = changed_callback
e.set = lambda: or_set(e)
e.clear = lambda: or_clear(e)
def changed():
bools = [e.is_set() for e in events]
if any(bools):
or_event.set()
else:
or_event.clear()
for e in events:
orify(e, changed)
changed()
return or_event
class RuntimeMeta(type):
"""Meta class of :class:`BaseRuntime` to enable switching between ``thread`` and ``process`` backend. """
_dct = {}
def __new__(cls, name, bases, dct):
_cls = super().__new__(cls, name, bases, dct)
RuntimeMeta._dct.update({name: {'cls': cls,
'name': name,
'bases': bases,
'dct': dct}})
return _cls
def __call__(cls, *args, **kwargs) -> 'RuntimeMeta':
# switch to the new backend
_cls = {
'thread': threading.Thread,
'process': multiprocessing.Process,
}.get(getattr(args[0], 'runtime', 'thread'))
# rebuild the class according to mro
for c in cls.mro()[-2::-1]:
arg_cls = RuntimeMeta._dct[c.__name__]['cls']
arg_name = RuntimeMeta._dct[c.__name__]['name']
arg_dct = RuntimeMeta._dct[c.__name__]['dct']
_cls = super().__new__(arg_cls, arg_name, (_cls,), arg_dct)
return type.__call__(_cls, *args, **kwargs)
class BaseRuntime(metaclass=RuntimeMeta):
"""BaseRuntime is a process or thread providing the support to run different :class:`BasePea` in different environments.
It manages the lifetime of these `BasePea` objects living in `Local`, `Remote`, or `Container` environment.
Inherited classes must define their own `run` method that is the one that will be run in a separate process or thread than the main process
"""
def __init__(self, args: Union['argparse.Namespace', Dict]):
super().__init__()
self.args = args
self.name = self.__class__.__name__ #: this is the process name
self.is_ready_event = _get_event(self)
self.is_shutdown = _get_event(self)
self.ready_or_shutdown = _make_or_event(self, self.is_ready_event, self.is_shutdown)
self.is_shutdown.clear()
if 'daemon' in args:
self.daemon = args.daemon
if 'name' in self.args and self.args.name:
self.name = f'runtime-{self.args.name}'
if 'role' in self.args and self.args.role == PeaRoleType.PARALLEL:
self.name = f'runtime-{self.args.name}-{self.args.pea_id}'
if 'role' in self.args and self.args.role == PeaRoleType.HEAD:
self.name = f'runtime-{self.args.name}-head'
if 'role' in self.args and self.args.role == PeaRoleType.TAIL:
self.name = f'runtime-{self.args.name}-tail'
if 'host' in self.args and 'port_ctrl' in self.args and 'ctrl_with_ipc' in self.args:
self.ctrl_addr, self.ctrl_with_ipc = Zmqlet.get_ctrl_address(self.args.host, self.args.port_ctrl,
self.args.ctrl_with_ipc)
if 'log_id' in self.args and 'log_config' in self.args:
self.logger = JinaLogger(self.name,
log_id=self.args.log_id,
log_config=self.args.log_config)
else:
self.logger = JinaLogger(self.name)
def run(self):
raise NotImplementedError
def start(self):
super().start()
if isinstance(self.args, dict):
_timeout = getattr(self.args['peas'][0], 'timeout_ready', -1)
else:
_timeout = getattr(self.args, 'timeout_ready', -1)
if _timeout <= 0:
_timeout = None
else:
_timeout /= 1e3
if self.ready_or_shutdown.wait(_timeout):
if self.is_shutdown.is_set():
# return too early and the shutdown is set, means something fails!!
self.logger.critical(f'fails to start {typename(self)} with name {self.name}, '
f'this often means the executor used in the pod is not valid')
raise PeaFailToStart
else:
self.logger.info(f'ready to listen')
return self
else:
raise TimeoutError(
f'{typename(self)} with name {self.name} can not be initialized after {_timeout * 1e3}ms')
def set_ready(self):
"""Set the `is_ready_event` to indicate that the `BasePea` managed by the Runtime is ready to start
receiving messages"""
self.is_ready_event.set()
def unset_ready(self):
"""Clear the `is_ready_event` to indicate that the `BasePea` managed by the Runtime is not anymore ready to start
receiving messages"""
self.is_ready_event.clear()
def set_shutdown(self):
"""Set the `is_shutdown` event to indicate that the `BasePea` managed by the Runtime is closed and the parallel process
can be shutdown"""
self.is_shutdown.set()
@property
def status(self):
"""Send the control signal ``STATUS`` to the manages `BasePea` and return the status """
return send_ctrl_message(self.ctrl_addr, 'STATUS', timeout=self.args.timeout_ctrl)
@property
def is_ready(self) -> bool:
status = self.status
return status and status.is_ready
@property
def is_idle(self) -> bool:
raise NotImplementedError
def send_terminate_signal(self):
"""Send a terminate signal to the `BasePea` supported by this `Runtime` """
return send_ctrl_message(self.ctrl_addr, 'TERMINATE', timeout=self.args.timeout_ctrl)
def close(self) -> None:
"""Close this `Runtime` by sending a `terminate signal` to the managed `BasePea`. Wait to
be sure that the `BasePea` is properly closed to join the parallel process """
self.send_terminate_signal()
self.is_shutdown.wait()
self.logger.close()
if not self.daemon:
self.join()
def __enter__(self):
return self.start()
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
self.close()
| 36.2
| 143
| 0.60663
|
4a11eac3933a4c01904989b2d5ef48b358ef548a
| 3,288
|
py
|
Python
|
enaml/wx/wx_date_selector.py
|
pberkes/enaml
|
cbcbee929e3117dfe56c0b06dc2385acc832b0e8
|
[
"BSD-3-Clause-Clear"
] | 11
|
2015-03-14T14:30:51.000Z
|
2022-03-15T13:01:44.000Z
|
enaml/wx/wx_date_selector.py
|
pberkes/enaml
|
cbcbee929e3117dfe56c0b06dc2385acc832b0e8
|
[
"BSD-3-Clause-Clear"
] | 3
|
2015-01-31T11:12:56.000Z
|
2022-03-14T00:53:25.000Z
|
enaml/wx/wx_date_selector.py
|
pberkes/enaml
|
cbcbee929e3117dfe56c0b06dc2385acc832b0e8
|
[
"BSD-3-Clause-Clear"
] | 4
|
2015-01-27T01:56:14.000Z
|
2021-02-23T07:21:20.000Z
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
import wx
from atom.api import Typed
from enaml.widgets.date_selector import ProxyDateSelector
from .wx_bounded_date import (
WxBoundedDate, CHANGED_GUARD, as_wx_date, as_py_date
)
class WxDateSelector(WxBoundedDate, ProxyDateSelector):
""" A Wx implementation of an Enaml ProxyDateSelector.
"""
#: A reference to the widget created by the proxy.
widget = Typed(wx.DatePickerCtrl)
#--------------------------------------------------------------------------
# Initialization API
#--------------------------------------------------------------------------
def create_widget(self):
""" Create the wx.DatePickerCtrl widget.
"""
self.widget = wx.DatePickerCtrl(self.parent_widget())
def init_widget(self):
""" Initialize the widget.
"""
super(WxDateSelector, self).init_widget()
d = self.declaration
self.set_date_format(d.date_format)
self.set_calendar_popup(d.calendar_popup)
self.widget.Bind(wx.EVT_DATE_CHANGED, self.on_date_changed)
#--------------------------------------------------------------------------
# Abstract API Implementation
#--------------------------------------------------------------------------
def get_date(self):
""" Return the current date in the control.
Returns
-------
result : date
The current control date as a date object.
"""
return as_py_date(self.widget.GetValue())
def set_minimum(self, date):
""" Set the widget's minimum date.
Parameters
----------
date : date
The date object to use for setting the minimum date.
"""
widget = self.widget
widget.SetRange(as_wx_date(date), widget.GetUpperLimit())
def set_maximum(self, date):
""" Set the widget's maximum date.
Parameters
----------
date : date
The date object to use for setting the maximum date.
"""
widget = self.widget
widget.SetRange(widget.GetLowerLimit(), as_wx_date(date))
def set_date(self, date):
""" Set the widget's current date.
Parameters
----------
date : date
The date object to use for setting the date.
"""
self._guard |= CHANGED_GUARD
try:
self.widget.SetValue(as_wx_date(date))
finally:
self._guard &= ~CHANGED_GUARD
def set_date_format(self, format):
""" Set the widget's date format.
Parameters
----------
format : string
A Python time formatting string.
.. note:: Changing the format on wx is not supported.
See http://trac.wxwidgets.org/ticket/10988
"""
pass
def set_calendar_popup(self, popup):
""" This is not supported on Wx.
"""
pass
| 27.864407
| 79
| 0.512165
|
4a11eb3b2ce06d47461203438d052865b52c7f7c
| 25,339
|
py
|
Python
|
facemap_flask_backend/.venv/Lib/site-packages/engineio/asyncio_client.py
|
PuneethRegonda/realtime_surveillance_system
|
bf0f7a80c8a0f8bbaaf3dcb6fd7ee419708a5f4f
|
[
"Apache-2.0"
] | 2
|
2021-07-05T03:53:41.000Z
|
2021-07-09T02:03:16.000Z
|
facemap_flask_backend/.venv/Lib/site-packages/engineio/asyncio_client.py
|
PuneethRegonda/realtime_surveillance_system
|
bf0f7a80c8a0f8bbaaf3dcb6fd7ee419708a5f4f
|
[
"Apache-2.0"
] | 20
|
2021-05-03T18:02:23.000Z
|
2022-03-12T12:01:04.000Z
|
Lib/site-packages/engineio/asyncio_client.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | 4
|
2020-10-18T05:37:47.000Z
|
2022-03-25T12:28:06.000Z
|
import asyncio
import signal
import ssl
import threading
try:
import aiohttp
except ImportError: # pragma: no cover
aiohttp = None
from . import client
from . import exceptions
from . import packet
from . import payload
async_signal_handler_set = False
def async_signal_handler():
"""SIGINT handler.
Disconnect all active async clients.
"""
async def _handler():
asyncio.get_event_loop().stop()
for c in client.connected_clients[:]:
if c.is_asyncio_based():
await c.disconnect()
else: # pragma: no cover
pass
asyncio.ensure_future(_handler())
class AsyncClient(client.Client):
"""An Engine.IO client for asyncio.
This class implements a fully compliant Engine.IO web client with support
for websocket and long-polling transports, compatible with the asyncio
framework on Python 3.5 or newer.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. The default is
``False``. Note that fatal errors are logged even when
``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions.
:param request_timeout: A timeout in seconds for requests. The default is
5 seconds.
:param http_session: an initialized ``aiohttp.ClientSession`` object to be
used when sending requests to the server. Use it if
you need to add special client options such as proxy
servers, SSL certificates, etc.
:param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to
skip SSL certificate verification, allowing
connections to servers with self signed certificates.
The default is ``True``.
"""
def is_asyncio_based(self):
return True
async def connect(self, url, headers=None, transports=None,
engineio_path='engine.io'):
"""Connect to an Engine.IO server.
:param url: The URL of the Engine.IO server. It can include custom
query string parameters if required by the server.
:param headers: A dictionary with custom headers to send with the
connection request.
:param transports: The list of allowed transports. Valid transports
are ``'polling'`` and ``'websocket'``. If not
given, the polling transport is connected first,
then an upgrade to websocket is attempted.
:param engineio_path: The endpoint where the Engine.IO server is
installed. The default value is appropriate for
most cases.
Note: this method is a coroutine.
Example usage::
eio = engineio.Client()
await eio.connect('http://localhost:5000')
"""
global async_signal_handler_set
if not async_signal_handler_set and \
threading.current_thread() == threading.main_thread():
try:
asyncio.get_event_loop().add_signal_handler(
signal.SIGINT, async_signal_handler)
async_signal_handler_set = True
except NotImplementedError: # pragma: no cover
self.logger.warning('Signal handler is unsupported')
if self.state != 'disconnected':
raise ValueError('Client is not in a disconnected state')
valid_transports = ['polling', 'websocket']
if transports is not None:
if isinstance(transports, str):
transports = [transports]
transports = [transport for transport in transports
if transport in valid_transports]
if not transports:
raise ValueError('No valid transports provided')
self.transports = transports or valid_transports
self.queue = self.create_queue()
return await getattr(self, '_connect_' + self.transports[0])(
url, headers or {}, engineio_path)
async def wait(self):
"""Wait until the connection with the server ends.
Client applications can use this function to block the main thread
during the life of the connection.
Note: this method is a coroutine.
"""
if self.read_loop_task:
await self.read_loop_task
async def send(self, data):
"""Send a message to a client.
:param data: The data to send to the client. Data can be of type
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
or ``dict``, the data will be serialized as JSON.
Note: this method is a coroutine.
"""
await self._send_packet(packet.Packet(packet.MESSAGE, data=data))
async def disconnect(self, abort=False):
"""Disconnect from the server.
:param abort: If set to ``True``, do not wait for background tasks
associated with the connection to end.
Note: this method is a coroutine.
"""
if self.state == 'connected':
await self._send_packet(packet.Packet(packet.CLOSE))
await self.queue.put(None)
self.state = 'disconnecting'
await self._trigger_event('disconnect', run_async=False)
if self.current_transport == 'websocket':
await self.ws.close()
if not abort:
await self.read_loop_task
self.state = 'disconnected'
try:
client.connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
self._reset()
def start_background_task(self, target, *args, **kwargs):
"""Start a background task.
This is a utility function that applications can use to start a
background task.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
This function returns an object compatible with the `Thread` class in
the Python standard library. The `start()` method on this object is
already called by this function.
Note: this method is a coroutine.
"""
return asyncio.ensure_future(target(*args, **kwargs))
async def sleep(self, seconds=0):
"""Sleep for the requested amount of time.
Note: this method is a coroutine.
"""
return await asyncio.sleep(seconds)
def create_queue(self):
"""Create a queue object."""
q = asyncio.Queue()
q.Empty = asyncio.QueueEmpty
return q
def create_event(self):
"""Create an event object."""
return asyncio.Event()
def _reset(self):
if self.http: # pragma: no cover
asyncio.ensure_future(self.http.close())
super()._reset()
async def _connect_polling(self, url, headers, engineio_path):
"""Establish a long-polling connection to the Engine.IO server."""
if aiohttp is None: # pragma: no cover
self.logger.error('aiohttp not installed -- cannot make HTTP '
'requests!')
return
self.base_url = self._get_engineio_url(url, engineio_path, 'polling')
self.logger.info('Attempting polling connection to ' + self.base_url)
r = await self._send_request(
'GET', self.base_url + self._get_url_timestamp(), headers=headers,
timeout=self.request_timeout)
if r is None:
self._reset()
raise exceptions.ConnectionError(
'Connection refused by the server')
if r.status < 200 or r.status >= 300:
self._reset()
try:
arg = await r.json()
except aiohttp.ClientError:
arg = None
raise exceptions.ConnectionError(
'Unexpected status code {} in server response'.format(
r.status), arg)
try:
p = payload.Payload(encoded_payload=(await r.read()).decode(
'utf-8'))
except ValueError:
raise exceptions.ConnectionError(
'Unexpected response from server') from None
open_packet = p.packets[0]
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError(
'OPEN packet not returned by server')
self.logger.info(
'Polling connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'polling'
self.base_url += '&sid=' + self.sid
self.state = 'connected'
client.connected_clients.append(self)
await self._trigger_event('connect', run_async=False)
for pkt in p.packets[1:]:
await self._receive_packet(pkt)
if 'websocket' in self.upgrades and 'websocket' in self.transports:
# attempt to upgrade to websocket
if await self._connect_websocket(url, headers, engineio_path):
# upgrade to websocket succeeded, we're done here
return
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_polling)
async def _connect_websocket(self, url, headers, engineio_path):
"""Establish or upgrade to a WebSocket connection with the server."""
if aiohttp is None: # pragma: no cover
self.logger.error('aiohttp package not installed')
return False
websocket_url = self._get_engineio_url(url, engineio_path,
'websocket')
if self.sid:
self.logger.info(
'Attempting WebSocket upgrade to ' + websocket_url)
upgrade = True
websocket_url += '&sid=' + self.sid
else:
upgrade = False
self.base_url = websocket_url
self.logger.info(
'Attempting WebSocket connection to ' + websocket_url)
if self.http is None or self.http.closed: # pragma: no cover
self.http = aiohttp.ClientSession()
# extract any new cookies passed in a header so that they can also be
# sent the the WebSocket route
cookies = {}
for header, value in headers.items():
if header.lower() == 'cookie':
cookies = dict(
[cookie.split('=', 1) for cookie in value.split('; ')])
del headers[header]
break
self.http.cookie_jar.update_cookies(cookies)
try:
if not self.ssl_verify:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
ws = await self.http.ws_connect(
websocket_url + self._get_url_timestamp(),
headers=headers, ssl=ssl_context)
else:
ws = await self.http.ws_connect(
websocket_url + self._get_url_timestamp(),
headers=headers)
except (aiohttp.client_exceptions.WSServerHandshakeError,
aiohttp.client_exceptions.ServerConnectionError,
aiohttp.client_exceptions.ClientConnectionError):
if upgrade:
self.logger.warning(
'WebSocket upgrade failed: connection error')
return False
else:
raise exceptions.ConnectionError('Connection error')
if upgrade:
p = packet.Packet(packet.PING, data='probe').encode()
try:
await ws.send_str(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
try:
p = (await ws.receive()).data
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected recv exception: %s',
str(e))
return False
pkt = packet.Packet(encoded_packet=p)
if pkt.packet_type != packet.PONG or pkt.data != 'probe':
self.logger.warning(
'WebSocket upgrade failed: no PONG packet')
return False
p = packet.Packet(packet.UPGRADE).encode()
try:
await ws.send_str(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
self.current_transport = 'websocket'
self.logger.info('WebSocket upgrade was successful')
else:
try:
p = (await ws.receive()).data
except Exception as e: # pragma: no cover
raise exceptions.ConnectionError(
'Unexpected recv exception: ' + str(e))
open_packet = packet.Packet(encoded_packet=p)
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError('no OPEN packet')
self.logger.info(
'WebSocket connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'websocket'
self.state = 'connected'
client.connected_clients.append(self)
await self._trigger_event('connect', run_async=False)
self.ws = ws
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_websocket)
return True
async def _receive_packet(self, pkt):
"""Handle incoming packets from the server."""
packet_name = packet.packet_names[pkt.packet_type] \
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
self.logger.info(
'Received packet %s data %s', packet_name,
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
if pkt.packet_type == packet.MESSAGE:
await self._trigger_event('message', pkt.data, run_async=True)
elif pkt.packet_type == packet.PING:
await self._send_packet(packet.Packet(packet.PONG, pkt.data))
elif pkt.packet_type == packet.CLOSE:
await self.disconnect(abort=True)
elif pkt.packet_type == packet.NOOP:
pass
else:
self.logger.error('Received unexpected packet of type %s',
pkt.packet_type)
async def _send_packet(self, pkt):
"""Queue a packet to be sent to the server."""
if self.state != 'connected':
return
await self.queue.put(pkt)
self.logger.info(
'Sending packet %s data %s',
packet.packet_names[pkt.packet_type],
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
async def _send_request(
self, method, url, headers=None, body=None,
timeout=None): # pragma: no cover
if self.http is None or self.http.closed:
self.http = aiohttp.ClientSession()
http_method = getattr(self.http, method.lower())
try:
if not self.ssl_verify:
return await http_method(
url, headers=headers, data=body,
timeout=aiohttp.ClientTimeout(total=timeout), ssl=False)
else:
return await http_method(
url, headers=headers, data=body,
timeout=aiohttp.ClientTimeout(total=timeout))
except (aiohttp.ClientError, asyncio.TimeoutError) as exc:
self.logger.info('HTTP %s request to %s failed with error %s.',
method, url, exc)
async def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
ret = None
if event in self.handlers:
if asyncio.iscoroutinefunction(self.handlers[event]) is True:
if run_async:
return self.start_background_task(self.handlers[event],
*args)
else:
try:
ret = await self.handlers[event](*args)
except asyncio.CancelledError: # pragma: no cover
pass
except:
self.logger.exception(event + ' async handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
else:
if run_async:
async def async_handler():
return self.handlers[event](*args)
return self.start_background_task(async_handler)
else:
try:
ret = self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
return ret
async def _read_loop_polling(self):
"""Read packets by polling the Engine.IO server."""
while self.state == 'connected':
self.logger.info(
'Sending polling GET request to ' + self.base_url)
r = await self._send_request(
'GET', self.base_url + self._get_url_timestamp(),
timeout=max(self.ping_interval, self.ping_timeout) + 5)
if r is None:
self.logger.warning(
'Connection refused by the server, aborting')
await self.queue.put(None)
break
if r.status < 200 or r.status >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status)
await self.queue.put(None)
break
try:
p = payload.Payload(encoded_payload=(await r.read()).decode(
'utf-8'))
except ValueError:
self.logger.warning(
'Unexpected packet from server, aborting')
await self.queue.put(None)
break
for pkt in p.packets:
await self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
await self.write_loop_task
if self.state == 'connected':
await self._trigger_event('disconnect', run_async=False)
try:
client.connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
self._reset()
self.logger.info('Exiting read loop task')
async def _read_loop_websocket(self):
"""Read packets from the Engine.IO WebSocket connection."""
while self.state == 'connected':
p = None
try:
p = await asyncio.wait_for(
self.ws.receive(),
timeout=self.ping_interval + self.ping_timeout)
p = p.data
if p is None: # pragma: no cover
await self.queue.put(None)
break # the connection is broken
except asyncio.TimeoutError:
self.logger.warning(
'Server has stopped communicating, aborting')
await self.queue.put(None)
break
except aiohttp.client_exceptions.ServerDisconnectedError:
self.logger.info(
'Read loop: WebSocket connection was closed, aborting')
await self.queue.put(None)
break
except Exception as e:
self.logger.info(
'Unexpected error receiving packet: "%s", aborting',
str(e))
await self.queue.put(None)
break
try:
pkt = packet.Packet(encoded_packet=p)
except Exception as e: # pragma: no cover
self.logger.info(
'Unexpected error decoding packet: "%s", aborting', str(e))
await self.queue.put(None)
break
await self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
await self.write_loop_task
if self.state == 'connected':
await self._trigger_event('disconnect', run_async=False)
try:
client.connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
self._reset()
self.logger.info('Exiting read loop task')
async def _write_loop(self):
"""This background task sends packages to the server as they are
pushed to the send queue.
"""
while self.state == 'connected':
# to simplify the timeout handling, use the maximum of the
# ping interval and ping timeout as timeout, with an extra 5
# seconds grace period
timeout = max(self.ping_interval, self.ping_timeout) + 5
packets = None
try:
packets = [await asyncio.wait_for(self.queue.get(), timeout)]
except (self.queue.Empty, asyncio.TimeoutError,
asyncio.CancelledError):
self.logger.error('packet queue is empty, aborting')
break
if packets == [None]:
self.queue.task_done()
packets = []
else:
while True:
try:
packets.append(self.queue.get_nowait())
except self.queue.Empty:
break
if packets[-1] is None:
packets = packets[:-1]
self.queue.task_done()
break
if not packets:
# empty packet list returned -> connection closed
break
if self.current_transport == 'polling':
p = payload.Payload(packets=packets)
r = await self._send_request(
'POST', self.base_url, body=p.encode(),
headers={'Content-Type': 'application/octet-stream'},
timeout=self.request_timeout)
for pkt in packets:
self.queue.task_done()
if r is None:
self.logger.warning(
'Connection refused by the server, aborting')
break
if r.status < 200 or r.status >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status)
self._reset()
break
else:
# websocket
try:
for pkt in packets:
if pkt.binary:
await self.ws.send_bytes(pkt.encode())
else:
await self.ws.send_str(pkt.encode())
self.queue.task_done()
except (aiohttp.client_exceptions.ServerDisconnectedError,
BrokenPipeError, OSError):
self.logger.info(
'Write loop: WebSocket connection was closed, '
'aborting')
break
self.logger.info('Exiting write loop task')
| 41.539344
| 79
| 0.548127
|
4a11eb6902e0e7f17698ae5fb98d1baaaf322a78
| 3,002
|
py
|
Python
|
harvardcs50/project1env/lib/python3.7/site-packages/passwordmeter/cli.py
|
snowyrainkun/webdevelop
|
9792d8d05b70f35586616a95ba26ed0daf1f72b3
|
[
"MIT"
] | 22
|
2015-03-18T20:33:59.000Z
|
2021-05-13T15:01:33.000Z
|
passwordmeter/cli.py
|
vsajip/passwordmeter
|
72c7f4b0ff69149fa0dd221a938d0ac3ec72bc52
|
[
"MIT"
] | 2
|
2015-04-03T19:48:46.000Z
|
2015-10-01T23:23:19.000Z
|
passwordmeter/cli.py
|
vsajip/passwordmeter
|
72c7f4b0ff69149fa0dd221a938d0ac3ec72bc52
|
[
"MIT"
] | 10
|
2015-06-19T03:21:56.000Z
|
2021-08-31T15:02:58.000Z
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# file: $Id$
# auth: Philip J Grabner <grabner@cadit.com>
# date: 2013/10/26
# copy: (C) Copyright 2013 Cadit Health Inc., All Rights Reserved.
#------------------------------------------------------------------------------
from __future__ import print_function
import sys
import argparse
import getpass
from .__init__ import Meter
from .i18n import _
ratings = (
'Infinitely weak',
'Extremely weak',
'Very weak',
'Weak',
'Moderately strong',
'Strong',
'Very strong',
)
#------------------------------------------------------------------------------
def main(argv=None):
cli = argparse.ArgumentParser(
description = _(
'Password strength meter - gives a coefficient of how strong'
' a password is (0 = extremely weak, 1 = extremely strong)'
' and lists ways that a password can be improved.')
)
cli.add_argument(
_('-v'), _('--verbose'),
dest='verbose', default=0, action='count',
help=_('enable verbose output to STDERR (for per-factor scoring)'))
cli.add_argument(
_('-i'), _('--ini'), metavar=_('FILENAME'),
dest='inifile', default=None,
help=_('INI settings filename'))
cli.add_argument(
_('-s'), _('--setting'), metavar=_('NAME=VALUE'),
dest='settings', default=[], action='append',
help=_('override a specific setting'))
cli.add_argument(
_('-m'), _('--minimum'), metavar=_('FLOAT'),
dest='minimum', default=0.75, type=float,
help=_('minimum password strength for %(prog)s to return'
' a success status (default: %(default)s)'))
cli.add_argument(
'password', metavar=_('PASSWORD'), nargs='?',
help=_('password to test; if exactly "-", the password is read from'
' STDIN and if not specified, it will be prompted for'))
options = cli.parse_args(args=argv)
settings = dict()
# todo: load ini file...
settings.update(dict([s.split('=', 1) for s in options.settings]))
if options.password == '-':
options.password = sys.stdin.read()
elif options.password is None:
options.password = getpass.getpass()
if options.verbose:
# todo: use `logging`?...
class mylogger(object):
def debug(self, msg, *args, **kw):
if args:
msg = msg % args
sys.stderr.write(msg + '\n')
settings['logger'] = mylogger()
meter = Meter(settings=settings)
result = meter.test(options.password)
print('Password strength: {} ({})'.format(
result[0],
_(ratings[min(len(ratings) - 1, int(result[0] * len(ratings)))])
))
if result[1]:
print('Possible improvements:')
for item in result[1].values():
print(' -',item)
if result[0] >= options.minimum:
return 0
return 100 + int(round(result[0] * 100))
#------------------------------------------------------------------------------
# end of $Id$
#------------------------------------------------------------------------------
| 28.865385
| 79
| 0.542305
|
4a11ebda9f52480574d9f4d8b3a5e22d3d4ae09b
| 2,634
|
py
|
Python
|
training/network_training/competitions_with_custom_Trainers/MMS/nnUNetTrainerV2_MMS.py
|
rylezhou/sunet-pytorch
|
46473f4ba6ce442335f318b45aee50a357af92bf
|
[
"Apache-2.0"
] | null | null | null |
training/network_training/competitions_with_custom_Trainers/MMS/nnUNetTrainerV2_MMS.py
|
rylezhou/sunet-pytorch
|
46473f4ba6ce442335f318b45aee50a357af92bf
|
[
"Apache-2.0"
] | null | null | null |
training/network_training/competitions_with_custom_Trainers/MMS/nnUNetTrainerV2_MMS.py
|
rylezhou/sunet-pytorch
|
46473f4ba6ce442335f318b45aee50a357af92bf
|
[
"Apache-2.0"
] | null | null | null |
import torch
from network_architecture.generic_UNet import Generic_UNet
from network_architecture.initialization import InitWeights_He
from training.network_training.nnUNet_variants.data_augmentation.nnUNetTrainerV2_insaneDA import \
nnUNetTrainerV2_insaneDA
from utilities.nd_softmax import softmax_helper
from torch import nn
class nnUNetTrainerV2_MMS(nnUNetTrainerV2_insaneDA):
def setup_DA_params(self):
super().setup_DA_params()
self.data_aug_params["p_rot"] = 0.7
self.data_aug_params["p_eldef"] = 0.1
self.data_aug_params["p_scale"] = 0.3
self.data_aug_params["independent_scale_factor_for_each_axis"] = True
self.data_aug_params["p_independent_scale_per_axis"] = 0.3
self.data_aug_params["do_additive_brightness"] = True
self.data_aug_params["additive_brightness_mu"] = 0
self.data_aug_params["additive_brightness_sigma"] = 0.2
self.data_aug_params["additive_brightness_p_per_sample"] = 0.3
self.data_aug_params["additive_brightness_p_per_channel"] = 1
self.data_aug_params["elastic_deform_alpha"] = (0., 300.)
self.data_aug_params["elastic_deform_sigma"] = (9., 15.)
self.data_aug_params['gamma_range'] = (0.5, 1.6)
def initialize_network(self):
if self.threeD:
conv_op = nn.Conv3d
dropout_op = nn.Dropout3d
norm_op = nn.BatchNorm3d
else:
conv_op = nn.Conv2d
dropout_op = nn.Dropout2d
norm_op = nn.BatchNorm2d
norm_op_kwargs = {'eps': 1e-5, 'affine': True}
dropout_op_kwargs = {'p': 0, 'inplace': True}
net_nonlin = nn.LeakyReLU
net_nonlin_kwargs = {'negative_slope': 1e-2, 'inplace': True}
self.network = Generic_UNet(self.num_input_channels, self.base_num_features, self.num_classes,
len(self.net_num_pool_op_kernel_sizes),
self.conv_per_stage, 2, conv_op, norm_op, norm_op_kwargs, dropout_op,
dropout_op_kwargs,
net_nonlin, net_nonlin_kwargs, True, False, lambda x: x, InitWeights_He(1e-2),
self.net_num_pool_op_kernel_sizes, self.net_conv_kernel_sizes, False, True, True)
if torch.cuda.is_available():
self.network.cuda()
self.network.inference_apply_nonlin = softmax_helper
"""def run_training(self):
from batchviewer import view_batch
a = next(self.tr_gen)
view_batch(a['data'])
import IPython;IPython.embed()"""
| 43.180328
| 117
| 0.655657
|
4a11ec80522bcdaf2d8215d671ba2f989c7dddfb
| 6,807
|
py
|
Python
|
src/python/ilp_common_classes.py
|
hhelm10/distPURL
|
735480bceff38b7a10ea618c13fe93a5b3d26910
|
[
"MIT"
] | 2
|
2021-04-27T15:26:00.000Z
|
2021-09-12T23:15:02.000Z
|
src/python/ilp_common_classes.py
|
hhelm10/distPURL
|
735480bceff38b7a10ea618c13fe93a5b3d26910
|
[
"MIT"
] | null | null | null |
src/python/ilp_common_classes.py
|
hhelm10/distPURL
|
735480bceff38b7a10ea618c13fe93a5b3d26910
|
[
"MIT"
] | 2
|
2021-04-27T15:26:33.000Z
|
2021-09-13T12:32:24.000Z
|
"""
Copyright (c) Microsoft Corporation.
Licensed under the MIT license.
"""
import json
from typing import *
from typing import List
from enum import Enum
from copy import copy
from ilp_common import *
class ILPError(Exception):
'''
ILPError Exception class.
'''
def __init__(self, *args):
if args:
self.message = args[0]
else:
self.message = None
def __str__(self):
if self.message:
return self.message
else:
return 'ILPError has been raised'
class FileExtension(Enum):
'''
FileExtension Enum class: Represents different accepted file extensions for data file inputs.
'''
CSV = 'csv'
PKL = 'pkl'
NPY = 'npy'
JSON = 'json'
class InputFormat(Enum):
'''
InputFormat Enum class: Represents different input format types for data file inputs.
'''
ONE_SET = 'ONE SET'
HEADER_VOIS = 'HEADER VOIS'
COLUMN_VOIS = 'COLUMN VOIS'
class ILPCat(Enum):
'''
ILPCat Enum class: Represents different cateory types for ILP processing.
'''
INT = 'Integer'
CONT = 'Continuous'
class DataInputMode(Enum):
'''
DataInputMode Enum class: Represents different modes in which the training and eval data is generated.
'''
FIXED = 'FIXED'
RANDOM = 'RANDOM'
class RunMode(Enum):
'''
RunMode Enum class: Represents different modes in which code and output can run.
'''
DEBUG = 'DEBUG'
RELEASE = 'RELEASE'
class EvalParam(Enum):
'''
EvalParam Enum class: Represents the parameter to evaluate in relation to.
'''
WAR = 'weight_approx_resolution'
TRAINING_SIZE = 'training_sets_size'
class CombiningInputs:
"""
CombiningInputs class: Contains all parameters needed by the ILP Algorithim to combine embeddings of a distance matrix.
"""
def __init__(self):
self.other_node_indices: List[int] = []
self.similar_node_indices: List[int] = []
self.dist_matrix: List[float] = []
self.num_embeddings: int
self.num_other_nodes: int
self.max_dist: int
self.up_bound: int
self.cat: ILPCat
self.solver: str
self.gurobi_outputflag: int
self.gurobi_logfile: str
self.time_limit: int
self.num_threads: int
self.mode: str
def create_copy(self, gurobi_log_file_path: str) -> object:
obj_copy = copy(self)
obj_copy.gurobi_logfile = gurobi_log_file_path
return obj_copy
class ILPParams:
"""
ILPParams class contains:
1- All changeable parameters used for running ILP experiments.
2- JSON representing and reading functions needed to read the class parameters.
"""
def __init__(self):
self.dataset: str = "test_set"
self.path_to_root: str = "../../"
self.path_to_metadata: str = "../data/test_set_data/metadata.json"
self.path_to_output: str = "../output_data/output_test_set/"
self.num_vois: int = 1
self.training_sets_sizes: List[int] = [10,15]
self.minimum_ranking_thresholds: List = [None]
self.solvers_and_apis: List = [["pulp","coin_cmd"]]
self.weight_approx_resolutions: List = [None]
self.num_cores: int = 3
self.persist_data: bool = True
self.mode: RunMode = RunMode("DEBUG")
self.eval_data_mode: DataInputMode = DataInputMode("RANDOM")
self.training_data_mode: DataInputMode = DataInputMode("RANDOM")
self.gurobi_outputflag: bool = 1
self.time_limit: int = 120
self.num_threads: int = 1
self.eval_size: int = 10
self.eval_param: EvalParam = EvalParam("training_sets_size")
def __repr__(self) -> str:
ilp_params_data = {
"dataset" : self.dataset,
"path_to_root" : self.path_to_root,
"path_to_metadata" : self.path_to_metadata,
"path_to_output" : self.path_to_output,
"num_vois" : self.num_vois,
"training_sets_sizes" : self.training_sets_sizes,
"minimum_ranking_thresholds": self.minimum_ranking_thresholds,
"solvers_and_apis" : self.solvers_and_apis,
"weight_approx_resolutions" : self.weight_approx_resolutions,
"num_cores" : self.num_cores,
"persist_data" : self.persist_data,
"mode" : self.mode.value,
"eval_data_mode" : self.eval_data_mode.value,
"training_data_mode" : self.training_data_mode.value,
"gurobi_outputflag" : self.gurobi_outputflag,
"time_limit" : self.time_limit,
"num_threads" : self.num_threads,
"eval_size" : self.eval_size,
"eval_param" : self.eval_param.value
}
return json.dumps(ilp_params_data, indent=2)
def load_data(self, path: str):
with open(path) as f:
data = json.load(f)
self.dataset = data['dataset']
self.path_to_root = data['path_to_root']
self.path_to_metadata = data['path_to_metadata']
self.path_to_output = data['path_to_output']
self.num_vois = data['num_vois']
self.training_sets_sizes = data['training_sets_sizes']
self.minimum_ranking_thresholds = data['minimum_ranking_thresholds']
self.solvers_and_apis = data['solvers_and_apis']
self.weight_approx_resolutions = data['weight_approx_resolutions']
self.num_cores = data['num_cores']
self.persist_data = data['persist_data']
self.mode = RunMode(data['mode'].upper())
self.eval_data_mode = DataInputMode(data['eval_data_mode'].upper())
self.training_data_mode = DataInputMode(data['training_data_mode'].upper())
self.gurobi_outputflag = data['gurobi_outputflag']
self.time_limit = data['time_limit']
self.num_threads = data['num_threads']
self.eval_size = data['eval_size']
self.eval_param = EvalParam(data['eval_param'])
| 38.241573
| 123
| 0.56486
|
4a11ef769f05d791559c74286390c221e9096330
| 1,011
|
py
|
Python
|
boardom/imaging/tone_map.py
|
dmarnerides/boardom
|
b7eec437b43398ac8dd8d2755dca9bbe7a8ff8a3
|
[
"BSD-2-Clause"
] | 2
|
2021-10-05T15:52:50.000Z
|
2021-11-22T00:05:03.000Z
|
boardom/imaging/tone_map.py
|
dmarnerides/boardom
|
b7eec437b43398ac8dd8d2755dca9bbe7a8ff8a3
|
[
"BSD-2-Clause"
] | null | null | null |
boardom/imaging/tone_map.py
|
dmarnerides/boardom
|
b7eec437b43398ac8dd8d2755dca9bbe7a8ff8a3
|
[
"BSD-2-Clause"
] | null | null | null |
import numpy as np
import cv2
import boardom as bd
# These functions assume inputs are hdr images in range [0, 1] with linear luminance
# Outputs are scaled to [0,1]
def culling(x, low=10, high=90, gamma=2.2):
low, high = np.percentile(x, (low, high))
return bd.map_range(np.clip(x, low, high)) ** (1 / gamma)
def exposure(x, exposure=0, gamma=2.2):
x = np.clip(x * (2 ** exposure), 0, 1)
return x ** (1 / gamma)
def reinhard(x, intensity=0.0, light_adapt=0.9, color_adapt=0.1, gamma=2.2):
return cv2.createTonemapReinhard(
gamma=gamma,
intensity=intensity,
light_adapt=light_adapt,
color_adapt=color_adapt,
).process(x)
def drago(x, saturation=1.0, gamma=2.2, bias=0.85):
tmo = cv2.createTonemapDrago(gamma=gamma, saturation=saturation, bias=bias)
return tmo.process(x)
def mantiuk(x, saturation=1.0, scale=0.75, gamma=2.2):
tmo = cv2.createTonemapMantiuk(gamma=gamma, saturation=saturation, scale=scale)
return tmo.process(x)
| 28.083333
| 84
| 0.673591
|
4a11f01759ae0b785f550c0bd4335ba56b57d615
| 30,120
|
py
|
Python
|
networking_oneview/tests/unit/ml2/drivers/oneview/test_oneview_mech_driver.py
|
HewlettPackard/networking-oneview
|
21881596eeb39565a027022921d4ec4f71e3278b
|
[
"Apache-2.0"
] | 6
|
2018-02-02T17:37:15.000Z
|
2018-09-20T14:06:16.000Z
|
networking_oneview/tests/unit/ml2/drivers/oneview/test_oneview_mech_driver.py
|
HewlettPackard/networking-oneview
|
21881596eeb39565a027022921d4ec4f71e3278b
|
[
"Apache-2.0"
] | 18
|
2018-02-06T14:54:22.000Z
|
2018-02-27T13:34:37.000Z
|
networking_oneview/tests/unit/ml2/drivers/oneview/test_oneview_mech_driver.py
|
HewlettPackard/networking-oneview
|
21881596eeb39565a027022921d4ec4f71e3278b
|
[
"Apache-2.0"
] | 2
|
2020-04-28T14:36:12.000Z
|
2020-07-22T13:09:44.000Z
|
# Copyright 2017 Hewlett Packard Enterprise Development LP.
# Copyright 2017 Universidade Federal de Campina Grande
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from neutron.tests.unit.plugins.ml2 import _test_mech_agent as base
from networking_oneview.ml2.drivers.oneview import common
from networking_oneview.ml2.drivers.oneview import database_manager
from networking_oneview.ml2.drivers.oneview import exceptions
from networking_oneview.ml2.drivers.oneview import mech_oneview
from networking_oneview.ml2.drivers.oneview import neutron_oneview_client
FAKE_FLAT_ONEVIEW_NETWORK = {
'id': '1',
'provider:physical_network': 'physnet-mapped',
'provider:network_type': 'flat'
}
FAKE_FLAT_NETWORK = {
'id': '2',
'provider:physical_network': 'physnet',
'provider:network_type': 'flat'
}
FAKE_VLAN_NETWORK = {
'id': '3',
'provider:segmentation_id': '123',
'provider:physical_network': 'physnet',
'provider:network_type': 'vlan'
}
FAKE_VXLAN_NETWORK = {
'id': '3',
'provider:segmentation_id': '123',
'provider:physical_network': 'physnet',
'provider:network_type': 'vxlan'
}
FAKE_NETWORK_NOT_MAPPED = {
'id': '4',
'provider:physical_network': 'not_mapped_phys',
'provider:network_type': 'flat'
}
FAKE_NETWORK_SEGMENT = {
'physical_network': 'physnet',
'network_type': 'flat'
}
FAKE_NETWORK_SEGMENT_NOT_MAPPED = {
'physical_network': 'not_mapped_phys',
'network_type': 'flat'
}
FAKE_UNTAGGED_UPLINKSET = {
'name': 'uplinkset_flat',
'ethernetNetworkType': 'untagged',
'networkUris': ['fake_net_uri']
}
FAKE_TAGGED_UPLINKSET = {
'name': 'uplinkset_vlan',
'ethernetNetworkType': 'tagged',
'networkUris': ['fake_net_uri2']
}
UPLINKSET_MAPPINGS = {
'physnet': ['lig_123', 'uplinkset_flat', 'lig_123', 'uplinkset_vlan']
}
FLAT_NET_MAPPINGS = {'physnet-mapped': ['112233AA']}
FAKE_LIG = {
'uplinkSets': [FAKE_TAGGED_UPLINKSET, FAKE_UNTAGGED_UPLINKSET]
}
FAKE_PORT = {
'id': '1',
'mac_address': 'aa:11:cc:33:ee:44',
'network_id': '1',
'binding:vnic_type': 'baremetal',
'binding:profile': {
'local_link_information': [{
"switch_info": {
"server_hardware_id": "1122AA",
"bootable": "true"
},
"port_id": "",
"switch_id": "aa:bb:cc:dd:ee:ff"
}]
}
}
FAKE_SERVER_PROFILE = {
'uri': '/fake_sp_uri',
'status': 'ok',
'connections': [{
'portId': '1234',
'networkUri': '/fake_net_uri',
'mac': 'aa:11:cc:33:ee:44',
'boot': {'priority': 'Primary'}
}]
}
FAKE_SERVER_HARDWARE = {
'uuid': '1122AA',
'powerState': 'On',
'serverProfileUri': '/fake_sp_uri',
'locationUri': '/fake_enclosure_uri',
'powerLock': False,
'portMap': {
'deviceSlots': [{
'slotNumber': '1',
'location': 'Flb',
'physicalPorts': [{
'portNumber': '1',
'virtualPorts': [{
'mac': 'aa:11:cc:33:ee:44',
'portFunction': 'a',
}]
}]
}]
}
}
FAKE_OV_FLAT_NETWORK = {
'name': 'Neutron [%s]' % FAKE_FLAT_NETWORK.get('id'),
'ethernetNetworkType': 'Untagged',
'vlanId': None,
'purpose': 'General',
'smartLink': False,
'privateNetwork': False,
}
FAKE_OV_VLAN_NETWORK = {
'name': 'Neutron [%s]' % FAKE_VLAN_NETWORK.get('id'),
'ethernetNetworkType': 'Tagged',
'vlanId': '%s' % FAKE_VLAN_NETWORK.get('provider:segmentation_id'),
'purpose': 'General',
'smartLink': False,
'privateNetwork': False,
}
class FakeContext(object):
def __init__(self):
self._plugin_context = FakePlugin()
self._network = None
self._port = copy.deepcopy(FAKE_PORT)
self.current = copy.deepcopy(FAKE_PORT)
self.segments_to_bind = []
class FakePlugin(object):
def __init__(self):
self._session = 'fake_session'
class FakeNetwork(object):
def __init__(self):
self.oneview_network_id = '12345'
self.neutron_network_id = '54321'
self.manageable = True
class OneViewMechanismDriverTestCase(base.AgentMechanismBaseTestCase):
def setUp(self):
super(OneViewMechanismDriverTestCase, self).setUp()
common.get_oneview_client = mock.MagicMock()
oneview_client = common.get_oneview_client()
oneview_client.logical_interconnect_groups.get.return_value = FAKE_LIG
database_manager.get_neutron_oneview_network = mock.Mock(
return_value=False
)
self.driver = mech_oneview.OneViewDriver()
self.driver.oneview_client = oneview_client
self.driver.neutron_oneview_client = neutron_oneview_client.Client(
oneview_client, UPLINKSET_MAPPINGS, FLAT_NET_MAPPINGS
)
self.server_hardware = copy.deepcopy(FAKE_SERVER_HARDWARE)
self.server_profile = copy.deepcopy(FAKE_SERVER_PROFILE)
@mock.patch.object(database_manager, 'map_neutron_network_to_oneview')
def test_create_network_postcommit_flat_mapping(self, mock_map_net):
network_context = FakeContext()
network_context._network = FAKE_FLAT_ONEVIEW_NETWORK
client = self.driver.oneview_client
self.driver.create_network_postcommit(network_context)
self.assertFalse(client.ethernet_networks.create.called)
# NOTE(nicodemos) parameters: session, network_id, oneview_network_id,
# manageable, mapping
mock_map_net.assert_called_with(
network_context._plugin_context._session,
FAKE_FLAT_ONEVIEW_NETWORK.get('id'),
['112233AA'], False, [])
@mock.patch.object(database_manager, 'map_neutron_network_to_oneview')
def test_create_network_postcommit_flat(self, mock_map_net):
network_context = FakeContext()
network_context._network = FAKE_FLAT_NETWORK
client = self.driver.oneview_client
client.ethernet_networks.get_by.return_value = []
self.driver.create_network_postcommit(network_context)
client.ethernet_networks.create.assert_called_with(
FAKE_OV_FLAT_NETWORK
)
# NOTE(nicodemos) parameters: session, network_id, oneview_network_id,
# manageable, mapping
mock_map_net.assert_called_with(
network_context._plugin_context._session,
FAKE_FLAT_NETWORK.get('id'),
mock.ANY, True, ['lig_123', 'uplinkset_flat'])
@mock.patch.object(database_manager, 'map_neutron_network_to_oneview')
def test_create_already_existing_network_postcommit_flat(
self, mock_map_net
):
network_context = FakeContext()
network_context._network = FAKE_FLAT_NETWORK
client = self.driver.oneview_client
client.ethernet_networks.get_by.return_value = [FAKE_OV_FLAT_NETWORK]
self.driver.create_network_postcommit(network_context)
self.assertFalse(client.ethernet_networks.create.called)
# NOTE(gustavo) parameters: session, network_id, oneview_network_id,
# manageable, mapping
mock_map_net.assert_called_with(
network_context._plugin_context._session,
FAKE_FLAT_NETWORK.get('id'),
mock.ANY, True, ['lig_123', 'uplinkset_flat'])
@mock.patch.object(database_manager, 'map_neutron_network_to_oneview')
def test_create_network_postcommit_vlan(self, mock_map_net):
network_context = FakeContext()
network_context._network = FAKE_VLAN_NETWORK
client = self.driver.oneview_client
client.ethernet_networks.get_by.return_value = []
self.driver.create_network_postcommit(network_context)
client.ethernet_networks.create.assert_called_with(
FAKE_OV_VLAN_NETWORK
)
# NOTE(nicodemos) parameters: session, network_id, oneview_network_id,
# manageable, mapping
mock_map_net.assert_called_with(
network_context._plugin_context._session,
FAKE_VLAN_NETWORK.get('id'),
mock.ANY, True, ['lig_123', 'uplinkset_vlan'])
@mock.patch.object(database_manager, 'map_neutron_network_to_oneview')
def test_create_already_existing_network_postcommit_vlan(
self, mock_map_net
):
network_context = FakeContext()
network_context._network = FAKE_VLAN_NETWORK
client = self.driver.oneview_client
client.ethernet_networks.get_by.return_value = [FAKE_OV_VLAN_NETWORK]
self.driver.create_network_postcommit(network_context)
self.assertFalse(client.ethernet_networks.create.called)
# NOTE(gustavo) parameters: session, network_id, oneview_network_id,
# manageable, mapping
mock_map_net.assert_called_with(
network_context._plugin_context._session,
FAKE_VLAN_NETWORK.get('id'),
mock.ANY, True, ['lig_123', 'uplinkset_vlan'])
@mock.patch.object(database_manager, 'map_neutron_network_to_oneview')
def test_create_network_postcommit_not_mapped(self, mock_map_net):
network_context = FakeContext()
network_context._network = FAKE_NETWORK_NOT_MAPPED
client = self.driver.oneview_client
self.driver.create_network_postcommit(network_context)
self.assertFalse(client.ethernet_networks.create.called)
self.assertFalse(mock_map_net.called)
@mock.patch.object(neutron_oneview_client.Network, '_add_to_ligs')
@mock.patch.object(database_manager, 'map_neutron_network_to_oneview')
def test_create_network_postcommit_in_lig(self, mock_map_net, mock_add):
network_context = FakeContext()
network_context._network = FAKE_VLAN_NETWORK
client = self.driver.oneview_client
mock_add.side_effect = Exception("BOOM")
vlan_network = {
'name': 'Neutron [%s]' % FAKE_VLAN_NETWORK.get('id'),
'ethernetNetworkType': 'Tagged',
'vlanId': '%s' % FAKE_VLAN_NETWORK.get('provider:segmentation_id'),
'purpose': 'General',
'smartLink': False,
'privateNetwork': False,
}
self.assertRaises(
exceptions.NetworkCreationException,
self.driver.create_network_postcommit,
network_context
)
client.ethernet_networks.create.assert_called_with(vlan_network)
self.assertTrue(client.ethernet_networks.delete.called)
self.assertFalse(mock_map_net.called)
@mock.patch.object(database_manager, 'map_neutron_network_to_oneview')
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
def test_create_network_postcommit_net_created(
self, mock_get_net, mock_map_net):
network_context = FakeContext()
network_context._network = FAKE_FLAT_NETWORK
client = self.driver.oneview_client
mock_get_net.return_value = True
self.driver.create_network_postcommit(network_context)
self.assertFalse(client.ethernet_networks.create.called)
self.assertFalse(mock_map_net.called)
# NOTE(nicodemos): See bug when creating a unsupported network type
@mock.patch.object(database_manager, 'map_neutron_network_to_oneview')
def test_create_network_postcommit_unsupported_type(self, mock_map_net):
network_context = FakeContext()
network_context._network = FAKE_VXLAN_NETWORK
client = self.driver.oneview_client
self.driver.create_network_postcommit(network_context)
self.assertFalse(client.ethernet_networks.create.called)
self.assertFalse(mock_map_net.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'delete_neutron_oneview_network')
@mock.patch.object(database_manager, 'delete_oneview_network_lig')
def test_delete_network_postcommit(self, mock_del_lig,
mock_del_net, mock_get_net):
network_context = FakeContext()
network_context._network = FAKE_FLAT_NETWORK
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.driver.delete_network_postcommit(network_context)
client.ethernet_networks.delete.assert_called_with(
fake_network_obj.oneview_network_id)
mock_del_net.assert_called_with(
network_context._plugin_context._session,
neutron_network_id=FAKE_FLAT_NETWORK.get('id')
)
mock_del_lig.assert_called_with(
network_context._plugin_context._session,
oneview_network_id=fake_network_obj.oneview_network_id
)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'delete_neutron_oneview_network')
@mock.patch.object(database_manager, 'delete_oneview_network_lig')
def test_delete_network_postcommit_flat_mapping(
self, mock_del_lig, mock_del_net, mock_get_net):
network_context = FakeContext()
network_context._network = FAKE_FLAT_ONEVIEW_NETWORK
fake_network_obj = FakeNetwork()
fake_network_obj.manageable = False
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.driver.delete_network_postcommit(network_context)
self.assertFalse(client.ethernet_networks.delete.called)
mock_del_net.assert_called_with(
network_context._plugin_context._session,
neutron_network_id=FAKE_FLAT_ONEVIEW_NETWORK.get('id')
)
mock_del_lig.assert_called_with(
network_context._plugin_context._session,
oneview_network_id=fake_network_obj.oneview_network_id
)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'delete_neutron_oneview_network')
@mock.patch.object(database_manager, 'delete_oneview_network_lig')
def test_delete_network_postcommit_no_network(
self, mock_del_lig, mock_del_net, mock_get_net):
network_context = FakeContext()
network_context._network = FAKE_FLAT_ONEVIEW_NETWORK
mock_get_net.return_value = None
client = self.driver.oneview_client
self.driver.delete_network_postcommit(network_context)
self.assertFalse(client.ethernet_networks.delete.called)
self.assertFalse(mock_del_net.called)
self.assertFalse(mock_del_lig.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
client.server_hardware.get.return_value = self.server_hardware
client.server_profiles.get.return_value = self.server_profile
old_connections = copy.deepcopy(self.server_profile['connections'])
self.driver.bind_port(port_context)
new_connections = self.server_profile['connections']
self.assertNotEqual(old_connections, new_connections)
client.server_profiles.update.assert_called_with(
id_or_uri=self.server_profile.get('uri'),
resource={
'uri': self.server_profile.get('uri'),
'status': self.server_profile.get('status'),
'connections': self.server_profile['connections']
})
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_existing_conn(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
client.server_hardware.get.return_value = self.server_hardware
client.server_profiles.get.return_value = self.server_profile
self.server_profile["connections"][0]["portId"] = "Flb 1:1-a"
old_connections = copy.deepcopy(self.server_profile['connections'])
self.driver.bind_port(port_context)
new_connections = self.server_profile['connections']
for old_conn in old_connections:
for new_conn in new_connections:
if old_conn.get('mac') == new_conn.get('mac'):
self.assertEqual(old_conn.get('portId'),
new_conn.get('portId'))
self.assertNotEqual(old_conn.get('networkUri'),
new_conn.get('networkUri'))
self.assertEqual(old_conn.get('boot'),
new_conn.get('boot'))
self.assertEqual(len(old_connections), len(new_connections))
client.server_profiles.update.assert_called_with(
id_or_uri=self.server_profile.get('uri'),
resource={
'uri': self.server_profile.get('uri'),
'status': self.server_profile.get('status'),
'connections': self.server_profile['connections']
})
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_net_not_mapped(self, mock_net_segment):
port_context = FakeContext()
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT_NOT_MAPPED
client = self.driver.oneview_client
self.driver.bind_port(port_context)
self.assertFalse(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_not_baremetal(self, mock_net_segment):
port_context = FakeContext()
port_context._port['binding:vnic_type'] = 'not_baremetal'
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
client = self.driver.oneview_client
self.driver.bind_port(port_context)
self.assertFalse(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_not_in_database(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
mock_get_net.return_value = None
client = self.driver.oneview_client
self.driver.bind_port(port_context)
self.assertFalse(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_no_link_info(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
port_context._port['binding:profile']['local_link_information'] = None
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.driver.bind_port(port_context)
self.assertFalse(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_more_link_info(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
port_context._port['binding:profile']['local_link_information'].append(
{'fake_local_link_info': True}
)
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.driver.bind_port(port_context)
self.assertFalse(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_no_switch_info(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
port_context._port[
'binding:profile']['local_link_information'][0][
'switch_info'] = None
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.driver.bind_port(port_context)
self.assertFalse(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_not_bootable(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
switch_info = port_context._port[
'binding:profile']['local_link_information'][0]['switch_info']
switch_info['bootable'] = False
port_context._port[
'binding:profile']['local_link_information'][0][
'switch_info'] = switch_info
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
client.server_hardware.get.return_value = self.server_hardware
client.server_profiles.get.return_value = self.server_profile
old_connections = copy.deepcopy(self.server_profile['connections'])
self.driver.bind_port(port_context)
new_connections = self.server_profile['connections']
boot_info = new_connections[1].get('boot').get('priority')
self.assertNotEqual(old_connections, new_connections)
self.assertTrue(client.server_hardware.get.called)
self.assertTrue(client.server_profiles.get.called)
self.assertEqual(boot_info, 'NotBootable')
client.server_profiles.update.assert_called_with(
id_or_uri=self.server_profile.get('uri'),
resource={
'uri': self.server_profile.get('uri'),
'status': self.server_profile.get('status'),
'connections': self.server_profile['connections']
})
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_no_hardware(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
switch_info = port_context._port[
'binding:profile']['local_link_information'][0]['switch_info']
switch_info['server_hardware_id'] = None
port_context._port[
'binding:profile']['local_link_information'][0][
'switch_info'] = switch_info
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.driver.bind_port(port_context)
self.assertFalse(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_no_profile(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.server_hardware['serverProfileUri'] = None
client.server_hardware.get.return_value = self.server_hardware
self.driver.bind_port(port_context)
self.assertTrue(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_rack_server(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.server_hardware['locationUri'] = None
client.server_hardware.get.return_value = self.server_hardware
self.driver.bind_port(port_context)
self.assertTrue(client.server_hardware.get.called)
self.assertTrue(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_create_port_no_pxe_bootable_available(
self, mock_net_segment, mock_get_net):
port_context = FakeContext()
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
client.server_hardware.get.return_value = self.server_hardware
client.server_profiles.get.return_value = self.server_profile
new_connection = {
'portId': '231',
'networkUri': '/fake_net_uri_2',
'mac': 'aa:11:22:33:ee:44',
'boot': {'priority': 'Secondary'}
}
self.server_profile['connections'].append(new_connection)
self.driver.bind_port(port_context)
self.assertTrue(client.server_hardware.get.called)
self.assertTrue(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_delete_port_postcommit(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
client.server_hardware.get.return_value = self.server_hardware
client.server_profiles.get.return_value = self.server_profile
self.driver.delete_port_postcommit(port_context)
client.server_profiles.update.assert_called_with(
id_or_uri=self.server_profile.get('uri'),
resource={
'uri': self.server_profile.get('uri'),
'status': self.server_profile.get('status'),
'connections': self.server_profile['connections']
})
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_delete_port_postcommit_not_valid(
self, mock_net_segment, mock_get_net):
port_context = FakeContext()
port_context._port['binding:profile']['local_link_information'] = None
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.driver.delete_port_postcommit(port_context)
self.assertFalse(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
@mock.patch.object(database_manager, 'get_neutron_oneview_network')
@mock.patch.object(database_manager, 'get_network_segment')
def test_delete_port_rack_server(self, mock_net_segment, mock_get_net):
port_context = FakeContext()
mock_net_segment.return_value = FAKE_NETWORK_SEGMENT
fake_network_obj = FakeNetwork()
mock_get_net.return_value = fake_network_obj
client = self.driver.oneview_client
self.server_hardware['locationUri'] = None
client.server_hardware.get.return_value = self.server_hardware
self.driver.delete_port_postcommit(port_context)
self.assertTrue(client.server_hardware.get.called)
self.assertFalse(client.server_profiles.get.called)
self.assertFalse(client.server_profiles.update.called)
| 41.717452
| 79
| 0.7
|
4a11f05c23237defd1221c2f4a7d36b41989b72a
| 49,325
|
py
|
Python
|
spyder/app/tour.py
|
StefRe/spyder
|
210495f5b691cc2986a437c237cce8de4ab06b79
|
[
"MIT"
] | 3
|
2019-09-27T21:00:00.000Z
|
2021-03-07T23:28:32.000Z
|
spyder/app/tour.py
|
StefRe/spyder
|
210495f5b691cc2986a437c237cce8de4ab06b79
|
[
"MIT"
] | 3
|
2020-10-13T21:15:23.000Z
|
2020-10-13T21:15:24.000Z
|
spyder/app/tour.py
|
StefRe/spyder
|
210495f5b691cc2986a437c237cce8de4ab06b79
|
[
"MIT"
] | 2
|
2021-04-30T01:18:22.000Z
|
2021-09-19T06:31:42.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""Spyder interactive tours"""
# pylint: disable=C0103
# pylint: disable=R0903
# pylint: disable=R0911
# pylint: disable=R0201
# Standard library imports
from __future__ import division
import sys
from math import ceil
# Third party imports
from qtpy.QtCore import (QEasingCurve, QPoint, QPropertyAnimation, QRectF, Qt,
Signal)
from qtpy.QtGui import (QBrush, QColor, QIcon, QPainter, QPainterPath, QPen,
QPixmap, QRegion)
from qtpy.QtWidgets import (QAction, QApplication, QComboBox, QDialog,
QGraphicsOpacityEffect, QHBoxLayout, QLabel,
QLayout, QMainWindow, QMenu, QPushButton,
QSpacerItem, QToolButton, QVBoxLayout, QWidget)
# Local imports
from spyder.config.base import _, get_image_path
from spyder.config.gui import is_dark_interface
from spyder.py3compat import to_binary_string
from spyder.utils.qthelpers import add_actions, create_action
from spyder.utils import icon_manager as ima
if is_dark_interface():
MAIN_TOP_COLOR = MAIN_BG_COLOR = QColor.fromRgb(25, 35, 45)
else:
MAIN_TOP_COLOR = QColor.fromRgb(230, 230, 230)
MAIN_BG_COLOR = QColor.fromRgb(255, 255, 255)
# FIXME: Known issues
# How to handle if an specific dockwidget does not exists/load, like ipython
# on python3.3, should that frame be removed? should it display a warning?
class SpyderWidgets(object):
"""List of supported widgets to highlight/decorate"""
# Panes
ipython_console = 'ipyconsole'
editor = 'editor'
editor_line_number_area = 'editor.get_current_editor().linenumberarea'
editor_scroll_flag_area = 'editor.get_current_editor().scrollflagarea'
file_explorer = 'explorer'
help_plugin = 'help'
variable_explorer = 'variableexplorer'
history_log = "historylog"
# Toolbars
toolbars = ''
toolbars_active = ''
toolbar_file = ''
toolbar_edit = ''
toolbar_run = ''
toolbar_debug = ''
toolbar_main = ''
status_bar = ''
menu_bar = ''
menu_file = ''
menu_edit = ''
def get_tours(index=None):
"""
Get the list of available tours (if index=None), or the your given by
index
"""
return get_tour(index)
def get_tour(index):
"""
This function generates a list of tours.
The index argument is used to retrieve a particular tour. If None is
passed, it will return the full list of tours. If instead -1 is given,
this function will return a test tour
To add more tours a new variable needs to be created to hold the list of
dicts and the tours variable at the bottom of this function needs to be
updated accordingly
"""
sw = SpyderWidgets
qtconsole_link = "https://qtconsole.readthedocs.io/en/stable/index.html"
# This test should serve as example of keys to use in the tour frame dics
test = [{'title': "Welcome to Spyder introduction tour",
'content': "<b>Spyder</b> is an interactive development \
environment. This tip panel supports rich text. <br>\
<br> it also supports image insertion to the right so\
far",
'image': 'tour-spyder-logo.png'},
{'title': "Widget display",
'content': ("This show how a widget is displayed. The tip panel "
"is adjusted based on the first widget in the list"),
'widgets': ['button1'],
'decoration': ['button2'],
'interact': True},
{'title': "Widget display",
'content': ("This show how a widget is displayed. The tip panel "
"is adjusted based on the first widget in the list"),
'widgets': ['button1'],
'decoration': ['button1'],
'interact': True},
{'title': "Widget display",
'content': ("This show how a widget is displayed. The tip panel "
"is adjusted based on the first widget in the list"),
'widgets': ['button1'],
'interact': True},
{'title': "Widget display and highlight",
'content': "This shows how a highlighted widget looks",
'widgets': ['button'],
'decoration': ['button'],
'interact': False},
]
intro = [{'title': _("Welcome to the Introduction tour"),
'content': _("<b>Spyder</b> is a powerful Interactive "
"Development Environment (or IDE) for the Python "
"programming language.<br><br>"
"Here we are going to guide you through its most "
"important features.<br><br>"
"Please use the arrow keys or click on the buttons "
"below to move along the tour."),
'image': 'tour-spyder-logo.png'},
{'title': _("The Editor"),
'content': _("This is the pane where you write Python code before "
"evaluating it. You can get automatic suggestions "
"and completions while writing, by pressing the "
"<b>Tab</b> key next to a given text.<br><br>"
"The Editor comes "
"with a line number area (highlighted here in red), "
"where Spyder shows warnings and syntax errors. They "
"can help you to detect potential problems before "
"running the code.<br><br>"
"You can also set debug breakpoints in the line "
"number area, by doing a double click next to "
"a non-empty line."),
'widgets': [sw.editor],
'decoration': [sw.editor_line_number_area]},
{'title': _("The IPython console"),
'content': _("This is one of panes where you can run or "
"execute the code you wrote on the Editor. To do it "
"you need to press the <b>F5</b> key.<br><br>"
"This console comes with several "
"useful features that greatly improve your "
"programming workflow (like syntax highlighting and "
"inline plots). If you want to know more about them, "
"please follow this <a href=\"{0}\">link</a>.<br><br>"
"Please click on the button below to run some simple "
"code in this console. This will be useful to show "
"you other important features.").format(
qtconsole_link),
'widgets': [sw.ipython_console],
'run': ["li = list(range(100))", "d = {'a': 1, 'b': 2}"]
},
{'title': _("The Variable Explorer"),
'content': _("In this pane you can view and edit the variables "
"generated during the execution of a program, or "
"those entered directly in one of Spyder "
"consoles.<br><br>"
"As you can see, the Variable Explorer is showing "
"the variables generated during the last step of "
"this tour. By doing a double-click on any "
"of them, a new window will be opened, where you "
"can inspect and modify their contents."),
'widgets': [sw.variable_explorer],
'interact': True},
{'title': _("Help"),
'content': _("This pane displays documentation of the "
"functions, classes, methods or modules you are "
"currently using in the Editor or the Consoles.<br><br>"
"To use it, you need to press <b>Ctrl+I</b> in "
"front of an object. If that object has some "
"documentation associated with it, it will be "
"displayed here."),
'widgets': [sw.help_plugin],
'interact': True},
{'title': _("The File Explorer"),
'content': _("This pane lets you navigate through the directories "
"and files present in your computer.<br><br>"
"You can also open any of these files with its "
"corresponding application, by doing a double "
"click on it.<br><br>"
"There is one exception to this rule: plain-text "
"files will always be opened in the Spyder Editor."),
'widgets': [sw.file_explorer],
'interact': True},
{'title': _("The History Log"),
'content': _("This pane records all commands introduced in "
"the Python and IPython consoles."),
'widgets': [sw.history_log],
'interact': True},
]
# ['The run toolbar',
# 'Should be short',
# ['self.run_toolbar'], None],
# ['The debug toolbar',
# '',
# ['self.debug_toolbar'], None],
# ['The main toolbar',
# '',
# ['self.main_toolbar'], None],
# ['The editor',
# 'Spyder has differnet bla bla bla',
# ['self.editor.dockwidget'], None],
# ['The editor',
# 'Spyder has differnet bla bla bla',
# ['self.outlineexplorer.dockwidget'], None],
#
# ['The menu bar',
# 'Spyder has differnet bla bla bla',
# ['self.menuBar()'], None],
#
# ['The menu bar',
# 'Spyder has differnet bla bla bla',
# ['self.statusBar()'], None],
#
#
# ['The toolbars!',
# 'Spyder has differnet bla bla bla',
# ['self.variableexplorer.dockwidget'], None],
# ['The toolbars MO!',
# 'Spyder has differnet bla bla bla',
# ['self.extconsole.dockwidget'], None],
# ['The whole window?!',
# 'Spyder has differnet bla bla bla',
# ['self'], None],
# ['Lets try something!',
# 'Spyder has differnet bla bla bla',
# ['self.extconsole.dockwidget',
# 'self.variableexplorer.dockwidget'], None]
#
# ]
feat30 = [{'title': "New features in Spyder 3.0",
'content': _("<b>Spyder</b> is an interactive development "
"environment based on bla"),
'image': 'spyder.png'},
{'title': _("Welcome to Spyder introduction tour"),
'content': _("Spyder is an interactive development environment "
"based on bla"),
'widgets': ['variableexplorer']},
]
tours = [{'name': _('Introduction tour'), 'tour': intro},
{'name': _('New features in version 3.0'), 'tour': feat30}]
if index is None:
return tours
elif index == -1:
return [test]
else:
return [tours[index]]
class FadingDialog(QDialog):
"""A general fade in/fade out QDialog with some builtin functions"""
sig_key_pressed = Signal()
def __init__(self, parent, opacity, duration, easing_curve):
super(FadingDialog, self).__init__(parent)
self.parent = parent
self.opacity_min = min(opacity)
self.opacity_max = max(opacity)
self.duration_fadein = duration[0]
self.duration_fadeout = duration[-1]
self.easing_curve_in = easing_curve[0]
self.easing_curve_out = easing_curve[-1]
self.effect = None
self.anim = None
self._fade_running = False
self._funcs_before_fade_in = []
self._funcs_after_fade_in = []
self._funcs_before_fade_out = []
self._funcs_after_fade_out = []
self.setModal(False)
def _run(self, funcs):
""" """
for func in funcs:
func()
def _run_before_fade_in(self):
""" """
self._run(self._funcs_before_fade_in)
def _run_after_fade_in(self):
""" """
self._run(self._funcs_after_fade_in)
def _run_before_fade_out(self):
""" """
self._run(self._funcs_before_fade_out)
def _run_after_fade_out(self):
""" """
self._run(self._funcs_after_fade_out)
def _set_fade_finished(self):
""" """
self._fade_running = False
def _fade_setup(self):
""" """
self._fade_running = True
self.effect = QGraphicsOpacityEffect(self)
self.setGraphicsEffect(self.effect)
self.anim = QPropertyAnimation(self.effect, to_binary_string("opacity"))
# --- public api
def fade_in(self, on_finished_connect):
""" """
self._run_before_fade_in()
self._fade_setup()
self.show()
self.raise_()
self.anim.setEasingCurve(self.easing_curve_in)
self.anim.setStartValue(self.opacity_min)
self.anim.setEndValue(self.opacity_max)
self.anim.setDuration(self.duration_fadein)
self.anim.finished.connect(on_finished_connect)
self.anim.finished.connect(self._set_fade_finished)
self.anim.finished.connect(self._run_after_fade_in)
self.anim.start()
def fade_out(self, on_finished_connect):
""" """
self._run_before_fade_out()
self._fade_setup()
self.anim.setEasingCurve(self.easing_curve_out)
self.anim.setStartValue(self.opacity_max)
self.anim.setEndValue(self.opacity_min)
self.anim.setDuration(self.duration_fadeout)
self.anim.finished.connect(on_finished_connect)
self.anim.finished.connect(self._set_fade_finished)
self.anim.finished.connect(self._run_after_fade_out)
self.anim.start()
def is_fade_running(self):
""" """
return self._fade_running
def set_funcs_before_fade_in(self, funcs):
""" """
self._funcs_before_fade_in = funcs
def set_funcs_after_fade_in(self, funcs):
""" """
self._funcs_after_fade_in = funcs
def set_funcs_before_fade_out(self, funcs):
""" """
self._funcs_before_fade_out = funcs
def set_funcs_after_fade_out(self, funcs):
""" """
self._funcs_after_fade_out = funcs
class FadingCanvas(FadingDialog):
"""The black semi transparent canvas that covers the application"""
def __init__(self, parent, opacity, duration, easing_curve, color,
tour=None):
"""Create a black semi transparent canvas that covers the app."""
super(FadingCanvas, self).__init__(parent, opacity, duration,
easing_curve)
self.parent = parent
self.tour = tour
self.color = color # Canvas color
self.color_decoration = Qt.red # Decoration color
self.stroke_decoration = 2 # width in pixels for decoration
self.region_mask = None
self.region_subtract = None
self.region_decoration = None
self.widgets = None # The widget to uncover
self.decoration = None # The widget to draw decoration
self.interaction_on = False
self.path_current = None
self.path_subtract = None
self.path_full = None
self.path_decoration = None
# widget setup
self.setWindowFlags(Qt.Dialog | Qt.FramelessWindowHint)
self.setAttribute(Qt.WA_TranslucentBackground)
self.setAttribute(Qt.WA_TransparentForMouseEvents)
self.setModal(False)
self.setFocusPolicy(Qt.NoFocus)
self.set_funcs_before_fade_in([self.update_canvas])
self.set_funcs_after_fade_out([lambda: self.update_widgets(None),
lambda: self.update_decoration(None)])
def set_interaction(self, value):
""" """
self.interaction_on = value
def update_canvas(self):
""" """
w, h = self.parent.size().width(), self.parent.size().height()
self.path_full = QPainterPath()
self.path_subtract = QPainterPath()
self.path_decoration = QPainterPath()
self.region_mask = QRegion(0, 0, w, h)
self.path_full.addRect(0, 0, w, h)
# Add the path
if self.widgets is not None:
for widget in self.widgets:
temp_path = QPainterPath()
# if widget is not found... find more general way to handle
if widget is not None:
widget.raise_()
widget.show()
geo = widget.frameGeometry()
width, height = geo.width(), geo.height()
point = widget.mapTo(self.parent, QPoint(0, 0))
x, y = point.x(), point.y()
temp_path.addRect(QRectF(x, y, width, height))
temp_region = QRegion(x, y, width, height)
if self.interaction_on:
self.region_mask = self.region_mask.subtracted(temp_region)
self.path_subtract = self.path_subtract.united(temp_path)
self.path_current = self.path_full.subtracted(self.path_subtract)
else:
self.path_current = self.path_full
if self.decoration is not None:
for widget in self.decoration:
temp_path = QPainterPath()
widget.raise_()
widget.show()
geo = widget.frameGeometry()
width, height = geo.width(), geo.height()
point = widget.mapTo(self.parent, QPoint(0, 0))
x, y = point.x(), point.y()
temp_path.addRect(QRectF(x, y, width, height))
temp_region_1 = QRegion(x-1, y-1, width+2, height+2)
temp_region_2 = QRegion(x+1, y+1, width-2, height-2)
temp_region = temp_region_1.subtracted(temp_region_2)
if self.interaction_on:
self.region_mask = self.region_mask.united(temp_region)
self.path_decoration = self.path_decoration.united(temp_path)
else:
self.path_decoration.addRect(0, 0, 0, 0)
# Add a decoration stroke around widget
self.setMask(self.region_mask)
self.update()
self.repaint()
def update_widgets(self, widgets):
""" """
self.widgets = widgets
def update_decoration(self, widgets):
""" """
self.decoration = widgets
def paintEvent(self, event):
"""Override Qt method"""
painter = QPainter(self)
painter.setRenderHint(QPainter.Antialiasing)
# Decoration
painter.fillPath(self.path_current, QBrush(self.color))
painter.strokePath(self.path_decoration, QPen(self.color_decoration,
self.stroke_decoration))
# decoration_fill = QColor(self.color_decoration)
# decoration_fill.setAlphaF(0.25)
# painter.fillPath(self.path_decoration, decoration_fill)
def reject(self):
"""Override Qt method"""
if not self.is_fade_running():
key = Qt.Key_Escape
self.key_pressed = key
self.sig_key_pressed.emit()
def mousePressEvent(self, event):
"""Override Qt method"""
pass
def focusInEvent(self, event):
"""Override Qt method."""
# To be used so tips do not appear outside spyder
if self.hasFocus():
self.tour.gain_focus()
def focusOutEvent(self, event):
"""Override Qt method."""
# To be used so tips do not appear outside spyder
if self.tour.step_current != 0:
self.tour.lost_focus()
class FadingTipBox(FadingDialog):
""" """
def __init__(self, parent, opacity, duration, easing_curve, tour=None,
color_top=None, color_back=None, combobox_background=None):
super(FadingTipBox, self).__init__(parent, opacity, duration,
easing_curve)
self.holder = self.anim # needed for qt to work
self.parent = parent
self.tour = tour
self.frames = None
self.offset_shadow = 0
self.fixed_width = 300
self.key_pressed = None
self.setAttribute(Qt.WA_TranslucentBackground)
self.setWindowFlags(Qt.Dialog | Qt.FramelessWindowHint |
Qt.WindowStaysOnTopHint)
self.setModal(False)
# Widgets
def toolbutton(icon):
bt = QToolButton()
bt.setAutoRaise(True)
bt.setIcon(icon)
return bt
self.button_close = toolbutton(ima.icon("tour.close"))
self.button_home = toolbutton(ima.icon("tour.home"))
self.button_previous = toolbutton(ima.icon("tour.previous"))
self.button_end = toolbutton(ima.icon("tour.end"))
self.button_next = toolbutton(ima.icon("tour.next"))
self.button_run = QPushButton(_('Run code'))
self.button_disable = None
self.button_current = QToolButton()
self.label_image = QLabel()
self.label_title = QLabel()
self.combo_title = QComboBox()
self.label_current = QLabel()
self.label_content = QLabel()
self.label_content.setMinimumWidth(self.fixed_width)
self.label_content.setMaximumWidth(self.fixed_width)
self.label_current.setAlignment(Qt.AlignCenter)
self.label_content.setWordWrap(True)
self.widgets = [self.label_content, self.label_title,
self.label_current, self.combo_title,
self.button_close, self.button_run, self.button_next,
self.button_previous, self.button_end,
self.button_home, self.button_current]
arrow = get_image_path('hide.png')
self.color_top = color_top
self.color_back = color_back
self.combobox_background = combobox_background
self.stylesheet = '''QComboBox {{
padding-left: 5px;
background-color: {}
border-width: 0px;
border-radius: 0px;
min-height:20px;
max-height:20px;
}}
QComboBox::drop-down {{
subcontrol-origin: padding;
subcontrol-position: top left;
border-width: 0px;
}}
QComboBox::down-arrow {{
image: url({});
}}
'''.format(self.combobox_background.name(), arrow)
# Windows fix, slashes should be always in unix-style
self.stylesheet = self.stylesheet.replace('\\', '/')
self.setFocusPolicy(Qt.StrongFocus)
for widget in self.widgets:
widget.setFocusPolicy(Qt.NoFocus)
widget.setStyleSheet(self.stylesheet)
layout_top = QHBoxLayout()
layout_top.addWidget(self.combo_title)
layout_top.addStretch()
layout_top.addWidget(self.button_close)
layout_top.addSpacerItem(QSpacerItem(self.offset_shadow,
self.offset_shadow))
layout_content = QHBoxLayout()
layout_content.addWidget(self.label_content)
layout_content.addWidget(self.label_image)
layout_content.addSpacerItem(QSpacerItem(5, 5))
layout_run = QHBoxLayout()
layout_run.addStretch()
layout_run.addWidget(self.button_run)
layout_run.addStretch()
layout_run.addSpacerItem(QSpacerItem(self.offset_shadow,
self.offset_shadow))
layout_navigation = QHBoxLayout()
layout_navigation.addWidget(self.button_home)
layout_navigation.addWidget(self.button_previous)
layout_navigation.addStretch()
layout_navigation.addWidget(self.label_current)
layout_navigation.addStretch()
layout_navigation.addWidget(self.button_next)
layout_navigation.addWidget(self.button_end)
layout_navigation.addSpacerItem(QSpacerItem(self.offset_shadow,
self.offset_shadow))
layout = QVBoxLayout()
layout.addLayout(layout_top)
layout.addStretch()
layout.addSpacerItem(QSpacerItem(15, 15))
layout.addLayout(layout_content)
layout.addLayout(layout_run)
layout.addStretch()
layout.addSpacerItem(QSpacerItem(15, 15))
layout.addLayout(layout_navigation)
layout.addSpacerItem(QSpacerItem(self.offset_shadow,
self.offset_shadow))
layout.setSizeConstraint(QLayout.SetFixedSize)
self.setLayout(layout)
self.set_funcs_before_fade_in([self._disable_widgets])
self.set_funcs_after_fade_in([self._enable_widgets, self.setFocus])
self.set_funcs_before_fade_out([self._disable_widgets])
self.setContextMenuPolicy(Qt.CustomContextMenu)
# signals and slots
# These are defined every time by the AnimatedTour Class
def _disable_widgets(self):
""" """
for widget in self.widgets:
widget.setDisabled(True)
def _enable_widgets(self):
""" """
self.setWindowFlags(Qt.Dialog | Qt.FramelessWindowHint |
Qt.WindowStaysOnTopHint)
for widget in self.widgets:
widget.setDisabled(False)
if self.button_disable == 'previous':
self.button_previous.setDisabled(True)
self.button_home.setDisabled(True)
elif self.button_disable == 'next':
self.button_next.setDisabled(True)
self.button_end.setDisabled(True)
def set_data(self, title, content, current, image, run, frames=None,
step=None):
""" """
self.label_title.setText(title)
self.combo_title.clear()
self.combo_title.addItems(frames)
self.combo_title.setCurrentIndex(step)
# min_content_len = max([len(f) for f in frames])
# self.combo_title.setMinimumContentsLength(min_content_len)
# Fix and try to see how it looks with a combo box
self.label_current.setText(current)
self.button_current.setText(current)
self.label_content.setText(content)
self.image = image
if image is None:
self.label_image.setFixedHeight(1)
self.label_image.setFixedWidth(1)
else:
extension = image.split('.')[-1]
self.image = QPixmap(get_image_path(image), extension)
self.label_image.setPixmap(self.image)
self.label_image.setFixedSize(self.image.size())
if run is None:
self.button_run.setVisible(False)
else:
self.button_run.setDisabled(False)
self.button_run.setVisible(True)
# Refresh layout
self.layout().activate()
def set_pos(self, x, y):
""" """
self.x = ceil(x)
self.y = ceil(y)
self.move(QPoint(self.x, self.y))
def build_paths(self):
""" """
geo = self.geometry()
radius = 0
shadow = self.offset_shadow
x0, y0 = geo.x(), geo.y()
width, height = geo.width() - shadow, geo.height() - shadow
left, top = 0, 0
right, bottom = width, height
self.round_rect_path = QPainterPath()
self.round_rect_path.moveTo(right, top + radius)
self.round_rect_path.arcTo(right-radius, top, radius, radius, 0.0,
90.0)
self.round_rect_path.lineTo(left+radius, top)
self.round_rect_path.arcTo(left, top, radius, radius, 90.0, 90.0)
self.round_rect_path.lineTo(left, bottom-radius)
self.round_rect_path.arcTo(left, bottom-radius, radius, radius, 180.0,
90.0)
self.round_rect_path.lineTo(right-radius, bottom)
self.round_rect_path.arcTo(right-radius, bottom-radius, radius, radius,
270.0, 90.0)
self.round_rect_path.closeSubpath()
# Top path
header = 36
offset = 2
left, top = offset, offset
right = width - (offset)
self.top_rect_path = QPainterPath()
self.top_rect_path.lineTo(right, top + radius)
self.top_rect_path.moveTo(right, top + radius)
self.top_rect_path.arcTo(right-radius, top, radius, radius, 0.0, 90.0)
self.top_rect_path.lineTo(left+radius, top)
self.top_rect_path.arcTo(left, top, radius, radius, 90.0, 90.0)
self.top_rect_path.lineTo(left, top + header)
self.top_rect_path.lineTo(right, top + header)
def paintEvent(self, event):
""" """
self.build_paths()
painter = QPainter(self)
painter.setRenderHint(QPainter.Antialiasing)
painter.fillPath(self.round_rect_path, self.color_back)
painter.fillPath(self.top_rect_path, self.color_top)
painter.strokePath(self.round_rect_path, QPen(Qt.gray, 1))
# TODO: Build the pointing arrow?
def keyReleaseEvent(self, event):
""" """
key = event.key()
self.key_pressed = key
keys = [Qt.Key_Right, Qt.Key_Left, Qt.Key_Down, Qt.Key_Up,
Qt.Key_Escape, Qt.Key_PageUp, Qt.Key_PageDown,
Qt.Key_Home, Qt.Key_End, Qt.Key_Menu]
if key in keys:
if not self.is_fade_running():
self.sig_key_pressed.emit()
def mousePressEvent(self, event):
"""override Qt method"""
# Raise the main application window on click
self.parent.raise_()
self.raise_()
if event.button() == Qt.RightButton:
pass
# clicked_widget = self.childAt(event.x(), event.y())
# if clicked_widget == self.label_current:
# self.context_menu_requested(event)
def focusOutEvent(self, event):
"""Override Qt method."""
# To be used so tips do not appear outside spyder
self.tour.lost_focus()
def context_menu_requested(self, event):
""" """
pos = QPoint(event.x(), event.y())
menu = QMenu(self)
actions = []
action_title = create_action(self, _('Go to step: '), icon=QIcon())
action_title.setDisabled(True)
actions.append(action_title)
# actions.append(create_action(self, _(': '), icon=QIcon()))
add_actions(menu, actions)
menu.popup(self.mapToGlobal(pos))
def reject(self):
"""Qt method to handle escape key event"""
if not self.is_fade_running():
key = Qt.Key_Escape
self.key_pressed = key
self.sig_key_pressed.emit()
class AnimatedTour(QWidget):
""" """
def __init__(self, parent):
QWidget.__init__(self, parent)
self.parent = parent
# Variables to adjust
self.duration_canvas = [666, 666]
self.duration_tips = [333, 333]
self.opacity_canvas = [0.0, 0.7]
self.opacity_tips = [0.0, 1.0]
self.color = Qt.black
self.easing_curve = [QEasingCurve.Linear]
self.current_step = 0
self.step_current = 0
self.steps = 0
self.canvas = None
self.tips = None
self.frames = None
self.spy_window = None
self.widgets = None
self.dockwidgets = None
self.decoration = None
self.run = None
self.is_tour_set = False
self.is_running = False
# Widgets
self.canvas = FadingCanvas(self.parent, self.opacity_canvas,
self.duration_canvas, self.easing_curve,
self.color, tour=self)
self.tips = FadingTipBox(self.parent, self.opacity_tips,
self.duration_tips, self.easing_curve,
tour=self, color_top=MAIN_TOP_COLOR,
color_back=MAIN_BG_COLOR,
combobox_background=MAIN_TOP_COLOR)
# Widgets setup
# Needed to fix spyder-ide/spyder#2204.
self.setAttribute(Qt.WA_TransparentForMouseEvents)
# Signals and slots
self.tips.button_next.clicked.connect(self.next_step)
self.tips.button_previous.clicked.connect(self.previous_step)
self.tips.button_close.clicked.connect(self.close_tour)
self.tips.button_run.clicked.connect(self.run_code)
self.tips.button_home.clicked.connect(self.first_step)
self.tips.button_end.clicked.connect(self.last_step)
self.tips.button_run.clicked.connect(
lambda: self.tips.button_run.setDisabled(True))
self.tips.combo_title.currentIndexChanged.connect(self.go_to_step)
# Main window move or resize
self.parent.sig_resized.connect(self._resized)
self.parent.sig_moved.connect(self._moved)
# To capture the arrow keys that allow moving the tour
self.tips.sig_key_pressed.connect(self._key_pressed)
# To control the focus of tour
self.setting_data = False
self.hidden = False
def _resized(self, event):
""" """
if self.is_running:
size = event.size()
self.canvas.setFixedSize(size)
self.canvas.update_canvas()
if self.is_tour_set:
self._set_data()
def _moved(self, event):
""" """
if self.is_running:
pos = event.pos()
self.canvas.move(QPoint(pos.x(), pos.y()))
if self.is_tour_set:
self._set_data()
def _close_canvas(self):
""" """
self.tips.hide()
self.canvas.fade_out(self.canvas.hide)
def _clear_canvas(self):
""" """
# TODO: Add option to also make it white... might be usefull?
# Make canvas black before transitions
self.canvas.update_widgets(None)
self.canvas.update_decoration(None)
self.canvas.update_canvas()
def _move_step(self):
""" """
self._set_data()
# Show/raise the widget so it is located first!
widgets = self.dockwidgets
if widgets is not None:
widget = widgets[0]
if widget is not None:
widget.show()
widget.raise_()
self._locate_tip_box()
# Change in canvas only after fadein finishes, for visual aesthetics
self.tips.fade_in(self.canvas.update_canvas)
self.tips.raise_()
def _set_modal(self, value, widgets):
""" """
platform = sys.platform.lower()
if 'linux' in platform:
pass
elif 'win' in platform:
for widget in widgets:
widget.setModal(value)
widget.hide()
widget.show()
elif 'darwin' in platform:
pass
else:
pass
def _process_widgets(self, names, spy_window):
""" """
widgets = []
dockwidgets = []
for name in names:
base = name.split('.')[0]
temp = getattr(spy_window, base)
# Check if it is the current editor
if 'get_current_editor()' in name:
temp = temp.get_current_editor()
temp = getattr(temp, name.split('.')[-1])
widgets.append(temp)
# Check if it is a dockwidget and make the widget a dockwidget
# If not return the same widget
temp = getattr(temp, 'dockwidget', temp)
dockwidgets.append(temp)
return widgets, dockwidgets
def _set_data(self):
"""Set data that is displayed in each step of the tour."""
self.setting_data = True
step, steps, frames = self.step_current, self.steps, self.frames
current = '{0}/{1}'.format(step + 1, steps)
frame = frames[step]
combobox_frames = [u"{0}. {1}".format(i+1, f['title'])
for i, f in enumerate(frames)]
title, content, image = '', '', None
widgets, dockwidgets, decoration = None, None, None
run = None
# Check if entry exists in dic and act accordingly
if 'title' in frame:
title = frame['title']
if 'content' in frame:
content = frame['content']
if 'widgets' in frame:
widget_names = frames[step]['widgets']
# Get the widgets based on their name
widgets, dockwidgets = self._process_widgets(widget_names,
self.spy_window)
self.widgets = widgets
self.dockwidgets = dockwidgets
if 'decoration' in frame:
widget_names = frames[step]['decoration']
deco, decoration = self._process_widgets(widget_names,
self.spy_window)
self.decoration = decoration
if 'image' in frame:
image = frames[step]['image']
if 'interact' in frame:
self.canvas.set_interaction(frame['interact'])
if frame['interact']:
self._set_modal(False, [self.tips])
else:
self._set_modal(True, [self.tips])
else:
self.canvas.set_interaction(False)
self._set_modal(True, [self.tips])
if 'run' in frame:
# Asume that the frist widget is the console
run = frame['run']
self.run = run
self.tips.set_data(title, content, current, image, run,
frames=combobox_frames, step=step)
self._check_buttons()
# Make canvas black when starting a new place of decoration
self.canvas.update_widgets(dockwidgets)
self.canvas.update_decoration(decoration)
self.setting_data = False
def _locate_tip_box(self):
""" """
dockwidgets = self.dockwidgets
# Store the dimensions of the main window
geo = self.parent.frameGeometry()
x, y, width, height = geo.x(), geo.y(), geo.width(), geo.height()
self.width_main = width
self.height_main = height
self.x_main = x
self.y_main = y
delta = 20
# Here is the tricky part to define the best position for the
# tip widget
if dockwidgets is not None:
if dockwidgets[0] is not None:
geo = dockwidgets[0].geometry()
x, y, width, height = geo.x(), geo.y(), geo.width(), geo.height()
point = dockwidgets[0].mapToGlobal(QPoint(0, 0))
x_glob, y_glob = point.x(), point.y()
# Check if is too tall and put to the side
y_fac = (height / self.height_main) * 100
if y_fac > 60: # FIXME:
if x < self.tips.width():
x = x_glob + width + delta
y = y_glob + height/2 - self.tips.height()/2
else:
x = x_glob - self.tips.width() - delta
y = y_glob + height/2 - self.tips.height()/2
else:
if y < self.tips.height():
x = x_glob + width/2 - self.tips.width()/2
y = y_glob + height + delta
else:
x = x_glob + width/2 - self.tips.width()/2
y = y_glob - delta - self.tips.height()
else:
# Center on parent
x = self.x_main + self.width_main/2 - self.tips.width()/2
y = self.y_main + self.height_main/2 - self.tips.height()/2
self.tips.set_pos(x, y)
def _check_buttons(self):
""" """
step, steps = self.step_current, self.steps
self.tips.button_disable = None
if step == 0:
self.tips.button_disable = 'previous'
if step == steps - 1:
self.tips.button_disable = 'next'
def _key_pressed(self):
""" """
key = self.tips.key_pressed
if ((key == Qt.Key_Right or key == Qt.Key_Down or
key == Qt.Key_PageDown) and self.step_current != self.steps - 1):
self.next_step()
elif ((key == Qt.Key_Left or key == Qt.Key_Up or
key == Qt.Key_PageUp) and self.step_current != 0):
self.previous_step()
elif key == Qt.Key_Escape:
self.close_tour()
elif key == Qt.Key_Home and self.step_current != 0:
self.first_step()
elif key == Qt.Key_End and self.step_current != self.steps - 1:
self.last_step()
elif key == Qt.Key_Menu:
pos = self.tips.label_current.pos()
self.tips.context_menu_requested(pos)
def _hiding(self):
self.hidden = True
self.tips.hide()
# --- public api
def run_code(self):
""" """
codelines = self.run
console = self.widgets[0]
for codeline in codelines:
console.execute_code(codeline)
def set_tour(self, index, frames, spy_window):
""" """
self.spy_window = spy_window
self.active_tour_index = index
self.last_frame_active = frames['last']
self.frames = frames['tour']
self.steps = len(self.frames)
self.is_tour_set = True
def start_tour(self):
""" """
geo = self.parent.geometry()
x, y, width, height = geo.x(), geo.y(), geo.width(), geo.height()
# self.parent_x = x
# self.parent_y = y
# self.parent_w = width
# self.parent_h = height
# FIXME: reset step to last used value
# Reset step to beginning
self.step_current = self.last_frame_active
# Adjust the canvas size to match the main window size
self.canvas.setFixedSize(width, height)
self.canvas.move(QPoint(x, y))
self.canvas.fade_in(self._move_step)
self._clear_canvas()
self.is_running = True
def close_tour(self):
""" """
self.tips.fade_out(self._close_canvas)
self.canvas.set_interaction(False)
self._set_modal(True, [self.tips])
self.canvas.hide()
try:
# set the last played frame by updating the available tours in
# parent. This info will be lost on restart.
self.parent.tours_available[self.active_tour_index]['last'] =\
self.step_current
except:
pass
self.is_running = False
def hide_tips(self):
"""Hide tips dialog when the main window loses focus."""
self._clear_canvas()
self.tips.fade_out(self._hiding)
def unhide_tips(self):
"""Unhide tips dialog when the main window loses focus."""
self._clear_canvas()
self._move_step()
self.hidden = False
def next_step(self):
""" """
self._clear_canvas()
self.step_current += 1
self.tips.fade_out(self._move_step)
def previous_step(self):
""" """
self._clear_canvas()
self.step_current -= 1
self.tips.fade_out(self._move_step)
def go_to_step(self, number, id_=None):
""" """
self._clear_canvas()
self.step_current = number
self.tips.fade_out(self._move_step)
def last_step(self):
""" """
self.go_to_step(self.steps - 1)
def first_step(self):
""" """
self.go_to_step(0)
def lost_focus(self):
"""Confirm if the tour loses focus and hides the tips."""
if (self.is_running and not self.any_has_focus() and
not self.setting_data and not self.hidden):
self.hide_tips()
def gain_focus(self):
"""Confirm if the tour regains focus and unhides the tips."""
if (self.is_running and self.any_has_focus() and
not self.setting_data and self.hidden):
self.unhide_tips()
def any_has_focus(self):
"""Returns if tour or any of its components has focus."""
f = (self.hasFocus() or self.parent.hasFocus() or
self.tips.hasFocus() or self.canvas.hasFocus())
return f
# ----------------------------------------------------------------------------
# Used for testing the functionality
class TourTestWindow(QMainWindow):
""" """
sig_resized = Signal("QResizeEvent")
sig_moved = Signal("QMoveEvent")
def __init__(self):
super(TourTestWindow, self).__init__()
self.setGeometry(300, 100, 400, 600)
self.setWindowTitle('Exploring QMainWindow')
self.exit = QAction('Exit', self)
self.exit.setStatusTip('Exit program')
# create the menu bar
menubar = self.menuBar()
file_ = menubar.addMenu('&File')
file_.addAction(self.exit)
# create the status bar
self.statusBar()
# QWidget or its instance needed for box layout
self.widget = QWidget(self)
self.button = QPushButton('test')
self.button1 = QPushButton('1')
self.button2 = QPushButton('2')
effect = QGraphicsOpacityEffect(self.button2)
self.button2.setGraphicsEffect(effect)
self.anim = QPropertyAnimation(effect, to_binary_string("opacity"))
self.anim.setStartValue(0.01)
self.anim.setEndValue(1.0)
self.anim.setDuration(500)
lay = QVBoxLayout()
lay.addWidget(self.button)
lay.addStretch()
lay.addWidget(self.button1)
lay.addWidget(self.button2)
self.widget.setLayout(lay)
self.setCentralWidget(self.widget)
self.button.clicked.connect(self.action1)
self.button1.clicked.connect(self.action2)
self.tour = AnimatedTour(self)
def action1(self):
""" """
frames = get_tour('test')
index = 0
dic = {'last': 0, 'tour': frames}
self.tour.set_tour(index, dic, self)
self.tour.start_tour()
def action2(self):
""" """
self.anim.start()
def resizeEvent(self, event):
"""Reimplement Qt method"""
QMainWindow.resizeEvent(self, event)
self.sig_resized.emit(event)
def moveEvent(self, event):
"""Reimplement Qt method"""
QMainWindow.moveEvent(self, event)
self.sig_moved.emit(event)
def test():
""" """
app = QApplication([])
win = TourTestWindow()
win.show()
app.exec_()
if __name__ == '__main__':
test()
| 36.700149
| 84
| 0.545241
|
4a11f17c6aeb04a417493d0b1fe0b228cbaf7e38
| 12,644
|
py
|
Python
|
awp5/api/pool.py
|
ThomasWaldinger/py_awp5
|
10077ab81eab506bea58a67242c2d550988ec18c
|
[
"Apache-2.0"
] | 2
|
2019-04-10T16:46:19.000Z
|
2020-08-18T21:57:59.000Z
|
awp5/api/pool.py
|
ThomasWaldinger/py_awp5
|
10077ab81eab506bea58a67242c2d550988ec18c
|
[
"Apache-2.0"
] | null | null | null |
awp5/api/pool.py
|
ThomasWaldinger/py_awp5
|
10077ab81eab506bea58a67242c2d550988ec18c
|
[
"Apache-2.0"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Thomas Waldinger. All rights reserved.
# Licensed under the Apache License, Version 2.0. See
# License.txt in the project root for license
# information.
# ---------------
"""
Pool
This resource tracks volume pools. Volume pools are collections of labeled
media that can be used for archive and/or backup tasks.
"""
from awp5.base.connection import P5Resource, exec_nsdchat
from awp5.base.helpers import resourcelist, onereturnvalue
from awp5.api.client import Client
from awp5.api.volume import Volume
module_name = "Pool"
def names(as_object=False, p5_connection=None):
"""
Syntax: Pool names
Description: Lists all configured media pools.
Return Values:
-On Success: a list of pool names
the string "<empty>" if no pools have been configured
"""
method_name = "names"
result = exec_nsdchat([module_name, method_name], p5_connection)
if as_object is False:
return result
else:
return resourcelist(result, Pool, p5_connection)
@onereturnvalue
def create(pool_name, option_value_list=None, as_object=False,
p5_connection=None):
"""
Syntax: Pool create <name> [<option> <value>]
Description: Creates a media pool with the name <name>. The <name> of the
pool may not include blanks or any special punctuation and/or national
characters. If the pool <name> already exists in the P5 configuration an
error will be thrown.
Options supported by this command are:
usage one of Archive or Backup
mediatype one of TAPE or DISK
blocksize count
If no optional arguments are given, the newly created pool will be assigned
Archive for usage and TAPE for media type.
The new option blocksize <count> allows to specify blocksize for all volumes
labeled for this pool. The <count> parameter can be as low as 32768 (32K)
and as high as 524288 (512K) but it must be one of:
32768, 65536, 131072, 262144, 524288
The newly created pool will be configured for no parallelism i.e. it will
use only one media-device for writing and/or reading the media. If you need
to configure the pool for parallelism, please use the P5 Web-GUI.
Example to create tape-archive media pool:
Pool create MyPool usage Archive mediatype TAPE
Return Values:
-On Success: the name of the created pool
"""
method_name = "create"
result = exec_nsdchat([module_name, method_name, pool_name,
option_value_list], p5_connection)
if as_object is False:
return result
else:
return resourcelist(result, Pool, p5_connection)
@onereturnvalue
def disabled(pool_name, p5_connection=None):
"""
Syntax: Pool <name> disabled
Description: Queries the pool Disabled status
Return Values:
-On Success: the "1" (the pool is disabled) or "0" (not disabled)
"""
method_name = "disabled"
return exec_nsdchat([module_name, pool_name, method_name],
p5_connection)
@onereturnvalue
def drivecount(pool_name, count=None, p5_connection=None):
"""
Syntax: Pool <name> drivecount <count>
Description: Sets the drives per stream the pool is allowed to use
Return Values:
-On Success: the drivecount as string
"""
method_name = "drivecount"
return exec_nsdchat([module_name, pool_name, method_name, count],
p5_connection)
@onereturnvalue
def enabled(pool_name, p5_connection=None):
"""
Syntax: Pool <name> enabled
Description: Queries the pool Enabled status.
Return Values:
-On Success: the string “1" (enabled) or "0" (not enabled)
"""
method_name = "drivecount"
return exec_nsdchat([module_name, pool_name, method_name], p5_connection)
@onereturnvalue
def mediatype(pool_name, p5_connection=None):
"""
Syntax: Pool <name> mediatype
Description: returns one of TAPE or DISK designating the media type of
labeled volumes in the pool.
Return Values:
-On Success: the media-type as a string
"""
method_name = "mediatype"
return exec_nsdchat([module_name, pool_name, method_name], p5_connection)
@onereturnvalue
def totalsize(pool_name, p5_connection=None):
"""
Syntax: Pool <name> totalsize
Description: Returns the estimated capacity for the pool <name> in kbytes.
The true capacity is variable and depends on the wear and tear and the
number of faulty blocks on the volume and degrades with time and usage.
Return Values:
-On Success: the number of kbytes
"""
method_name = "totalsize"
return exec_nsdchat([module_name, pool_name, method_name], p5_connection)
@onereturnvalue
def usage(pool_name, p5_connection=None):
"""
Syntax: Pool <name> usage
Description: Returns either Archive or Backup
Return Values:
-On Success: the usage as a string
"""
method_name = "usage"
return exec_nsdchat([module_name, pool_name, method_name], p5_connection)
@onereturnvalue
def usedsize(pool_name, p5_connection=None):
"""
Syntax: Pool <name> usedsize
Description: Returns the number of kbytes currently written to the pool
<name>. If this method returns zero (0) then no data has been written to
this pool.
Return Values:
-On Success: the number of kbytes written
"""
method_name = "usedsize"
return exec_nsdchat([module_name, pool_name, method_name], p5_connection)
def volumes(pool_name, as_object=False, p5_connection=None):
"""
Syntax: Pool <name> volumes
Description: Lists all labeled volumes for the given pool
Return Values:
-On Success: a list of volume ID's labeled for the named pool
the string "<empty>" if the pool has no volumes
"""
method_name = "volumes"
result = exec_nsdchat([module_name, pool_name, method_name],
p5_connection)
if as_object is False:
return result
else:
return resourcelist(result, Volume, p5_connection)
class Pool(P5Resource):
def __init__(self, pool_name, p5_connection):
super().__init__(pool_name, p5_connection)
def names(as_object=True, p5_connection=None):
"""
Syntax: Pool names
Description: Lists all configured media pools.
Return Values:
-On Success: a list of pool names
the string "<empty>" if no pools have been configured
"""
method_name = "names"
result = exec_nsdchat([module_name, method_name], p5_connection)
if as_object is False:
return result
else:
return resourcelist(result, Pool, p5_connection)
@onereturnvalue
def create(pool_name, option_value_list=None, as_object=False,
p5_connection=None):
"""
Syntax: Pool create <name> [<option> <value>]
Description: Creates a media pool with the name <name>. The <name> of
the pool may not include blanks or any special punctuation and/or
national characters. If the pool <name> already exists in the P5
configuration an error will be thrown.
Options supported by this command are:
usage one of Archive or Backup
mediatype one of TAPE or DISK
blocksize count
If no optional arguments are given, the newly created pool will be
assigned Archive for usage and TAPE for media type.
The new option blocksize <count> allows to specify blocksize for all
volumes labeled for this pool. The <count> parameter can be as low as
32768 (32K) and as high as 524288 (512K) but it must be one of:
32768, 65536, 131072, 262144, 524288
The newly created pool will be configured for no parallelism i.e. it
will use only one media-device for writing and/or reading the media. If
you need to configure the pool for parallelism, please use the P5
Web-GUI.
Example to create tape-archive media pool:
Pool create MyPool usage Archive mediatype TAPE
Return Values:
-On Success: the name of the created pool
"""
method_name = "create"
result = exec_nsdchat([module_name, method_name, pool_name,
option_value_list], p5_connection)
if as_object is False:
return result
else:
return resourcelist(result, Pool, p5_connection)
@onereturnvalue
def disabled(self):
"""
Syntax: Pool <name> disabled
Description: Queries the pool Disabled status
Return Values:
-On Success: the "1" (the pool is disabled) or "0" (not disabled)
"""
method_name = "disabled"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def drivecount(self, count=None):
"""
Syntax: Pool <name> drivecount <count>
Description: Sets the drives per stream the pool is allowed to use
Return Values:
-On Success: the drivecount as string
"""
method_name = "drivecount"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name, count])
@onereturnvalue
def enabled(self):
"""
Syntax: Pool <name> enabled
Description: Queries the pool Enabled status.
Return Values:
-On Success: the string “1" (enabled) or "0" (not enabled)
"""
method_name = "enabled"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def mediatype(self):
"""
Syntax: Pool <name> mediatype
Description: returns one of TAPE or DISK designating the media type of
labeled volumes in the pool.
Return Values:
-On Success: the media-type as a string
"""
method_name = "mediatype"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def totalsize(self):
"""
Syntax: Pool <name> totalsize
Description: Returns the estimated capacity for the pool <name> in
kbytes. The true capacity is variable and depends on the wear and tear
and the number of faulty blocks on the volume and degrades with time
and usage.
Return Values:
-On Success: the number of kbytes
"""
method_name = "totalsize"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def usage(self):
"""
Syntax: Pool <name> usage
Description: Returns either Archive or Backup
Return Values:
-On Success: the usage as a string
"""
method_name = "usage"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def usedsize(self):
"""
Syntax: Pool <name> usedsize
Description: Returns the number of kbytes currently written to the pool
<name>. If this method returns zero (0) then no data has been written
to this pool.
Return Values:
-On Success: the number of kbytes written
"""
method_name = "usedsize"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
def volumes(self, as_object=True):
"""
Syntax: Pool <name> volumes
Description: Lists all labeled volumes for the given pool
Return Values:
-On Success: a list of volume ID's labeled for the named pool
the string "<empty>" if the pool has no volumes
"""
method_name = "volumes"
result = self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
if as_object is False:
return result
else:
return resourcelist(result, Volume, self.p5_connection)
def __repr__(self):
return ": ".join([module_name, self.name])
| 36.755814
| 80
| 0.630101
|
4a11f2f4f970acafd14d6e9572833d214604b12b
| 951
|
py
|
Python
|
lhotse/bin/modes/recipes/rir_noise.py
|
rosrad/lhotse
|
177ce3a6b963d4ac56a87843a0130ccfc74b3a57
|
[
"Apache-2.0"
] | 353
|
2020-10-31T10:38:51.000Z
|
2022-03-30T05:22:52.000Z
|
lhotse/bin/modes/recipes/rir_noise.py
|
rosrad/lhotse
|
177ce3a6b963d4ac56a87843a0130ccfc74b3a57
|
[
"Apache-2.0"
] | 353
|
2020-10-27T23:25:12.000Z
|
2022-03-31T22:16:05.000Z
|
lhotse/bin/modes/recipes/rir_noise.py
|
rosrad/lhotse
|
177ce3a6b963d4ac56a87843a0130ccfc74b3a57
|
[
"Apache-2.0"
] | 66
|
2020-11-01T06:08:08.000Z
|
2022-03-29T02:03:07.000Z
|
import click
from lhotse.bin.modes import download, prepare
from lhotse.recipes import download_rir_noise, prepare_rir_noise
from lhotse.utils import Pathlike
from typing import Sequence, Union
__all__ = ["rir_noise"]
@prepare.command()
@click.argument("corpus_dir", type=click.Path(exists=True, dir_okay=True))
@click.argument("output_dir", type=click.Path())
@click.option(
"--parts",
"-p",
type=str,
multiple=True,
default=["point_noise", "iso_noise", "real_rir", "sim_rir"],
show_default=True,
help="Parts to prepare.",
)
def rir_noise(
corpus_dir: Pathlike, output_dir: Pathlike, parts: Union[str, Sequence[str]]
):
"""RIRS and noises data preparation."""
prepare_rir_noise(corpus_dir, output_dir=output_dir, parts=parts)
@download.command()
@click.argument("target_dir", type=click.Path())
def rir_noise(target_dir: Pathlike):
"""RIRS and noises download."""
download_rir_noise(target_dir)
| 26.416667
| 80
| 0.724501
|
4a11f472b7795f1c8fa3a16d97d21dc9695df4c8
| 234
|
py
|
Python
|
sandbox/finetuning/envs/mujoco/gather/ant_half_gravity_gather_env.py
|
andrewli77/rllab-finetuning
|
2dae9141d0fdc284d04f18931907131d66b43023
|
[
"MIT"
] | 23
|
2020-04-27T23:53:44.000Z
|
2022-03-10T03:13:16.000Z
|
sandbox/finetuning/envs/mujoco/gather/ant_half_gravity_gather_env.py
|
WeiChengTseng/rllab-finetuning
|
2dae9141d0fdc284d04f18931907131d66b43023
|
[
"MIT"
] | 1
|
2021-11-14T13:30:22.000Z
|
2021-11-14T13:30:22.000Z
|
sandbox/finetuning/envs/mujoco/gather/ant_half_gravity_gather_env.py
|
WeiChengTseng/rllab-finetuning
|
2dae9141d0fdc284d04f18931907131d66b43023
|
[
"MIT"
] | 8
|
2020-06-17T03:28:34.000Z
|
2022-03-09T03:13:03.000Z
|
from rllab.envs.mujoco.gather.gather_env import GatherEnv
from sandbox.finetuning.envs.mujoco.half_gravity_ant import HalfGravityAntEnv
class HalfGravityAntGatherEnv(GatherEnv):
MODEL_CLASS = HalfGravityAntEnv
ORI_IND = 3
| 23.4
| 77
| 0.824786
|
4a11f50c099f3fae533e5fff4f0276ca31dc3cb6
| 409
|
py
|
Python
|
backend/click_time_28533/wsgi.py
|
crowdbotics-apps/click-time-28533
|
89f4f7e05bf04623b2822899677b6c3606968151
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/click_time_28533/wsgi.py
|
crowdbotics-apps/click-time-28533
|
89f4f7e05bf04623b2822899677b6c3606968151
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/click_time_28533/wsgi.py
|
crowdbotics-apps/click-time-28533
|
89f4f7e05bf04623b2822899677b6c3606968151
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
"""
WSGI config for click_time_28533 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "click_time_28533.settings")
application = get_wsgi_application()
| 24.058824
| 78
| 0.794621
|
4a11f5dc99161ce6ed7ef01bff03084d88d675a0
| 435
|
py
|
Python
|
project/Support/Code/Sheets/models/components/settings.py
|
fael07/Django-Helper
|
bcc9de58f0453452b017b2e219130fbf5a3d48d7
|
[
"MIT"
] | null | null | null |
project/Support/Code/Sheets/models/components/settings.py
|
fael07/Django-Helper
|
bcc9de58f0453452b017b2e219130fbf5a3d48d7
|
[
"MIT"
] | null | null | null |
project/Support/Code/Sheets/models/components/settings.py
|
fael07/Django-Helper
|
bcc9de58f0453452b017b2e219130fbf5a3d48d7
|
[
"MIT"
] | null | null | null |
from abc import ABC
class AppSettings(ABC):
def register_app(self):
self.settings.insert_code(' # My apps', f" '{self.app}.{self.app.title()}Config',")
self.response('app foi registrado')
def register_abstract_user(self):
self.settings.add_in_end(f"\nAUTH_USER_MODEL = 'accounts.User'")
self.response('Registrado modelo padrão de usuário')
| 31.071429
| 97
| 0.597701
|
4a11f6a3a00ff3d08d4770ee5bcfebfbe9a2dd66
| 4,691
|
py
|
Python
|
tiled/structures/structured_array.py
|
AustralianSynchrotron/tiled
|
307d2f3b7e9b841afdf5af716f218584e4c3d530
|
[
"BSD-3-Clause"
] | null | null | null |
tiled/structures/structured_array.py
|
AustralianSynchrotron/tiled
|
307d2f3b7e9b841afdf5af716f218584e4c3d530
|
[
"BSD-3-Clause"
] | null | null | null |
tiled/structures/structured_array.py
|
AustralianSynchrotron/tiled
|
307d2f3b7e9b841afdf5af716f218584e4c3d530
|
[
"BSD-3-Clause"
] | null | null | null |
from dataclasses import dataclass
import json
from typing import Tuple, List, Union, Optional
import numpy
from .array import MachineDataType as BuiltinType, ArrayMacroStructure
from ..media_type_registration import serialization_registry
@dataclass
class Field:
name: str
dtype: Union[BuiltinType, "StructDtype"]
shape: Optional[Tuple[int, ...]]
@classmethod
def from_numpy_descr(cls, field):
name, *rest = field
if name == "":
raise ValueError(
f"You seem to have gotten descr of a base or subdtype: {field}"
)
if len(rest) == 1:
(f_type,) = rest
shape = None
else:
f_type, shape = rest
if isinstance(f_type, str):
FType = BuiltinType.from_numpy_dtype(numpy.dtype(f_type))
else:
FType = StructDtype.from_numpy_dtype(numpy.dtype(f_type))
return cls(name=name, dtype=FType, shape=shape)
def to_numpy_descr(self):
if isinstance(self.dtype, BuiltinType):
base = [self.name, self.dtype.to_numpy_str()]
else:
base = [self.name, self.dtype.to_numpy_descr()]
if self.shape is None:
return tuple(base)
else:
return tuple(base + [self.shape])
@classmethod
def from_json(cls, structure):
name = structure["name"]
if "fields" in structure["dtype"]:
ftype = StructDtype.from_json(structure["dtype"])
else:
ftype = BuiltinType.from_json(structure["dtype"])
return cls(name=name, dtype=ftype, shape=structure["shape"])
@dataclass
class StructDtype:
itemsize: int
fields: List[Field]
@classmethod
def from_numpy_dtype(cls, dtype):
# subdtypes push extra dimensions into arrays, we should handle these
# a layer up and report an array with bigger dimensions.
if dtype.subdtype is not None:
raise ValueError(f"We do not know how to encode subdtypes: {dtype}")
# If this is a builtin type, require the use of BuiltinType (nee .array.MachineDataType)
if dtype.fields is None:
raise ValueError(f"You have a base type: {dtype}")
return cls(
itemsize=dtype.itemsize,
fields=[Field.from_numpy_descr(f) for f in dtype.descr],
)
def to_numpy_dtype(self):
return numpy.dtype(self.to_numpy_descr())
def to_numpy_descr(self):
return [f.to_numpy_descr() for f in self.fields]
def max_depth(self):
return max(
1 if isinstance(f.dtype, BuiltinType) else 1 + f.dtype.max_depth()
for f in self.fields
)
@classmethod
def from_json(cls, structure):
return cls(
itemsize=structure["itemsize"],
fields=[Field.from_json(f) for f in structure["fields"]],
)
@dataclass
class StructuredArrayGenericStructure:
macro: ArrayMacroStructure
micro: StructDtype
@classmethod
def from_json(cls, structure):
return cls(
macro=ArrayMacroStructure.from_json(structure["macro"]),
micro=StructDtype.from_json(structure["micro"]),
)
@dataclass
class ArrayTabularMacroStructure:
"""
Similar to ArrayMacroStructure, but must be 1D
This is distinct from DataFrameMacoStructure because it knows its length and
chunk sizes. Dataframes only know number of partitions.
"""
chunks: Tuple[Tuple[int]]
shape: Tuple[int]
@classmethod
def from_json(cls, structure):
return cls(
chunks=tuple(map(tuple, structure["chunks"])),
shape=tuple(structure["shape"]),
)
@dataclass
class StructuredArrayTabularStructure:
macro: ArrayTabularMacroStructure
micro: StructDtype
@classmethod
def from_json(cls, structure):
return cls(
macro=ArrayMacroStructure.from_json(structure["macro"]),
micro=StructDtype.from_json(structure["micro"]),
)
serialization_registry.register(
"structured_array_generic",
"application/octet-stream",
lambda array, metadata: memoryview(numpy.ascontiguousarray(array)),
)
serialization_registry.register(
"structured_array_generic",
"application/json",
lambda array, metadata: json.dumps(array.tolist()).encode(),
)
serialization_registry.register(
"structured_array_tabular",
"application/octet-stream",
lambda array, metadata: memoryview(numpy.ascontiguousarray(array)),
)
serialization_registry.register(
"structured_array_tabular",
"application/json",
lambda array, metadata: json.dumps(array.tolist()).encode(),
)
| 29.136646
| 96
| 0.646557
|
4a11f6c15694735007ea726608b3e3d7663b2e68
| 2,293
|
py
|
Python
|
kubernetes/test/test_io_xk8s_cluster_v1alpha4_cluster_class_spec_control_plane_machine_infrastructure_ref.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_io_xk8s_cluster_v1alpha4_cluster_class_spec_control_plane_machine_infrastructure_ref.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_io_xk8s_cluster_v1alpha4_cluster_class_spec_control_plane_machine_infrastructure_ref.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kubernetes.client
from kubernetes.client.models.io_xk8s_cluster_v1alpha4_cluster_class_spec_control_plane_machine_infrastructure_ref import IoXK8sClusterV1alpha4ClusterClassSpecControlPlaneMachineInfrastructureRef # noqa: E501
from kubernetes.client.rest import ApiException
class TestIoXK8sClusterV1alpha4ClusterClassSpecControlPlaneMachineInfrastructureRef(unittest.TestCase):
"""IoXK8sClusterV1alpha4ClusterClassSpecControlPlaneMachineInfrastructureRef unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test IoXK8sClusterV1alpha4ClusterClassSpecControlPlaneMachineInfrastructureRef
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kubernetes.client.models.io_xk8s_cluster_v1alpha4_cluster_class_spec_control_plane_machine_infrastructure_ref.IoXK8sClusterV1alpha4ClusterClassSpecControlPlaneMachineInfrastructureRef() # noqa: E501
if include_optional :
return IoXK8sClusterV1alpha4ClusterClassSpecControlPlaneMachineInfrastructureRef(
api_version = '0',
field_path = '0',
kind = '0',
name = '0',
namespace = '0',
resource_version = '0',
uid = '0'
)
else :
return IoXK8sClusterV1alpha4ClusterClassSpecControlPlaneMachineInfrastructureRef(
)
def testIoXK8sClusterV1alpha4ClusterClassSpecControlPlaneMachineInfrastructureRef(self):
"""Test IoXK8sClusterV1alpha4ClusterClassSpecControlPlaneMachineInfrastructureRef"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 38.864407
| 217
| 0.737462
|
4a11f700ac03162bdee7baa4b33ca642e852cfa7
| 11,306
|
py
|
Python
|
examples/HED/hed.py
|
awesome-archive/tensorpack
|
55f640f70e19d538e5082a4712241ee966fcb201
|
[
"Apache-2.0"
] | 2
|
2021-01-31T11:18:49.000Z
|
2021-02-24T14:42:16.000Z
|
examples/HED/hed.py
|
yangxue0827/tensorpack
|
e5e54e07bb47f85fc7efe9c78bde3e153ef0d49b
|
[
"Apache-2.0"
] | null | null | null |
examples/HED/hed.py
|
yangxue0827/tensorpack
|
e5e54e07bb47f85fc7efe9c78bde3e153ef0d49b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: hed.py
# Author: Yuxin Wu
import argparse
import numpy as np
import os
import cv2
import tensorflow as tf
from six.moves import zip
from tensorpack import *
from tensorpack.dataflow import dataset
from tensorpack.tfutils import gradproc, optimizer
from tensorpack.tfutils.summary import add_moving_summary, add_param_summary
from tensorpack.utils.gpu import get_num_gpu
from tensorpack.utils import logger
def class_balanced_sigmoid_cross_entropy(logits, label, name='cross_entropy_loss'):
"""
The class-balanced cross entropy loss,
as in `Holistically-Nested Edge Detection
<http://arxiv.org/abs/1504.06375>`_.
Args:
logits: of shape (b, ...).
label: of the same shape. the ground truth in {0,1}.
Returns:
class-balanced cross entropy loss.
"""
with tf.name_scope('class_balanced_sigmoid_cross_entropy'):
y = tf.cast(label, tf.float32)
count_neg = tf.reduce_sum(1. - y)
count_pos = tf.reduce_sum(y)
beta = count_neg / (count_neg + count_pos)
pos_weight = beta / (1 - beta)
cost = tf.nn.weighted_cross_entropy_with_logits(logits=logits, targets=y, pos_weight=pos_weight)
cost = tf.reduce_mean(cost * (1 - beta))
zero = tf.equal(count_pos, 0.0)
return tf.where(zero, 0.0, cost, name=name)
@layer_register(log_shape=True)
def CaffeBilinearUpSample(x, shape):
"""
Deterministic bilinearly-upsample the input images.
It is implemented by deconvolution with "BilinearFiller" in Caffe.
It is aimed to mimic caffe behavior.
Args:
x (tf.Tensor): a NCHW tensor
shape (int): the upsample factor
Returns:
tf.Tensor: a NCHW tensor.
"""
inp_shape = x.shape.as_list()
ch = inp_shape[1]
assert ch == 1, "This layer only works for channel=1"
shape = int(shape)
filter_shape = 2 * shape
def bilinear_conv_filler(s):
"""
s: width, height of the conv filter
https://github.com/BVLC/caffe/blob/99bd99795dcdf0b1d3086a8d67ab1782a8a08383/include/caffe/filler.hpp#L219-L268
"""
f = np.ceil(float(s) / 2)
c = float(2 * f - 1 - f % 2) / (2 * f)
ret = np.zeros((s, s), dtype='float32')
for x in range(s):
for y in range(s):
ret[x, y] = (1 - abs(x / f - c)) * (1 - abs(y / f - c))
return ret
w = bilinear_conv_filler(filter_shape)
w = np.repeat(w, ch * ch).reshape((filter_shape, filter_shape, ch, ch))
weight_var = tf.constant(w, tf.float32,
shape=(filter_shape, filter_shape, ch, ch),
name='bilinear_upsample_filter')
x = tf.pad(x, [[0, 0], [0, 0], [shape - 1, shape - 1], [shape - 1, shape - 1]], mode='SYMMETRIC')
out_shape = tf.shape(x) * tf.constant([1, 1, shape, shape], tf.int32)
deconv = tf.nn.conv2d_transpose(x, weight_var, out_shape,
[1, 1, shape, shape], 'SAME', data_format='NCHW')
edge = shape * (shape - 1)
deconv = deconv[:, :, edge:-edge, edge:-edge]
if inp_shape[2]:
inp_shape[2] *= shape
if inp_shape[3]:
inp_shape[3] *= shape
deconv.set_shape(inp_shape)
return deconv
class Model(ModelDesc):
def inputs(self):
return [tf.placeholder(tf.float32, [None, None, None, 3], 'image'),
tf.placeholder(tf.int32, [None, None, None], 'edgemap')]
def build_graph(self, image, edgemap):
image = image - tf.constant([104, 116, 122], dtype='float32')
image = tf.transpose(image, [0, 3, 1, 2])
edgemap = tf.expand_dims(edgemap, 3, name='edgemap4d')
def branch(name, l, up):
with tf.variable_scope(name):
l = Conv2D('convfc', l, 1, kernel_size=1, activation=tf.identity,
use_bias=True,
kernel_initializer=tf.constant_initializer())
while up != 1:
l = CaffeBilinearUpSample('upsample{}'.format(up), l, 2)
up = up // 2
return l
with argscope(Conv2D, kernel_size=3, activation=tf.nn.relu), \
argscope([Conv2D, MaxPooling], data_format='NCHW'):
l = Conv2D('conv1_1', image, 64)
l = Conv2D('conv1_2', l, 64)
b1 = branch('branch1', l, 1)
l = MaxPooling('pool1', l, 2)
l = Conv2D('conv2_1', l, 128)
l = Conv2D('conv2_2', l, 128)
b2 = branch('branch2', l, 2)
l = MaxPooling('pool2', l, 2)
l = Conv2D('conv3_1', l, 256)
l = Conv2D('conv3_2', l, 256)
l = Conv2D('conv3_3', l, 256)
b3 = branch('branch3', l, 4)
l = MaxPooling('pool3', l, 2)
l = Conv2D('conv4_1', l, 512)
l = Conv2D('conv4_2', l, 512)
l = Conv2D('conv4_3', l, 512)
b4 = branch('branch4', l, 8)
l = MaxPooling('pool4', l, 2)
l = Conv2D('conv5_1', l, 512)
l = Conv2D('conv5_2', l, 512)
l = Conv2D('conv5_3', l, 512)
b5 = branch('branch5', l, 16)
final_map = Conv2D('convfcweight',
tf.concat([b1, b2, b3, b4, b5], 1), 1, kernel_size=1,
kernel_initializer=tf.constant_initializer(0.2),
use_bias=False, activation=tf.identity)
costs = []
for idx, b in enumerate([b1, b2, b3, b4, b5, final_map]):
b = tf.transpose(b, [0, 2, 3, 1])
output = tf.nn.sigmoid(b, name='output{}'.format(idx + 1))
xentropy = class_balanced_sigmoid_cross_entropy(
b, edgemap,
name='xentropy{}'.format(idx + 1))
costs.append(xentropy)
# some magic threshold
pred = tf.cast(tf.greater(output, 0.5), tf.int32, name='prediction')
wrong = tf.cast(tf.not_equal(pred, edgemap), tf.float32)
wrong = tf.reduce_mean(wrong, name='train_error')
wd_w = tf.train.exponential_decay(2e-4, get_global_step_var(),
80000, 0.7, True)
wd_cost = tf.multiply(wd_w, regularize_cost('.*/W', tf.nn.l2_loss), name='wd_cost')
costs.append(wd_cost)
add_param_summary(('.*/W', ['histogram'])) # monitor W
total_cost = tf.add_n(costs, name='cost')
add_moving_summary(wrong, total_cost, *costs)
return total_cost
def optimizer(self):
lr = tf.get_variable('learning_rate', initializer=3e-5, trainable=False)
opt = tf.train.AdamOptimizer(lr, epsilon=1e-3)
return optimizer.apply_grad_processors(
opt, [gradproc.ScaleGradient(
[('convfcweight.*', 0.1), ('conv5_.*', 5)])])
def get_data(name):
isTrain = name == 'train'
ds = dataset.BSDS500(name, shuffle=True)
class CropMultiple16(imgaug.ImageAugmentor):
def _get_augment_params(self, img):
newh = img.shape[0] // 16 * 16
neww = img.shape[1] // 16 * 16
assert newh > 0 and neww > 0
diffh = img.shape[0] - newh
h0 = 0 if diffh == 0 else self.rng.randint(diffh)
diffw = img.shape[1] - neww
w0 = 0 if diffw == 0 else self.rng.randint(diffw)
return (h0, w0, newh, neww)
def _augment(self, img, param):
h0, w0, newh, neww = param
return img[h0:h0 + newh, w0:w0 + neww]
if isTrain:
shape_aug = [
imgaug.RandomResize(xrange=(0.7, 1.5), yrange=(0.7, 1.5),
aspect_ratio_thres=0.15),
imgaug.RotationAndCropValid(90),
CropMultiple16(),
imgaug.Flip(horiz=True),
imgaug.Flip(vert=True)
]
else:
# the original image shape (321x481) in BSDS is not a multiple of 16
IMAGE_SHAPE = (320, 480)
shape_aug = [imgaug.CenterCrop(IMAGE_SHAPE)]
ds = AugmentImageComponents(ds, shape_aug, (0, 1), copy=False)
def f(m): # thresholding
m[m >= 0.50] = 1
m[m < 0.50] = 0
return m
ds = MapDataComponent(ds, f, 1)
if isTrain:
augmentors = [
imgaug.Brightness(63, clip=False),
imgaug.Contrast((0.4, 1.5)),
]
ds = AugmentImageComponent(ds, augmentors, copy=False)
ds = BatchDataByShape(ds, 8, idx=0)
ds = PrefetchDataZMQ(ds, 1)
else:
ds = BatchData(ds, 1)
return ds
def view_data():
ds = RepeatedData(get_data('train'), -1)
ds.reset_state()
for ims, edgemaps in ds:
for im, edgemap in zip(ims, edgemaps):
assert im.shape[0] % 16 == 0 and im.shape[1] % 16 == 0, im.shape
cv2.imshow("im", im / 255.0)
cv2.waitKey(1000)
cv2.imshow("edge", edgemap)
cv2.waitKey(1000)
def get_config():
logger.auto_set_dir()
dataset_train = get_data('train')
steps_per_epoch = len(dataset_train) * 40
dataset_val = get_data('val')
return TrainConfig(
dataflow=dataset_train,
callbacks=[
ModelSaver(),
ScheduledHyperParamSetter('learning_rate', [(30, 6e-6), (45, 1e-6), (60, 8e-7)]),
HumanHyperParamSetter('learning_rate'),
InferenceRunner(dataset_val,
BinaryClassificationStats('prediction', 'edgemap4d'))
],
model=Model(),
steps_per_epoch=steps_per_epoch,
max_epoch=100,
)
def run(model_path, image_path, output):
pred_config = PredictConfig(
model=Model(),
session_init=get_model_loader(model_path),
input_names=['image'],
output_names=['output' + str(k) for k in range(1, 7)])
predictor = OfflinePredictor(pred_config)
im = cv2.imread(image_path)
assert im is not None
im = cv2.resize(
im, (im.shape[1] // 16 * 16, im.shape[0] // 16 * 16)
)[None, :, :, :].astype('float32')
outputs = predictor(im)
if output is None:
for k in range(6):
pred = outputs[k][0]
cv2.imwrite("out{}.png".format(
'-fused' if k == 5 else str(k + 1)), pred * 255)
logger.info("Results saved to out*.png")
else:
pred = outputs[5][0]
cv2.imwrite(output, pred * 255)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', help='comma separated list of GPU(s) to use.')
parser.add_argument('--load', help='load model')
parser.add_argument('--view', help='view dataset', action='store_true')
parser.add_argument('--run', help='run model on images')
parser.add_argument('--output', help='fused output filename. default to out-fused.png')
args = parser.parse_args()
if args.gpu:
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
if args.view:
view_data()
elif args.run:
run(args.load, args.run, args.output)
else:
config = get_config()
if args.load:
config.session_init = get_model_loader(args.load)
launch_train_with_config(
config,
SyncMultiGPUTrainer(max(get_num_gpu(), 1)))
| 35.553459
| 118
| 0.56669
|
4a11f72e3871c2b40155b036cac14c43a296b3e6
| 3,212
|
py
|
Python
|
recommendation_system_demos/Basic-CMN-Demo/util/evaluation.py
|
sweetpand/tensorflow_mri
|
7a483cbbbe515ad395928311759505707bd72503
|
[
"MIT"
] | 2
|
2021-02-06T11:27:17.000Z
|
2021-05-29T10:45:22.000Z
|
recommendation_system_demos/Basic-CMN-Demo/util/evaluation.py
|
sweetpand/tensorflow_mri
|
7a483cbbbe515ad395928311759505707bd72503
|
[
"MIT"
] | null | null | null |
recommendation_system_demos/Basic-CMN-Demo/util/evaluation.py
|
sweetpand/tensorflow_mri
|
7a483cbbbe515ad395928311759505707bd72503
|
[
"MIT"
] | 1
|
2020-10-23T20:26:36.000Z
|
2020-10-23T20:26:36.000Z
|
import numpy as np
import tensorflow as tf
from tqdm import tqdm
def get_model_scores(sess, test_data, neighborhood, input_user_handle, input_item_handle,
input_neighborhood_handle, input_neighborhood_length_handle,
dropout_handle, score_op, max_neighbors, return_scores=False):
"""
test_data = dict([positive, np.array[negatives]])
"""
out = ''
scores = []
progress = tqdm(test_data.items(), total=len(test_data),
leave=False, desc=u'Evaluate || ')
for user, (pos, neg) in progress:
item_indices = list(neg) + [pos]
feed = {
input_user_handle: [user] * (len(neg) + 1),
input_item_handle: item_indices,
}
if neighborhood is not None:
neighborhoods, neighborhood_length = np.zeros((len(neg) + 1, max_neighbors),
dtype=np.int32), np.ones(len(neg) + 1, dtype=np.int32)
for _idx, item in enumerate(item_indices):
_len = min(len(neighborhood[item]), max_neighbors)
if _len > 0:
neighborhoods[_idx, :_len] = neighborhood[item][:_len]
neighborhood_length[_idx] = _len
else:
neighborhoods[_idx, :1] = user
feed.update({
input_neighborhood_handle: neighborhoods,
input_neighborhood_length_handle: neighborhood_length
})
score = sess.run(score_op, feed)
scores.append(score.ravel())
if return_scores:
s = ' '.join(["{}:{}".format(n, s) for s, n in zip(score.ravel().tolist(), item_indices)])
out += "{}\t{}\n".format(user, s)
if return_scores:
return scores, out
return scores
def evaluate_model(sess, test_data, neighborhood, input_user_handle, input_item_handle,
input_neighborhood_handle, input_neighborhood_length_handle,
dropout_handle, score_op, max_neighbors, EVAL_AT=[1, 5, 10]):
scores = get_model_scores(sess, test_data, neighborhood, input_user_handle, input_item_handle,
input_neighborhood_handle, input_neighborhood_length_handle,
dropout_handle, score_op, max_neighbors)
hrs = []
ndcgs = []
s = '\n'
for k in EVAL_AT:
hr, ndcg = get_eval(scores, len(scores[0]) - 1, k)
s += "{:<14} {:<14.6f}{:<14} {:.6f}\n".format('HR@%s' % k, hr, 'NDCG@%s' % k, ndcg)
hrs.append(hr)
ndcgs.append(ndcg)
tf.logging.info(s + '\n')
return hrs, ndcgs
def get_eval(scores, index, top_n=10):
"""
if the last element is the correct one, then
index = len(scores[0])-1
"""
ndcg = 0.0
hr = 0.0
assert len(scores[0]) > index and index >= 0
for score in scores:
# Get the top n indices
arg_index = np.argsort(-score)[:top_n]
if index in arg_index:
# Get the position
ndcg += np.log(2.0) / np.log(arg_index.tolist().index(index) + 2.0)
# Increment
hr += 1.0
return hr / len(scores), ndcg / len(scores)
| 36.5
| 112
| 0.56538
|
4a11f73e01157fb86d2cd1fbcc1c489543df10e1
| 3,049
|
py
|
Python
|
api/app/schemas/weather_models.py
|
bcgov/wps
|
71df0de72de9cd656dc9ebf8461ffe47cfb155f6
|
[
"Apache-2.0"
] | 19
|
2020-01-31T21:51:31.000Z
|
2022-01-07T14:40:03.000Z
|
api/app/schemas/weather_models.py
|
bcgov/wps
|
71df0de72de9cd656dc9ebf8461ffe47cfb155f6
|
[
"Apache-2.0"
] | 1,680
|
2020-01-24T23:25:08.000Z
|
2022-03-31T23:50:27.000Z
|
api/app/schemas/weather_models.py
|
bcgov/wps
|
71df0de72de9cd656dc9ebf8461ffe47cfb155f6
|
[
"Apache-2.0"
] | 6
|
2020-04-28T22:41:08.000Z
|
2021-05-05T18:16:06.000Z
|
""" This module contains pydandict schemas relating to weather models for the API.
"""
from datetime import datetime
from typing import List
from pydantic import BaseModel
from app.schemas.stations import WeatherStation
class WeatherPredictionModel(BaseModel):
""" The full name & acronym for a weather prediction model """
name: str
abbrev: str
class CHainesModelRunPredictions(BaseModel):
""" List of predictions """
model: WeatherPredictionModel
model_run_timestamp: datetime
prediction_timestamps: List[datetime]
class CHainesModelRuns(BaseModel):
""" List of model run timestamps """
model_runs: List[CHainesModelRunPredictions]
class CHainesRequest(BaseModel):
""" Request for particular model run """
model_run_timestamp: datetime
prediction_timestamp: datetime
class WeatherModelPredictionSummaryValues(BaseModel):
""" Summary of model prediction values. """
datetime: datetime
tmp_tgl_2_5th: float
tmp_tgl_2_90th: float
tmp_tgl_2_median: float
rh_tgl_2_5th: float
rh_tgl_2_90th: float
rh_tgl_2_median: float
class WeatherModelPredictionSummary(BaseModel):
""" Summary of weather predictions for a given model.
Detail: For the global model, we end up with 20 different predictions for every three hours of any given
day, this represents a summary of that data. """
station: WeatherStation
model: WeatherPredictionModel
values: List[WeatherModelPredictionSummaryValues] = []
class WeatherModelPredictionSummaryResponse(BaseModel):
""" Response containing prediction summaries for a given weather model."""
summaries: List[WeatherModelPredictionSummary]
class WeatherModelPredictionValues(BaseModel):
""" The predicted weather values. """
datetime: datetime
temperature: float = None
bias_adjusted_temperature: float = None
relative_humidity: float = None
bias_adjusted_relative_humidity: float = None
wind_speed: float = None
wind_direction: float = None
delta_precipitation: float = None
class WeatherModelRun(BaseModel):
""" Detail about the model run """
datetime: datetime
name: str
abbreviation: str
projection: str
class WeatherModelPrediction(BaseModel):
""" Weather model prediction for a particular weather station. """
station: WeatherStation
model_run: WeatherModelRun = None
values: List[WeatherModelPredictionValues] = []
class ModelRunPredictions(BaseModel):
""" Predictions for a model run """
model_run: WeatherModelRun = None
values: List[WeatherModelPredictionValues] = []
class WeatherStationModelRunsPredictions(BaseModel):
""" Weather model run and predictions for a station. """
station: WeatherStation
model_runs: List[ModelRunPredictions]
class WeatherStationsModelRunsPredictionsResponse(BaseModel):
""" Response containing a number of weather predictions for a number of weather model runs for a number
of stations."""
stations: List[WeatherStationModelRunsPredictions]
| 30.188119
| 108
| 0.750738
|
4a11f8301f99df641f1f2ca48e5ba6caac918ade
| 636
|
py
|
Python
|
LeetCode/1-1000/101-200/101-125/108. Convert Sorted Array to Binary Search Tree/solution-python3.py
|
adubois85/coding_challenge_websites
|
7867a05847a216661eff3b24b1cb1480fb7d3030
|
[
"Apache-2.0"
] | null | null | null |
LeetCode/1-1000/101-200/101-125/108. Convert Sorted Array to Binary Search Tree/solution-python3.py
|
adubois85/coding_challenge_websites
|
7867a05847a216661eff3b24b1cb1480fb7d3030
|
[
"Apache-2.0"
] | null | null | null |
LeetCode/1-1000/101-200/101-125/108. Convert Sorted Array to Binary Search Tree/solution-python3.py
|
adubois85/coding_challenge_websites
|
7867a05847a216661eff3b24b1cb1480fb7d3030
|
[
"Apache-2.0"
] | null | null | null |
from typing import List
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
# Without lots of temp variables
if not nums:
return None
temp = TreeNode()
# go left
temp.left = self.sortedArrayToBST(nums[:len(nums)//2])
# go right
temp.right = self.sortedArrayToBST(nums[(len(nums)//2 + 1):len(nums)])
temp.val = nums[len(nums)//2]
return temp
| 26.5
| 78
| 0.591195
|
4a11f8e0cb33d375563084a254c988e470ee01ce
| 8,012
|
py
|
Python
|
sdks/python/http_client/v1/polyaxon_sdk/models/v1_operation_body.py
|
denisoliveirac/polyaxon
|
21d33e50bbbd4bfbe474683f977eecf0b741ddd4
|
[
"Apache-2.0"
] | null | null | null |
sdks/python/http_client/v1/polyaxon_sdk/models/v1_operation_body.py
|
denisoliveirac/polyaxon
|
21d33e50bbbd4bfbe474683f977eecf0b741ddd4
|
[
"Apache-2.0"
] | null | null | null |
sdks/python/http_client/v1/polyaxon_sdk/models/v1_operation_body.py
|
denisoliveirac/polyaxon
|
21d33e50bbbd4bfbe474683f977eecf0b741ddd4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.5.2
Contact: contact@polyaxon.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1OperationBody(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'content': 'str',
'is_managed': 'bool',
'is_approved': 'bool',
'name': 'str',
'description': 'str',
'tags': 'list[str]',
'meta_info': 'object'
}
attribute_map = {
'content': 'content',
'is_managed': 'is_managed',
'is_approved': 'is_approved',
'name': 'name',
'description': 'description',
'tags': 'tags',
'meta_info': 'meta_info'
}
def __init__(self, content=None, is_managed=None, is_approved=None, name=None, description=None, tags=None, meta_info=None, local_vars_configuration=None): # noqa: E501
"""V1OperationBody - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._content = None
self._is_managed = None
self._is_approved = None
self._name = None
self._description = None
self._tags = None
self._meta_info = None
self.discriminator = None
if content is not None:
self.content = content
if is_managed is not None:
self.is_managed = is_managed
if is_approved is not None:
self.is_approved = is_approved
if name is not None:
self.name = name
if description is not None:
self.description = description
if tags is not None:
self.tags = tags
if meta_info is not None:
self.meta_info = meta_info
@property
def content(self):
"""Gets the content of this V1OperationBody. # noqa: E501
:return: The content of this V1OperationBody. # noqa: E501
:rtype: str
"""
return self._content
@content.setter
def content(self, content):
"""Sets the content of this V1OperationBody.
:param content: The content of this V1OperationBody. # noqa: E501
:type: str
"""
self._content = content
@property
def is_managed(self):
"""Gets the is_managed of this V1OperationBody. # noqa: E501
:return: The is_managed of this V1OperationBody. # noqa: E501
:rtype: bool
"""
return self._is_managed
@is_managed.setter
def is_managed(self, is_managed):
"""Sets the is_managed of this V1OperationBody.
:param is_managed: The is_managed of this V1OperationBody. # noqa: E501
:type: bool
"""
self._is_managed = is_managed
@property
def is_approved(self):
"""Gets the is_approved of this V1OperationBody. # noqa: E501
:return: The is_approved of this V1OperationBody. # noqa: E501
:rtype: bool
"""
return self._is_approved
@is_approved.setter
def is_approved(self, is_approved):
"""Sets the is_approved of this V1OperationBody.
:param is_approved: The is_approved of this V1OperationBody. # noqa: E501
:type: bool
"""
self._is_approved = is_approved
@property
def name(self):
"""Gets the name of this V1OperationBody. # noqa: E501
:return: The name of this V1OperationBody. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1OperationBody.
:param name: The name of this V1OperationBody. # noqa: E501
:type: str
"""
self._name = name
@property
def description(self):
"""Gets the description of this V1OperationBody. # noqa: E501
:return: The description of this V1OperationBody. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this V1OperationBody.
:param description: The description of this V1OperationBody. # noqa: E501
:type: str
"""
self._description = description
@property
def tags(self):
"""Gets the tags of this V1OperationBody. # noqa: E501
:return: The tags of this V1OperationBody. # noqa: E501
:rtype: list[str]
"""
return self._tags
@tags.setter
def tags(self, tags):
"""Sets the tags of this V1OperationBody.
:param tags: The tags of this V1OperationBody. # noqa: E501
:type: list[str]
"""
self._tags = tags
@property
def meta_info(self):
"""Gets the meta_info of this V1OperationBody. # noqa: E501
:return: The meta_info of this V1OperationBody. # noqa: E501
:rtype: object
"""
return self._meta_info
@meta_info.setter
def meta_info(self, meta_info):
"""Sets the meta_info of this V1OperationBody.
:param meta_info: The meta_info of this V1OperationBody. # noqa: E501
:type: object
"""
self._meta_info = meta_info
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1OperationBody):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1OperationBody):
return True
return self.to_dict() != other.to_dict()
| 27.251701
| 173
| 0.594358
|
4a11f930a29e412b67508cfc443a5835528407d3
| 90
|
py
|
Python
|
abc/abc150/abc150a.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | 1
|
2019-08-21T00:49:34.000Z
|
2019-08-21T00:49:34.000Z
|
abc/abc150/abc150a.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
abc/abc150/abc150a.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
K, X = map(int, input().split())
if 500 * K >= X:
print('Yes')
else:
print('No')
| 12.857143
| 32
| 0.488889
|
4a11f9cb44e71d13aac99dfc0edd05445ff1b823
| 9,887
|
py
|
Python
|
jackal/config.py
|
mwgielen/jackal
|
7fe62732eb5194b7246215d5277fb37c398097bf
|
[
"MIT"
] | 10
|
2018-01-17T20:11:30.000Z
|
2022-02-20T21:31:37.000Z
|
jackal/config.py
|
mwgielen/jackal
|
7fe62732eb5194b7246215d5277fb37c398097bf
|
[
"MIT"
] | null | null | null |
jackal/config.py
|
mwgielen/jackal
|
7fe62732eb5194b7246215d5277fb37c398097bf
|
[
"MIT"
] | 1
|
2018-06-21T16:47:16.000Z
|
2018-06-21T16:47:16.000Z
|
"""
Configuration of jackal
"""
from builtins import input
from os.path import expanduser
import os
import configparser
def input_with_default(question, default):
"""
Helper function to return default value if string is empty.
"""
return input(question + ' [{}] '.format(default)) or default
def required_input(question):
while True:
result = input(question)
if result:
return result
print("This option is required")
def manual_configure():
"""
Function to manually configure jackal.
"""
print("Manual configuring jackal")
mapping = { '1': 'y', '0': 'n'}
config = Config()
# Host
host = input_with_default("What is the Elasticsearch host?", config.get('jackal', 'host'))
config.set('jackal', 'host', host)
# SSL
if input_with_default("Use SSL?", mapping[config.get('jackal', 'use_ssl')]) == 'y':
config.set('jackal', 'use_ssl', '1')
if input_with_default("Setup custom server cert?", 'y') == 'y':
ca_certs = input_with_default("Server certificate location?", config.get('jackal', 'ca_certs'))
config.set('jackal', 'ca_certs', ca_certs)
else:
config.set('jackal', 'ca_certs', '')
else:
config.set('jackal', 'use_ssl', '0')
if input_with_default("Setup client certificates?", mapping[config.get('jackal', 'client_certs')]) == 'y':
config.set('jackal', 'client_certs', '1')
client_cert = input_with_default("Client cert location?", config.get('jackal', 'client_cert'))
config.set('jackal', 'client_cert', client_cert)
client_key = input_with_default("Client key location?", config.get('jackal', 'client_key'))
config.set('jackal', 'client_key', client_key)
else:
config.set('jackal', 'client_certs', '0')
# Index
index = input_with_default("What index prefix should jackal use?", config.get('jackal', 'index'))
config.set('jackal', 'index', index)
initialize_indices = (input_with_default("Do you want to initialize the indices?", 'y').lower() == 'y')
# Nmap
nmap_dir = input_with_default("What directory do you want to place the nmap results in?", config.get('nmap', 'directory'))
if not os.path.exists(nmap_dir):
os.makedirs(nmap_dir)
config.set('nmap', 'directory', nmap_dir)
nmap_options = input_with_default("What nmap options do you want to set for 'custom' (for example '-p 22,445')?", config.get('nmap', 'options'))
config.set('nmap', 'options', nmap_options)
# Nessus
configure_nessus = (input_with_default("Do you want to setup nessus?", 'n').lower() == 'y')
if configure_nessus:
nessus_host = input_with_default("What is the nessus host?", config.get('nessus', 'host'))
nessus_template = input_with_default("What template should jackal use?", config.get('nessus', 'template_name'))
nessus_access = input_with_default("What api access key should jackal use?", config.get('nessus', 'access_key'))
nessus_secret = input_with_default("What api secret key should jackal use?", config.get('nessus', 'secret_key'))
config.set('nessus', 'host', nessus_host)
config.set('nessus', 'template_name', nessus_template)
config.set('nessus', 'access_key', nessus_access)
config.set('nessus', 'secret_key', nessus_secret)
# Named pipes
configure_pipes = (input_with_default("Do you want to setup named pipes?", 'n').lower() == 'y')
if configure_pipes:
directory = input_with_default("What directory do you want to place the named pipes in?", config.get('pipes', 'directory'))
config.set('pipes', 'directory', directory)
config_file = input_with_default("What is the name of the named pipe config?", config.get('pipes', 'config_file'))
config.set('pipes', 'config_file', config_file)
if not os.path.exists(directory):
create = (input_with_default("Do you want to create the directory?", 'n').lower() == 'y')
if create:
os.makedirs(directory)
if not os.path.exists(os.path.join(config.config_dir, config_file)):
f = open(os.path.join(config.config_dir, config_file), 'a')
f.close()
config.write_config(initialize_indices)
def add_named_pipe():
"""
"""
config = Config()
pipes_config = config.get('pipes', 'config_file')
pipes_config_path = os.path.join(config.config_dir, pipes_config)
if not os.path.exists(pipes_config_path):
print("First configure named pipes with jk-configure")
return
pipes_config = configparser.ConfigParser()
pipes_config.read(pipes_config_path)
name = required_input("What is the name of the named pipe? ")
pipes_config[name] = {}
object_type = input_with_default("What is the type of the named pipe? Pick from: [host, range, service, user]", 'service')
pipes_config[name]['type'] = object_type
if object_type in ['host', 'range', 'service']:
ports = input_with_default("What ports do you want to filter on? Empty for disable", '')
if ports:
pipes_config[name]['ports'] = ports
tags = input_with_default("What tags do you want to filter on? Empty for disable", '')
if tags:
pipes_config[name]['tags'] = tags
up = (input_with_default("Do you want to include only up hosts/services?", 'n').lower() == 'y')
if up:
pipes_config[name]['up'] = '1'
elif object_type == 'user':
groups = input_with_default("What group do you want to filter on? Empty for disable", '')
if groups:
pipes_config[name]['groups'] = groups
search = input_with_default("What search query do you want to use?", '')
if search:
pipes_config[name]['search'] = search
unique = (input_with_default("Do you want to only show unique results?", 'n').lower() == 'y')
if unique:
pipes_config[name]['unique'] = '1'
output_format = input_with_default("How do you want the results to be formatted?", '{address}')
pipes_config[name]['format'] = output_format
print("Adding new named pipe")
with open(pipes_config_path, 'w') as f:
pipes_config.write(f)
class Config(object):
"""
The class that represents the jackal configuration.
This class will try to read the config file from the users home directory.
"""
@property
def defaults(self):
return {
'jackal':
{
'host': 'localhost',
'index': 'jackal',
'use_ssl': '0',
'client_certs': '0',
'ca_certs': '',
'client_cert': '',
'client_key': '',
},
'nessus':
{
'host': 'https://localhost:8834',
'template_name': 'advanced',
'access_key': '',
'secret_key': '',
},
'pipes':
{
'directory': os.getcwd(),
'config_file': 'pipes.ini'
},
'nmap':
{
'options': '',
'directory': os.path.join(self.config_dir, 'nmap'),
}
}
def __init__(self):
self.config = configparser.ConfigParser()
if not os.path.exists(self.config_file):
if not os.path.exists(self.config_dir):
os.makedirs(self.config_dir)
self.config.read_dict(self.defaults)
with open(self.config_file, 'w') as configfile:
self.config.write(configfile)
self.config.read(self.config_file)
def set(self, section, key, value):
"""
Creates the section value if it does not exists and sets the value.
Use write_config to actually set the value.
"""
if not section in self.config:
self.config.add_section(section)
self.config.set(section, key, value)
def get(self, section, key):
"""
This function tries to retrieve the value from the configfile
otherwise will return a default.
"""
try:
return self.config.get(section, key)
except configparser.NoSectionError:
pass
except configparser.NoOptionError:
pass
return self.defaults[section][key]
@property
def config_file(self):
"""
Returns the configuration file name
"""
config_file = os.path.join(self.config_dir, 'config.ini')
return config_file
@property
def config_dir(self):
"""
Returns the configuration directory
"""
home = expanduser('~')
config_dir = os.path.join(home, '.jackal')
return config_dir
def write_config(self, initialize_indices=False):
"""
Write the current config to disk to store them.
"""
if not os.path.exists(self.config_dir):
os.mkdir(self.config_dir)
with open(self.config_file, 'w') as configfile:
self.config.write(configfile)
if initialize_indices:
index = self.get('jackal', 'index')
from jackal import Host, Range, Service, User, Credential, Log
from jackal.core import create_connection
create_connection(self)
Host.init(index="{}-hosts".format(index))
Range.init(index="{}-ranges".format(index))
Service.init(index="{}-services".format(index))
User.init(index="{}-users".format(index))
Credential.init(index="{}-creds".format(index))
Log.init(index="{}-log".format(index))
| 39.234127
| 148
| 0.59472
|
4a11fb62a3439a5bfa28bf234b2690933707d0db
| 75
|
py
|
Python
|
src/__main__.py
|
navneet-arya/Boomerang
|
fc6cba31ed59956b7ab535d60115f8e96a09d02d
|
[
"MIT"
] | 3
|
2020-11-05T09:16:34.000Z
|
2021-04-15T17:29:26.000Z
|
src/__main__.py
|
navneet-arya/Witcher
|
fc6cba31ed59956b7ab535d60115f8e96a09d02d
|
[
"MIT"
] | null | null | null |
src/__main__.py
|
navneet-arya/Witcher
|
fc6cba31ed59956b7ab535d60115f8e96a09d02d
|
[
"MIT"
] | null | null | null |
import sys
from witcher import main
if __name__ == "__main__":
main()
| 12.5
| 26
| 0.693333
|
4a11fcab662234682f673e1bc65a25bdb677dcd7
| 7,512
|
py
|
Python
|
modules/unit_tests/s3/s3crud.py
|
andygimma/eden
|
716d5e11ec0030493b582fa67d6f1c35de0af50d
|
[
"MIT"
] | 1
|
2019-08-20T16:32:33.000Z
|
2019-08-20T16:32:33.000Z
|
modules/unit_tests/s3/s3crud.py
|
andygimma/eden
|
716d5e11ec0030493b582fa67d6f1c35de0af50d
|
[
"MIT"
] | null | null | null |
modules/unit_tests/s3/s3crud.py
|
andygimma/eden
|
716d5e11ec0030493b582fa67d6f1c35de0af50d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# S3CRUD Unit Tests
#
# To run this script use:
# python web2py.py -S eden -M -R applications/eden/tests/unit_tests/modules/s3/s3crud.py
#
import unittest
try:
from cStringIO import StringIO
except:
from StringIO import StringIO
# =============================================================================
class ValidateTests(unittest.TestCase):
""" Test S3CRUD/validate """
# -------------------------------------------------------------------------
def setUp(self):
s3db = current.s3db
# Create a fake request
self.resource = s3db.resource("org_organisation")
self.request = Storage(prefix="org",
name="organisation",
resource=self.resource,
table=self.resource.table,
tablename=self.resource.tablename,
method="validate",
get_vars=Storage(),
representation="json",
http="GET")
# -------------------------------------------------------------------------
def testValidateMainTable(self):
""" Test successful main table validation """
request = self.request
crud = self.resource.crud
jsonstr = """{"name":"TestOrganisation", "acronym":"TO"}"""
request.body = StringIO(jsonstr)
output = crud.validate(request)
self.assertTrue(isinstance(output, basestring))
from gluon.contrib import simplejson as json
data = json.loads(output)
self.assertTrue(isinstance(data, dict))
self.assertEqual(len(data), 2)
self.assertTrue("name" in data)
name = data["name"]
self.assertTrue(isinstance(name, dict))
self.assertTrue("value" in name)
self.assertTrue("text" in name)
self.assertTrue(isinstance(name["text"], basestring))
self.assertFalse("_error" in name)
acronym = data["acronym"]
self.assertTrue(isinstance(acronym, dict))
self.assertTrue("value" in acronym)
self.assertTrue("text" in acronym)
self.assertTrue(isinstance(acronym["text"], basestring))
self.assertFalse("_error" in acronym)
# -------------------------------------------------------------------------
def testValidateMainTableError(self):
""" Test error in main table validation """
request = self.request
crud = self.resource.crud
jsonstr = """{"name":"", "acronym":"TO"}"""
request.body = StringIO(jsonstr)
output = crud.validate(request)
self.assertTrue(isinstance(output, basestring))
from gluon.contrib import simplejson as json
data = json.loads(output)
self.assertTrue(isinstance(data, dict))
self.assertEqual(len(data), 2)
self.assertTrue("name" in data)
name = data["name"]
self.assertTrue(isinstance(name, dict))
self.assertTrue("value" in name)
self.assertFalse("text" in name)
self.assertTrue("_error" in name)
acronym = data["acronym"]
self.assertTrue(isinstance(acronym, dict))
self.assertTrue("value" in acronym)
self.assertTrue("text" in acronym)
self.assertTrue(isinstance(acronym["text"], basestring))
self.assertFalse("_error" in acronym)
# -------------------------------------------------------------------------
def testValidateComponentTable(self):
""" Test successful component validation """
request = self.request
crud = self.resource.crud
jsonstr = """{"name":"TestOffice"}"""
request.body = StringIO(jsonstr)
request.get_vars["component"] = "office"
output = crud.validate(request)
self.assertTrue(isinstance(output, basestring))
from gluon.contrib import simplejson as json
data = json.loads(output)
self.assertTrue(isinstance(data, dict))
self.assertEqual(len(data), 1)
self.assertTrue("name" in data)
name = data["name"]
self.assertTrue(isinstance(name, dict))
self.assertTrue("value" in name)
self.assertTrue("text" in name)
self.assertTrue(isinstance(name["text"], basestring))
self.assertFalse("_error" in name)
# -------------------------------------------------------------------------
def testValidateComponentTableFailure(self):
""" Test error in component validation """
request = self.request
crud = self.resource.crud
jsonstr = """{"name":"", "acronym":"test"}"""
request.body = StringIO(jsonstr)
request.get_vars["component"] = "office"
output = crud.validate(request)
self.assertTrue(isinstance(output, basestring))
from gluon.contrib import simplejson as json
data = json.loads(output)
self.assertTrue(isinstance(data, dict))
self.assertEqual(len(data), 2)
self.assertTrue("name" in data)
name = data["name"]
self.assertTrue(isinstance(name, dict))
self.assertTrue("value" in name)
self.assertFalse("text" in name)
self.assertTrue("_error" in name)
self.assertTrue("acronym" in data)
acronym = data["acronym"]
self.assertTrue(isinstance(acronym, dict))
self.assertTrue("value" in acronym)
self.assertFalse("text" in acronym)
self.assertTrue("_error" in acronym)
# -------------------------------------------------------------------------
def testTypeConversionFeature(self):
""" Check that values get converted into the field type during validation """
s3db = current.s3db
# Create a fake request
resource = s3db.resource("project_organisation")
request = Storage(prefix="project",
name="organisation",
resource=resource,
table=resource.table,
tablename=resource.tablename,
method="validate",
get_vars=Storage(),
representation="json",
http="GET")
crud = resource.crud
jsonstr = """{"organisation_id":"1", "role":"1"}"""
request.body = StringIO(jsonstr)
output = crud.validate(request)
self.assertTrue(isinstance(output, basestring))
from gluon.contrib import simplejson as json
data = json.loads(output)
self.assertTrue(isinstance(data, dict))
self.assertEqual(len(data), 2)
self.assertTrue("role" in data)
role = data["role"]
self.assertTrue(isinstance(role, dict))
self.assertTrue("value" in role)
self.assertTrue(isinstance(role["value"], int))
# =============================================================================
def run_suite(*test_classes):
""" Run the test suite """
loader = unittest.TestLoader()
suite = unittest.TestSuite()
for test_class in test_classes:
tests = loader.loadTestsFromTestCase(test_class)
suite.addTests(tests)
if suite is not None:
unittest.TextTestRunner(verbosity=2).run(suite)
return
if __name__ == "__main__":
run_suite(
ValidateTests,
)
# END ========================================================================
| 35.267606
| 88
| 0.538072
|
4a11fce2c8e99af60098e8967d6807ee57b4a275
| 4,123
|
py
|
Python
|
runtime/python/Tools/scripts/find_recursionlimit.py
|
hwaipy/InteractionFreeNode
|
88642b68430f57b028fd0f276a5709f89279e30d
|
[
"MIT"
] | 207
|
2018-10-01T08:53:01.000Z
|
2022-03-14T12:15:54.000Z
|
runtime/python/Tools/scripts/find_recursionlimit.py
|
hwaipy/InteractionFreeNode
|
88642b68430f57b028fd0f276a5709f89279e30d
|
[
"MIT"
] | 8
|
2019-06-29T14:18:51.000Z
|
2022-02-19T07:30:27.000Z
|
runtime/python/Tools/scripts/find_recursionlimit.py
|
hwaipy/InteractionFreeNode
|
88642b68430f57b028fd0f276a5709f89279e30d
|
[
"MIT"
] | 76
|
2020-03-16T01:47:46.000Z
|
2022-03-21T16:37:07.000Z
|
#! /usr/bin/env python3
"""Find the maximum recursion limit that prevents interpreter termination.
This script finds the maximum safe recursion limit on a particular
platform. If you need to change the recursion limit on your system,
this script will tell you a safe upper bound. To use the new limit,
call sys.setrecursionlimit().
This module implements several ways to create infinite recursion in
Python. Different implementations end up pushing different numbers of
C stack frames, depending on how many calls through Python's abstract
C API occur.
After each round of tests, it prints a message:
"Limit of NNNN is fine".
The highest printed value of "NNNN" is therefore the highest potentially
safe limit for your system (which depends on the OS, architecture, but also
the compilation flags). Please note that it is practically impossible to
test all possible recursion paths in the interpreter, so the results of
this test should not be trusted blindly -- although they give a good hint
of which values are reasonable.
NOTE: When the C stack space allocated by your system is exceeded due
to excessive recursion, exact behaviour depends on the platform, although
the interpreter will always fail in a likely brutal way: either a
segmentation fault, a MemoryError, or just a silent abort.
NB: A program that does not use __methods__ can set a higher limit.
"""
import sys
import itertools
class RecursiveBlowup1:
def __init__(self):
self.__init__()
def test_init():
return RecursiveBlowup1()
class RecursiveBlowup2:
def __repr__(self):
return repr(self)
def test_repr():
return repr(RecursiveBlowup2())
class RecursiveBlowup4:
def __add__(self, x):
return x + self
def test_add():
return RecursiveBlowup4() + RecursiveBlowup4()
class RecursiveBlowup5:
def __getattr__(self, attr):
return getattr(self, attr)
def test_getattr():
return RecursiveBlowup5().attr
class RecursiveBlowup6:
def __getitem__(self, item):
return self[item - 2] + self[item - 1]
def test_getitem():
return RecursiveBlowup6()[5]
def test_recurse():
return test_recurse()
def test_cpickle(_cache={}):
import io
try:
import _pickle
except ImportError:
print("cannot import _pickle, skipped!")
return
k, l = None, None
for n in itertools.count():
try:
l = _cache[n]
continue # Already tried and it works, let's save some time
except KeyError:
for i in range(100):
l = [k, l]
k = {i: l}
_pickle.Pickler(io.BytesIO(), protocol=-1).dump(l)
_cache[n] = l
def test_compiler_recursion():
# The compiler uses a scaling factor to support additional levels
# of recursion. This is a sanity check of that scaling to ensure
# it still raises RecursionError even at higher recursion limits
compile("()" * (10 * sys.getrecursionlimit()), "<single>", "single")
def check_limit(n, test_func_name):
sys.setrecursionlimit(n)
if test_func_name.startswith("test_"):
print(test_func_name[5:])
else:
print(test_func_name)
test_func = globals()[test_func_name]
try:
test_func()
# AttributeError can be raised because of the way e.g. PyDict_GetItem()
# silences all exceptions and returns NULL, which is usually interpreted
# as "missing attribute".
except (RecursionError, AttributeError):
pass
else:
print("Yikes!")
if __name__ == '__main__':
limit = 1000
while 1:
check_limit(limit, "test_recurse")
check_limit(limit, "test_add")
check_limit(limit, "test_repr")
check_limit(limit, "test_init")
check_limit(limit, "test_getattr")
check_limit(limit, "test_getitem")
check_limit(limit, "test_cpickle")
check_limit(limit, "test_compiler_recursion")
print("Limit of %d is fine" % limit)
limit = limit + 100
| 31.96124
| 77
| 0.670871
|
4a11feb9ae79deace303ce46e5f30de0d65a419f
| 1,061
|
py
|
Python
|
opsdroid/cli/logs.py
|
JiahnChoi/opsdroid.kr
|
0893456b0f9f6c70edf7c330a7593d87450538cc
|
[
"Apache-2.0"
] | 712
|
2016-08-09T21:30:07.000Z
|
2022-03-24T09:38:21.000Z
|
opsdroid/cli/logs.py
|
JiahnChoi/opsdroid.kr
|
0893456b0f9f6c70edf7c330a7593d87450538cc
|
[
"Apache-2.0"
] | 1,767
|
2016-07-27T13:01:25.000Z
|
2022-03-29T04:25:10.000Z
|
opsdroid/cli/logs.py
|
JiahnChoi/opsdroid.kr
|
0893456b0f9f6c70edf7c330a7593d87450538cc
|
[
"Apache-2.0"
] | 536
|
2016-07-31T14:23:41.000Z
|
2022-03-22T17:35:15.000Z
|
"""The logs subcommand for opsdroid cli."""
import click
import tailer
from opsdroid.const import DEFAULT_LOG_FILENAME
@click.group(invoke_without_command=True)
@click.option("-f", "follow", is_flag=True, help="Print the logs in real time")
@click.pass_context
def logs(ctx, follow):
"""Print the content of the log file into the terminal.
Open opsdroid logs and prints the contents of the file into the terminal.
If you wish to follow the logs in real time you can use the `-f` flag which
will allow you to do this.
Args:
ctx (:obj:`click.Context`): The current click cli context.
follow(bool): Set by the `-f` flag to trigger the print of the logs in real time.
Returns:
int: the exit code. Always returns 0 in this case.
"""
with open(DEFAULT_LOG_FILENAME, "r") as log:
if follow:
click.echo("Now following logs in real time, press CTRL+C to stop.")
for line in tailer.follow(log):
click.echo(line)
click.echo(log.read())
ctx.exit(0)
| 31.205882
| 89
| 0.663525
|
4a11ff40bddb03bd4d44f2d08cf22f63a96fcc26
| 2,931
|
py
|
Python
|
jupiter/domain/metrics/metric.py
|
horia141/jupiter
|
2c721d1d44e1cd2607ad9936e54a20ea254741dc
|
[
"MIT"
] | 15
|
2019-05-05T14:34:58.000Z
|
2022-02-25T09:57:28.000Z
|
jupiter/domain/metrics/metric.py
|
horia141/jupiter
|
2c721d1d44e1cd2607ad9936e54a20ea254741dc
|
[
"MIT"
] | 3
|
2020-02-22T16:09:39.000Z
|
2021-12-18T21:33:06.000Z
|
jupiter/domain/metrics/metric.py
|
horia141/jupiter
|
2c721d1d44e1cd2607ad9936e54a20ea254741dc
|
[
"MIT"
] | null | null | null |
"""A metric."""
from dataclasses import dataclass
from typing import Optional
from jupiter.domain.entity_name import EntityName
from jupiter.domain.metrics.metric_key import MetricKey
from jupiter.domain.metrics.metric_unit import MetricUnit
from jupiter.domain.recurring_task_gen_params import RecurringTaskGenParams
from jupiter.framework.aggregate_root import AggregateRoot
from jupiter.framework.base.entity_id import BAD_REF_ID
from jupiter.framework.base.timestamp import Timestamp
@dataclass()
class Metric(AggregateRoot):
"""A metric."""
@dataclass(frozen=True)
class Created(AggregateRoot.Created):
"""Created event."""
@dataclass(frozen=True)
class Updated(AggregateRoot.Updated):
"""Updated event."""
_key: MetricKey
_name: EntityName
_collection_params: Optional[RecurringTaskGenParams]
_metric_unit: Optional[MetricUnit]
@staticmethod
def new_metric(
key: MetricKey, name: EntityName, collection_params: Optional[RecurringTaskGenParams],
metric_unit: Optional[MetricUnit], created_time: Timestamp) -> 'Metric':
"""Create a metric."""
metric = Metric(
_ref_id=BAD_REF_ID,
_archived=False,
_created_time=created_time,
_archived_time=None,
_last_modified_time=created_time,
_events=[],
_key=key,
_name=name,
_collection_params=collection_params,
_metric_unit=metric_unit)
metric.record_event(Metric.Created.make_event_from_frame_args(created_time))
return metric
def change_name(self, name: EntityName, modification_time: Timestamp) -> 'Metric':
"""Change the name of the metric."""
if self._name == name:
return self
self._name = name
self.record_event(Metric.Updated.make_event_from_frame_args(modification_time))
return self
def change_collection_params(
self, collection_params: Optional[RecurringTaskGenParams], modification_time: Timestamp) -> 'Metric':
"""Change the collection period of the metric."""
if self._collection_params == collection_params:
return self
self._collection_params = collection_params
self.record_event(Metric.Updated.make_event_from_frame_args(modification_time))
return self
@property
def key(self) -> MetricKey:
"""The key of the metric."""
return self._key
@property
def name(self) -> EntityName:
"""The name of the metric."""
return self._name
@property
def collection_params(self) -> Optional[RecurringTaskGenParams]:
"""The collection parameters of the metric."""
return self._collection_params
@property
def metric_unit(self) -> Optional[MetricUnit]:
"""The metric unit of the metric."""
return self._metric_unit
| 33.689655
| 113
| 0.683385
|
4a11ffe51f80d835ff06ea68b1bafbcb0f2fe959
| 2,951
|
py
|
Python
|
models/CNN_custom_global.py
|
JRC1995/CapsuleRoutingEncoders
|
a99c666f33a2ea98f60b944df19d699ee3f3b009
|
[
"MIT"
] | null | null | null |
models/CNN_custom_global.py
|
JRC1995/CapsuleRoutingEncoders
|
a99c666f33a2ea98f60b944df19d699ee3f3b009
|
[
"MIT"
] | null | null | null |
models/CNN_custom_global.py
|
JRC1995/CapsuleRoutingEncoders
|
a99c666f33a2ea98f60b944df19d699ee3f3b009
|
[
"MIT"
] | null | null | null |
# Adapted from: https://github.com/castorini/hedwig/tree/master/models/kim_cnn
import torch as T
import torch.nn as nn
import torch.nn.functional as F
from models.modules.custom import routing
class Classifier(nn.Module):
def __init__(self, embeddings, pad_idx, classes_num,
config, device):
super(Classifier, self).__init__()
trainable_embeddings = config.trainable_embeddings
if trainable_embeddings:
self.embeddings = nn.Parameter(T.tensor(embeddings).float().to(device))
else:
self.embeddings = T.tensor(embeddings).float().to(device)
self.embedding_dropout = nn.Dropout(config.embedding_dropout)
self.embedding_ones = T.ones(self.embeddings.size(0), 1).float().to(device)
self.output_channel = config.output_channel
words_dim = self.embeddings.size(-1)
self.ks = 3 # There are three conv nets here
self.pad_idx = pad_idx
self.classes_num = classes_num
self.D = config.D
self.loss_all = 0.0
input_channel = 1
self.conv1 = nn.Conv2d(input_channel, self.output_channel, (3, words_dim), padding=(1, 0))
self.conv2 = nn.Conv2d(input_channel, self.output_channel, (4, words_dim), padding=(2, 0))
self.conv3 = nn.Conv2d(input_channel, self.output_channel, (5, words_dim), padding=(2, 0))
self.dropout1 = nn.Dropout(config.dropout)
self.capsulize = routing(D=self.ks*self.output_channel,
n_in=1,
n_out=1,
in_dim=self.D,
out_dim=8*self.D,
device=device)
self.dropout2 = nn.Dropout(config.dropout)
self.classifier = nn.Linear(8*self.D, self.classes_num)
def cnn(self, x):
x = x.unsqueeze(1)
x = [F.relu(self.conv1(x)).squeeze(3),
F.relu(self.conv2(x)).squeeze(3),
F.relu(self.conv3(x)).squeeze(3)]
x = [x[0], x[1][:, :, 0:-1], x[2]]
x = T.cat(x, dim=1)
x = x.permute(0, 2, 1).contiguous()
return x
def forward(self, x, mask):
N, S = x.size()
max_len = 600
if S > max_len:
x = x[:, 0:max_len]
S = max_len
mask = mask[:, 0:S]
embeddings_dropout_mask = self.embedding_dropout(self.embedding_ones)
dropped_embeddings = self.embeddings*embeddings_dropout_mask
x = F.embedding(x, dropped_embeddings, padding_idx=self.pad_idx)
x = self.cnn(x)
x = x.view(N, S, self.ks*self.output_channel)
x = self.dropout1(x)
capsule_out = self.capsulize(x, mask).view(N, -1)
#capsule_out = self.dropout2(capsule_out)
logit = self.classifier(capsule_out)
logit = logit.view(N, self.classes_num)
logit = T.sigmoid(logit)
return logit
| 31.393617
| 98
| 0.584209
|
4a1200112c2ee742bb9b9a7360ab5a60cfdbe118
| 699
|
py
|
Python
|
200914/02.lists.py
|
Inclementia/python-basics
|
0b327ad7ef121d5d5483b092907b970ec19adf9c
|
[
"MIT"
] | null | null | null |
200914/02.lists.py
|
Inclementia/python-basics
|
0b327ad7ef121d5d5483b092907b970ec19adf9c
|
[
"MIT"
] | null | null | null |
200914/02.lists.py
|
Inclementia/python-basics
|
0b327ad7ef121d5d5483b092907b970ec19adf9c
|
[
"MIT"
] | null | null | null |
# # data read
# student_marks = []
# while True:
# mark = input('введите оценку студента:\n')
# if mark:
# student_marks.append(int(mark))
# else:
# break
#
# print('ввод завершен')
# print(student_marks)
# mock_student_marks = ['5', '4', '3', '2', '5'] # wrong mock
mock_student_marks = [5, 4, 3, 2, 5] # correct mock
student_marks = mock_student_marks
# data processing
# i = 0
# avg_mark = 0
# while i < len(student_marks):
# avg_mark += student_marks[i]
# i += 1
# avg_mark /= len(student_marks)
# print('средний балл', avg_mark)
avg_mark = 0
for item in student_marks:
avg_mark += item
avg_mark /= len(student_marks)
print('средний балл', avg_mark)
| 22.548387
| 62
| 0.635193
|
4a12005ef66725aa2c52c76b12ef45a6ed1cc29e
| 694
|
py
|
Python
|
xmltok/setup.py
|
Carglglz/micropython-lib
|
07102c56aa1087b97ee313cedc1d89fd20452e11
|
[
"PSF-2.0"
] | 2
|
2021-02-24T13:59:02.000Z
|
2021-04-06T22:25:03.000Z
|
xmltok/setup.py
|
Carglglz/micropython-lib
|
07102c56aa1087b97ee313cedc1d89fd20452e11
|
[
"PSF-2.0"
] | null | null | null |
xmltok/setup.py
|
Carglglz/micropython-lib
|
07102c56aa1087b97ee313cedc1d89fd20452e11
|
[
"PSF-2.0"
] | 3
|
2021-02-24T12:16:11.000Z
|
2022-01-04T08:35:59.000Z
|
import sys
# Remove current dir from sys.path, otherwise setuptools will peek up our
# module instead of system's.
sys.path.pop(0)
from setuptools import setup
sys.path.append("..")
import sdist_upip
setup(name='micropython-xmltok',
version='0.2',
description='xmltok module for MicroPython',
long_description='Simple XML tokenizer',
url='https://github.com/micropython/micropython-lib',
author='Paul Sokolovsky',
author_email='micro-python@googlegroups.com',
maintainer='micropython-lib Developers',
maintainer_email='micro-python@googlegroups.com',
license='MIT',
cmdclass={'sdist': sdist_upip.sdist},
py_modules=['xmltok'])
| 33.047619
| 73
| 0.707493
|
4a12017bbce2d8cfb854353b562d9235c3de7816
| 16,993
|
py
|
Python
|
tensorflow/compiler/tf2xla/python/xla.py
|
anonymous-313/tensorflow
|
b82785818b6b020d62340eaaece32b9c75858185
|
[
"Apache-2.0"
] | 9
|
2019-06-05T06:48:07.000Z
|
2020-09-29T07:08:02.000Z
|
tensorflow/compiler/tf2xla/python/xla.py
|
anonymous-313/tensorflow
|
b82785818b6b020d62340eaaece32b9c75858185
|
[
"Apache-2.0"
] | 2
|
2021-11-10T20:21:47.000Z
|
2022-02-10T04:12:28.000Z
|
tensorflow/compiler/tf2xla/python/xla.py
|
anonymous-313/tensorflow
|
b82785818b6b020d62340eaaece32b9c75858185
|
[
"Apache-2.0"
] | 3
|
2019-06-28T02:28:27.000Z
|
2021-07-06T08:16:19.000Z
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Experimental library that exposes XLA operations directly in TensorFlow.
It is sometimes useful to be able to build HLO programs directly from
TensorFlow. This file provides Tensorflow operators that mirror the semantics of
HLO operators as closely as possible.
Note: There is no promise of backward or forward compatibility for operators
defined in this module. This is primarily because the underlying HLO operators
do not promise backward or forward compatibility.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.compiler.tf2xla.ops import gen_xla_ops
from tensorflow.core.framework import attr_value_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import bitwise_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import gen_random_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import special_math_ops
# TODO(phawkins): provide wrappers for all XLA operators. Currently the missing
# ops include:
# infeed/outfeed (available via tf.contrib.tpu)
# collectives, e.g., cross-replica-sum (available via tf.contrib.tpu)
# conditional
# gather/scatter
# collapse
# This file reuses builtin names (following XLA's names, so we can call things
# like xla.max), so we capture the builtin versions here.
# pylint: disable=redefined-builtin
_max = max
_min = min
_slice = slice # pylint: disable=invalid-name
constant = constant_op.constant
# Unary operators.
# For most arithmetic operators there is a TensorFlow operator
# that exactly corresponds to each XLA operator. Rather than defining
# XLA-specific variants, we reuse the corresponding TensorFlow operator.
# TODO(phawkins): It would be even better to have TensorFlow operators that 1:1
# wrap every HLO operator, because that would allow us to be confident that the
# semantics match.
def _unary_op(fn):
"""Wrapper that restricts `fn` to have the correct signature."""
def unary_op_wrapper(x, name=None):
return fn(x, name=name)
return unary_op_wrapper
abs = _unary_op(math_ops.abs)
# TODO(phawkins): implement clz.
conj = _unary_op(math_ops.conj)
cos = _unary_op(math_ops.cos)
ceil = _unary_op(math_ops.ceil)
digamma = _unary_op(math_ops.digamma)
erf = _unary_op(math_ops.erf)
erfc = _unary_op(math_ops.erfc)
erfinv = _unary_op(math_ops.erfinv)
ndtri = _unary_op(math_ops.ndtri)
exp = _unary_op(math_ops.exp)
expm1 = _unary_op(math_ops.expm1)
floor = _unary_op(math_ops.floor)
imag = _unary_op(math_ops.imag)
is_finite = _unary_op(math_ops.is_finite)
lgamma = _unary_op(math_ops.lgamma)
log = _unary_op(math_ops.log)
log1p = _unary_op(math_ops.log1p)
logical_not = _unary_op(math_ops.logical_not)
neg = _unary_op(math_ops.neg)
real = _unary_op(math_ops.real)
# TODO(phawkins): unlike xla::Round, this rounds to even instead of zero for
# numbers halfway between two integers.
round = _unary_op(math_ops.round)
sin = _unary_op(math_ops.sin)
sign = _unary_op(math_ops.sign)
tanh = _unary_op(math_ops.tanh)
# Bessel
bessel_i0e = _unary_op(special_math_ops.bessel_i0e)
bessel_i1e = _unary_op(special_math_ops.bessel_i1e)
# Binary operators
# The main difference between TensorFlow and XLA binary ops is the broadcasting
# semantics. TensorFlow uses Numpy-style broadcasting semantics, whereas XLA
# requires an explicit specification of which dimensions to broadcast if the
# arguments have different ranks.
def _broadcasting_binary_op(fn):
"""Wraps a binary Tensorflow operator and performs XLA-style broadcasting."""
def broadcasting_binary_op_wrapper(x, y, broadcast_dims=None, name=None):
"""Inner wrapper function."""
broadcast_dims = broadcast_dims or []
broadcast_dims = ops.convert_to_tensor(broadcast_dims, dtypes.int64)
# Rather than relying on having static shape information in the TensorFlow
# graph, we use an XlaBroadcastHelper op that can compute the correct shapes
# at JIT compilation time.
x, y = gen_xla_ops.xla_broadcast_helper(x, y, broadcast_dims)
return fn(x, y, name=name)
return broadcasting_binary_op_wrapper
# Map from TF signed types to TF unsigned types.
_SIGNED_TO_UNSIGNED_TABLE = {
dtypes.int8: dtypes.uint8,
dtypes.int16: dtypes.uint16,
dtypes.int32: dtypes.uint32,
dtypes.int64: dtypes.uint64,
}
# Map from TF unsigned types to TF signed types.
_UNSIGNED_TO_SIGNED_TABLE = {
dtypes.uint8: dtypes.int8,
dtypes.uint16: dtypes.int16,
dtypes.uint32: dtypes.int32,
dtypes.uint64: dtypes.int64,
}
def _shift_right_logical_helper(x, y, name=None):
"""Performs an integer right logical shift irrespective of input type."""
assert y.dtype == x.dtype
dtype = x.dtype
signed = dtype in _SIGNED_TO_UNSIGNED_TABLE
if signed:
unsigned_dtype = _SIGNED_TO_UNSIGNED_TABLE[dtype]
x = math_ops.cast(x, unsigned_dtype)
y = math_ops.cast(y, unsigned_dtype)
output = bitwise_ops.right_shift(x, y, name=name)
if signed:
output = math_ops.cast(output, dtype)
return output
def _shift_right_arithmetic_helper(x, y, name=None):
"""Performs an integer right arithmetic shift irrespective of input type."""
assert y.dtype == x.dtype
dtype = x.dtype
unsigned = dtype in _UNSIGNED_TO_SIGNED_TABLE
if unsigned:
signed_dtype = _UNSIGNED_TO_SIGNED_TABLE[dtype]
x = math_ops.cast(x, signed_dtype)
y = math_ops.cast(y, signed_dtype)
output = bitwise_ops.right_shift(x, y, name=name)
if unsigned:
output = math_ops.cast(output, dtype)
return output
add = _broadcasting_binary_op(math_ops.add)
sub = _broadcasting_binary_op(math_ops.sub)
mul = _broadcasting_binary_op(math_ops.mul)
div = _broadcasting_binary_op(math_ops.div)
rem = _broadcasting_binary_op(gen_math_ops.mod)
max = _broadcasting_binary_op(math_ops.maximum)
min = _broadcasting_binary_op(math_ops.minimum)
atan2 = _broadcasting_binary_op(math_ops.atan2)
complex = _broadcasting_binary_op(math_ops.complex)
logical_and = _broadcasting_binary_op(math_ops.logical_and)
logical_or = _broadcasting_binary_op(math_ops.logical_or)
logical_xor = _broadcasting_binary_op(math_ops.logical_xor)
eq = _broadcasting_binary_op(math_ops.equal)
ne = _broadcasting_binary_op(math_ops.not_equal)
ge = _broadcasting_binary_op(math_ops.greater_equal)
gt = _broadcasting_binary_op(math_ops.greater)
le = _broadcasting_binary_op(math_ops.less_equal)
lt = _broadcasting_binary_op(math_ops.less)
pow = _broadcasting_binary_op(math_ops.pow)
shift_left = _broadcasting_binary_op(bitwise_ops.left_shift)
shift_right_logical = _broadcasting_binary_op(_shift_right_logical_helper)
shift_right_arithmetic = _broadcasting_binary_op(_shift_right_arithmetic_helper)
igamma = _broadcasting_binary_op(math_ops.igamma)
igamma_grad_a = _broadcasting_binary_op(gen_math_ops.igamma_grad_a)
random_gamma_grad = _broadcasting_binary_op(gen_random_ops.random_gamma_grad)
igammac = _broadcasting_binary_op(math_ops.igammac)
polygamma = _broadcasting_binary_op(math_ops.polygamma)
zeta = _broadcasting_binary_op(math_ops.zeta)
def _binary_op(fn):
"""Wrapper that restricts `fn` to have the correct signature."""
def binary_op_wrapper(x, y, name=None):
return fn(x, y, name=name)
return binary_op_wrapper
transpose = _binary_op(array_ops.transpose)
rev = _binary_op(array_ops.reverse)
bitcast_convert_type = array_ops.bitcast
def broadcast(x, dims, name=None):
x = ops.convert_to_tensor(x)
shape = array_ops.concat([constant_op.constant(dims),
array_ops.shape(x)],
axis=0)
return array_ops.broadcast_to(x, shape, name=name)
def clamp(a, x, b, name=None):
return min(max(a, x, name=name), b, name=name)
concatenate = array_ops.concat
def conv(lhs,
rhs,
window_strides,
padding,
lhs_dilation,
rhs_dilation,
dimension_numbers,
feature_group_count=1,
precision_config=None,
name=None):
"""Wraps the XLA ConvGeneralDilated operator.
ConvGeneralDilated is the most general form of XLA convolution and is
documented at
https://www.tensorflow.org/performance/xla/operation_semantics#conv_convolution
Args:
lhs: the input tensor
rhs: the kernel tensor
window_strides: the inter-window strides
padding: the padding to apply at the start and end of each input dimensions
lhs_dilation: dilation to apply between input elements
rhs_dilation: dilation to apply between kernel elements
dimension_numbers: a `ConvolutionDimensionNumbers` proto.
feature_group_count: number of feature groups for grouped convolution.
precision_config: a `xla.PrecisionConfig` proto.
name: an optional name for the operator
Returns:
A tensor representing the output of the convolution.
"""
precision_config_proto = ""
if precision_config:
precision_config_proto = precision_config.SerializeToString()
return gen_xla_ops.xla_conv(
lhs,
rhs,
window_strides=window_strides,
padding=padding,
lhs_dilation=lhs_dilation,
rhs_dilation=rhs_dilation,
feature_group_count=feature_group_count,
dimension_numbers=dimension_numbers.SerializeToString(),
precision_config=precision_config_proto,
name=name)
convert_element_type = math_ops.cast
def dot(lhs, rhs, name=None):
return math_ops.tensordot(lhs, rhs, axes=1, name=name)
def dot_general(lhs, rhs, dimension_numbers, precision_config=None, name=None):
precision_config_proto = ""
if precision_config:
precision_config_proto = precision_config.SerializeToString()
return gen_xla_ops.xla_dot(
lhs,
rhs,
dimension_numbers=dimension_numbers.SerializeToString(),
precision_config=precision_config_proto,
name=name)
def self_adjoint_eig(a, lower, max_iter, epsilon):
return gen_xla_ops.xla_self_adjoint_eig(a, lower, max_iter, epsilon)
def svd(a, max_iter, epsilon, precision_config=None):
precision_config_proto = ""
if precision_config:
precision_config_proto = precision_config.SerializeToString()
return gen_xla_ops.xla_svd(a, max_iter, epsilon, precision_config_proto)
dynamic_slice = gen_xla_ops.xla_dynamic_slice
dynamic_update_slice = gen_xla_ops.xla_dynamic_update_slice
einsum = gen_xla_ops.xla_einsum
# TODO(phawkins): generalize tf.pad to support interior padding, and then remove
# the XLA-specific pad operator.
pad = gen_xla_ops.xla_pad
def random_normal(mu, sigma, dims, name=None):
mu = ops.convert_to_tensor(mu)
return random_ops.random_normal(
dims, mean=mu, stddev=sigma, dtype=mu.dtype, name=name)
def random_uniform(minval, maxval, dims, name=None):
minval = ops.convert_to_tensor(minval)
return random_ops.random_uniform(
dims, minval, maxval, dtype=minval.dtype, name=name)
recv = gen_xla_ops.xla_recv
reduce = gen_xla_ops.xla_reduce
variadic_reduce = gen_xla_ops.xla_variadic_reduce
ops.no_gradient("XlaVariadicReduce")
def reduce_window(operand,
init,
reducer,
window_dimensions,
window_strides=None,
base_dilations=None,
window_dilations=None,
padding=None,
name=None):
"""Wraps the XLA ReduceWindow operator.
ReduceWindow is documented at
https://www.tensorflow.org/performance/xla/operation_semantics#reducewindow .
Args:
operand: the input tensor
init: a scalar tensor representing the initial value for the reduction
reducer: a reduction function that combines a pair of scalars.
window_dimensions: shape of the window, as a list of integers
window_strides: inter-window strides, as a list of integers. Optional; if
omitted, defaults to strides of 1.
padding: padding to apply to 'operand'. List of (low, high) pairs of
integers that specify the padding to apply before and after each
dimension. Optional; if omitted, defaults to no padding.
name: the operator name, or None.
Returns:
A tensor that represents the output of the reduce_window operator.
"""
window_strides = window_strides or [1] * len(window_dimensions)
base_dilations = base_dilations or [1] * len(window_dimensions)
window_dilations = window_dilations or [1] * len(window_dimensions)
padding = padding or [(0, 0)] * len(window_dimensions)
return gen_xla_ops.xla_reduce_window(
input=operand,
init_value=init,
window_dimensions=window_dimensions,
window_strides=window_strides,
base_dilations=base_dilations,
window_dilations=window_dilations,
padding=padding,
computation=reducer,
name=name)
replica_id = gen_xla_ops.xla_replica_id
# Set a static bound for the given input value as a hint to Xla compiler,
# returns the same value.
# Usage:
# def f(t, p):
# p = xla.set_bound(p, 3) # Tells xla the constraint that p <= 3.
# return t[:p] # xla knows the bound of the slice is 3.
set_bound = gen_xla_ops.xla_set_bound
# Make a static dimension into a xla bounded dynamic dimension. The current
# static dimension size will become the bound and the second operand becomes the
# dynamic size of the dimension.
#
# This should mostly be used for testing.
#
# def f():
# array = tf.convert_to_tensor([[1, 2, 3, 4, 5]])
# # Tells xla the valid size of the array is 3.
# dim = 0
# p = xla_set_dynamic_dimension_size(array, dim, 3)
# assert(reduce_sum(p) == 6) # xla knows only the first 3 elements are valid.
set_dynamic_dimension_size = gen_xla_ops.xla_set_dynamic_dimension_size
def reshape(x, new_sizes, dimensions=None, name=None):
if dimensions is not None:
x = array_ops.transpose(x, dimensions)
x = array_ops.reshape(x, new_sizes, name=name)
return x
def select(condition, x, y, name=None):
return array_ops.where(condition, x, y, name)
select_and_scatter = gen_xla_ops.xla_select_and_scatter
send = gen_xla_ops.xla_send
def slice(x, start_dims, limit_dims, strides):
spec = [
_slice(start, limit, stride)
for (start, limit, stride) in zip(start_dims, limit_dims, strides)
]
return x[tuple(spec)]
sharding = gen_xla_ops.xla_sharding
@ops.RegisterGradient("XlaSharding")
def _sharding_grad(op, grad):
sharding_attr = op.get_attr("sharding")
grad_sharding = gen_xla_ops.xla_sharding(grad, sharding=sharding_attr)
# pylint: disable=protected-access
grad_sharding.op._set_attr("_XlaSharding",
attr_value_pb2.AttrValue(s=sharding_attr))
return [grad_sharding]
spmd_full_to_shard_shape = gen_xla_ops.xla_spmd_full_to_shard_shape
spmd_shard_to_full_shape = gen_xla_ops.xla_spmd_shard_to_full_shape
@ops.RegisterGradient("XlaSpmdFullToShardShape")
def _spmd_full_to_shard_shape_grad(op, grad):
s2f = gen_xla_ops.xla_spmd_shard_to_full_shape(
grad,
manual_sharding=op.get_attr("manual_sharding"),
full_shape=op.inputs[0].shape.as_list())
return [s2f]
@ops.RegisterGradient("XlaSpmdShardToFullShape")
def _spmd_shard_to_full_shape_grad(op, grad):
f2s = gen_xla_ops.xla_spmd_full_to_shard_shape(
grad, manual_sharding=op.get_attr("manual_sharding"))
return [f2s]
sort = gen_xla_ops.xla_sort
key_value_sort = gen_xla_ops.xla_key_value_sort
variadic_sort = gen_xla_ops.xla_variadic_sort
while_loop = gen_xla_ops.xla_while
dequantize = gen_xla_ops.xla_dequantize
def gather(operand, start_indices, dimension_numbers, slice_sizes,
indices_are_sorted=False, name=None):
return gen_xla_ops.xla_gather(
operand,
start_indices,
slice_sizes=slice_sizes,
dimension_numbers=dimension_numbers.SerializeToString(),
indices_are_sorted=indices_are_sorted,
name=name)
def scatter(operand, scatter_indices, updates, update_computation,
dimension_numbers, indices_are_sorted=False, name=None):
return gen_xla_ops.xla_scatter(
operand,
scatter_indices,
updates,
update_computation=update_computation,
dimension_numbers=dimension_numbers.SerializeToString(),
indices_are_sorted=indices_are_sorted,
name=name)
| 33.7833
| 81
| 0.7569
|
4a12023489009d092f293805e331424f50ae339b
| 2,616
|
py
|
Python
|
ava/commands/set_user_title.py
|
MCYBA/m-cyba
|
9051674a0e5192e0c8f0235f725311baadd187ec
|
[
"MIT"
] | 1
|
2020-04-23T14:18:16.000Z
|
2020-04-23T14:18:16.000Z
|
ava/commands/set_user_title.py
|
MCYBA/m-cyba
|
9051674a0e5192e0c8f0235f725311baadd187ec
|
[
"MIT"
] | null | null | null |
ava/commands/set_user_title.py
|
MCYBA/m-cyba
|
9051674a0e5192e0c8f0235f725311baadd187ec
|
[
"MIT"
] | 1
|
2021-02-09T22:27:30.000Z
|
2021-02-09T22:27:30.000Z
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
.. module:: set_user_title
:platform: Unix
:synopsis: the top-level submodule of Dragonfire.commands that contains the classes related to Dragonfire's simple if-else struct of setting
user title ability.
.. moduleauthors:: Mehmet Mert Yıldıran <mert.yildiran@bil.omu.edu.tr>
Cem Baybars GÜÇLÜ <cem.baybars@gmail.com>
"""
from tinydb import Query # TinyDB is a lightweight document oriented database optimized for your happiness
class SetUserTitleCommands():
"""Class to contains user title setting commands with simply if-else struct.
"""
def compare(self, doc, h, args, userin, config_file):
"""Method to ava's command structures of setting user title ability.
Args:
doc: doc of com from __init__.py
h: doc helper from __init__.py
userin: :class:`ava.utilities.TextToAction` instance.
args: Command-line arguments.
config_file: virtual assistant config file
"""
if h.check_lemma("be") and h.check_lemma("-PRON-") and (
h.check_lemma("lady") or h.check_lemma("woman") or h.check_lemma("girl")):
config_file.update({'gender': 'female'}, Query().datatype == 'gender')
config_file.remove(Query().datatype == 'callme')
user_prefix = "my lady"
return userin.say("Pardon, " + user_prefix + ".")
if h.check_lemma("be") and h.check_lemma("-PRON-") and (
h.check_lemma("sir") or h.check_lemma("man") or h.check_lemma("boy")):
config_file.update({'gender': 'male'}, Query().datatype == 'gender')
config_file.remove(Query().datatype == 'callme')
user_prefix = "sir"
return userin.say("Pardon, " + user_prefix + ".")
if h.check_lemma("call") and h.check_lemma("-PRON-"):
title = ""
for token in doc:
if token.pos_ == "NOUN":
title += ' ' + token.text
title = title.strip()
if not args["server"]:
callme_config = config_file.search(Query().datatype == 'callme')
if callme_config:
config_file.update({'title': title}, Query().datatype == 'callme')
else:
config_file.insert({'datatype': 'callme', 'title': title})
user_prefix = title
return userin.say("OK, " + user_prefix + ".")
return None
| 44.338983
| 144
| 0.559251
|
4a1203aec46a82e5760742c1b8a16105464a99cd
| 12,374
|
py
|
Python
|
vtscan.py
|
JavaScriptDude/vtscan
|
790d9a0e8b9beb5cb53400e4f6e600cf18b66fa7
|
[
"MIT"
] | 1
|
2020-03-07T06:35:45.000Z
|
2020-03-07T06:35:45.000Z
|
vtscan.py
|
JavaScriptDude/vtscan
|
790d9a0e8b9beb5cb53400e4f6e600cf18b66fa7
|
[
"MIT"
] | null | null | null |
vtscan.py
|
JavaScriptDude/vtscan
|
790d9a0e8b9beb5cb53400e4f6e600cf18b66fa7
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
#########################################
# .: vtscan :.
# Verifies a file using VirusTotal API
# .: install dependencies :.
# python3 -m pip install -r requirements.txt
# .: Sample :.
# export VT_API_KEY=<virus_total_api_key>
# .: usage :.
# vtscan <path_to_file>
# -or-
# python3 vtscan.py <path_to_file>
# .: deployment :.
# # put vtscan.py in a folder on your computer by hand or using git
# % alias vtscan="python3 <path_to_vtscan_folder>/vtscan.py"
# .: Other :.
# Author: Timothy C. Quinn
# Home: https://github.com/JavaScriptDude/vtscan
# Licence: https://opensource.org/licenses/MIT
# .: Todo :.
# [.] Get Hyperlink working
# [.] Add CLI Arg for GUI
#########################################
import os, sys, json, hashlib, traceback, pathlib, argparse, subprocess, shutil, qrcode, tempfile, webbrowser, pyperclip, time
from virus_total_apis import PublicApi as VirusTotalPublicApi
from PySide2.QtCore import QObject
from PySide2.QtQml import QQmlApplicationEngine
from PySide2.QtWidgets import QApplication
_is_windows = hasattr(sys, 'getwindowsversion')
def main():
argp = argparse.ArgumentParser(prog="vtscan")
argp.add_argument("--verbose", "-v", action='store_true')
argp.add_argument("--nogui", "-n", action='store_true')
argp.add_argument("--links", "-L", action='store_true')
argp.add_argument("--browser", "-b", type=str, help="Browser to launch for Virus Total Info or other searches")
argp.add_argument("file", type=str, help="File to scan")
args = argp.parse_args()
api_call_failed : bool = False
got_results : bool = False
result_issues : int = -1
warnings : list = []
_script_name, _script_path = splitPath(os.path.realpath(__file__))
_qr_png_path = None
# Check for Api key
if "VT_API_KEY" not in os.environ:
argp.print_help()
exit("\nMissing Virus total API Key. Please set VT_API_KEY environment variable!", 1)
API_KEY=os.environ["VT_API_KEY"]
if API_KEY.strip() == "":
argp.print_help()
exit("\nMissing Virus total API Key. Please set VT_API_KEY environment variable!", 1)
# Verify that file exists
if not os.path.isfile(args.file):
argp.print_help()
exit("\nPlease specify path to an existing file", 1)
# Get args.file (first arg)
fname, fpath = splitPath(args.file)
# Get sha1 checksum of file
digest_md5 = getChecksum(args.file, 'md5')
digest_sha1 = getChecksum(args.file, 'sha1')
digest_sha256 = getChecksum(args.file, 'sha256')
# print("digest_sha256 = " + digest_sha256)
vt = VirusTotalPublicApi(API_KEY)
response = vt.get_file_report(digest_sha256)
if not 'response_code' in response:
api_call_failed = True
warnings.append("Call to Virus Total API Failed")
if 'error' in response:
err_msg = response['error']
if err_msg.find("Max retries exceeded with url") > -1:
warnings.append("Please check your network connection")
elif not response['response_code'] == 200:
api_call_failed = True
warnings.append("Bad general response_code from Virus Total")
if not api_call_failed: # Dig into the results...
res = response['results']
if not res['response_code'] == 1:
if res['verbose_msg'] == 'The requested resource is not among the finished, queued or pending scans':
warnings.append("File not found in VirusTotal database. Therefore its safety is unknown.")
warnings.append("Alternate verifications may be required")
else:
api_call_failed = True
warnings.append("Bad result response_code from virus total: {}")
# print("Raw virus total results: {}".format(json.dumps(res, sort_keys=False, indent=4)), 1)
if len(warnings) == 0:
# Lets be paranoid and verify the checksums found
if not res['md5'] == digest_md5:
warnings.append("MD5 Checksums do not match:\n - Original: {}\n - Virus Total: {}".format(digest_md5, res['md5']))
if not res['sha1'] == digest_sha1:
warnings.append("SHA1 Checksums do not match:\n - Original: {}\n - Virus Total: {}".format(digest_sha1, res['sha1']))
if not res['sha256'] == digest_sha256:
warnings.append("SHA256 Checksums do not match:\n - Original: {}\n - Virus Total: {}".format(digest_sha256, res['sha256']))
got_results = True
result_issues = res['positives']
if api_call_failed or args.verbose:
print(".: Raw Virus Total Response :.\n" + json.dumps(response, sort_keys=False, indent=4) + "\n")
print("""
.: Details :.
- md5: {0}
- sha1: {1}
- sha256: {2}""".format(digest_md5, digest_sha1, digest_sha256) )
if got_results:
print("- Permalink: " + res['permalink'])
if not args.nogui:
# Encoding data using make() function
img = qrcode.make(res['permalink'])
# Get Temp dir (windows only)
_tmp_dir = None
if _is_windows:
if 'TEMP' not in os.environ:
raise Exception("TEMP Variable does not exist in users environment! cannot continue.")
_tmp_dir = API_KEY=os.environ["TEMP"]
if not os.path.isdir(_tmp_dir):
raise Exception(f"TEMP dir does not exist: {_tmp_dir}")
# Create Temp File name
_tf = tempfile.NamedTemporaryFile(suffix='.png', prefix='_qrcode_', dir=_tmp_dir)
_qr_png_path = _tf.name
_tf.close()
# Saving as an image file
img.save(_qr_png_path)
print("\n.: File :.\n- File: {0}\n- Path: {1}".format(fname, fpath) )
if got_results:
print("\n.: Virus Total Summary :.")
if result_issues == 0:
print("- Detections: 0 out of {} (100% pass)".format(res['total']))
else:
print("- Detections: {} out of {} (Go to VirusTotal for more details)".format(result_issues, res['total']))
if len(warnings) > 0:
print("\n.: Warnings :.")
for warning in warnings:
print("- {}".format(warning))
if not api_call_failed and args.browser:
exe = shutil.which(args.browser)
if exe is None:
print("\n Note: Browser not launched executable not found: " + args.browser)
else:
if not got_results:
print("""
Signature not found in Virus Total so will search in google
and bing for hash signatures. If no results are found,
it is strongly recommended to take care with this file.""")
else:
print("\n Note: Launching Virus Total in " + args.browser)
urls = [res['permalink']]
for url in urls:
cmd = [exe, url]
subprocess.Popen(cmd, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL)
if not api_call_failed and not got_results:
print("""
If this is an installer, executable, or other file that does not contain
personal information (for example not a zip archive of personal files),
you may want to consider uploading to VirusTotal to do a deep scan at:
- https://www.virustotal.com/gui/home/upload""")
if not args.nogui:
vtdata = VTData()
vtdata.fname = fname
vtdata.fpath = fpath
vtdata.digest_md5 = digest_md5
vtdata.digest_sha1 = digest_sha1
vtdata.digest_sha256 = digest_sha256
vtdata.got_results = got_results
vtdata.result_issues = result_issues
vtdata.qr_png_path = _qr_png_path
vtdata.permalink = res['permalink'] if res and 'permalink' in res else '-'
vtdata.total = res['total'] if res and 'total' in res else '-'
vtdata.script_path = _script_path
app = MyQtApp(vtdata)
app.start()
class VTData():
def __init__(self):
pass
class MyQtApp():
def __init__(self, vtdata:VTData):
self._vtdata = vtdata
self._app = QApplication([])
self._engine = QQmlApplicationEngine()
_constants = {
'Labels': {
'Indent': 140
}
}
self._engine.rootContext().setContextProperty("C", _constants)
qml = None
with open(f'{vtdata.script_path}/vtscan.qml') as f:
lines = [ line.strip('\n') for line in list(f) ]
qml = str.encode('\n'.join(lines))
self._engine.loadData(qml)
root_obj = self._engine.rootObjects()
if not len(root_obj) == 1:
raise Exception("Issue Parsing QML. Exiting Program")
self._win = root_obj[0]
self.setText("txtFile", vtdata.fname)
self.setText("txtPath", vtdata.fpath)
self.setText("txtMd5", vtdata.digest_md5)
self.setText("txtSha1", vtdata.digest_sha1)
self.setText("txtSha256", vtdata.digest_sha256)
if vtdata.got_results:
self.setText("txtLink", f"""<a href="{vtdata.permalink}">VirusTotal.com</a>""")
self.setText("txtRes", vtdata.digest_sha256)
if vtdata.result_issues == 0:
self.setText("txtRes", "Detections: 0 out of {} (100% pass)".format(vtdata.total))
else:
self.setText("txtRes", "Detections: {} out of {} (Go to VirusTotal for more details)".format(vtdata.result_issues, vtdata.total))
o = self._win.findChild(QObject, "qrcode")
_qr_png_path_c = vtdata.qr_png_path.replace('\\', '/').replace(':', '::') if _is_windows else vtdata.qr_png_path
o.setProperty('source', f'file://{_qr_png_path_c}')
# Link signal in qml to python event
self._win.maTxtLink_click.connect(self.maTxtLink_click)
# self._maTxtLink = self._win.findChild(QObject, 'maTxtLink')
# self._maTxtLink.clicked.connect(self.maTxtLink_click)
# self._maTxtLink.pressed.connect(self.maTxtLink_pressed)
else:
self.setText("txtLink", 'n/a')
self.setText("txtRes", "Not Registered in VirusTotal")
def start(self):
self._app.exec_()
def setText(self, name, value):
o = self._win.findChild(QObject, name)
o.setProperty("text", value)
def maTxtLink_click(self, right_click):
if right_click:
self.setText('txtStatusBar', "Permalink copied to clipboard")
pyperclip.copy(self._vtdata.permalink)
else:
webbrowser.open(self._vtdata.permalink)
def maTxtLink_pressed(self):
print("HERE")
def Foobar():
print("HERE")
def getChecksum(path, csumtype):
if csumtype == 'md5':
h = hashlib.md5()
elif csumtype == 'sha1':
h = hashlib.sha1()
elif csumtype == 'sha256':
h = hashlib.sha256()
else:
raise Exception("Unexpected csumtype: {}".format(csumtype))
b = bytearray(128*1024)
mv = memoryview(b)
with open(path, 'rb', buffering=0) as f:
for n in iter(lambda : f.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
def exit(s, exitCode=1):
if not s is None:
print(s)
print('~')
sys.stdout.flush()
sys.stderr.flush()
sys.exit(exitCode)
def splitPath(s):
f = os.path.basename(s)
p = s[:-(len(f))-1]
p = toPosixPath(getAbsPath(p))
return f, p
def toPosixPath(s:str, strip_slash:bool=False, ensure_slash:bool=False):
s = s.strip().replace('\\', '/')
if strip_slash and s[-1:] == '/':
return s[:-1]
if ensure_slash and not s[-1:] == '/':
return '%s/' % s
return s
def getAbsPath(s:str):
return os.path.abspath( pathlib.Path(s).expanduser() )
if __name__ == '__main__':
iExit = 0
try:
main()
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
aTB = traceback.format_tb(exc_traceback)
exit("Program Exception:\nStack:\n{}\n Error: {} - {}".format('\n'.join(aTB), exc_type.__name__, exc_value), exitCode=1)
sys.exit(iExit)
| 33.263441
| 145
| 0.598513
|
4a12044422e52e01d2a3d30d80cb298e815d623f
| 48,192
|
py
|
Python
|
opensfm/dataset.py
|
whuaegeanse/OpenSfM
|
1c261fbd1330f9a4483597ceb5cb1098fcdbd97f
|
[
"BSD-2-Clause"
] | null | null | null |
opensfm/dataset.py
|
whuaegeanse/OpenSfM
|
1c261fbd1330f9a4483597ceb5cb1098fcdbd97f
|
[
"BSD-2-Clause"
] | null | null | null |
opensfm/dataset.py
|
whuaegeanse/OpenSfM
|
1c261fbd1330f9a4483597ceb5cb1098fcdbd97f
|
[
"BSD-2-Clause"
] | null | null | null |
import gzip
import json
import logging
import os
import pickle
from abc import ABC, abstractmethod
from io import BytesIO
from typing import Dict, List, Tuple, Optional, IO, Any
import cv2
import numpy as np
from opensfm import config, features, geo, io, upright, pygeometry, types, pymap
logger = logging.getLogger(__name__)
class DataSetBase(ABC):
"""Base for dataset classes providing i/o access to persistent data.
It is possible to store data remotely or in different formats
by subclassing this class and overloading its methods.
"""
@property
@abstractmethod
def io_handler(self) -> io.IoFilesystemBase:
pass
@property
@abstractmethod
def config(self) -> Dict[str, Any]:
pass
@abstractmethod
def images(self) -> List[str]:
pass
@abstractmethod
def open_image_file(self, image: str) -> IO[Any]:
pass
@abstractmethod
def load_image(
self,
image: str,
unchanged: bool = False,
anydepth: bool = False,
grayscale: bool = False,
) -> np.ndarray:
pass
@abstractmethod
def image_size(self, image: str) -> Tuple[int, int]:
pass
@abstractmethod
def load_mask(self, image: str) -> Optional[np.ndarray]:
pass
@abstractmethod
def load_features_mask(self, image: str, points: np.ndarray) -> np.ndarray:
pass
@abstractmethod
def load_instances(self, image: str) -> Optional[np.ndarray]:
pass
@abstractmethod
def segmentation_labels(self) -> List[Any]:
pass
@abstractmethod
def load_segmentation(self, image: str) -> Optional[np.ndarray]:
pass
@abstractmethod
def segmentation_ignore_values(self, image: str) -> List[int]:
pass
@abstractmethod
def mask_from_segmentation(
self, segmentation: np.ndarray, ignore_values: List[int]
) -> np.ndarray:
pass
@abstractmethod
def combine_masks(
self, mask: Optional[np.ndarray], smask: Optional[np.ndarray]
) -> Optional[np.ndarray]:
pass
@abstractmethod
def load_exif(self, image: str) -> Dict[str, Any]:
pass
@abstractmethod
def save_exif(self, image: str, data: Dict[str, Any]) -> None:
pass
@abstractmethod
def exif_exists(self, image: str) -> bool:
pass
@abstractmethod
def feature_type(self) -> str:
pass
@abstractmethod
def features_exist(self, image: str) -> bool:
pass
@abstractmethod
def load_features(self, image: str) -> Optional[features.FeaturesData]:
pass
@abstractmethod
def save_features(self, image: str, features_data: features.FeaturesData) -> None:
pass
@abstractmethod
def words_exist(self, image: str) -> bool:
pass
@abstractmethod
def load_words(self, image: str) -> np.ndarray:
pass
@abstractmethod
def save_words(self, image: str, words: np.ndarray) -> None:
pass
@abstractmethod
def matches_exists(self, image: str) -> bool:
pass
@abstractmethod
def load_matches(self, image: str) -> Dict[str, np.ndarray]:
pass
@abstractmethod
def save_matches(self, image: str, matches: Dict[str, np.ndarray]) -> None:
pass
@abstractmethod
def load_tracks_manager(
self, filename: Optional[str] = None
) -> pymap.TracksManager:
pass
@abstractmethod
def save_tracks_manager(
self, tracks_manager: pymap.TracksManager, filename: Optional[str] = None
) -> None:
pass
@abstractmethod
def load_reconstruction(
self, filename: Optional[str] = None
) -> List[types.Reconstruction]:
pass
@abstractmethod
def save_reconstruction(
self,
reconstruction: List[types.Reconstruction],
filename: Optional[str] = None,
minify=False,
) -> None:
pass
@abstractmethod
def invent_reference_lla(
self, images: Optional[List[str]] = None
) -> Dict[str, float]:
pass
@abstractmethod
def load_reference(self) -> geo.TopocentricConverter:
pass
@abstractmethod
def reference_lla_exists(self) -> bool:
pass
@abstractmethod
def load_camera_models(self) -> Dict[str, pygeometry.Camera]:
pass
@abstractmethod
def save_camera_models(self, camera_models: Dict[str, pygeometry.Camera]) -> None:
pass
@abstractmethod
def camera_models_overrides_exists(self) -> bool:
pass
@abstractmethod
def load_camera_models_overrides(self) -> Dict[str, pygeometry.Camera]:
pass
@abstractmethod
def save_camera_models_overrides(
self, camera_models: Dict[str, pygeometry.Camera]
) -> None:
pass
@abstractmethod
def exif_overrides_exists(self) -> bool:
pass
@abstractmethod
def load_exif_overrides(self) -> Dict[str, Any]:
pass
@abstractmethod
def load_rig_cameras(
self,
) -> Dict[str, pymap.RigCamera]:
pass
@abstractmethod
def save_rig_cameras(self, rig_cameras: Dict[str, pymap.RigCamera]) -> None:
pass
@abstractmethod
def load_rig_assignments(self) -> List[List[Tuple[str, str]]]:
pass
@abstractmethod
def load_rig_assignments_per_image(
self,
) -> Dict[str, Tuple[int, str, List[str]]]:
pass
@abstractmethod
def save_rig_assignments(self, rig_assignments: List[List[Tuple[str, str]]]):
pass
# TODO(pau): switch this to save_profile_log
@abstractmethod
def profile_log(self) -> str:
pass
@abstractmethod
def load_report(self, path: str) -> str:
pass
@abstractmethod
def save_report(self, report_str: str, path: str) -> None:
pass
@abstractmethod
def load_ground_control_points(
self,
) -> List[pymap.GroundControlPoint]:
pass
class DataSet(DataSetBase):
"""Accessors to the main input and output data.
Data include input images, masks, and segmentation as well
temporary data such as features and matches and the final
reconstructions.
All data is stored inside a single folder with a specific subfolder
structure.
It is possible to store data remotely or in different formats
by subclassing this class and overloading its methods.
"""
io_handler: io.IoFilesystemBase = io.IoFilesystemDefault()
config = None
image_files: Dict[str, str] = {}
mask_files: Dict[str, str] = {}
image_list: List[str] = []
def __init__(self, data_path: str, io_handler=io.IoFilesystemDefault):
"""Init dataset associated to a folder."""
self.io_handler = io_handler
self.data_path = data_path
self.load_config()
self.load_image_list()
self.load_mask_list()
def _config_file(self):
return os.path.join(self.data_path, "config.yaml")
def load_config(self):
config_file_path = self._config_file()
if self.io_handler.isfile(config_file_path):
with self.io_handler.open(config_file_path) as f:
self.config = config.load_config_from_fileobject(f)
else:
self.config = config.default_config()
def _image_list_file(self):
return os.path.join(self.data_path, "image_list.txt")
def load_image_list(self):
"""Load image list from image_list.txt or list images/ folder."""
image_list_file = self._image_list_file()
if self.io_handler.isfile(image_list_file):
with self.io_handler.open_rt(image_list_file) as fin:
lines = fin.read().splitlines()
self._set_image_list(lines)
else:
self._set_image_path(os.path.join(self.data_path, "images"))
def images(self):
"""List of file names of all images in the dataset."""
return self.image_list
def _image_file(self, image: str):
"""Path to the image file."""
return self.image_files[image]
def open_image_file(self, image: str) -> IO[Any]:
"""Open image file and return file object."""
return self.io_handler.open(self._image_file(image), "rb")
def load_image(
self,
image: str,
unchanged: bool = False,
anydepth: bool = False,
grayscale: bool = False,
) -> np.ndarray:
"""Load image pixels as numpy array.
The array is 3D, indexed by y-coord, x-coord, channel.
The channels are in RGB order.
"""
return self.io_handler.imread(
self._image_file(image),
unchanged=unchanged,
anydepth=anydepth,
grayscale=grayscale,
)
def image_size(self, image: str) -> Tuple[int, int]:
"""Height and width of the image."""
return self.io_handler.image_size(self._image_file(image))
def load_mask_list(self) -> None:
"""Load mask list from mask_list.txt or list masks/ folder."""
mask_list_file = os.path.join(self.data_path, "mask_list.txt")
if self.io_handler.isfile(mask_list_file):
with self.io_handler.open_rt(mask_list_file) as fin:
lines = fin.read().splitlines()
self._set_mask_list(lines)
else:
self._set_mask_path(os.path.join(self.data_path, "masks"))
def load_mask(self, image: str) -> Optional[np.ndarray]:
"""Load image mask if it exists, otherwise return None."""
if image in self.mask_files:
mask_path = self.mask_files[image]
mask = self.io_handler.imread(mask_path, grayscale=True)
if mask is None:
raise IOError(
"Unable to load mask for image {} "
"from file {}".format(image, mask_path)
)
else:
mask = None
return mask
def load_features_mask(self, image: str, points: np.ndarray) -> np.ndarray:
"""Load a feature-wise mask.
This is a binary array true for features that lie inside the
combined mask.
The array is all true when there's no mask.
"""
if points is None or len(points) == 0:
return np.array([], dtype=bool)
mask_image = self.load_combined_mask(image)
if mask_image is None:
logger.debug("No segmentation for {}, no features masked.".format(image))
return np.ones((points.shape[0],), dtype=bool)
exif = self.load_exif(image)
width = exif["width"]
height = exif["height"]
orientation = exif["orientation"]
new_height, new_width = mask_image.shape
ps = upright.opensfm_to_upright(
points[:, :2],
width,
height,
orientation,
new_width=new_width,
new_height=new_height,
).astype(int)
mask = mask_image[ps[:, 1], ps[:, 0]]
n_removed = np.sum(mask == 0)
logger.debug(
"Masking {} / {} ({:.2f}) features for {}".format(
n_removed, len(mask), n_removed / len(mask), image
)
)
return np.array(mask, dtype=bool)
def _detection_path(self) -> str:
return os.path.join(self.data_path, "detections")
def _detection_file(self, image: str) -> str:
return os.path.join(self._detection_path(), image + ".png")
def load_detection(self, image: str) -> Optional[np.ndarray]:
"""Load image detection if it exists, otherwise return None."""
detection_file = self._detection_file(image)
if self.io_handler.isfile(detection_file):
detection = self.io_handler.imread(detection_file, grayscale=True)
else:
detection = None
return detection
def _instances_path(self) -> str:
return os.path.join(self.data_path, "instances")
def _instances_file(self, image: str) -> str:
return os.path.join(self._instances_path(), image + ".png")
def load_instances(self, image: str) -> Optional[np.ndarray]:
"""Load image instances file if it exists, otherwise return None."""
instances_file = self._instances_file(image)
if self.io_handler.isfile(instances_file):
instances = self.io_handler.imread(instances_file, grayscale=True)
else:
instances = None
return instances
def _segmentation_path(self) -> str:
return os.path.join(self.data_path, "segmentations")
def _segmentation_file(self, image: str) -> str:
return os.path.join(self._segmentation_path(), image + ".png")
def segmentation_labels(self) -> List[Any]:
return []
def load_segmentation(self, image: str) -> Optional[np.ndarray]:
"""Load image segmentation if it exists, otherwise return None."""
segmentation_file = self._segmentation_file(image)
if self.io_handler.isfile(segmentation_file):
segmentation = self.io_handler.imread(segmentation_file, grayscale=True)
else:
segmentation = None
return segmentation
def segmentation_ignore_values(self, image: str) -> List[int]:
"""List of label values to ignore.
Pixels with this labels values will be masked out and won't be
processed when extracting features or computing depthmaps.
"""
return self.config.get("segmentation_ignore_values", [])
def load_segmentation_mask(self, image: str) -> Optional[np.ndarray]:
"""Build a mask from segmentation ignore values.
The mask is non-zero only for pixels with segmentation
labels not in segmentation_ignore_values.
"""
ignore_values = self.segmentation_ignore_values(image)
if not ignore_values:
return None
segmentation = self.load_segmentation(image)
if segmentation is None:
return None
return self.mask_from_segmentation(segmentation, ignore_values)
def mask_from_segmentation(
self, segmentation: np.ndarray, ignore_values: List[int]
) -> np.ndarray:
mask = np.ones(segmentation.shape, dtype=np.uint8)
for value in ignore_values:
mask &= segmentation != value
return mask
def load_combined_mask(self, image: str) -> Optional[np.ndarray]:
"""Combine binary mask with segmentation mask.
Return a mask that is non-zero only where the binary
mask and the segmentation mask are non-zero.
"""
mask = self.load_mask(image)
smask = self.load_segmentation_mask(image)
return self.combine_masks(mask, smask)
def combine_masks(
self, mask: Optional[np.ndarray], smask: Optional[np.ndarray]
) -> Optional[np.ndarray]:
if mask is None:
if smask is None:
return None
else:
return smask
else:
if smask is None:
return mask
else:
mask, smask = self._resize_masks_to_match(mask, smask)
return mask & smask
def _resize_masks_to_match(
self,
im1: np.ndarray,
im2: np.ndarray,
) -> Tuple[np.ndarray, np.ndarray]:
h, w = max(im1.shape, im2.shape)
if im1.shape != (h, w):
im1 = cv2.resize(im1, (w, h), interpolation=cv2.INTER_NEAREST)
if im2.shape != (h, w):
im2 = cv2.resize(im2, (w, h), interpolation=cv2.INTER_NEAREST)
return im1, im2
def _is_image_file(self, filename: str) -> bool:
extensions = {"jpg", "jpeg", "png", "tif", "tiff", "pgm", "pnm", "gif"}
return filename.split(".")[-1].lower() in extensions
def _set_image_path(self, path: str) -> None:
"""Set image path and find all images in there"""
self.image_list = []
self.image_files = {}
if self.io_handler.exists(path):
for name in self.io_handler.ls(path):
if self._is_image_file(name):
self.image_list.append(name)
self.image_files[name] = os.path.join(path, name)
def _set_image_list(self, image_list: List[str]) -> None:
self.image_list = []
self.image_files = {}
for line in image_list:
path = os.path.join(self.data_path, line)
name = os.path.basename(path)
self.image_list.append(name)
self.image_files[name] = path
def _set_mask_path(self, path: str) -> None:
"""Set mask path and find all masks in there"""
self.mask_files = {}
if self.io_handler.isdir(path):
files = set(self.io_handler.ls(path))
for image in self.images():
mask = image + ".png"
if mask in files:
self.mask_files[image] = os.path.join(path, mask)
def _set_mask_list(self, mask_list_lines: List[str]) -> None:
self.mask_files = {}
for line in mask_list_lines:
image, relpath = line.split(None, 1)
path = os.path.join(self.data_path, relpath.strip())
self.mask_files[image.strip()] = path
def _exif_path(self) -> str:
"""Return path of extracted exif directory"""
return os.path.join(self.data_path, "exif")
def _exif_file(self, image: str) -> str:
"""
Return path of exif information for given image
:param image: Image name, with extension (i.e. 123.jpg)
"""
return os.path.join(self._exif_path(), image + ".exif")
def load_exif(self, image: str) -> Dict[str, Any]:
"""Load pre-extracted image exif metadata."""
with self.io_handler.open_rt(self._exif_file(image)) as fin:
return json.load(fin)
def save_exif(self, image: str, data: Dict[str, Any]) -> None:
self.io_handler.mkdir_p(self._exif_path())
with self.io_handler.open_wt(self._exif_file(image)) as fout:
io.json_dump(data, fout)
def exif_exists(self, image: str) -> bool:
return self.io_handler.isfile(self._exif_file(image))
def feature_type(self) -> str:
"""Return the type of local features (e.g. AKAZE, SURF, SIFT)"""
feature_name = self.config["feature_type"].lower()
if self.config["feature_root"]:
feature_name = "root_" + feature_name
return feature_name
def _feature_path(self) -> str:
"""Return path of feature descriptors and FLANN indices directory"""
return os.path.join(self.data_path, "features")
def _feature_file(self, image: str) -> str:
"""
Return path of feature file for specified image
:param image: Image name, with extension (i.e. 123.jpg)
"""
return os.path.join(self._feature_path(), image + ".features.npz")
def _feature_file_legacy(self, image: str) -> str:
"""
Return path of a legacy feature file for specified image
:param image: Image name, with extension (i.e. 123.jpg)
"""
return os.path.join(self._feature_path(), image + ".npz")
def _save_features(
self, filepath: str, features_data: features.FeaturesData
) -> None:
self.io_handler.mkdir_p(self._feature_path())
with self.io_handler.open(filepath, "wb") as fwb:
features_data.save(fwb, self.config)
def features_exist(self, image: str) -> bool:
return self.io_handler.isfile(
self._feature_file(image)
) or self.io_handler.isfile(self._feature_file_legacy(image))
def load_features(self, image: str) -> Optional[features.FeaturesData]:
features_filepath = (
self._feature_file_legacy(image)
if self.io_handler.isfile(self._feature_file_legacy(image))
else self._feature_file(image)
)
with self.io_handler.open(features_filepath, "rb") as f:
return features.FeaturesData.from_file(f, self.config)
def save_features(self, image: str, features_data: features.FeaturesData) -> None:
self._save_features(self._feature_file(image), features_data)
def _words_file(self, image: str) -> str:
return os.path.join(self._feature_path(), image + ".words.npz")
def words_exist(self, image: str) -> bool:
return self.io_handler.isfile(self._words_file(image))
def load_words(self, image: str) -> np.ndarray:
with self.io_handler.open(self._words_file(image), "rb") as f:
s = np.load(f)
return s["words"].astype(np.int32)
def save_words(self, image: str, words: np.ndarray) -> None:
with self.io_handler.open(self._words_file(image), "wb") as f:
np.savez_compressed(f, words=words.astype(np.uint16))
def _matches_path(self) -> str:
"""Return path of matches directory"""
return os.path.join(self.data_path, "matches")
def _matches_file(self, image: str) -> str:
"""File for matches for an image"""
return os.path.join(self._matches_path(), "{}_matches.pkl.gz".format(image))
def matches_exists(self, image: str) -> bool:
return self.io_handler.isfile(self._matches_file(image))
def load_matches(self, image: str) -> Dict[str, np.ndarray]:
with self.io_handler.open(self._matches_file(image), "rb") as fin:
matches = pickle.load(BytesIO(gzip.decompress(fin.read())))
return matches
def save_matches(self, image: str, matches: Dict[str, np.ndarray]) -> None:
self.io_handler.mkdir_p(self._matches_path())
with BytesIO() as buffer:
with gzip.GzipFile(fileobj=buffer, mode="w") as fzip:
# pyre-fixme[6]: Expected `IO[bytes]` for 2nd param but got `GzipFile`.
pickle.dump(matches, fzip)
with self.io_handler.open(self._matches_file(image), "wb") as fw:
fw.write(buffer.getvalue())
def find_matches(self, im1: str, im2: str) -> np.ndarray:
if self.matches_exists(im1):
im1_matches = self.load_matches(im1)
if im2 in im1_matches:
return im1_matches[im2]
if self.matches_exists(im2):
im2_matches = self.load_matches(im2)
if im1 in im2_matches:
if len(im2_matches[im1]):
return im2_matches[im1][:, [1, 0]]
return np.array([])
def _tracks_manager_file(self, filename: Optional[str] = None) -> str:
"""Return path of tracks file"""
return os.path.join(self.data_path, filename or "tracks.csv")
def load_tracks_manager(
self, filename: Optional[str] = None
) -> pymap.TracksManager:
"""Return the tracks manager"""
with self.io_handler.open(self._tracks_manager_file(filename), "r") as f:
return pymap.TracksManager.instanciate_from_string(f.read())
def tracks_exists(self, filename: Optional[str] = None) -> bool:
return self.io_handler.isfile(self._tracks_manager_file(filename))
def save_tracks_manager(
self, tracks_manager: pymap.TracksManager, filename: Optional[str] = None
) -> None:
with self.io_handler.open(self._tracks_manager_file(filename), "w") as fw:
fw.write(tracks_manager.as_string())
def _reconstruction_file(self, filename: Optional[str]) -> str:
"""Return path of reconstruction file"""
return os.path.join(self.data_path, filename or "reconstruction.json")
def reconstruction_exists(self, filename: Optional[str] = None) -> bool:
return self.io_handler.isfile(self._reconstruction_file(filename))
def load_reconstruction(
self, filename: Optional[str] = None
) -> List[types.Reconstruction]:
with self.io_handler.open_rt(self._reconstruction_file(filename)) as fin:
reconstructions = io.reconstructions_from_json(io.json_load(fin))
return reconstructions
def save_reconstruction(
self,
reconstruction: List[types.Reconstruction],
filename: Optional[str] = None,
minify=False,
) -> None:
with self.io_handler.open_wt(self._reconstruction_file(filename)) as fout:
io.json_dump(io.reconstructions_to_json(reconstruction), fout, minify)
def _reference_lla_path(self) -> str:
return os.path.join(self.data_path, "reference_lla.json")
def invent_reference_lla(
self, images: Optional[List[str]] = None
) -> Dict[str, float]:
lat, lon, alt = 0.0, 0.0, 0.0
wlat, wlon, walt = 0.0, 0.0, 0.0
if images is None:
images = self.images()
for image in images:
d = self.load_exif(image)
if "gps" in d and "latitude" in d["gps"] and "longitude" in d["gps"]:
w = 1.0 / max(0.01, d["gps"].get("dop", 15))
lat += w * d["gps"]["latitude"]
lon += w * d["gps"]["longitude"]
wlat += w
wlon += w
if "altitude" in d["gps"]:
alt += w * d["gps"]["altitude"]
walt += w
if not wlat and not wlon:
for gcp in self.load_ground_control_points_impl(None):
lat += gcp.lla["latitude"]
lon += gcp.lla["longitude"]
wlat += 1
wlon += 1
if gcp.has_altitude:
alt += gcp.lla["altitude"]
walt += 1
if wlat:
lat /= wlat
if wlon:
lon /= wlon
if walt:
alt /= walt
reference = {
"latitude": lat,
"longitude": lon,
"altitude": 0,
} # Set altitude manually.
self.save_reference_lla(reference)
return reference
def save_reference_lla(self, reference: Dict[str, float]) -> None:
with self.io_handler.open_wt(self._reference_lla_path()) as fout:
io.json_dump(reference, fout)
def load_reference_lla(self) -> Dict[str, float]:
with self.io_handler.open_rt(self._reference_lla_path()) as fin:
return io.json_load(fin)
def load_reference(self) -> geo.TopocentricConverter:
"""Load reference as a topocentric converter."""
lla = self.load_reference_lla()
return geo.TopocentricConverter(
lla["latitude"], lla["longitude"], lla["altitude"]
)
def reference_lla_exists(self) -> bool:
return self.io_handler.isfile(self._reference_lla_path())
def _camera_models_file(self) -> str:
"""Return path of camera model file"""
return os.path.join(self.data_path, "camera_models.json")
def load_camera_models(self) -> Dict[str, pygeometry.Camera]:
"""Return camera models data"""
with self.io_handler.open_rt(self._camera_models_file()) as fin:
obj = json.load(fin)
return io.cameras_from_json(obj)
def save_camera_models(self, camera_models: Dict[str, pygeometry.Camera]) -> None:
"""Save camera models data"""
with self.io_handler.open_wt(self._camera_models_file()) as fout:
obj = io.cameras_to_json(camera_models)
io.json_dump(obj, fout)
def _camera_models_overrides_file(self) -> str:
"""Path to the camera model overrides file."""
return os.path.join(self.data_path, "camera_models_overrides.json")
def camera_models_overrides_exists(self) -> bool:
"""Check if camera overrides file exists."""
return self.io_handler.isfile(self._camera_models_overrides_file())
def load_camera_models_overrides(self) -> Dict[str, pygeometry.Camera]:
"""Load camera models overrides data."""
with self.io_handler.open_rt(self._camera_models_overrides_file()) as fin:
obj = json.load(fin)
return io.cameras_from_json(obj)
def save_camera_models_overrides(
self, camera_models: Dict[str, pygeometry.Camera]
) -> None:
"""Save camera models overrides data"""
with self.io_handler.open_wt(self._camera_models_overrides_file()) as fout:
obj = io.cameras_to_json(camera_models)
io.json_dump(obj, fout)
def _exif_overrides_file(self) -> str:
"""Path to the EXIF overrides file."""
return os.path.join(self.data_path, "exif_overrides.json")
def exif_overrides_exists(self) -> bool:
"""Check if EXIF overrides file exists."""
return self.io_handler.isfile(self._exif_overrides_file())
def load_exif_overrides(self) -> Dict[str, Any]:
"""Load EXIF overrides data."""
with self.io_handler.open_rt(self._exif_overrides_file()) as fin:
return json.load(fin)
def _rig_cameras_file(self) -> str:
"""Return path of rig models file"""
return os.path.join(self.data_path, "rig_cameras.json")
def load_rig_cameras(self) -> Dict[str, pymap.RigCamera]:
"""Return rig models data"""
if not self.io_handler.exists(self._rig_cameras_file()):
return {}
with self.io_handler.open_rt(self._rig_cameras_file()) as fin:
return io.rig_cameras_from_json(json.load(fin))
def save_rig_cameras(self, rig_cameras: Dict[str, pymap.RigCamera]) -> None:
"""Save rig models data"""
with self.io_handler.open_wt(self._rig_cameras_file()) as fout:
io.json_dump(io.rig_cameras_to_json(rig_cameras), fout)
def _rig_assignments_file(self) -> str:
"""Return path of rig assignments file"""
return os.path.join(self.data_path, "rig_assignments.json")
def load_rig_assignments(self) -> List[List[Tuple[str, str]]]:
"""Return rig assignments data"""
if not self.io_handler.exists(self._rig_assignments_file()):
return []
with self.io_handler.open_rt(self._rig_assignments_file()) as fin:
return json.load(fin)
def load_rig_assignments_per_image(
self,
) -> Dict[str, Tuple[int, str, List[str]]]:
"""Return rig assignments data"""
raw_assignments = self.load_rig_assignments()
assignments_per_image = {}
for instance_id, instance in enumerate(raw_assignments):
instance_shots = [s[0] for s in instance]
for (shot_id, rig_camera_id) in instance:
assignments_per_image[shot_id] = (
instance_id,
rig_camera_id,
instance_shots,
)
return assignments_per_image
def save_rig_assignments(self, rig_assignments: List[List[Tuple[str, str]]]):
"""Save rig assignments data"""
with self.io_handler.open_wt(self._rig_assignments_file()) as fout:
io.json_dump(rig_assignments, fout)
def profile_log(self) -> str:
"Filename where to write timings."
return os.path.join(self.data_path, "profile.log")
def _report_path(self) -> str:
return os.path.join(self.data_path, "reports")
def load_report(self, path: str) -> str:
"""Load a report file as a string."""
with self.io_handler.open_rt(os.path.join(self._report_path(), path)) as fin:
return fin.read()
def save_report(self, report_str: str, path: str) -> None:
"""Save report string to a file."""
filepath = os.path.join(self._report_path(), path)
self.io_handler.mkdir_p(os.path.dirname(filepath))
with self.io_handler.open_wt(filepath) as fout:
return fout.write(report_str)
def _ply_file(self, filename: Optional[str]):
return os.path.join(self.data_path, filename or "reconstruction.ply")
def save_ply(
self,
reconstruction: types.Reconstruction,
tracks_manager: pymap.TracksManager,
filename: Optional[str] = None,
no_cameras: bool = False,
no_points: bool = False,
point_num_views: bool = False,
):
"""Save a reconstruction in PLY format."""
ply = io.reconstruction_to_ply(
reconstruction, tracks_manager, no_cameras, no_points, point_num_views
)
with self.io_handler.open_wt(self._ply_file(filename)) as fout:
fout.write(ply)
def _ground_control_points_file(self) -> str:
return os.path.join(self.data_path, "ground_control_points.json")
def _gcp_list_file(self) -> str:
return os.path.join(self.data_path, "gcp_list.txt")
def load_ground_control_points(self) -> List[pymap.GroundControlPoint]:
"""Load ground control points.
It uses reference_lla to convert the coordinates
to topocentric reference frame.
"""
reference = self.load_reference()
return self.load_ground_control_points_impl(reference)
def load_ground_control_points_impl(
self, reference: Optional[geo.TopocentricConverter]
) -> List[pymap.GroundControlPoint]:
"""Load ground control points.
It might use reference to convert the coordinates
to topocentric reference frame.
If reference is None, it won't initialize topocentric data,
thus allowing loading raw data only.
"""
exif = {image: self.load_exif(image) for image in self.images()}
gcp = []
if self.io_handler.isfile(self._gcp_list_file()):
with self.io_handler.open_rt(self._gcp_list_file()) as fin:
gcp = io.read_gcp_list(fin, reference, exif)
pcs = []
if self.io_handler.isfile(self._ground_control_points_file()):
with self.io_handler.open_rt(self._ground_control_points_file()) as fin:
pcs = io.read_ground_control_points(fin, reference)
return gcp + pcs
def image_as_array(self, image: str) -> np.ndarray:
logger.warning("image_as_array() is deprecated. Use load_image() instead.")
return self.load_image(image)
def mask_as_array(self, image: str) -> Optional[np.ndarray]:
logger.warning("mask_as_array() is deprecated. Use load_mask() instead.")
return self.load_mask(image)
def subset(self, name: str, images_subset: List[str]) -> "DataSet":
"""Create a subset of this dataset by symlinking input data."""
subset_dataset_path = os.path.join(self.data_path, name)
self.io_handler.mkdir_p(subset_dataset_path)
self.io_handler.mkdir_p(os.path.join(subset_dataset_path, "images"))
self.io_handler.mkdir_p(os.path.join(subset_dataset_path, "segmentations"))
subset_dataset = DataSet(subset_dataset_path, self.io_handler)
files = []
for method in [
"_camera_models_file",
"_config_file",
"_camera_models_overrides_file",
"_exif_overrides_file",
]:
files.append(
(
getattr(self, method)(),
getattr(subset_dataset, method)(),
)
)
for image in images_subset:
files.append(
(
self._image_file(image),
os.path.join(subset_dataset_path, "images", image),
)
)
files.append(
(
self._segmentation_file(image),
os.path.join(subset_dataset_path, "segmentations", image + ".png"),
)
)
for src, dst in files:
if not self.io_handler.exists(src):
continue
self.io_handler.rm_if_exist(dst)
self.io_handler.symlink(src, dst)
return DataSet(subset_dataset_path, self.io_handler)
def undistorted_dataset(self) -> "UndistortedDataSet":
return UndistortedDataSet(
self, os.path.join(self.data_path, "undistorted"), self.io_handler
)
class UndistortedDataSet(object):
"""Accessors to the undistorted data of a dataset.
Data include undistorted images, masks, and segmentation as well
the undistorted reconstruction, tracks graph and computed depth maps.
All data is stored inside the single folder ``undistorted_data_path``.
By default, this path is set to the ``undistorted`` subfolder.
"""
base: DataSetBase
config: Dict[str, Any] = {}
data_path: str
def __init__(
self,
base_dataset: DataSetBase,
undistorted_data_path: str,
io_handler=io.IoFilesystemDefault,
):
"""Init dataset associated to a folder."""
self.base = base_dataset
self.config = self.base.config
self.data_path = undistorted_data_path
self.io_handler = io_handler
def load_undistorted_shot_ids(self) -> Dict[str, List[str]]:
filename = os.path.join(self.data_path, "undistorted_shot_ids.json")
with self.io_handler.open_rt(filename) as fin:
return io.json_load(fin)
def save_undistorted_shot_ids(self, ushot_dict: Dict[str, List[str]]):
filename = os.path.join(self.data_path, "undistorted_shot_ids.json")
self.io_handler.mkdir_p(self.data_path)
with self.io_handler.open_wt(filename) as fout:
io.json_dump(ushot_dict, fout, minify=False)
def _undistorted_image_path(self) -> str:
return os.path.join(self.data_path, "images")
def _undistorted_image_file(self, image: str) -> str:
"""Path of undistorted version of an image."""
return os.path.join(self._undistorted_image_path(), image)
def load_undistorted_image(self, image: str) -> np.ndarray:
"""Load undistorted image pixels as a numpy array."""
return self.io_handler.imread(self._undistorted_image_file(image))
def save_undistorted_image(self, image: str, array: np.ndarray) -> None:
"""Save undistorted image pixels."""
self.io_handler.mkdir_p(self._undistorted_image_path())
self.io_handler.imwrite(self._undistorted_image_file(image), array)
def undistorted_image_size(self, image: str) -> Tuple[int, int]:
"""Height and width of the undistorted image."""
return self.io_handler.image_size(self._undistorted_image_file(image))
def _undistorted_mask_path(self) -> str:
return os.path.join(self.data_path, "masks")
def _undistorted_mask_file(self, image: str) -> str:
"""Path of undistorted version of a mask."""
return os.path.join(self._undistorted_mask_path(), image + ".png")
def undistorted_mask_exists(self, image: str) -> bool:
"""Check if the undistorted mask file exists."""
return self.io_handler.isfile(self._undistorted_mask_file(image))
def load_undistorted_mask(self, image: str) -> np.ndarray:
"""Load undistorted mask pixels as a numpy array."""
return self.io_handler.imread(
self._undistorted_mask_file(image), grayscale=True
)
def save_undistorted_mask(self, image: str, array: np.ndarray):
"""Save the undistorted image mask."""
self.io_handler.mkdir_p(self._undistorted_mask_path())
self.io_handler.imwrite(self._undistorted_mask_file(image), array)
def _undistorted_detection_path(self) -> str:
return os.path.join(self.data_path, "detections")
def _undistorted_detection_file(self, image: str) -> str:
"""Path of undistorted version of a detection."""
return os.path.join(self._undistorted_detection_path(), image + ".png")
def undistorted_detection_exists(self, image: str) -> bool:
"""Check if the undistorted detection file exists."""
return self.io_handler.isfile(self._undistorted_detection_file(image))
def load_undistorted_detection(self, image: str) -> np.ndarray:
"""Load an undistorted image detection."""
return self.io_handler.imread(
self._undistorted_detection_file(image), grayscale=True
)
def save_undistorted_detection(self, image: str, array: np.ndarray) -> None:
"""Save the undistorted image detection."""
self.io_handler.mkdir_p(self._undistorted_detection_path())
self.io_handler.imwrite(self._undistorted_detection_file(image), array)
def _undistorted_segmentation_path(self) -> str:
return os.path.join(self.data_path, "segmentations")
def _undistorted_segmentation_file(self, image: str) -> str:
"""Path of undistorted version of a segmentation."""
return os.path.join(self._undistorted_segmentation_path(), image + ".png")
def undistorted_segmentation_exists(self, image: str) -> bool:
"""Check if the undistorted segmentation file exists."""
return self.io_handler.isfile(self._undistorted_segmentation_file(image))
def load_undistorted_segmentation(self, image: str) -> np.ndarray:
"""Load an undistorted image segmentation."""
return self.io_handler.imread(
self._undistorted_segmentation_file(image), grayscale=True
)
def save_undistorted_segmentation(self, image: str, array: np.ndarray) -> None:
"""Save the undistorted image segmentation."""
self.io_handler.mkdir_p(self._undistorted_segmentation_path())
self.io_handler.imwrite(self._undistorted_segmentation_file(image), array)
def load_undistorted_segmentation_mask(self, image: str) -> Optional[np.ndarray]:
"""Build a mask from the undistorted segmentation.
The mask is non-zero only for pixels with segmentation
labels not in segmentation_ignore_values.
"""
ignore_values = self.base.segmentation_ignore_values(image)
if not ignore_values:
return None
segmentation = self.load_undistorted_segmentation(image)
if segmentation is None:
return None
return self.base.mask_from_segmentation(segmentation, ignore_values)
def load_undistorted_combined_mask(self, image: str) -> Optional[np.ndarray]:
"""Combine undistorted binary mask with segmentation mask.
Return a mask that is non-zero only where the binary
mask and the segmentation mask are non-zero.
"""
mask = None
if self.undistorted_mask_exists(image):
mask = self.load_undistorted_mask(image)
smask = None
if self.undistorted_segmentation_exists(image):
smask = self.load_undistorted_segmentation_mask(image)
return self.base.combine_masks(mask, smask)
def _depthmap_path(self) -> str:
return os.path.join(self.data_path, "depthmaps")
def depthmap_file(self, image: str, suffix: str) -> str:
"""Path to the depthmap file"""
return os.path.join(self._depthmap_path(), image + "." + suffix)
def point_cloud_file(self, filename: str = "merged.ply") -> str:
return os.path.join(self._depthmap_path(), filename)
def load_point_cloud(
self, filename: str = "merged.ply"
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
with self.io_handler.open(self.point_cloud_file(filename), "r") as fp:
return io.point_cloud_from_ply(fp)
def save_point_cloud(
self,
points: np.ndarray,
normals: np.ndarray,
colors: np.ndarray,
labels: np.ndarray,
detections: np.ndarray,
filename: str = "merged.ply",
) -> None:
self.io_handler.mkdir_p(self._depthmap_path())
with self.io_handler.open(self.point_cloud_file(filename), "w") as fp:
io.point_cloud_to_ply(points, normals, colors, labels, detections, fp)
def raw_depthmap_exists(self, image: str) -> bool:
return self.io_handler.isfile(self.depthmap_file(image, "raw.npz"))
def save_raw_depthmap(
self,
image: str,
depth: np.ndarray,
plane: np.ndarray,
score: np.ndarray,
nghbr: np.ndarray,
nghbrs: np.ndarray,
) -> None:
self.io_handler.mkdir_p(self._depthmap_path())
filepath = self.depthmap_file(image, "raw.npz")
with self.io_handler.open(filepath, "wb") as f:
np.savez_compressed(
f, depth=depth, plane=plane, score=score, nghbr=nghbr, nghbrs=nghbrs
)
def load_raw_depthmap(
self, image: str
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
with self.io_handler.open(self.depthmap_file(image, "raw.npz"), "rb") as f:
o = np.load(f)
return o["depth"], o["plane"], o["score"], o["nghbr"], o["nghbrs"]
def clean_depthmap_exists(self, image: str) -> bool:
return self.io_handler.isfile(self.depthmap_file(image, "clean.npz"))
def save_clean_depthmap(
self, image: str, depth: np.ndarray, plane: np.ndarray, score: np.ndarray
):
self.io_handler.mkdir_p(self._depthmap_path())
filepath = self.depthmap_file(image, "clean.npz")
with self.io_handler.open(filepath, "wb") as f:
np.savez_compressed(f, depth=depth, plane=plane, score=score)
def load_clean_depthmap(
self, image: str
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
with self.io_handler.open(self.depthmap_file(image, "clean.npz"), "rb") as f:
o = np.load(f)
return o["depth"], o["plane"], o["score"]
def pruned_depthmap_exists(self, image: str) -> bool:
return self.io_handler.isfile(self.depthmap_file(image, "pruned.npz"))
def save_pruned_depthmap(
self,
image: str,
points: np.ndarray,
normals: np.ndarray,
colors: np.ndarray,
labels: np.ndarray,
detections: np.ndarray,
) -> None:
self.io_handler.mkdir_p(self._depthmap_path())
filepath = self.depthmap_file(image, "pruned.npz")
with self.io_handler.open(filepath, "wb") as f:
np.savez_compressed(
f,
points=points,
normals=normals,
colors=colors,
labels=labels,
detections=detections,
)
def load_pruned_depthmap(
self, image: str
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
with self.io_handler.open(self.depthmap_file(image, "pruned.npz"), "rb") as f:
o = np.load(f)
if "detections" not in o:
return (
o["points"],
o["normals"],
o["colors"],
o["labels"],
np.zeros(o["labels"].shape),
)
else:
return (
o["points"],
o["normals"],
o["colors"],
o["labels"],
o["detections"],
)
def load_undistorted_tracks_manager(self) -> pymap.TracksManager:
filename = os.path.join(self.data_path, "tracks.csv")
with self.io_handler.open(filename, "r") as f:
return pymap.TracksManager.instanciate_from_string(f.read())
def save_undistorted_tracks_manager(
self, tracks_manager: pymap.TracksManager
) -> None:
filename = os.path.join(self.data_path, "tracks.csv")
with self.io_handler.open(filename, "w") as fw:
fw.write(tracks_manager.as_string())
def load_undistorted_reconstruction(self) -> List[types.Reconstruction]:
filename = os.path.join(self.data_path, "reconstruction.json")
with self.io_handler.open_rt(filename) as fin:
return io.reconstructions_from_json(io.json_load(fin))
def save_undistorted_reconstruction(
self, reconstruction: List[types.Reconstruction]
) -> None:
filename = os.path.join(self.data_path, "reconstruction.json")
self.io_handler.mkdir_p(self.data_path)
with self.io_handler.open_wt(filename) as fout:
io.json_dump(io.reconstructions_to_json(reconstruction), fout, minify=True)
| 36.18018
| 87
| 0.624523
|
4a1204a8f26338359303db9f151743171801e27f
| 678
|
py
|
Python
|
GenyTreeApp/migrations/0004_auto_20190607_1620.py
|
carlos-el/ETSIIT_TFG_2019
|
0a3d65c5ae5abc3ec0af80e24c9eb9f257761e87
|
[
"MIT"
] | null | null | null |
GenyTreeApp/migrations/0004_auto_20190607_1620.py
|
carlos-el/ETSIIT_TFG_2019
|
0a3d65c5ae5abc3ec0af80e24c9eb9f257761e87
|
[
"MIT"
] | 1
|
2021-06-09T18:18:24.000Z
|
2021-06-09T18:18:24.000Z
|
GenyTreeApp/migrations/0004_auto_20190607_1620.py
|
carlos-el/ETSIIT_TFG_2019
|
0a3d65c5ae5abc3ec0af80e24c9eb9f257761e87
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2 on 2019-06-07 14:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('GenyTreeApp', '0003_auto_20190607_1619'),
]
operations = [
migrations.AlterField(
model_name='person',
name='child',
field=models.ManyToManyField(related_name='parents', through='GenyTreeApp.Child', to='GenyTreeApp.Person'),
),
migrations.AlterField(
model_name='person',
name='couple',
field=models.ManyToManyField(related_name='couples', through='GenyTreeApp.Union', to='GenyTreeApp.Person'),
),
]
| 28.25
| 119
| 0.620944
|
4a1204ebd3575d4e644e6e26cb16404259e91684
| 2,084
|
py
|
Python
|
autofit/tools/util.py
|
jonathanfrawley/PyAutoFit
|
818384a6eb3926b18247e16efcf0db6008193bd4
|
[
"MIT"
] | null | null | null |
autofit/tools/util.py
|
jonathanfrawley/PyAutoFit
|
818384a6eb3926b18247e16efcf0db6008193bd4
|
[
"MIT"
] | null | null | null |
autofit/tools/util.py
|
jonathanfrawley/PyAutoFit
|
818384a6eb3926b18247e16efcf0db6008193bd4
|
[
"MIT"
] | null | null | null |
import numpy as np
from contextlib import contextmanager
import sys, os
import json
@contextmanager
def suppress_stdout():
with open(os.devnull, "w") as devnull:
old_stdout = sys.stdout
sys.stdout = devnull
try:
yield
finally:
sys.stdout = old_stdout
def numpy_array_to_json(
array: np.ndarray, file_path: str, overwrite: bool = False
):
"""
Write a NumPy array to a json file.
Parameters
----------
array : np.ndarray
The array that is written to json.
file_path : str
The full path of the file that is output, including the file name and `.json` extension.
overwrite : bool
If `True` and a file already exists with the input file_path the .json file is overwritten. If
`False`, an error will be raised.
Returns
-------
None
Examples
--------
array_2d = np.ones((5,5))
numpy_array_to_json(array_2d=array_2d, file_path='/path/to/file/filename.json', overwrite=True)
"""
file_dir = os.path.split(file_path)[0]
if not os.path.exists(file_dir):
os.makedirs(file_dir)
if overwrite and os.path.exists(file_path):
os.remove(file_path)
with open(file_path, "w+") as f:
json.dump(array.tolist(), f)
def numpy_array_from_json(file_path: str):
"""
Read a 1D NumPy array from a .json file.
After loading the NumPy array, the array is flipped upside-down using np.flipud. This is so that the structures
appear the same orientation as .json files loaded in DS9.
Parameters
----------
file_path : str
The full path of the file that is loaded, including the file name and ``.json`` extension.
Returns
-------
ndarray
The NumPy array that is loaded from the .json file.
Examples
--------
array_2d = numpy_array_from_json(file_path='/path/to/file/filename.json')
"""
with open(file_path, "r") as f:
return np.asarray(json.load(f))
| 27.421053
| 116
| 0.609885
|
4a12056f9d6757537f67ebb21ca386921f58b599
| 7,218
|
py
|
Python
|
src/buspirate_uart_monitor/bp_monitor.py
|
e28eta/buspirate-uart-monitor
|
31aa50058405beb3272d98ae4355195e368de124
|
[
"MIT"
] | null | null | null |
src/buspirate_uart_monitor/bp_monitor.py
|
e28eta/buspirate-uart-monitor
|
31aa50058405beb3272d98ae4355195e368de124
|
[
"MIT"
] | null | null | null |
src/buspirate_uart_monitor/bp_monitor.py
|
e28eta/buspirate-uart-monitor
|
31aa50058405beb3272d98ae4355195e368de124
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
Connect to a Bus Pirate, put it into UART mode compatible with esphome logger, and print everything received
"""
import argparse
import sys
from typing import Optional
import serial
from serial.tools.list_ports_common import ListPortInfo
from serial.tools.miniterm import key_description
class DoNotTx(serial.tools.miniterm.Transform):
def tx(self, text):
return ""
serial.tools.miniterm.TRANSFORMATIONS["do-not-tx"] = DoNotTx
commands = {
"BBIO1": b"\x00", # Enter reset binary mode
"ART1": b"\x03", # Enter binary UART mode
"RESET": b"\x0F", # Reset Bus Pirate
}
def EnterBinaryMode(connection: serial.Serial) -> bool:
connection.reset_output_buffer()
connection.reset_input_buffer()
# > The Bus Pirate user terminal could be stuck in a configuration menu when your program attempts to enter binary mode.
# > One way to ensure that you're at the command line is to send <enter> at least 10 times, and then send '#' to reset.
# > Next, send 0x00 to the command line 20+ times until you get the BBIOx version string
connection.write(b"\n" * 10 + b"#" + b"\n")
connection.reset_input_buffer()
if connection.in_waiting:
connection.read(connection.in_waiting)
# Conflicting docs on entering binary mode:
# > Send 0x00 to the user terminal (max.) 20 times
# > you must now enter 0x00 at least 20 times to enter raw bitbang mode
for _ in range(25):
connection.write(commands["BBIO1"])
if connection.read(5) == b"BBIO1":
return True
return False
def Send(
connection: serial.Serial,
message: bytes,
expected_response: bytes,
error_message: Optional[str] = None,
raise_on_fail: bool = True,
):
connection.write(message)
response = connection.read(len(expected_response))
if response != expected_response:
error_message = (
error_message
or f'Response to "{message.hex()}" was "{response.hex()}" instead of expected value "{expected_response.hex()}"'
)
if raise_on_fail:
connection.close()
raise RuntimeError(error_message)
else:
sys.stderr.write(error_message + "\n")
def is_possible_buspirate(port: ListPortInfo) -> bool:
return (port.vid == 0x0403 and port.pid == 0x6001) or (
port.vid == 0x04D8 and port.pid == 0xFB00
)
def find_buspirate_port():
all_ports_found = sorted(serial.tools.list_ports.comports())
ports_matching_filter = list(filter(is_possible_buspirate, all_ports_found))
if len(ports_matching_filter) == 1:
pirate_port = ports_matching_filter[0].device
sys.stderr.write(
f"Found only one serial port matching expected Bus Pirate vendor & product id: {pirate_port}\n"
)
sys.stderr.write(
"To use a different port, you'll need to explicitly specify desired port with -p or --port\n"
)
return pirate_port
sys.stderr.write(
f'\nFound {"zero" if len(ports_matching_filter) == 0 else "multiple"} possible Bus Pirates\n'
)
sys.stderr.write("--- All available ports:\n")
ports = []
for n, port in enumerate(all_ports_found, 1):
matches_pirate = ("", " -- Possible Bus Pirate!")[port in ports_matching_filter]
sys.stderr.write(
"--- {:2}: {:20} {!r}{}\n".format(
n, port.device, port.description, matches_pirate
)
)
ports.append(port.device)
while True:
port = input("--- Enter port index or full name: ")
try:
index = int(port) - 1
if not 0 <= index < len(ports):
sys.stderr.write("--- Invalid index!\n")
continue
except ValueError:
pass
else:
port = ports[index]
return port
def main():
parser = argparse.ArgumentParser(description="Bus Pirate UART monitor")
parser.add_argument("--port", "-p", help="TTY device for Bus Pirate", default=None)
args = parser.parse_args()
if args.port is None or args.port == "-":
try:
args.port = find_buspirate_port()
except KeyboardInterrupt:
sys.stderr.write("\n")
parser.error("user aborted")
else:
if not args.port:
parser.error(
"could not find Bus Pirate port, and user did not specify one"
)
baud = 115200
# also 115200, despite error on http://dangerousprototypes.com/docs/UART_(binary)#0110xxxx_-_Set_UART_speed
uart_connection_speed_command = b"\x69"
sys.stderr.write(f"Connecting to: {args.port} at baudrate {baud}\n")
try:
connection = serial.Serial(args.port, baud, timeout=0.1)
except Exception as e:
sys.stderr.write(
f"Connection cannot be opened\nError({e.errno}): {e.strerror}\n"
)
return
sys.stderr.write("Entering binary mode\n")
if not EnterBinaryMode(connection):
connection.close()
raise RuntimeError("Bus Pirate failed to enter binary mode")
sys.stderr.write("Entering UART Mode\n")
Send(connection, commands["ART1"], b"ART1", "Bus Pirate failed to enter UART mode")
Send(
connection,
uart_connection_speed_command,
b"\x01",
"Failed to set connection speed",
)
Send(
connection,
b"\x80",
b"\x01",
"Failed to configure UART settings (HiZ, 8/N/1, RX idle high)",
)
Send(connection, b"\x02", b"\x01", "Turning on echo UART RX failed")
connection.timeout = None
_ = connection.read(
connection.in_waiting
) # pretty reliably getting a 0x00 from the device, ignore it if there
miniterm = serial.tools.miniterm.Miniterm(connection, filters=["do-not-tx"])
miniterm.exit_character = chr(0x03) # Ctrl-C
miniterm.set_rx_encoding("UTF-8")
miniterm.set_tx_encoding("UTF-8")
sys.stderr.write(
f"--- UART Log | Quit: {key_description(miniterm.exit_character)} ---\n"
)
miniterm.start()
try:
miniterm.join(True)
except KeyboardInterrupt:
pass
sys.stderr.write("\n--- exit ---\n")
miniterm.join()
connection.reset_output_buffer()
connection.reset_input_buffer()
connection.write(b"\x03")
response = connection.read(connection.in_waiting or 1)
if not response.endswith(b"\x01"):
# I haven't yet figured out how to make this robust
sys.stderr.write(
f"Turning off echo UART RX failed, the rest of cleanup may not go correctly. Response: {response.hex()}\n"
)
sys.stderr.write("Closing connection.\n")
Send(
connection, commands["BBIO1"], b"BBIO1", raise_on_fail=False
) # 'Switching back to bitbang mode failed'
Send(
connection, commands["RESET"], b"\x01", raise_on_fail=False
) # 'Resetting Bus Pirate hardware failed'
connection.close()
if __name__ == "__main__":
try:
main()
except RuntimeError as e:
sys.stderr.write(f"\nA fatal error of some sort occurred: { repr(e) }\n")
sys.exit(2)
| 32.958904
| 124
| 0.630784
|
4a12057a81e2fa63e1069be8f802fcb4a762d8b6
| 4,237
|
py
|
Python
|
octavia_tempest_plugin/tests/api/v2/test_availability_zone_capabilities.py
|
NeCTAR-RC/octavia-tempest-plugin
|
5506c00b8d8972e6223499dd5a5da4c85c1ff836
|
[
"Apache-2.0"
] | null | null | null |
octavia_tempest_plugin/tests/api/v2/test_availability_zone_capabilities.py
|
NeCTAR-RC/octavia-tempest-plugin
|
5506c00b8d8972e6223499dd5a5da4c85c1ff836
|
[
"Apache-2.0"
] | null | null | null |
octavia_tempest_plugin/tests/api/v2/test_availability_zone_capabilities.py
|
NeCTAR-RC/octavia-tempest-plugin
|
5506c00b8d8972e6223499dd5a5da4c85c1ff836
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Rackspace US Inc. All rights reserved.
# Copyright 2019 Verizon Media
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config
from tempest.lib import decorators
from tempest.lib import exceptions
from octavia_tempest_plugin.common import constants as const
from octavia_tempest_plugin.tests import test_base
CONF = config.CONF
class AvailabilityZoneCapabilitiesAPITest(test_base.LoadBalancerBaseTest):
"""Test the provider availability zone capabilities API."""
@decorators.idempotent_id('cb3e4c59-4114-420b-9837-2666d4d5fef4')
def test_availability_zone_capabilities_list(self):
"""Tests provider availability zone capabilities list API/filtering.
* Validates that non-lb admin accounts cannot list the capabilities.
* List the availability zone capablilities.
* Validate that the "loadbalancer_topology" capablility is present.
* List the providers returning one field at a time.
"""
# We have to do this here as the api_version and clients are not
# setup in time to use a decorator or the skip_checks mixin
if not self.mem_provider_client.is_version_supported(
self.api_version, '2.14'):
raise self.skipException(
'Availability zone capabilities are only available '
'on Octavia API version 2.14 or newer.')
# Test that a user without the load balancer admin role cannot
# list provider availability zone capabilities.
if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
os_primary_capabilities_client = (
self.os_primary.availability_zone_capabilities_client)
self.assertRaises(
exceptions.Forbidden,
(os_primary_capabilities_client
.list_availability_zone_capabilities),
CONF.load_balancer.provider)
# Check for an expected availability zone capability for the
# configured provider
admin_capabilities_client = (
self.lb_admin_availability_zone_capabilities_client)
capabilities = (
admin_capabilities_client.list_availability_zone_capabilities(
CONF.load_balancer.provider))
expected_name = list(
CONF.load_balancer.expected_availability_zone_capability)[0]
expected_description = (
CONF.load_balancer.expected_availability_zone_capability[
expected_name])
for capability in capabilities:
if capability[const.NAME] == expected_name:
self.assertEqual(expected_description,
capability[const.DESCRIPTION])
# Test fields
capabilities = (
admin_capabilities_client.list_availability_zone_capabilities(
CONF.load_balancer.provider,
query_params='{fields}={field}&{field}={exp_name}'.format(
fields=const.FIELDS, field=const.NAME,
exp_name=expected_name)))
self.assertEqual(1, len(capabilities[0]))
self.assertEqual(expected_name, capabilities[0][const.NAME])
capabilities = (
admin_capabilities_client.list_availability_zone_capabilities(
CONF.load_balancer.provider,
query_params='{fields}={field}&{name}={exp_name}'.format(
fields=const.FIELDS, field=const.DESCRIPTION,
name=const.NAME, exp_name=expected_name)))
self.assertEqual(1, len(capabilities[0]))
self.assertEqual(expected_description,
capabilities[0][const.DESCRIPTION])
| 45.074468
| 78
| 0.67359
|
4a1205888907386fc637052a76f3963e3861ce04
| 2,302
|
py
|
Python
|
app/db.py
|
LambdaTheda/final_CitySpire_TeamC
|
8c2a2fa76464633b9520c929ecdfc28fe30c41b2
|
[
"MIT"
] | null | null | null |
app/db.py
|
LambdaTheda/final_CitySpire_TeamC
|
8c2a2fa76464633b9520c929ecdfc28fe30c41b2
|
[
"MIT"
] | 4
|
2021-01-25T04:01:05.000Z
|
2021-03-03T02:24:38.000Z
|
app/db.py
|
LambdaTheda/final_CitySpire_TeamC
|
8c2a2fa76464633b9520c929ecdfc28fe30c41b2
|
[
"MIT"
] | 4
|
2021-01-28T02:16:06.000Z
|
2021-03-10T03:31:50.000Z
|
#app/db.py
"""Database functions"""
#Imports
import os
from dotenv import load_dotenv
from fastapi import APIRouter, Depends
import sqlalchemy
import psycopg2
from psycopg2.extras import execute_values
import json
# Router
router = APIRouter()
# Load environment variables aka secrets from .env
load_dotenv()
DATABASE_URL = os.getenv("DATABASE_URL")
#DATABASE_URL = os.getenv['DATABASE_URL'] # for AWS EB Environment Variable
# Connect to AWS RDS PG DB with FastAPI on Heroku (Hosted on AWS)
connection = psycopg2.connect(DATABASE_URL)
# Cursor for making SQL queries
cursor = connection.cursor()
# Get a SQLAlchemy database connection (we are using Postgres...)
async def get_db() -> sqlalchemy.engine.base.Connection:
"""Get a SQLAlchemy database connection.
Uses this environment variable if it exists:
DATABASE_URL=dialect://user:password@host/dbname
Otherwise uses a SQLite database for initial local development.
"""
load_dotenv()
database_url = os.getenv('DATABASE_URL', default='sqlite:///temporary.db')
engine = sqlalchemy.create_engine(database_url)
connection = engine.connect()
try:
yield connection
finally:
connection.close()
# Verify we can connect to the database
@router.get('/info')
async def get_url(connection=Depends(get_db)):
"""Verify we can connect to the database,
and return the database URL in this format:
dialect://user:password@host/dbname
The password will be hidden with ***
"""
url_without_password = repr(connection.engine.url)
return {'database_url': url_without_password}
# This looks like trash and it is slow, but it is a start for a more refined serve of data
@router.get('/cityspire')
async def get_table(connection=Depends(get_db)):
"""Return table of all data from CitySpire DB in json object"""
select_query = "SELECT * from cityspire01"
cursor.execute(select_query)
records = cursor.fetchall()
#cursor.close()
#connection.close()
return json.dumps(records)
# Elastic Load Balancing health checks
@router.get('/healthcheck')
def healthcheck():
msg = ("This is a health check message.")
return {"message": msg}
| 25.577778
| 91
| 0.694179
|
4a12059bac27b4bb5fa455c940461a21fb65eaaa
| 8,777
|
py
|
Python
|
back-end/shiftapp/serializers.py
|
BloomTech-Labs/CS10-employee-shift
|
bb45c8e0ece121b8e38db9f156bddd8ac204d875
|
[
"MIT"
] | null | null | null |
back-end/shiftapp/serializers.py
|
BloomTech-Labs/CS10-employee-shift
|
bb45c8e0ece121b8e38db9f156bddd8ac204d875
|
[
"MIT"
] | 6
|
2022-02-12T15:59:53.000Z
|
2022-03-08T22:53:17.000Z
|
back-end/shiftapp/serializers.py
|
BloomTech-Labs/CS10-employee-shift
|
bb45c8e0ece121b8e38db9f156bddd8ac204d875
|
[
"MIT"
] | 4
|
2018-09-10T16:13:48.000Z
|
2021-01-30T20:04:37.000Z
|
from django.contrib.auth.models import User, Group, Permission
from rest_framework.serializers import ModelSerializer, ChoiceField
from rest_framework import serializers
from django.core.exceptions import ValidationError
from django.contrib.auth.password_validation import validate_password, get_password_validators
from django.contrib.auth.hashers import make_password, check_password
from difflib import SequenceMatcher
from shiftapp.models import Profile, Account, Availability, RequestedTimeOff, Shift, HourOfOperation
class PermissionSerializer(ModelSerializer):
class Meta:
model = Permission
fields = '__all__'
class GroupSerializer(ModelSerializer):
permissions = PermissionSerializer(many=True)
class Meta:
model = Group
fields = '__all__'
# fields = ('url', 'name')
class UserSerializer(ModelSerializer):
# profiles = serializers.PrimaryKeyRelatedField(many=True, queryset=Profile.objects.all())
# groups = GroupSerializer(many=True)
class Meta:
model = User
# fields = '__all__'
fields = ('url', 'id', 'username','first_name', 'last_name', 'email', 'password', 'is_staff', 'groups')
extra_kwargs = {
'password': {'write_only': True},
'is_superuser': {'read_only': True},
}
def validate_password(self, value):
data = self.get_initial()
request = self.context.get("request")
user = request.user
password = data.get("password")
re_password = request.data["re_password"]
if 'old_password' in request.data:
old_user = User.objects.get(username=user)
old_password = request.data["old_password"]
if not check_password(old_password, old_user.password):
raise serializers.ValidationError("Old password is incorrect")
if re_password != value:
raise serializers.ValidationError("The password doesn't match.")
try:
validate_password(value)
except ValidationError as exc:
print(exc)
raise exc
return value
def create(self, validated_data):
user = super(UserSerializer, self).create(validated_data)
if 'password' in validated_data:
user.set_password(validated_data['password'])
user.save()
return user
class UserGroupSerializer(ModelSerializer):
# profiles = serializers.PrimaryKeyRelatedField(many=True, queryset=Profile.objects.all())
groups = GroupSerializer(many=True)
class Meta:
model = User
# fields = '__all__'
fields = ('url', 'id', 'username','first_name', 'last_name', 'email', 'password', 'is_staff', 'groups')
extra_kwargs = {
'password': {'write_only': True},
'is_superuser': {'read_only': True},
}
def validate_password(self, value):
data = self.get_initial()
request = self.context.get("request")
user = request.user
password = data.get("password")
re_password = request.data["re_password"]
if 'old_password' in request.data:
old_user = User.objects.get(username=user)
old_password = request.data["old_password"]
if not check_password(old_password, old_user.password):
raise serializers.ValidationError("Old password is incorrect")
if re_password != value:
raise serializers.ValidationError("The password doesn't match.")
try:
validate_password(value)
except ValidationError as exc:
raise serializers.ValidationError(str(exc))
return value
def create(self, validated_data):
user = super(UserSerializer, self).create(validated_data)
if 'password' in validated_data:
user.set_password(validated_data['password'])
user.save()
return user
class AccountSerializer(ModelSerializer):
class Meta:
model = Account
fields = ('id', 'logo', 'company', 'enabled', 'plan_expires')
class ProfileSerializer(ModelSerializer):
user = UserSerializer()
class Meta:
model = Profile
fields = ('url', 'id', 'user', 'account', 'phone_number', 'notes', 'email_enabled', 'text_enabled')
def create(self, validated_data):
user_data = validated_data.pop('user')
group = Group.objects.filter(name='employee')
user_data['groups'] = group
user = UserSerializer.create(UserSerializer(), validated_data=user_data)
# profile = super(ProfileSerializer, self).create(user=user)
profile, created = Profile.objects.update_or_create(user=user,
account=validated_data.pop('account'),
phone_number=validated_data.pop('phone_number'),
notes=validated_data.pop('notes'),
email_enabled=validated_data.pop('email_enabled'),
text_enabled=validated_data.pop('text_enabled'))
return profile
def update(self, instance, validated_data):
if 'user' in validated_data:
user_data = validated_data.pop('user')
user = instance.user
for key, value in user_data.items():
if key == 'password':
setattr(user, key, make_password(value))
if value != "" and key != 'password':
setattr(user, key, value)
user.save()
for key, value in validated_data.items():
if value != "":
setattr(instance, key, value)
instance.save()
return instance
class UserProfileSerializer(ModelSerializer):
# user = UserSerializer(many=False, read_only=True)
user = UserGroupSerializer()
account = AccountSerializer()
class Meta:
model = Profile
fields = ('url', 'id', 'user', 'account', 'phone_number', 'notes', 'email_enabled', 'text_enabled')
def update(self, instance, validated_data):
if 'user' in validated_data:
user_data = validated_data.pop('user')
user = instance.user
for key, value in user_data.items():
if key == 'password':
setattr(user, key, make_password(value))
if value != "" and key != 'password':
setattr(user, key, value)
user.save()
for key, value in validated_data.items():
if value != "":
setattr(instance, key, value)
instance.save()
return instance
class AccountUserProfileSerializer(ModelSerializer):
# user = UserSerializer(many=False, read_only=True)
user = UserSerializer()
account = AccountSerializer()
class Meta:
model = Profile
fields = ('url', 'id', 'user', 'account', 'phone_number', 'notes', 'email_enabled', 'text_enabled')
def create(self, validated_data):
user_data = validated_data.pop('user')
group = Group.objects.filter(name='manager')
user_data['groups'] = group
account_data = validated_data.pop('account')
user = UserSerializer.create(UserSerializer(), validated_data=user_data)
account = AccountSerializer.create(AccountSerializer(), validated_data=account_data)
# profile = super(ProfileSerializer, self).create(user=user)
profile, created = Profile.objects.update_or_create(user=user,
account=account,
phone_number=validated_data.pop('phone_number'),
notes=validated_data.pop('notes'),
email_enabled=validated_data.pop('email_enabled'),
text_enabled=validated_data.pop('text_enabled'))
return profile
class RequestedTimeOffSerializer(ModelSerializer):
# status = serializers.ChoiceField(choices=STATUS_CHOICES, default='Pending')
class Meta:
model = RequestedTimeOff
# model.STATUS_CHOICES
fields = ('id', 'profile', 'start_datetime', 'end_datetime', 'reason', 'status')
# def create(self, validated_data):
# # TODO: check valid profile wuth user
# roo = super(RequestedTimeOffSerializer, self).create(validated_data)
# roo.save()
# return roo
class ShiftSerializer(ModelSerializer):
class Meta:
model = Shift
fields = ('id', 'account', 'profile', 'start_datetime', 'end_datetime', 'notes', 'is_open')
class HourOfOperationSerializer(ModelSerializer):
class Meta:
model = HourOfOperation
fields = ('id', 'account', 'day', 'open_time', 'close_time', 'is_open')
class AvailabilitySerializer(ModelSerializer):
class Meta:
model = Availability
fields = ('id', 'profile', 'day', 'start_time', 'end_time')
| 34.968127
| 111
| 0.632335
|
4a12064334bfffb954c7f9f1b58ea5a718da85f1
| 795
|
py
|
Python
|
visuals/periodogram.py
|
randompirate/tempesp
|
8b933ed3d9805f340858d2da08460b93cb7706e2
|
[
"MIT"
] | null | null | null |
visuals/periodogram.py
|
randompirate/tempesp
|
8b933ed3d9805f340858d2da08460b93cb7706e2
|
[
"MIT"
] | null | null | null |
visuals/periodogram.py
|
randompirate/tempesp
|
8b933ed3d9805f340858d2da08460b93cb7706e2
|
[
"MIT"
] | null | null | null |
import temp_plots as tp
import scipy.signal as sig
import matplotlib.pyplot as plt
import numpy as np
t = tp.time_array_epoch
t = (t - t[0]) / (60*60*24) # Time passed in days
y = tp.temp_array
y = y - np.mean(y)
N_samples = len(t)
max_daily_freq = 4 # At most 4 times a day (sampling rate)
max_daily_period = 10 # A period of at most 10 days
# Lomb-scargle power spectrum: https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lombscargle.html\n
f_angular = np.linspace(1/(max_daily_period*2*np.pi), max_daily_freq*2*np.pi, N_samples)
f_daily = f_angular / (2*np.pi)
ls = sig.lombscargle(t, y, f_angular)
print(t[-1])
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(1/f_daily, ls/np.max(ls))
ax.set_xlabel('Period (days)')
plt.show()
# plt.plot(freqs, ps/np.max(ps))\n
| 26.5
| 115
| 0.715723
|
4a1206e83f92f38134c4670652ba903d55870a16
| 7,456
|
py
|
Python
|
test/kitchen/tasks/kitchen.py
|
bai/datadog-agent
|
091aeb213df1568aec9edf18a728d69740042237
|
[
"Apache-2.0"
] | null | null | null |
test/kitchen/tasks/kitchen.py
|
bai/datadog-agent
|
091aeb213df1568aec9edf18a728d69740042237
|
[
"Apache-2.0"
] | 52
|
2021-05-01T01:52:30.000Z
|
2022-02-01T22:03:12.000Z
|
test/kitchen/tasks/kitchen.py
|
bai/datadog-agent
|
091aeb213df1568aec9edf18a728d69740042237
|
[
"Apache-2.0"
] | null | null | null |
import glob
import json
import os.path
import re
import traceback
import requests
from invoke import task
from invoke.exceptions import Exit
@task(iterable=['platlist'])
def genconfig(
ctx,
platform=None,
provider=None,
osversions="all",
testfiles=None,
uservars=None,
platformfile=None,
platlist=None,
fips=False,
arch="x86_64",
):
"""
Create a kitchen config
"""
if not platform and not platlist:
raise Exit(message="Must supply a platform to configure\n", code=1)
if not testfiles:
raise Exit(message="Must supply one or more testfiles to include\n", code=1)
if platlist and (platform or provider):
raise Exit(
message="Can specify either a list of specific OS images OR a platform and provider, but not both\n", code=1
)
if not platlist and not provider:
provider = "azure"
if platformfile:
with open(platformfile, "r") as f:
platforms = json.load(f)
else:
try:
print(
"Fetching the latest kitchen platforms.json from Github. Use --platformfile=platforms.json to override with a local file."
)
r = requests.get(
'https://raw.githubusercontent.com/DataDog/datadog-agent/master/test/kitchen/platforms.json',
allow_redirects=True,
)
r.raise_for_status()
platforms = r.json()
except Exception:
traceback.print_exc()
print("Warning: Could not fetch the latest kitchen platforms.json from Github, using local version.")
with open("platforms.json", "r") as f:
platforms = json.load(f)
# create the TEST_PLATFORMS environment variable
testplatformslist = []
if platform:
plat = platforms.get(platform)
if not plat:
raise Exit(
message="Unknown platform {platform}. Known platforms are {avail}\n".format(
platform=platform, avail=list(platforms.keys())
),
code=2,
)
# check to see if the OS is configured for the given provider
prov = plat.get(provider)
if not prov:
raise Exit(
message="Unknown provider {prov}. Known providers for platform {plat} are {avail}\n".format(
prov=provider, plat=platform, avail=list(plat.keys())
),
code=3,
)
ar = prov.get(arch)
if not ar:
raise Exit(
message="Unknown architecture {arch}. "
"Known architectures for platform {plat} provider {prov} are {avail}\n".format(
arch=arch, prov=provider, plat=platform, avail=list(prov.keys())
),
code=4,
)
# get list of target OSes
if osversions.lower() == "all":
osversions = ".*"
osimages = load_targets(ctx, ar, osversions)
print("Chose os targets {}\n".format(osimages))
for osimage in osimages:
testplatformslist.append("{},{}".format(osimage, ar[osimage]))
elif platlist:
# platform list should be in the form of driver,os,arch,image
for entry in platlist:
driver, os, arch, image = entry.split(",")
if provider and driver != provider:
raise Exit(
message="Can only use one driver type per config ( {} != {} )\n".format(provider, driver), code=1
)
provider = driver
# check to see if we know this one
if not platforms.get(os):
raise Exit(message="Unknown OS in {}\n".format(entry), code=4)
if not platforms[os].get(driver):
raise Exit(message="Unknown driver in {}\n".format(entry), code=5)
if not platforms[os][driver].get(arch):
raise Exit(message="Unknown architecture in {}\n".format(entry), code=5)
if not platforms[os][driver][arch].get(image):
raise Exit(message="Unknown image in {}\n".format(entry), code=6)
testplatformslist.append("{},{}".format(image, platforms[os][driver][arch][image]))
print("Using the following test platform(s)\n")
for logplat in testplatformslist:
print(" {}".format(logplat))
testplatforms = "|".join(testplatformslist)
# create the kitchen.yml file
with open('tmpkitchen.yml', 'w') as kitchenyml:
# first read the correct driver
print("Adding driver file drivers/{}-driver.yml\n".format(provider))
with open("drivers/{}-driver.yml".format(provider), 'r') as driverfile:
kitchenyml.write(driverfile.read())
# read the generic contents
with open("test-definitions/platforms-common.yml", 'r') as commonfile:
kitchenyml.write(commonfile.read())
# now open the requested test files
for f in glob.glob("test-definitions/{}.yml".format(testfiles)):
if f.lower().endswith("platforms-common.yml"):
print("Skipping common file\n")
with open(f, 'r') as infile:
print("Adding file {}\n".format(f))
kitchenyml.write(infile.read())
env = {}
if uservars:
env = load_user_env(ctx, provider, uservars)
env['TEST_PLATFORMS'] = testplatforms
if fips:
env['FIPS'] = 'true'
ctx.run("erb tmpkitchen.yml > kitchen.yml", env=env)
@task
def should_rerun_failed(_, runlog):
"""
Parse a log from kitchen run and see if we should rerun it (e.g. because of a network issue).
"""
test_result_re = re.compile(r'\d+\s+examples?,\s+(?P<failures>\d+)\s+failures?')
with open(runlog, 'r') as f:
text = f.read()
result = set(test_result_re.findall(text))
if result == {'0'} or result == set():
print("Seeing no failed tests in log, advising to rerun")
else:
raise Exit("Seeing some failed tests in log, not advising to rerun", 1)
def load_targets(_, targethash, selections):
returnlist = []
commentpattern = re.compile("^comment")
for selection in selections.split(","):
selectionpattern = re.compile("^{}$".format(selection))
matched = False
for key in targethash:
if commentpattern.match(key):
continue
if selectionpattern.search(key):
matched = True
if key not in returnlist:
returnlist.append(key)
else:
print("Skipping duplicate target key {} (matched search {})\n".format(key, selection))
if not matched:
raise Exit(message="Couldn't find any match for target {}\n".format(selection), code=7)
return returnlist
def load_user_env(_, provider, varsfile):
env = {}
commentpattern = re.compile("^comment")
if os.path.exists(varsfile):
with open("uservars.json", "r") as f:
vars = json.load(f)
for key, val in vars['global'].items():
if commentpattern.match(key):
continue
env[key] = val
for key, val in vars[provider].items():
if commentpattern.match(key):
continue
env[key] = val
return env
| 34.359447
| 138
| 0.571218
|
4a120732e81002c493e0862779855b81a4d3d239
| 8,829
|
py
|
Python
|
adversarial_models.py
|
laurensalvarez/Fooling-LIME-SHAP
|
e7505795b239406e11f838f27b2ca5da09139d6b
|
[
"MIT"
] | null | null | null |
adversarial_models.py
|
laurensalvarez/Fooling-LIME-SHAP
|
e7505795b239406e11f838f27b2ca5da09139d6b
|
[
"MIT"
] | null | null | null |
adversarial_models.py
|
laurensalvarez/Fooling-LIME-SHAP
|
e7505795b239406e11f838f27b2ca5da09139d6b
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
import sklearn
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import KMeans
from sklearn.model_selection import train_test_split
from copy import deepcopy
import shap
class Adversarial_Model(object):
""" A scikit-learn style adversarial explainer base class for adversarial models. This accepts
a scikit learn style function f_obscure that serves as the _true classification rule_ for in distribution
data. Also, it accepts, psi_display: the classification rule you wish to display by explainers (e.g. LIME/SHAP).
Ideally, f_obscure will classify individual instances but psi_display will be shown by the explainer.
Parameters
----------
f_obscure : function
psi_display : function
"""
def __init__(self, f_obscure, psi_display):
self.f_obscure = f_obscure
self.psi_display = psi_display
self.cols = None
self.scaler = None
self.numerical_cols = None
def predict_proba(self, X, threshold=0.5):
""" Scikit-learn style probability prediction for the adversarial model.
Parameters
----------
X : np.ndarray
Returns
----------
A numpy array of the class probability predictions of the advesarial model.
"""
if self.perturbation_identifier is None:
raise NameError("Model is not trained yet, can't perform predictions.")
# generate the "true" predictions on the data using the "bad" model -- this is f in the paper
predictions_to_obscure = self.f_obscure.predict_proba(X)
# generate the "explain" predictions -- this is psi in the paper
predictions_to_explain_by = self.psi_display.predict_proba(X)
# in the case that we're only considering numerical columns
if self.numerical_cols:
X = X[:,self.numerical_cols]
# allow thresholding for finetuned control over psi_display and f_obscure
pred_probs = self.perturbation_identifier.predict_proba(X)
perturbation_preds = (pred_probs[:,1] >= threshold)
sol = np.where(np.array([perturbation_preds == 1,perturbation_preds==1]).transpose(), predictions_to_obscure, predictions_to_explain_by)
return sol
def predict(self, X):
""" Scikit-learn style prediction. Follows from predict_proba.
Parameters
----------
X : np.ndarray
Returns
----------
A numpy array containing the binary class predictions.
"""
pred_probs = self.predict_proba(X)
return np.argmax(pred_probs,axis=1)
def score(self, X_test, y_test):
""" Scikit-learn style accuracy scoring.
Parameters:
----------
X_test : X_test
y_test : y_test
Returns:
----------
A scalar value of the accuracy score on the task.
"""
return np.sum(self.predict(X_test)==y_test) / y_test.size
def get_column_names(self):
""" Access column names."""
if self.cols is None:
raise NameError("Train model with pandas data frame to get column names.")
return self.cols
def fidelity(self, X):
""" Get the fidelity of the adversarial model to the original predictions. High fidelity means that
we're predicting f along the in distribution data.
Parameters:
----------
X : np.ndarray
Returns:
----------
The fidelity score of the adversarial model's predictions to the model you're trying to obscure's predictions.
"""
return (np.sum(self.predict(X) == self.f_obscure.predict(X)) / X.shape[0])
class Adversarial_Lime_Model(Adversarial_Model):
""" Lime adversarial model. Generates an adversarial model for LIME style explainers using the Adversarial Model
base class.
Parameters:
----------
f_obscure : function
psi_display : function
perturbation_std : float
"""
def __init__(self, f_obscure, psi_display, perturbation_std=0.3):
super(Adversarial_Lime_Model, self).__init__(f_obscure, psi_display)
self.perturbation_std = perturbation_std
def train(self, X, y, feature_names, perturbation_multiplier=30, categorical_features=[], rf_estimators=100, estimator=None):
""" Trains the adversarial LIME model. This method trains the perturbation detection classifier to detect instances
that are either in the manifold or not if no estimator is provided.
Parameters:
----------
X : np.ndarray of pd.DataFrame
y : np.ndarray
perturbation_multiplier : int
cols : list
categorical_columns : list
rf_estimators : integer
estimator : func
"""
if isinstance(X, pd.DataFrame):
cols = [c for c in X]
X = X.values
elif not isinstance(X, np.ndarray):
raise NameError("X of type {} is not accepted. Only pandas dataframes or numpy arrays allowed".format(type(X)))
self.cols = feature_names
all_x, all_y = [], []
# loop over perturbation data to create larger data set
for _ in range(perturbation_multiplier):
perturbed_xtrain = np.random.normal(0,self.perturbation_std,size=X.shape)
p_train_x = np.vstack((X, X + perturbed_xtrain))
p_train_y = np.concatenate((np.ones(X.shape[0]), np.zeros(X.shape[0])))
all_x.append(p_train_x)
all_y.append(p_train_y)
all_x = np.vstack(all_x)
all_y = np.concatenate(all_y)
# it's easier to just work with numerical columns, so focus on them for exploiting LIME
self.numerical_cols = [feature_names.index(c) for c in feature_names if feature_names.index(c) not in categorical_features]
if self.numerical_cols == []:
raise NotImplementedError("We currently only support numerical column data. If your data set is all categorical, consider using SHAP adversarial model.")
# generate perturbation detection model as RF
xtrain = all_x[:,self.numerical_cols]
xtrain, xtest, ytrain, ytest = train_test_split(xtrain, all_y, test_size=0.2)
if estimator is not None:
self.perturbation_identifier = estimator.fit(xtrain, ytrain)
else:
self.perturbation_identifier = RandomForestClassifier(n_estimators=rf_estimators).fit(xtrain, ytrain)
ypred = self.perturbation_identifier.predict(xtest)
self.ood_training_task_ability = (ytest, ypred)
return self
class Adversarial_Kernel_SHAP_Model(Adversarial_Model):
""" SHAP adversarial model. Generates an adversarial model for SHAP style perturbations.
Parameters:
----------
f_obscure : function
psi_display : function
"""
def __init__(self, f_obscure, psi_display):
super(Adversarial_Kernel_SHAP_Model, self).__init__(f_obscure, psi_display)
def train(self, X, y, feature_names, background_distribution=None, perturbation_multiplier=10, n_samples=2e4, rf_estimators=100, n_kmeans=10, estimator=None):
""" Trains the adversarial SHAP model. This method perturbs the shap training distribution by sampling from
its kmeans and randomly adding features. These points get substituted into a test set. We also check to make
sure that the instance isn't in the test set before adding it to the out of distribution set. If an estimator is
provided this is used.
Parameters:
----------
X : np.ndarray
y : np.ndarray
features_names : list
perturbation_multiplier : int
n_samples : int or float
rf_estimators : int
n_kmeans : int
estimator : func
Returns:
----------
The model itself.
"""
if isinstance(X, pd.DataFrame):
X = X.values
elif not isinstance(X, np.ndarray):
raise NameError("X of type {} is not accepted. Only pandas dataframes or numpy arrays allowed".format(type(X)))
self.cols = feature_names
# This is the mock background distribution we'll pull from to create substitutions
if background_distribution is None:
background_distribution = shap.kmeans(X,n_kmeans).data
repeated_X = np.repeat(X, perturbation_multiplier, axis=0)
new_instances = []
equal = []
# We generate n_samples number of substutions
for _ in range(int(n_samples)):
i = np.random.choice(X.shape[0])
point = deepcopy(X[i, :])
# iterate over points, sampling and updating
for _ in range(X.shape[1]):
j = np.random.choice(X.shape[1])
point[j] = deepcopy(background_distribution[np.random.choice(background_distribution.shape[0]),j])
new_instances.append(point)
substituted_training_data = np.vstack(new_instances)
all_instances_x = np.vstack((repeated_X, substituted_training_data))
# make sure feature truly is out of distribution before labeling it
xlist = X.tolist()
ys = np.array([1 if substituted_training_data[val,:].tolist() in xlist else 0\
for val in range(substituted_training_data.shape[0])])
all_instances_y = np.concatenate((np.ones(repeated_X.shape[0]),ys))
xtrain,xtest,ytrain,ytest = train_test_split(all_instances_x, all_instances_y, test_size=0.2)
if estimator is not None:
self.perturbation_identifier = estimator.fit(xtrain,ytrain)
else:
self.perturbation_identifier = RandomForestClassifier(n_estimators=rf_estimators).fit(xtrain,ytrain)
ypred = self.perturbation_identifier.predict(xtest)
self.ood_training_task_ability = (ytest, ypred)
return self
| 32.94403
| 159
| 0.740854
|
4a1207390f6bc7c246b17a75e6f694587cfd4df9
| 5,547
|
py
|
Python
|
mars/services/scheduling/supervisor/tests/test_manager.py
|
chaokunyang/mars
|
f8d06b833603525133a62f099af159cec3ff5570
|
[
"Apache-2.0"
] | 1
|
2021-09-03T18:52:06.000Z
|
2021-09-03T18:52:06.000Z
|
mars/services/scheduling/supervisor/tests/test_manager.py
|
chaokunyang/mars
|
f8d06b833603525133a62f099af159cec3ff5570
|
[
"Apache-2.0"
] | null | null | null |
mars/services/scheduling/supervisor/tests/test_manager.py
|
chaokunyang/mars
|
f8d06b833603525133a62f099af159cec3ff5570
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 1999-2021 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
from collections import defaultdict
from typing import List, Tuple
import pytest
import mars.oscar as mo
from mars.services.cluster import MockClusterAPI
from mars.services.scheduling.supervisor import SubtaskQueueingActor, \
SubtaskManagerActor, GlobalSlotManagerActor
from mars.services.scheduling.worker import SubtaskExecutionActor
from mars.services.subtask import Subtask, SubtaskResult, SubtaskStatus
from mars.services.task.supervisor.manager import TaskManagerActor
from mars.typing import BandType
class MockTaskManagerActor(mo.Actor):
def __init__(self):
self._results = dict()
def set_subtask_result(self, result: SubtaskResult):
self._results[result.subtask_id] = result
def get_result(self, subtask_id: str) -> SubtaskResult:
return self._results[subtask_id]
class MockSubtaskQueueingActor(mo.Actor):
def __init__(self):
self._subtasks = dict()
self._error = None
def add_subtasks(self, subtasks: List[Subtask], priorities: List[Tuple]):
if self._error is not None:
raise self._error
for subtask, priority in zip(subtasks, priorities):
self._subtasks[subtask.subtask_id] = (subtask, priority)
def submit_subtasks(self, band: BandType, limit: int):
pass
def remove_queued_subtasks(self, subtask_ids: List[str]):
for stid in subtask_ids:
self._subtasks.pop(stid)
def set_error(self, error):
self._error = error
class MockSubtaskExecutionActor(mo.Actor):
def __init__(self):
self._subtask_aiotasks = defaultdict(dict)
async def run_subtask(self, subtask: Subtask, band_name: str, supervisor_address: str):
task = self._subtask_aiotasks[subtask.subtask_id][band_name] = \
asyncio.create_task(asyncio.sleep(20))
return task
def cancel_subtask(self, subtask_id: str, kill_timeout: int = 5):
for task in self._subtask_aiotasks[subtask_id].values():
task.cancel()
async def wait_subtask(self, subtask_id: str, band_name: str):
try:
yield self._subtask_aiotasks[subtask_id][band_name]
except asyncio.CancelledError:
pass
@pytest.fixture
async def actor_pool():
pool = await mo.create_actor_pool('127.0.0.1', n_process=0)
async with pool:
session_id = 'test_session'
await MockClusterAPI.create(pool.external_address)
queue_ref = await mo.create_actor(
MockSubtaskQueueingActor, uid=SubtaskQueueingActor.gen_uid(session_id),
address=pool.external_address)
slots_ref = await mo.create_actor(
GlobalSlotManagerActor, uid=GlobalSlotManagerActor.default_uid(),
address=pool.external_address)
task_manager_ref = await mo.create_actor(
MockTaskManagerActor, uid=TaskManagerActor.gen_uid(session_id),
address=pool.external_address)
execution_ref = await mo.create_actor(
MockSubtaskExecutionActor,
uid=SubtaskExecutionActor.default_uid(),
address=pool.external_address)
submitter_ref = await mo.create_actor(
SubtaskManagerActor, session_id, uid=SubtaskManagerActor.gen_uid(session_id),
address=pool.external_address)
yield pool, session_id, execution_ref, submitter_ref, queue_ref, task_manager_ref
await mo.destroy_actor(slots_ref)
await MockClusterAPI.cleanup(pool.external_address)
@pytest.mark.asyncio
async def test_subtask_manager(actor_pool):
pool, session_id, execution_ref, manager_ref, queue_ref, task_manager_ref = actor_pool
subtask1 = Subtask('subtask1', session_id)
subtask2 = Subtask('subtask2', session_id)
await manager_ref.add_subtasks([subtask1, subtask2], [(1,), (2,)])
await manager_ref.submit_subtask_to_band(
subtask1.subtask_id, (pool.external_address, 'gpu-0'))
await manager_ref.submit_subtask_to_band(
subtask1.subtask_id, (pool.external_address, 'gpu-1'))
await manager_ref.cancel_subtasks([subtask1.subtask_id, subtask2.subtask_id])
await asyncio.wait_for(
asyncio.gather(
execution_ref.wait_subtask(subtask1.subtask_id, 'gpu-0'),
execution_ref.wait_subtask(subtask1.subtask_id, 'gpu-1'),
), timeout=10)
assert (await task_manager_ref.get_result(subtask1.subtask_id)).status \
== SubtaskStatus.cancelled
assert (await task_manager_ref.get_result(subtask2.subtask_id)).status \
== SubtaskStatus.cancelled
subtask3 = Subtask('subtask3', session_id)
await queue_ref.set_error(ValueError())
await manager_ref.add_subtasks.tell([subtask3], [(3,)])
await asyncio.sleep(0.1)
subtask3_result = await task_manager_ref.get_result(subtask3.subtask_id)
assert subtask3_result.status == SubtaskStatus.errored
assert isinstance(subtask3_result.error, ValueError)
| 37.993151
| 91
| 0.718406
|
4a1207d1cafec203c3f8c68063e5d065204ca7ef
| 1,552
|
py
|
Python
|
cogdl/tasks/__init__.py
|
kwyoke/cogdl
|
df919b4fc7db40f8b035665edbcc7ed59f9d448e
|
[
"MIT"
] | 1
|
2020-07-20T07:14:50.000Z
|
2020-07-20T07:14:50.000Z
|
cogdl/tasks/__init__.py
|
LONG-9621/cogdl
|
5e1d70240a3bced319b0f0e04af79acb72f65bed
|
[
"MIT"
] | null | null | null |
cogdl/tasks/__init__.py
|
LONG-9621/cogdl
|
5e1d70240a3bced319b0f0e04af79acb72f65bed
|
[
"MIT"
] | 1
|
2021-06-17T02:44:09.000Z
|
2021-06-17T02:44:09.000Z
|
import argparse
import importlib
import os
import torch.nn as nn
from .base_task import BaseTask
TASK_REGISTRY = {}
def register_task(name):
"""
New task types can be added to cogdl with the :func:`register_task`
function decorator.
For example::
@register_task('node_classification')
class NodeClassification(BaseTask):
(...)
Args:
name (str): the name of the task
"""
def register_task_cls(cls):
if name in TASK_REGISTRY:
raise ValueError("Cannot register duplicate task ({})".format(name))
if not issubclass(cls, BaseTask):
raise ValueError(
"Task ({}: {}) must extend BaseTask".format(name, cls.__name__)
)
TASK_REGISTRY[name] = cls
return cls
return register_task_cls
# automatically import any Python files in the tasks/ directory
for file in os.listdir(os.path.dirname(__file__)):
if file.endswith(".py") and not file.startswith("_"):
task_name = file[: file.find(".py")]
module = importlib.import_module("cogdl.tasks." + task_name)
def build_task(args, dataset=None, model=None):
if dataset is None and model is None:
return TASK_REGISTRY[args.task](args)
elif dataset is not None and model is None:
return TASK_REGISTRY[args.task](args, dataset=dataset)
elif dataset is None and model is not None:
return TASK_REGISTRY[args.task](args, model=model)
return TASK_REGISTRY[args.task](args, dataset=dataset, model=model)
| 28.218182
| 80
| 0.657216
|
4a1208909e78ca0e6337e10cf7fb06c8082b5306
| 556
|
py
|
Python
|
midRevisao/produto.py
|
vitorkaio/python-revision
|
f62937c829355cb5a46725ff14fc79ee8be1796d
|
[
"MIT"
] | null | null | null |
midRevisao/produto.py
|
vitorkaio/python-revision
|
f62937c829355cb5a46725ff14fc79ee8be1796d
|
[
"MIT"
] | null | null | null |
midRevisao/produto.py
|
vitorkaio/python-revision
|
f62937c829355cb5a46725ff14fc79ee8be1796d
|
[
"MIT"
] | null | null | null |
#coding: utf-8
class Produto:
def __init__(self, name, des, value):
self.__name = name
self.__des = des
self.__value = value
def setName(self, name):
self.__name = name
def getName(self):
return self.__name
def setDes(self, des):
self.__des = des
def getDes(self):
return self.__des
def setValue(self, value):
self.__value = value
def getValue(self):
return self.__value
def toDict(self):
return {'name': self.__name, 'des': self.__des, 'value': self.__value}
| 19.857143
| 74
| 0.607914
|
4a1208a0ea17b5672f5567b8d3494c11525203ce
| 5,660
|
py
|
Python
|
Lab_5/webservice.py
|
Milka03/Linux-for-Embedded-Systems
|
447a4a833f28cb8b1e2674126f546d6d40973369
|
[
"MIT"
] | null | null | null |
Lab_5/webservice.py
|
Milka03/Linux-for-Embedded-Systems
|
447a4a833f28cb8b1e2674126f546d6d40973369
|
[
"MIT"
] | null | null | null |
Lab_5/webservice.py
|
Milka03/Linux-for-Embedded-Systems
|
447a4a833f28cb8b1e2674126f546d6d40973369
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from flask import Flask, flash, request, redirect, url_for, send_from_directory
from werkzeug.utils import secure_filename
from mpd import MPDClient
import os
import threading
ALLOWED_EXTENSIONS = {'mp3'}
app = Flask(__name__)
app.secret_key = 'LINES'
path = '/root/music/'
stopped = False
client = MPDClient()
client_lock = threading.Lock()
with client_lock:
client.connect("localhost", 6600)
client.update()
def reconnect():
try:
client.ping()
except Exception:
client.connect("localhost", 6600)
client.update()
@app.route("/")
def hello():
reconnect()
output = ""
for song in client.playlistinfo():
if(output != ""):
output += "<br>"
output += song["file"]
return '''
<!doctype html>
<head>
<meta charset="UTF-8">
</head>
<body>
<form action="/upload" method="get">
<input type="submit" value="Upload song" name="Submit"/>
</form>
<form action="/reload">
<input type=submit value="|<" formaction="/previous">
<input type=submit value="> ||" formaction="/playpause">
<input type=submit value=">|" formaction="/next">
<input type=submit value="Volume Down" formaction="/volumedown">
<input type=submit value="Volume Up" formaction="/volumeup">
</form>
<form action="/download">
<input type=text name=file>
<input type=submit value=Download formaction="/download">
<input type=submit value="Move Up" formaction="/up">
<input type=submit value="Move Down" formaction="/down">
<input type=submit value=Delete formaction="/delete">
</form>
''' + "<h4>Current song:</h4>" + client.currentsong()["file"] + "<br><br>" + "<h4>Playlist:</h4>" + \
output + '''</body>'''
def allowed_file(filename):
if filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS:
return True
else:
return False
@app.route('/upload', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(path, filename))
reconnect()
client.add(filename)
return redirect(url_for("hello"))
else:
return "Error uploading file"
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form method=post enctype=multipart/form-data>
<input type=file name=file>
<input type=submit value=Upload>
</form>
'''
#------- Manage Playlist ------#
@app.route("/download")
def download():
filename = request.args.get('file')
if filename in os.listdir(path):
return send_from_directory(path, filename, as_attachment=True)
else:
return redirect(url_for("hello"))
@app.route("/up")
def up():
filename = request.args.get('file')
if filename in os.listdir(path):
reconnect()
playlist = client.playlistinfo()
pos = 0
to = len(playlist) - 1
for song in playlist:
if song["file"] == filename :
break
pos += 1
if pos > 0:
to = pos - 1
reconnect()
client.move(pos, to)
return redirect(url_for("hello"))
@app.route("/down")
def down():
filename = request.args.get('file')
if filename in os.listdir(path):
reconnect()
playlist = client.playlistinfo()
pos = 0
to = 0
for song in playlist:
if song["file"] == filename :
break
pos += 1
if pos < len(playlist) - 1:
to = pos + 1
reconnect()
client.move(pos, to)
return redirect(url_for("hello"))
@app.route("/delete")
def delete():
filename = request.args.get('file')
if filename in os.listdir(path):
reconnect()
pos = 0
for song in client.playlistinfo():
if song["file"] == filename :
break
pos += 1
reconnect()
client.delete(pos)
return redirect(url_for("hello"))
#------- Current Song Actions ------#
@app.route("/reload")
def reload():
return redirect(url_for("hello"))
@app.route("/previous")
def previous():
reconnect()
client.previous()
return redirect(url_for("hello"))
@app.route("/next")
def next():
reconnect()
client.next()
return redirect(url_for("hello"))
@app.route("/playpause")
def playpause():
global stopped
if stopped == False :
stopped = True
reconnect()
client.pause()
else :
stopped = False
reconnect()
client.play()
return redirect(url_for("hello"))
@app.route("/volumedown")
def volumedown():
reconnect()
volume = (int)(client.status()['volume'])
if volume >= 20 :
reconnect()
client.setvol(volume - 20)
return redirect(url_for("hello"))
@app.route("/volumeup")
def volumeup():
reconnect()
volume = (int)(client.status()['volume'])
if volume <= 80 :
reconnect()
client.setvol(volume + 20)
return redirect(url_for("hello"))
if __name__ == '__main__':
app.run(debug=True, port=22, host='0.0.0.0')
| 27.211538
| 109
| 0.566784
|
4a1209671932e1b46c2413888793a005408acab6
| 2,502
|
py
|
Python
|
hubmap/create_minimal_dataset.py
|
hubmapconsortium/cross_modality_query
|
9d38359a02c0ee7c8f74a24bfad4ff19530df6ea
|
[
"MIT"
] | null | null | null |
hubmap/create_minimal_dataset.py
|
hubmapconsortium/cross_modality_query
|
9d38359a02c0ee7c8f74a24bfad4ff19530df6ea
|
[
"MIT"
] | 29
|
2021-01-05T16:37:42.000Z
|
2022-03-29T17:57:08.000Z
|
hubmap/create_minimal_dataset.py
|
hubmapconsortium/cross_modality_query
|
9d38359a02c0ee7c8f74a24bfad4ff19530df6ea
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from argparse import ArgumentParser
from pathlib import Path
from typing import List
import pandas as pd
if __name__ == "__main__":
import django
django.setup()
def make_mini_cell_df(file):
with pd.HDFStore(file) as store:
cell_df = store.get("cell")
mini_cell_df = cell_df.head(1000).copy()
if "cell_id" not in mini_cell_df.columns:
mini_cell_df["cell_id"] = mini_cell_df.index
cell_ids = list(mini_cell_df["cell_id"].unique())
new_file = "mini_" + file.stem + ".hdf5"
with pd.HDFStore(new_file) as store:
if file.stem == "codex":
store.put("cell", mini_cell_df)
else:
store.put("cell", mini_cell_df, format="t")
return cell_ids
def make_mini_quant_df(file, cell_ids):
with pd.HDFStore(file) as store:
genes = list(store.get("quant").columns)[:1000]
chunks = len(store.get("quant").index) // 1000 + 1
filtered_chunks = []
for i in range(chunks):
print("Loading chunk " + str(i) + " out of " + str(chunks))
chunk = store.select("quant", start=i * 1000, stop=(i + 1) * 1000)
filtered_chunk = chunk[genes]
empty = True
print(cell_ids[0])
for i in chunk.index:
if i in cell_ids:
empty = False
if not empty:
filtered_chunk = filtered_chunk[cell_ids]
filtered_chunks.append(filtered_chunk)
filtered_quant_df = pd.concat(filtered_chunks)
new_file = "mini_" + file.stem + ".hdf5"
with pd.HDFStore(new_file) as store:
store.put("quant", filtered_quant_df)
return genes
def make_mini_pval_df(file, gene_ids):
with pd.HDFStore(file) as store:
pval_df = store.get("p_values")
pval_df = pval_df.set_index("gene_id", drop=False)
filtered_pval_df = pval_df.loc[gene_ids]
new_file = "mini_" + file.stem + ".hdf5"
with pd.HDFStore(new_file) as store:
store.put("p_values", filtered_pval_df)
return
def main(hdf_files: List[Path]):
for file in hdf_files:
cell_ids = make_mini_cell_df(file)
if file.stem in ["atac", "rna"]:
gene_ids = make_mini_quant_df(file, cell_ids)
make_mini_pval_df(file, gene_ids)
if __name__ == "__main__":
p = ArgumentParser()
p.add_argument("hdf_files", type=Path, nargs="+")
args = p.parse_args()
main(args.hdf_files)
| 27.8
| 78
| 0.610312
|
4a1209a9c491053cea3157070a72d2573442c045
| 1,147
|
py
|
Python
|
auth-api/migrations/versions/422daf97fd19_added_content_type_to_docs.py
|
argush3/sbc-auth
|
96a4de3a4358b3158540aea8c4d99e06909793f2
|
[
"Apache-2.0"
] | null | null | null |
auth-api/migrations/versions/422daf97fd19_added_content_type_to_docs.py
|
argush3/sbc-auth
|
96a4de3a4358b3158540aea8c4d99e06909793f2
|
[
"Apache-2.0"
] | null | null | null |
auth-api/migrations/versions/422daf97fd19_added_content_type_to_docs.py
|
argush3/sbc-auth
|
96a4de3a4358b3158540aea8c4d99e06909793f2
|
[
"Apache-2.0"
] | 1
|
2019-07-25T18:20:41.000Z
|
2019-07-25T18:20:41.000Z
|
"""added content type to docs
Revision ID: 422daf97fd19
Revises: 598dd27fc660
Create Date: 2020-06-09 07:58:20.103049
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.sql import column, table
from sqlalchemy import Integer, String
# revision identifiers, used by Alembic.
revision = '422daf97fd19'
down_revision = '598dd27fc660'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('documents', sa.Column('content_type', sa.String(length=50), nullable=True))
op.execute("update documents set content_type='text/html'")
documents = table('documents',
column('version_id', String),
column('type', String),
column('content', String),
column('content_type', String))
file_name = "affidavit_v1.pdf"
op.bulk_insert(
documents,
[
{'version_id': 'a1', 'type': 'affidavit', 'content': file_name, 'content_type': 'application/pdf'}
]
)
def downgrade():
op.execute("DELETE FROM DOCUMENTS WHERE version_id='a1'")
op.drop_column('documents', 'content_type')
| 27.309524
| 110
| 0.646033
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.