function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def __init__(self, base_dir):
self.base_dir = base_dir
self.blobs_dir = os.path.join(base_dir, 'blobs')
self.links_dir = os.path.join(base_dir, 'links')
self.locks_dir = os.path.join(base_dir, 'locks')
self.db_dir = os.path.join(base_dir, 'db')
_makedirs(self.blobs_dir)
_makedirs(self.links_dir)
_makedirs(self.locks_dir)
_makedirs(self.db_dir)
# https://docs.oracle.com/cd/E17076_05/html/programmer_reference/transapp_env_open.html
self.db_env = bsddb3.db.DBEnv()
try:
self.db_env.open(
self.db_dir,
bsddb3.db.DB_CREATE
| bsddb3.db.DB_INIT_LOCK
| bsddb3.db.DB_INIT_LOG
| bsddb3.db.DB_INIT_MPOOL
| bsddb3.db.DB_INIT_TXN
| bsddb3.db.DB_REGISTER,
)
except bsddb3.db.DBRunRecoveryError:
raise RuntimeError(
'DB requires recovery! It should have run in .run.main...'
)
self.db = bsddb3.db.DB(self.db_env)
self.db.open(
'metadata',
dbtype=bsddb3.db.DB_HASH,
flags=bsddb3.db.DB_CREATE | bsddb3.db.DB_AUTO_COMMIT,
) | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def store(
self,
name,
data,
version,
size=0,
compressed=False,
digest=None,
logical_size=None, | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def delete(self, name, version, _lock=True):
"""Removes a file from the storage.
Args:
name: name of the file being deleted.
May contain slashes that are treated as path separators.
version: file "version" that is meant to be deleted
If the file that is stored has newer version than provided,
it will not be deleted.
lock: whether or not to acquire locks
This is for internal use only,
normal users should always leave it set to True.
Returns whether or not the file has been deleted.
"""
link_path = self._link_path(name)
if _lock:
file_lock = _exclusive_lock(self._lock_path('links', name))
else:
file_lock = _no_lock()
with file_lock:
logger.debug('Acquired or inherited lock for link %s.', name)
if not _path_exists(link_path):
raise FiletrackerFileNotFoundError
if _file_version(link_path) > version:
logger.info(
'Tried to delete newer version of %s (%d < %d), ignoring.',
name,
version,
_file_version(link_path),
)
return False
digest = self._digest_for_link(name)
with _exclusive_lock(self._lock_path('blobs', digest)):
logger.debug('Acquired lock for blob %s.', digest)
should_delete_blob = False
with self._db_transaction() as txn:
logger.debug('Started DB transaction (deleting link).')
digest_bytes = digest.encode()
link_count = self.db.get(digest_bytes, txn=txn)
if link_count is None:
raise RuntimeError("File exists but has no key in db")
link_count = int(link_count)
if link_count == 1:
logger.debug('Deleting last link to blob %s.', digest)
self.db.delete(digest_bytes, txn=txn)
self.db.delete(
'{}:logical_size'.format(digest).encode(), txn=txn
)
should_delete_blob = True
else:
new_count = str(link_count - 1).encode()
self.db.put(digest_bytes, new_count, txn=txn)
logger.debug('Committing DB transaction (deleting link).')
logger.debug('Committed DB transaction (deleting link).')
os.unlink(link_path)
logger.debug('Deleted link %s.', name)
if should_delete_blob:
os.unlink(self._blob_path(digest))
logger.debug('Released lock for blob %s.', digest)
logger.debug('Released (or gave back) lock for link %s.', name)
return True | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def logical_size(self, name):
"""Returns the logical size (before compression) of file `name`."""
digest = self._digest_for_link(name)
logical_size = self.db.get('{}:logical_size'.format(digest).encode())
if logical_size:
return int(logical_size.decode())
else:
raise RuntimeError('Blob doesn\'t have :logical_size in DB: try recovering') | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def _blob_path(self, digest):
return os.path.join(self.blobs_dir, digest[0:2], digest) | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def _db_transaction(self):
txn = self.db_env.txn_begin()
try:
yield txn
except:
txn.abort()
raise
else:
txn.commit() | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def __init__(self, data, size):
self._data = data
self._size = size
self.current_path = None
self.saved_in_temp = False | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def __exit__(self, _exc_type, _exc_value, _traceback):
"""Removes file if it was last saved as a temporary file."""
if self.saved_in_temp:
os.unlink(self.current_path) | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def _copy_stream(src, dest, length=0):
"""Similar to shutil.copyfileobj, but supports limiting data size.
As for why this is required, refer to
https://www.python.org/dev/peps/pep-0333/#input-and-error-streams
Yes, there are WSGI implementations which do not support EOFs, and
believe me, you don't want to debug this.
Args:
src: source file-like object
dest: destination file-like object
length: optional file size hint
If not 0, exactly length bytes will be written.
If 0, write will continue until EOF is encountered.
"""
if length == 0:
shutil.copyfileobj(src, dest)
return
bytes_left = length
while bytes_left > 0:
buf_size = min(_BUFFER_SIZE, bytes_left)
buf = src.read(buf_size)
dest.write(buf)
bytes_left -= buf_size | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def _create_file_dirs(file_path):
"""Creates directory tree to file if it doesn't exist."""
dir_name = os.path.dirname(file_path)
_makedirs(dir_name) | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def _file_version(path):
return os.lstat(path).st_mtime | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def _exclusive_lock(path):
"""A simple wrapper for fcntl exclusive lock."""
_create_file_dirs(path)
fd = os.open(path, os.O_WRONLY | os.O_CREAT, 0o600)
try:
retries_left = _LOCK_RETRIES
success = False
while retries_left > 0:
# try to acquire the lock in a loop
# because gevent doesn't treat flock as IO,
# so waiting here without yielding would get the worker killed
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
success = True
break
except IOError as e:
if e.errno in [errno.EAGAIN, errno.EWOULDBLOCK]:
# This yields execution to other green threads.
gevent.sleep(_LOCK_SLEEP_TIME_S)
retries_left -= 1
else:
raise
if success:
yield
else:
raise ConcurrentModificationError(path)
finally:
if success:
fcntl.flock(fd, fcntl.LOCK_UN)
os.close(fd) | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def _no_lock():
"""Does nothing, just runs the code within the `with` statement.
Used for conditional locking."""
yield | sio2project/filetracker | [
7,
12,
7,
1,
1346586491
] |
def __init__(self, nodeName):
""" Constructor
:param nodeName: short name describing this node. Is used to construct the nodePath.
Currently we don't check for uniqueness in the children but this may change.
The nodeName may not contain slashes (/).
"""
check_class(nodeName, six.string_types, allow_none=False)
assert nodeName, "nodeName may not be empty"
assert '/' not in nodeName, "nodeName may not contain slashes"
self._nodeName = str(nodeName)
self._parentItem = None
self._model = None
self._childItems = [] # the fetched children
self._nodePath = self._constructNodePath() | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def __str__(self):
return "<{}: {}>".format(type(self).__name__, self.nodePath) | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def model(self):
""" Returns the ConfigTreeModel this item belongs to.
If the model is None (not set), it will use and cache the parent's model.
Therefore make sure that an ancestor node has a reference to the model! Typically by
setting the model property of the invisible root item in the model constructor.
"""
if self._model is None and self.parentItem is not None:
self._model = self.parentItem.model
return self._model | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def model(self, value):
""" Sets ConfigTreeModel this item belongs to.
"""
self._model = value | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def decoration(self):
""" An optional decoration (e.g. icon).
The default implementation returns None (no decoration).
"""
return None | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def font(self):
""" Returns a font for displaying this item's text in the tree.
The default implementation returns None (i.e. uses default font).
"""
return None | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def backgroundBrush(self):
""" Returns a brush for drawing the background role in the tree.
The default implementation returns None (i.e. uses default brush).
"""
return None | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def foregroundBrush(self):
""" Returns a brush for drawing the foreground role in the tree.
The default implementation returns None (i.e. uses default brush).
"""
return None | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def sizeHint(self):
""" Returns a size hint for displaying the items in the tree
The default implementation returns None (i.e. no hint).
Should return a QSize object or None
"""
return None | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def nodeName(self):
""" The node name. Is used to construct the nodePath"""
return self._nodeName | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def nodeName(self, nodeName):
""" The node name. Is used to construct the nodePath"""
assert '/' not in nodeName, "nodeName may not contain slashes"
self._nodeName = nodeName
self._recursiveSetNodePath(self._constructNodePath()) | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def nodePath(self):
""" The sequence of nodeNames from the root to this node. Separated by slashes."""
return self._nodePath | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def parentItem(self):
""" The parent item """
return self._parentItem | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def parentItem(self, value):
""" The parent item """
self._parentItem = value
self._recursiveSetNodePath(self._constructNodePath()) | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def childItems(self):
""" List of child items """
#logger.debug("childItems {!r}".format(self))
return self._childItems | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def nChildren(self): # TODO: numChildren
""" Returns the number of children
"""
return len(self.childItems) | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def childByNodeName(self, nodeName):
""" Gets first (direct) child that has the nodeName.
"""
assert '/' not in nodeName, "nodeName can not contain slashes"
for child in self.childItems:
if child.nodeName == nodeName:
return child
raise IndexError("No child item found having nodeName: {}".format(nodeName)) | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def _auxGetByPath(parts, item):
"Aux function that does the actual recursive search"
#logger.debug("_auxGetByPath item={}, parts={}".format(item, parts))
if len(parts) == 0:
return item
head, tail = parts[0], parts[1:]
if head == '':
# Two consecutive slashes. Just go one level deeper.
return _auxGetByPath(tail, item)
else:
childItem = item.childByNodeName(head)
return _auxGetByPath(tail, childItem) | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def childNumber(self):
""" Gets the index (nr) of this node in its parent's list of children.
"""
# This is O(n) in time. # TODO: store row number in the items?
if self.parentItem is not None:
return self.parentItem.childItems.index(self)
return 0 | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def removeChild(self, position):
""" Removes the child at the position 'position'
Calls the child item finalize to close its resources before removing it.
"""
assert 0 <= position <= len(self.childItems), \
"position should be 0 < {} <= {}".format(position, len(self.childItems))
self.childItems[position].finalize()
self.childItems.pop(position) | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def logBranch(self, indent=0, level=logging.DEBUG):
""" Logs the item and all descendants, one line per child
"""
if 0:
print(indent * " " + str(self))
else:
logger.log(level, indent * " " + str(self))
for childItems in self.childItems:
childItems.logBranch(indent + 1, level=level) | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def __init__(self, nodeName=''):
""" Constructor
"""
super(AbstractLazyLoadTreeItem, self).__init__(nodeName=nodeName)
self._canFetchChildren = True # children not yet fetched (successfully or unsuccessfully) | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def canFetchChildren(self):
""" Returns True if children can be fetched, and False if they already have been fetched.
Also returns False if they have been fetched and tried.
"""
return self._canFetchChildren | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def _fetchAllChildren(self):
""" The function that actually fetches the children.
The result must be a list of RepoTreeItems. Their parents must be None,
as that attribute will be set by BaseTreeitem.insertItem()
:rtype: list of BaseRti objects
"""
raise NotImplementedError | titusjan/argos | [
151,
24,
151,
3,
1434200731
] |
def __init__(self) -> None:
super().__init__()
kick_off_date = tournaments.kick_off_date()
if dtutil.now() > kick_off_date:
self.date_info = 'The Season Kick Off is on the second Saturday of the season'
else:
self.date_info = 'The next Season Kick Off is on ' + dtutil.display_date_with_date_and_year(kick_off_date)
self.faqs_url = url_for('faqs')
self.cardhoarder_loan_url = 'https://www.cardhoarder.com/free-loan-program-faq'
self.tournaments_url = url_for('tournaments')
self.discord_url = url_for('discord') | PennyDreadfulMTG/Penny-Dreadful-Discord-Bot | [
33,
26,
33,
354,
1474960935
] |
def setupUi(self, SelectCode):
SelectCode.setObjectName("SelectCode")
SelectCode.resize(1300, 1100)
self.centralwidget = QtWidgets.QWidget(SelectCode)
self.centralwidget.setObjectName("centralwidget")
self.opt_select_code_3 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_3.setGeometry(QtCore.QRect(970, 100, 21, 22))
self.opt_select_code_3.setText("")
self.opt_select_code_3.setObjectName("opt_select_code_3")
self.txt_select_code_1 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_1.setGeometry(QtCore.QRect(150, 140, 320, 721))
self.txt_select_code_1.setObjectName("txt_select_code_1")
self.opt_select_code_1 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_1.setGeometry(QtCore.QRect(310, 100, 21, 22))
self.opt_select_code_1.setText("")
self.opt_select_code_1.setObjectName("opt_select_code_1")
self.txt_select_code_3 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_3.setGeometry(QtCore.QRect(810, 140, 320, 721))
self.txt_select_code_3.setObjectName("txt_select_code_3")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(560, 40, 201, 41))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.btn_compile_start = QtWidgets.QPushButton(self.centralwidget)
self.btn_compile_start.setGeometry(QtCore.QRect(980, 890, 151, 51))
self.btn_compile_start.setObjectName("btn_compile_start")
self.btn_return_search = QtWidgets.QPushButton(self.centralwidget)
self.btn_return_search.setGeometry(QtCore.QRect(980, 970, 151, 51))
self.btn_return_search.setObjectName("btn_return_search")
self.opt_select_code_2 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_2.setGeometry(QtCore.QRect(640, 100, 21, 22))
self.opt_select_code_2.setText("")
self.opt_select_code_2.setObjectName("opt_select_code_2")
self.txt_select_code_2 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_2.setGeometry(QtCore.QRect(480, 140, 320, 721))
self.txt_select_code_2.setObjectName("txt_select_code_2")
self.progress = QtWidgets.QProgressBar(self.centralwidget)
self.progress.setGeometry(QtCore.QRect(150, 910, 791, 31))
self.progress.setProperty("value", 0)
self.progress.setObjectName("progress")
SelectCode.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(SelectCode)
self.statusbar.setObjectName("statusbar")
SelectCode.setStatusBar(self.statusbar)
self.retranslateUi(SelectCode)
QtCore.QMetaObject.connectSlotsByName(SelectCode) | lunapocket/powerOverWhelming | [
6,
3,
6,
5,
1493794654
] |
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
self.setupUi(self)
self.initUi() | lunapocket/powerOverWhelming | [
6,
3,
6,
5,
1493794654
] |
def return_search(self) :
global window_search_code
self.close()
window_search_code = guiStart.GuiStart()
window_search_code.show() | lunapocket/powerOverWhelming | [
6,
3,
6,
5,
1493794654
] |
def __init__(self, elements=None):
"""Create a new empty union-find structure.
If *elements* is an iterable, this structure will be initialized
with the discrete partition on the given set of elements.
"""
if elements is None:
elements = ()
self.parents = {}
self.weights = {}
for x in elements:
self.weights[x] = 1
self.parents[x] = x | SpaceGroupUCL/qgisSpaceSyntaxToolkit | [
96,
34,
96,
65,
1403185627
] |
def __iter__(self):
"""Iterate through all items ever found or unioned by this structure.
"""
return iter(self.parents) | SpaceGroupUCL/qgisSpaceSyntaxToolkit | [
96,
34,
96,
65,
1403185627
] |
def javascript_link( *urls, **attrs ):
return _modTag( urls, attrs, tags.javascript_link ) | DIRACGrid/DIRACWeb | [
8,
12,
8,
27,
1304434265
] |
def _modTag( urls, attrs, functor ):
nUrls = urls
sN = request.environ[ 'SCRIPT_NAME' ]
if sN:
if sN[0] == "/":
sN = sN[1:]
nUrls = []
for url in urls:
if url.find( "http" ) == 0:
nUrls.append( url )
else:
if url[0] == "/":
url = "/%s%s" % ( sN, url )
nUrls.append( url )
return functor( *nUrls, **attrs ) | DIRACGrid/DIRACWeb | [
8,
12,
8,
27,
1304434265
] |
def wrap( self = None ):
return "<html><body><img src='/images/logos/logo.png'/><br><br><br><br><p class='lrg'>\
The <a href='http://diracgrid.org'>DIRAC</a> project is a complete \
Grid solution for a community of users needing access to \
distributed computing resources.</p><br><p class='lrg'>Do you want \
to help your community? Get <a href='https://github.com/DIRACGrid'>\
involved</a>!</p><br>\
<p class='footer'>" + fn( self ) + "</p></body></html>" | DIRACGrid/DIRACWeb | [
8,
12,
8,
27,
1304434265
] |
def test_signal(T=100, fs=1):
dt = 1/fs
x = np.arange(0, T, dt)
y = np.ones(x.shape)
return np.vstack((x,y)) | BrainTech/openbci | [
12,
9,
12,
3,
1407695699
] |
def plot_graph(ax, title, x, y, xlabel, ylabel):
ax.set_title(title)
ax.plot(x, y)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel) | BrainTech/openbci | [
12,
9,
12,
3,
1407695699
] |
def calculate_distance(position_1, position_2):
""" Returns distance between two points or 1-dim and 2-dim vectors
input:
position1 -- int or 1-dim or 2-dim matrix
position2 -- int or 1-dim or 2-dim matrix
position1 and position2 must have the same dimension
"""
distance = position_2 - position_1 | BrainTech/openbci | [
12,
9,
12,
3,
1407695699
] |
def max_sway_AP_ML(signal):
""" Returns maximal sway values for mediolateral (x) and anterioposterior (y) directions
Input:
signal -- 2-dim array with shape (channel, samples)
(on the index 0 - samples from channel x, on index 1 - samples from channel y)
Output:
max_sway -- float
max_AP -- float
max_ML -- float
"""
resultant_distance = np.sqrt((signal[0]-np.mean(signal[0]))**2+(signal[1]-np.mean(signal[1]))**2)
distance_AP = signal[1]-np.mean(signal[1])
distance_ML = signal[0]-np.mean(signal[0])
return max(resultant_distance), max(np.abs(distance_AP)), max(np.abs(distance_ML)) | BrainTech/openbci | [
12,
9,
12,
3,
1407695699
] |
def COP_path(signal):
""" Returns total length of the COP path
Input:
signal -- 2-dim array with shape (channel, samples)
(on the index 0 - samples from channel x, on index 1 - samples from channel y)
Output:
cop -- float
cop_x -- float
cop_y -- float
"""
cop = sum([calculate_distance(signal[:,index], signal[:, index+1]) for index in xrange(signal.shape[1]-1)])
cop_x = sum([calculate_distance(signal[0,index], signal[0, index+1]) for index in xrange(signal.shape[1]-1)])
cop_y = sum([calculate_distance(signal[1,index], signal[1, index+1]) for index in xrange(signal.shape[1]-1)])
return cop, cop_x, cop_y | BrainTech/openbci | [
12,
9,
12,
3,
1407695699
] |
def confidence_ellipse_area(signal):
""" Returns area of the 95 perc. confidence ellipse"""
s_AP = np.std(signal[1])
s_ML = np.std(signal[0])
s_AP_ML = 1./signal.shape[1]*np.sum((signal[0]-np.mean(signal[0]))*(signal[1]-np.mean(signal[1])))
area = 2*np.pi*3.0*np.sqrt(s_AP**2*s_ML**2-s_AP_ML**2)
return area | BrainTech/openbci | [
12,
9,
12,
3,
1407695699
] |
def get_percentages_being(signal, fs, grid=0.1, plot=True):
"""Return how long person was on o field grig x grid (%) | BrainTech/openbci | [
12,
9,
12,
3,
1407695699
] |
def get_percentages_values(signal, fs, plot=True):
"""
Returns percentages of being on each of four parts of board.
Input:
signal -- 2-dim array with shape (channel, samples)
(index 0 - samples from channel x, index 1 - samples from channel y)
fs -- float -- sampling frequency
Output:
top_right -- float
top_left -- float
bottom_right -- float
bottom_left -- float
"""
p, xedges, yedges = get_percentages_being(signal, fs, plot=plot)
top_right = 0
top_left = 0
bottom_right = 0
bottom_left = 0
for j,x in enumerate(xedges[1:-1]):
for i,y in enumerate(yedges[1:-1]):
if x > 0 and y > 0:
top_right += p[j,i]
elif x < 0 and y > 0:
top_left += p[j,i]
elif x < 0 and y < 0:
bottom_left += p[j,i]
elif x > 0 and y < 0:
bottom_right += p[j,i]
return top_right, top_left, bottom_right, bottom_left | BrainTech/openbci | [
12,
9,
12,
3,
1407695699
] |
def plot_percentages_being(grid, percentages_being, xedges, yedges, sig):
fig = plt.figure()
ax = fig.gca()
ax.set_title('histogram with percentagles\nbegining in field {}cm x {}cm [time %].'.format(grid, grid))
im = mpl.image.NonUniformImage(ax, interpolation='nearest')
xcenters = xedges[:-1] + 0.5 * (xedges[1:] - xedges[:-1])
ycenters = yedges[:-1] + 0.5 * (yedges[1:] - yedges[:-1])
im.set_data(xcenters, ycenters, percentages_being.T)
plt.colorbar(mappable=im, ax=ax)
ax.images.append(im)
ax.set_xlim(xedges[0], xedges[-1])
ax.set_ylim(yedges[0], yedges[-1])
ax.set_aspect('equal')
ax.set_xlabel('x [cm]')
ax.set_ylabel('y [cm]')
ax.plot(sig[0], sig[1], 'w') | BrainTech/openbci | [
12,
9,
12,
3,
1407695699
] |
def apply_filter(input_filename, output_filename):
settings = {
'amount': 25,
'granularity': 5
} | aluminiumgeek/psychedelizer | [
7,
3,
7,
2,
1390165576
] |
def get_test_account_connectivity_tasks(asset):
if asset.is_unixlike():
tasks = const.PING_UNIXLIKE_TASKS
elif asset.is_windows():
tasks = const.PING_WINDOWS_TASKS
else:
msg = _(
"The asset {} system platform {} does not "
"support run Ansible tasks".format(asset.hostname, asset.platform)
)
logger.info(msg)
tasks = []
return tasks | jumpserver/jumpserver | [
19948,
4806,
19948,
134,
1404446099
] |
def test_user_connectivity(task_name, asset, username, password=None, private_key=None):
"""
:param task_name
:param asset
:param username
:param password
:param private_key
"""
from ops.inventory import JMSCustomInventory
tasks = get_test_account_connectivity_tasks(asset)
if not tasks:
logger.debug("No tasks ")
return {}, {}
inventory = JMSCustomInventory(
assets=[asset], username=username, password=password,
private_key=private_key
)
raw, summary = run_adhoc(
task_name=task_name, tasks=tasks, inventory=inventory
)
return raw, summary | jumpserver/jumpserver | [
19948,
4806,
19948,
134,
1404446099
] |
def test_account_connectivity_util(account, task_name):
"""
:param account: <AuthBook>对象
:param task_name:
:return:
"""
if not check_asset_can_run_ansible(account.asset):
return
account.load_auth()
try:
raw, summary = test_user_connectivity(
task_name=task_name, asset=account.asset,
username=account.username, password=account.password,
private_key=account.private_key_file
)
except Exception as e:
logger.warn("Failed run adhoc {}, {}".format(task_name, e))
return
if summary.get('success'):
account.set_connectivity(Connectivity.ok)
else:
account.set_connectivity(Connectivity.failed) | jumpserver/jumpserver | [
19948,
4806,
19948,
134,
1404446099
] |
def __init__(self):
"""
"""
self.fastaDict = {} | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def fasta_reader(self, fastaFile):
"""
"""
fastaDict = {}
subHeader("Fasta reader")
fh = open(fastaFile)
# ditch the boolean (x[0]) and just keep the header or sequence since
# we know they alternate.
faiter = (x[1] for x in itertools.groupby(fh, lambda line: line[0] == ">"))
for header in faiter:
# drop the ">"
header = header.next()[1:].strip()
# drop the info
header = header.split(" ")[0]
info("Reading " + header + "...")
# join all sequence lines to one.
seq = "".join(s.strip() for s in faiter.next())
fastaDict[header] = seq
self.fastaDict = fastaDict | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def __init__(self, alignmentObj, clippedSide):
"""
"""
self.chrom = alignmentObj.reference_name
self.clippedSide = clippedSide
self.bkpPos = alignmentObj.reference_start if clippedSide == "beg" else alignmentObj.reference_end
self.clippedReadDict = {}
self.consensusSeq = "" | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def nbReads(self):
"""
"""
return len(self.clippedReadDict) | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def addReadSeqs(self, fastaObj):
"""
"""
for readId in self.clippedReadDict.keys():
alignmentObj = self.clippedReadDict[readId]["alignmentObj"] | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def makeConsensusSeq(self, outDir):
"""
multiple sequence alignment based
"""
## A) Single sequence
if len(self.clippedReadDict.keys()) == 1:
consensusSeq = list(self.clippedReadDict.values())[0]["seq"].upper()
## B) Multiple sequence
else:
command = 'mkdir -p ' + outDir
os.system(command) # returns the exit status | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def log(label, string):
"""
Display labelled information
"""
print "[" + label + "]", string | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def info(string):
"""
Display basic information
"""
timeInfo = time.strftime("%Y-%m-%d %H:%M")
print timeInfo, string | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def overlap(begA, endA, begB, endB):
"""
Check if both ranges overlap. 2 criteria for defining overlap:
## A) Begin of the range A within the range B
# *beg* <---------range_A---------->
# <---------range_B----------> | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def getClippedPairedClusters(chrPlus, begPlus, endPlus, chrMinus, begMinus, endMinus, rgType, bamFile, windowSize):
"""
"""
## 1. Extract clipped reads for positive cluster
chrom = chrPlus
if (rgType == "DUP"):
beg = int(begPlus) - windowSize
end = int(begPlus) + windowSize
else:
beg = int(endPlus) - windowSize
end = int(endPlus) + windowSize
clippedBegPlusList, clippedEndPlusList = getClippedInterval(chrom, beg, end, bamFile)
## 2. Extract clipped reads for negative cluster
chrom = chrMinus
if (rgType == "DUP"):
beg = int(endMinus) - windowSize
end = int(endMinus) + windowSize
else:
beg = int(begMinus) - windowSize
end = int(begMinus) + windowSize | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def getClippedUnpairedCluster(chrPlus, begPlus, endPlus, bamFile, windowSize):
"""
"""
## 1. Extract clipped reads for cluster beginning
chrom = chrPlus
beg = int(begPlus) - windowSize
end = int(begPlus) + windowSize | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def getClippedInterval(chrom, beg, end, bamFile):
'''
'''
#print "** pickClipped function **" | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def clusterCLipped(clippedList, clippedSide, minNbReads, maxNbReads):
'''
'''
#print "** clusterCLipped function **"
### 1. Sort the list of clipped reads in increasing coordinates order
if (clippedSide == "beg"):
clippedSortedList = sorted(clippedList, key=lambda alignmentObj: alignmentObj.reference_start, reverse=False)
else:
clippedSortedList = sorted(clippedList, key=lambda alignmentObj: alignmentObj.reference_end, reverse=False)
### 2. Make clipped read clusters:
clusterList = []
## For each clipped read alignment
for alignmentObj in clippedSortedList:
# A) No cluster in the list -> Create first cluster
if not clusterList:
clusterObj = cluster(alignmentObj, clippedSide)
clusterObj.addClippedRead(alignmentObj)
clusterList.append(clusterObj)
# B) There is already at least one cluster in the list -> Check if current clipped read within the latest cluster
else:
## Define bkp position:
bkpPos = alignmentObj.reference_start if clippedSide == "beg" else alignmentObj.reference_end
## Define cluster range for searching for overlap
lastClusterObj = clusterList[-1]
begClusterRange = lastClusterObj.bkpPos
endClusterRange = lastClusterObj.bkpPos + 3 | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def filterNbClusters(clusterBegList, clusterEndList, maxNbClusters):
'''
'''
totalNbClusters = len(clusterBegList) + len(clusterEndList) | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def filterDiscordantCluster(chrom, beg, end, readPairList, bamFile):
'''
'''
nbDiscordant = len(readPairList)
nbClippedBothSides = 0
readPairFilteredList = []
## Extract alignments in the interval
iterator = bamFile.fetch(chrom, beg, end)
## Iterate over the alignments
for alignmentObj in iterator:
## Supporting discordant paired-end read and cigar available
if (alignmentObj.query_name in readPairList) and (alignmentObj.cigartuples is not None):
firstOperation = alignmentObj.cigartuples[0][0]
lastOperation = alignmentObj.cigartuples[-1][0]
### A) Read clipped both in the beginning and ending
if ((firstOperation == 4) or (firstOperation == 5)) and ((lastOperation == 4) or (lastOperation == 5)):
nbClippedBothSides += 1 | brguez/TEIBA | [
2,
2,
2,
2,
1462206335
] |
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--pig', default='tk')
parser.add_argument('--wave')
parser.add_argument('--gallery', nargs='*', default=['.', '../gallery'])
parser.add_argument('--images', default=False, action='store_true')
parser.add_argument('--thresh', type=float, default=10.0)
parser.add_argument('--monitor', action='store_true')
parser.add_argument('--nomon', action='store_false', default=True)
parser.add_argument('--words', default='diceware.wordlist.asc')
parser.add_argument(
'--files',
nargs='+',
default=__file__)
parser.add_argument('--nomick', action='store_true')
parser.add_argument('--sense', action='store_true',
help="if you have a sense hat")
parser.add_argument('--path')
args = parser.parse_args()
# import from pig stuff here, after talking to joy
from karmapi import joy
joy.set_backend(args.pig)
from karmapi import piglet
from karmapi import widgets
from karmapi import sonogram
# what's this doing here?
#import tkinter
farm = pigfarm.PigFarm()
print('building farm')
farm.status()
from karmapi.mclock2 import GuidoClock
from karmapi.tankrain import TankRain
from karmapi import diceware as dice
from karmapi import talk
if args.monitor:
from karmapi import milk
farm.add(milk.Curio)
images = [
dict(image='climate_karma_pi_and_jupyter.png', title=''),
dict(image='gil_ly_.png', title=''),
dict(image='princess_cricket.jpg', title='Princess Cricket'),
dict(image='fork_in_road.jpg', title='Fork in the Road'),
dict(image='tree_of_hearts.jpg', title='Tree of Hearts'),
#dict(image='chess.jpg', title='Branching'),
dict(image='lock.jpg', title='Global Interpreter Lock'),
dict(image='air_water.jpg', title='async def(): await run()'),
dict(image='venus.jpg', title='Jupyter')]
from karmapi import sunny, noddy
farm.files = args.files
print('galleries', args.gallery)
im_info = dict(galleries=args.gallery)
if args.images:
for im in images:
im_info.update(im)
farm.add(piglet.Image, im_info.copy())
words = Path(args.words)
if words.exists():
words = words.open()
else:
words = None
data = None
if args.path:
path = Path(args.path)
if path.exists():
data = base.load_folder(path) | openbermuda/karmapi | [
4,
3,
4,
3,
1459170159
] |
def setUp(self):
self.hamster = TorecHashCodesHamster(RequestsManager()) | yosi-dediashvili/SubiT | [
16,
2,
16,
22,
1360016987
] |
def test_remove_after_max_time_passed(self):
self.hamster.add_sub_id("23703")
self.hamster.add_sub_id("2638")
self.assertEquals(len(self.hamster._records), 2)
time.sleep(10)
self.assertEquals(len(self.hamster._records), 2)
time.sleep(120)
self.assertEquals(len(self.hamster._records), 0) | yosi-dediashvili/SubiT | [
16,
2,
16,
22,
1360016987
] |
def test_remove_after_after_request(self):
self.hamster.add_sub_id("23703")
self.hamster.add_sub_id("2638")
self.assertEquals(len(self.hamster._records), 2)
self.hamster.remove_sub_id("2638")
self.assertEquals(len(self.hamster._records), 1)
self.assertEquals(self.hamster._records.keys()[0], "23703") | yosi-dediashvili/SubiT | [
16,
2,
16,
22,
1360016987
] |
def parse_arguments(args=None):
basic_args = plot_correlation_args()
heatmap_parser = heatmap_options()
scatter_parser = scatterplot_options()
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=""" | fidelram/deepTools | [
561,
191,
561,
107,
1373286628
] |
def plot_correlation_args():
parser = argparse.ArgumentParser(add_help=False)
required = parser.add_argument_group('Required arguments')
# define the arguments
required.add_argument('--corData', '-in',
metavar='FILE',
help='Compressed matrix of values generated by multiBigwigSummary or multiBamSummary',
required=True)
required.add_argument('--corMethod', '-c',
help="Correlation method.",
choices=['spearman', 'pearson'],
required=True)
required.add_argument('--whatToPlot', '-p',
help="Choose between a heatmap or pairwise scatter plots",
choices=['heatmap', 'scatterplot'],
required=True)
optional = parser.add_argument_group('Optional arguments')
optional.add_argument('--plotFile', '-o',
help='File to save the heatmap to. The file extension determines the format, '
'so heatmap.pdf will save the heatmap in PDF format. '
'The available formats are: .png, '
'.eps, .pdf and .svg.',
type=writableFile,
metavar='FILE')
optional.add_argument('--skipZeros',
help='By setting this option, genomic regions '
'that have zero or missing (nan) values in all samples '
'are excluded.',
action='store_true',
required=False)
optional.add_argument('--labels', '-l',
metavar='sample1 sample2',
help='User defined labels instead of default labels from '
'file names. '
'Multiple labels have to be separated by spaces, e.g. '
'--labels sample1 sample2 sample3',
nargs='+')
optional.add_argument('--plotTitle', '-T',
help='Title of the plot, to be printed on top of '
'the generated image. Leave blank for no title. (Default: %(default)s)',
default='')
optional.add_argument('--plotFileFormat',
metavar='FILETYPE',
help='Image format type. If given, this option '
'overrides the image format based on the plotFile '
'ending. The available options are: png, '
'eps, pdf and svg.',
choices=['png', 'pdf', 'svg', 'eps', 'plotly'])
optional.add_argument(
'--removeOutliers',
help='If set, bins with very large counts are removed. '
'Bins with abnormally high reads counts artificially increase '
'pearson correlation; that\'s why, multiBamSummary tries '
'to remove outliers using the median absolute deviation (MAD) '
'method applying a threshold of 200 to only consider extremely '
'large deviations from the median. The ENCODE blacklist page '
'(https://sites.google.com/site/anshulkundaje/projects/blacklists) '
'contains useful information about regions with unusually high counts'
'that may be worth removing.',
action='store_true')
optional.add_argument('--version', action='version',
version='%(prog)s {}'.format(__version__))
group = parser.add_argument_group('Output optional options')
group.add_argument('--outFileCorMatrix',
help='Save matrix with pairwise correlation values to a tab-separated file.',
metavar='FILE',
type=writableFile)
return parser | fidelram/deepTools | [
561,
191,
561,
107,
1373286628
] |
def heatmap_options():
"""
Options for generating the correlation heatmap
"""
parser = argparse.ArgumentParser(add_help=False)
heatmap = parser.add_argument_group('Heatmap options')
heatmap.add_argument('--plotHeight',
help='Plot height in cm. (Default: %(default)s)',
type=float,
default=9.5)
heatmap.add_argument('--plotWidth',
help='Plot width in cm. The minimum value is 1 cm. (Default: %(default)s)',
type=float,
default=11)
heatmap.add_argument('--zMin', '-min',
default=None,
help='Minimum value for the heatmap intensities. '
'If not specified, the value is set automatically',
type=float)
heatmap.add_argument('--zMax', '-max',
default=None,
help='Maximum value for the heatmap intensities.'
'If not specified, the value is set automatically',
type=float)
heatmap.add_argument(
'--colorMap', default='jet',
metavar='',
help='Color map to use for the heatmap. Available values can be '
'seen here: '
'http://matplotlib.org/examples/color/colormaps_reference.html')
heatmap.add_argument('--plotNumbers',
help='If set, then the correlation number is plotted '
'on top of the heatmap. This option is only valid when plotting a heatmap.',
action='store_true',
required=False)
return parser | fidelram/deepTools | [
561,
191,
561,
107,
1373286628
] |
def capture_area(area):
"""
Captures area of the screen
Args:
area (Tuple (x,y,width,height)): Area to capture
Returns:
Image : Image of the area captured
"""
img = pyautogui.screenshot(region=area)
return img | DavidBarishev/DDtankFarmingBot | [
2,
3,
2,
1,
1466450080
] |
def get_game_screen():
"""
Get game screen image
Returns:
Image : Image of screen area
"""
return capture_area(area=Globals.GAME_REGION) | DavidBarishev/DDtankFarmingBot | [
2,
3,
2,
1,
1466450080
] |
def glmnetPlot(x, xvar = 'norm', label = False, ptype = 'coef', **options):
import matplotlib.pyplot as plt
# process inputs
xvar = getFromList(xvar, ['norm', 'lambda', 'dev'], 'xvar should be one of ''norm'', ''lambda'', ''dev'' ')
ptype = getFromList(ptype, ['coef', '2norm'], 'ptype should be one of ''coef'', ''2norm'' ')
if x['class'] in ['elnet', 'lognet', 'coxnet', 'fishnet']:
handle = plotCoef(x['beta'], [], x['lambdau'], x['df'], x['dev'],
label, xvar, '', 'Coefficients', **options)
elif x['class'] in ['multnet', 'mrelnet']:
beta = x['beta']
if xvar == 'norm':
norm = 0
nzbeta = beta
for i in range(len(beta)):
which = nonzeroCoef(beta[i])
nzbeta[i] = beta[i][which, :]
norm = norm + scipy.sum(scipy.absolute(nzbeta[i]), axis = 0)
else:
norm = 0 | bbalasub1/glmnet_python | [
185,
91,
185,
25,
1471585049
] |
def getFromList(xvar, xvarbase, errMsg):
indxtf = [x.startswith(xvar.lower()) for x in xvarbase] # find index
xvarind = [i for i in range(len(indxtf)) if indxtf[i] == True]
if len(xvarind) == 0:
raise ValueError(errMsg)
else:
xvar = xvarbase[xvarind[0]]
return xvar | bbalasub1/glmnet_python | [
185,
91,
185,
25,
1471585049
] |
def nonzeroCoef(beta, bystep = False):
result = scipy.absolute(beta) > 0
if len(result.shape) == 1:
result = scipy.reshape(result, [result.shape[0], 1])
if not bystep:
result = scipy.any(result, axis = 1) | bbalasub1/glmnet_python | [
185,
91,
185,
25,
1471585049
] |
def plotCoef(beta, norm, lambdau, df, dev, label, xvar, xlab, ylab, **options):
import matplotlib.pyplot as plt
which = nonzeroCoef(beta)
idwhich = [i for i in range(len(which)) if which[i] == True]
nwhich = len(idwhich)
if nwhich == 0:
raise ValueError('No plot produced since all coefficients are zero')
elif nwhich == 1:
raise ValueError('1 or less nonzero coefficients; glmnet plot is not meaningful') | bbalasub1/glmnet_python | [
185,
91,
185,
25,
1471585049
] |
def __init__(self, proxy_type, proxy_address, proxy_port, proxy_login, proxy_password):
self.proxyType = proxy_type
self.proxyAddress = proxy_address
self.proxyPort = proxy_port
self.proxyLogin = proxy_login
self.proxyPassword = proxy_password | h3llrais3r/Auto-Subliminal | [
43,
7,
43,
6,
1387396745
] |
def add_arguments(self, parser):
parser.add_argument("username", help="account username")
parser.add_argument("password", help="account password")
parser.add_argument("email", help="account email address") | mozilla/socorro | [
556,
233,
556,
19,
1314213709
] |
def onchange_list_price(self, cr, uid, ids, list_price, uos_coeff, context=None):
return {'value': {'list_price_copy': list_price}} | iw3hxn/LibrERP | [
29,
16,
29,
1,
1402418161
] |
def write(self, cr, uid, ids, vals, context=None):
if 'list_price' in vals:
group_obj = self.pool['res.groups']
if not group_obj.user_in_group(cr, uid, uid, 'dt_price_security.can_modify_prices', context=context):
title = _('Violation of permissions')
message = _('You do not have the necessary permissions to modify the price of the products')
raise orm.except_orm(title, message)
return super(product_product, self).write(cr, uid, ids, vals, context=context) | iw3hxn/LibrERP | [
29,
16,
29,
1,
1402418161
] |
def test(self, parent, block):
return bool(self.RE.search(block)) | c2corg/v6_ui | [
7,
12,
7,
25,
1443631964
] |
def extendMarkdown(self, md, md_globals): # noqa
md.parser.blockprocessors.add(
'header_emphasis',
C2CHeaderProcessor(md.parser),
"<hashheader") | c2corg/v6_ui | [
7,
12,
7,
25,
1443631964
] |
def __init__(self, maintenance_communicator=INJECTED, ssl_private_key=INJECTED, ssl_certificate=INJECTED):
# type: (MaintenanceCommunicator, str, str) -> None
"""
:type maintenance_communicator: gateway.maintenance_communicator.MaintenanceCommunicator
"""
self._consumers = {} # type: Dict[int, Callable[[str], Any]]
self._privatekey_filename = ssl_private_key
self._certificate_filename = ssl_certificate
self._maintenance_communicator = maintenance_communicator
if self._maintenance_communicator:
self._maintenance_communicator.set_receiver(self._received_data)
self._connection = None # type: Optional[socket.socket]
self._server_thread = None # type: Optional[BaseThread] | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def start(self):
# type: () -> None
if self._maintenance_communicator:
self._maintenance_communicator.start() | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def _received_data(self, message):
# type: (str) -> None
try:
if self._connection is not None:
self._connection.sendall(bytearray(message.rstrip().encode()) + bytearray(b'\n'))
except Exception:
logger.exception('Exception forwarding maintenance data to socket connection.')
for consumer_id in self._consumers.keys():
callback = self._consumers.get(consumer_id)
if callback is None:
continue
try:
callback(message.rstrip())
except Exception:
logger.exception('Exception forwarding maintenance data to consumer %s', str(consumer_id)) | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def _deactivate(self):
# type: () -> None
if self._maintenance_communicator.is_active():
self._maintenance_communicator.deactivate() | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def add_consumer(self, consumer_id, callback):
# type: (int, Callable[[str],Any]) -> None
self._consumers[consumer_id] = callback
self._activate() | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def open_maintenace_socket(self):
# type: () -> int
"""
Opens a TCP/SSL socket, connecting it with the maintenance service
"""
port = random.randint(6000, 7000)
self._server_thread = BaseThread(name='maintenancesock', target=self._run_socket_server, args=[port])
self._server_thread.daemon = True
self._server_thread.start()
return port | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def _handle_connection(self):
# type: () -> None
"""
Handles one incoming connection.
"""
assert self._connection is not None
try:
self._connection.settimeout(1)
self._connection.sendall(b'Activating maintenance mode, waiting for other actions to complete ...\n')
self._activate()
self._connection.sendall(b'Connected\n')
while self._maintenance_communicator.is_active():
try:
try:
data = self._connection.recv(1024)
if not data:
logger.info('Stopping maintenance mode due to no data.')
break
if data.startswith(b'exit'):
logger.info('Stopping maintenance mode due to exit.')
break
self._maintenance_communicator.write(data.decode())
except Exception as exception:
if System.handle_socket_exception(self._connection, exception, logger):
continue
else:
logger.exception('Unexpected exception receiving connection data')
break
except Exception:
logger.exception('Exception in maintenance mode')
break
except InMaintenanceModeException:
self._connection.sendall(b'Maintenance mode already active.\n')
finally:
self._deactivate()
logger.info('Maintenance mode deactivated')
self._connection.close()
self._connection = None | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.