commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
83e26982473176e853cf0d6b6f7d9bcd19039588
|
Fix crash
|
enamlx/qt/qt_tree_view.py
|
enamlx/qt/qt_tree_view.py
|
# -*- coding: utf-8 -*-
'''
Created on Aug 28, 2015
@author: jrm
'''
from atom.api import (
Typed, Instance, Property, Int
)
from enamlx.qt.qt_abstract_item_view import (
QtAbstractItemView, QAbstractAtomItemModel
)
from enamlx.widgets.tree_view import (
ProxyTreeViewItem, ProxyTreeView ,ProxyTreeViewColumn
)
from enamlx.qt.qt_abstract_item import AbstractQtWidgetItem, RESIZE_MODES
from enaml.qt.QtGui import QTreeView
from enaml.qt.QtCore import Qt, QAbstractItemModel, QModelIndex
from enaml.core.pattern import Pattern
from enaml.qt.qt_widget import QtWidget
from enaml.application import timed_call
class QAtomTreeModel(QAbstractAtomItemModel, QAbstractItemModel):
def rowCount(self, index):
#print 'rowCount',index.row(),index.column(),index.internalPointer()
d = self.declaration
if d.vertical_headers:
return len(d.vertical_headers)
item = index.internalPointer()
if not item:
return len(self.declaration.items)
d = item.declaration
#print 'rowcount ',len(d.items)
return len(d.items)
def columnCount(self, index):
d = self.declaration
if d.horizontal_headers:
return len(d.horizontal_headers)
item = index.internalPointer()
return len(item.declaration._columns)
def index(self, row, column, parent):
""" The index should point to the corresponding
QtControl in the enaml hierarchy
"""
item = parent.internalPointer()
d = item.declaration if item else self.declaration
r = row#%self.declaration.visible_rows
#print row,r,item,len(item._items)
if r<len(d._items):
return self.createIndex(row,column,d._items[r].proxy)
return QModelIndex()
def parent(self, index):
item = index.internalPointer()
if not item or item.declaration==self.declaration:
return QModelIndex()
parent = item.parent()
return self.createIndex(parent.declaration.row,0,parent)
def itemAt(self,index=None):
if not index or not index.isValid():
return
item = index.internalPointer()
d = item.declaration
try:
c = index.column()# - d.visible_column
#: First column is the item
return d._columns[c].proxy
except IndexError:
return
class QtTreeView(QtAbstractItemView, ProxyTreeView):
#: Tree widget
widget = Typed(QTreeView)
#: Root index
index = Instance(QModelIndex,())
def create_widget(self):
self.widget = QTreeView(self.parent_widget())
def init_widget(self):
super(QtTreeView, self).init_widget()
d = self.declaration
self.set_show_root(d.show_root)
def init_model(self):
self.set_model(QAtomTreeModel(parent=self.widget))
#--------------------------------------------------------------------------
# Widget Setters
#--------------------------------------------------------------------------
def set_show_root(self,show):
self.widget.setRootIsDecorated(show)
def set_cell_padding(self,padding):
self.widget.setStyleSheet("QTreeView::item { padding: %ipx }"%padding);
def set_horizontal_minimum_section_size(self,size):
self.widget.header().setMinimumSectionSize(size)
def set_horizontal_stretch(self,stretch):
self.widget.header().setStretchLastSection(stretch)
def set_horizontal_headers(self, headers):
self.widget.header().model().layoutChanged.emit()
def set_resize_mode(self,mode):
self.widget.header().setResizeMode(RESIZE_MODES[mode])
def set_show_horizontal_header(self,show):
header = self.widget.header()
header.show() if show else header.hide()
def set_model(self, model):
super(QtTreeView, self).set_model(model)
#--------------------------------------------------------------------------
# View refresh handlers
#--------------------------------------------------------------------------
def _refresh_visible_column(self, value):
self._pending_column_refreshes -=1
if self._pending_column_refreshes==0:
d = self.declaration
# TODO: What about parents???
try:
d.visible_column = max(0,min(value,self.model.columnCount(self.index)-d.visible_columns))
except RuntimeError:
pass
def _refresh_visible_row(self, value):
self._pending_row_refreshes -=1
if self._pending_row_refreshes==0:
d = self.declaration
try:
d.visible_row = max(0,min(value,self.model.rowCount(self.index)-d.visible_rows))
except RuntimeError:
pass
class AbstractQtTreeViewItem(AbstractQtWidgetItem):
""" Base TreeViewItem class """
#: Pending refreshes when loading widgets
_refresh_count = Int(0)
#: Time to wait before loading widget
_loading_interval = Int(100)
def create_widget(self):
for child in self.children():
if isinstance(child, (Pattern, QtWidget)):
self.delegate = child
def set_row(self, row):
self._update_index()
def set_column(self,column):
self._update_index()
def _default_index(self):
d = self.declaration
return self.view.model.index(d.row,d.column,self.parent().index)
def _update_index(self):
self.index = self._default_index()
if self.delegate:
self._refresh_count +=1
timed_call(self._loading_interval,self._update_delegate)
def _update_delegate(self):
""" Update the delegate cell widget. This is deferred so it
does not get called until the user is done scrolling.
"""
self._refresh_count -=1
if self._refresh_count!=0:
return
#return # DISABLED
try:
delegate = self.delegate
if not self._is_visible():
return
print self.index.row(),self.index.column(),self.index.internalPointer()
# The table destroys when it goes out of view
# so we always have to make a new one
delegate.create_widget()
delegate.init_widget()
# Set the index widget
self.view.widget.setIndexWidget(self.index,delegate.widget)
except RuntimeError:
pass # Since this is deferred, the table could be deleted already
def _is_visible(self):
return self.index.isValid()
def data_changed(self, change):
""" Notify the model that data has changed in this cell! """
self.view.model.dataChanged.emit(self.index,self.index)
class QtTreeViewItem(AbstractQtTreeViewItem, ProxyTreeViewItem):
def _default_view(self):
""" If this is the root item, return the parent
which must be a TreeView, otherwise return the
parent Item's view.
"""
parent = self.parent()
if isinstance(parent, QtTreeView):
return parent
return parent.view
class QtTreeViewColumn(AbstractQtTreeViewItem,ProxyTreeViewColumn):
def _default_view(self):
""" Since the TreeViewColumn must be a child of a TreeViewItem,
simply return the Item's view.
"""
return self.parent().view
def _default_index(self):
d = self.declaration
return self.view.model.index(d.row,d.column,self.parent().index)
|
Python
| 0.000004
|
@@ -5220,32 +5220,65 @@
e_widget(self):%0A
+ if self.declaration:%0A
for chil
@@ -5303,32 +5303,36 @@
():%0A
+
+
if isinstance(ch
@@ -5370,24 +5370,28 @@
+
self.delegat
@@ -5400,24 +5400,28 @@
= child%0A
+
%0A def set
|
b689cadb696ce07372588b368a6d3709f636ca8a
|
Edit descriptions
|
manage.py
|
manage.py
|
from os.path import abspath
from flask import current_app as app
from app import create_app, db
# from app.model import init_db, populate_db()
from flask.ext.script import Manager
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=abspath)
@manager.command
def createdb():
with app.app_context():
"""Creates database"""
db.create_all()
print 'Database created'
@manager.command
def cleardb():
with app.app_context():
"""Deletes all database tables"""
db.drop_all()
print 'Database cleared'
@manager.command
def resetdb():
with app.app_context():
"""Removes all content from database"""
db.drop_all()
db.create_all()
print 'Database reset'
@manager.command
def initdb():
with app.app_context():
"""Initializes database with default values"""
db.drop_all()
db.create_all()
init_db()
print 'Database initialized'
@manager.command
def popdb():
with app.app_context():
"""Populates database with sample data"""
db.drop_all()
db.create_all()
init_db()
populate_db()
print 'Database populated'
if __name__ == '__main__':
manager.run()
|
Python
| 0.000001
|
@@ -556,28 +556,41 @@
%09%22%22%22
-Delet
+Remov
es all
+content from
database
tab
@@ -589,15 +589,8 @@
base
- tables
%22%22%22%0A
@@ -721,32 +721,55 @@
nt from database
+ and creates new tables
%22%22%22%0A%09%09db.drop_al
|
15a37d1e86d9217eec218aadbe53d633335460ae
|
Fix block name.
|
xadmin/templatetags/xadmin_tags.py
|
xadmin/templatetags/xadmin_tags.py
|
from django import template
from django.template import Library
from django.utils import six
from django.utils.safestring import mark_safe
from xadmin.util import static, vendor as util_vendor
register = Library()
@register.simple_tag(takes_context=True)
def view_block(context, block_name, *args, **kwargs):
if 'admin_view' not in context:
return ""
admin_view = context['admin_view']
nodes = []
method_name = 'block_%s' % block_name
cls_str = str if six.PY3 else basestring
for view in [admin_view] + admin_view.plugins:
if hasattr(view, method_name) and callable(getattr(view, method_name)):
block_func = getattr(view, method_name)
result = block_func(context, nodes, *args, **kwargs)
if result and isinstance(result, cls_str):
nodes.append(result)
if nodes:
return mark_safe(''.join(nodes))
else:
return ""
@register.filter
def admin_urlname(value, arg):
return 'xadmin:%s_%s_%s' % (value.app_label, value.model_name, arg)
static = register.simple_tag(static)
@register.simple_tag(takes_context=True)
def vendor(context, *tags):
return util_vendor(*tags).render()
class BlockcaptureNode(template.Node):
"""https://chriskief.com/2013/11/06/conditional-output-of-a-django-block/"""
def __init__(self, nodelist, varname):
self.nodelist = nodelist
self.varname = varname
def render(self, context):
output = self.nodelist.render(context)
context[self.varname] = str(output)
return ''
@register.tag(name='blockcapture')
def do_blockcapture(parser, token):
try:
tag_name, args = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError("'blockcapture' node requires a variable name.")
nodelist = parser.parse(('endblockcapture',))
parser.delete_first_token()
return BlockcaptureNode(nodelist, args)
|
Python
| 0
|
@@ -451,24 +451,42 @@
%25 block_name
+.replace('-', '_')
%0A%0A cls_st
|
1eb025811e5cc7df5b0185d34f053379d52b26ab
|
Remove create_admin command
|
manage.py
|
manage.py
|
from flask_script import Manager
from radar.app import create_app
from radar.lib.database import db
from radar.models.users import User
from radar.lib import fixtures
app = create_app('settings.py')
manager = Manager(app)
@manager.command
def create_tables():
db.drop_all()
db.create_all()
@manager.command
def drop_tables():
db.drop_all()
@manager.command
def create_admin():
user = User()
user.username = 'admin'
user.email = 'admin@example.org'
user.set_password('password')
user.is_admin = True
db.session.add(user)
db.session.commit()
@manager.command
def load_data():
fixtures.create_fixtures()
db.session.commit()
@manager.command
def reload_data():
create_tables()
load_data()
if __name__ == '__main__':
manager.run()
|
Python
| 0.000003
|
@@ -98,44 +98,8 @@
db%0A
-from radar.models.users import User%0A
from
@@ -322,238 +322,8 @@
)%0A%0A%0A
-@manager.command%0Adef create_admin():%0A user = User()%0A user.username = 'admin'%0A user.email = 'admin@example.org'%0A user.set_password('password')%0A user.is_admin = True%0A db.session.add(user)%0A db.session.commit()%0A%0A%0A
@man
|
bc12ed9c9ecb108cd051feedd2dc74e9849b7e7c
|
Print all failures in pron rule eval test.
|
xh/evaluate_pronunciation_rules.py
|
xh/evaluate_pronunciation_rules.py
|
#! /usr/bin/python2 -u
# -*- coding: utf-8 -*-
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Evaluate isiXhosa pronunciation rules against the NCHLT dictionary.
"""
from __future__ import unicode_literals
import codecs
import icu # Debian/Ubuntu: apt-get install python-pyicu
import sys
STDOUT = codecs.getwriter('utf-8')(sys.stdout)
def GetPronunciationRules(path, name):
rules = codecs.open(path, 'r', 'utf-8').read()
return icu.Transliterator.createFromRules(
name, rules, icu.UTransDirection.FORWARD)
def GetSampaToIpaMapping(path):
mapping = {}
with codecs.open(path, 'r', 'utf-8') as reader:
for line in reader:
line = line.rstrip('\n')
fields = line.split('\t')
assert len(fields) == 2
sampa, ipa = fields
assert sampa not in mapping
mapping[sampa] = ipa
return mapping
def TestPronunciationRules(xltor, mapping, dictionary):
# Batch testing against a dictionary.
with codecs.open(dictionary, 'r', 'utf-8') as reader:
for line in reader:
line = line.rstrip('\n')
fields = line.split('\t')
assert len(fields) == 2
orth, pron = fields
sampa = pron.split()
ipa = ''.join(mapping[p] for p in sampa)
if orth == 'apreli': # known issue
continue
predicted = xltor.transliterate(orth)
if predicted != ipa:
STDOUT.write('%s\t%s\t%s != %s\n' %
(orth, ' '.join(sampa), ipa, predicted))
return False
return True
def ApplyPronunciationRules(xltor):
# For interactive use.
while True:
line = sys.stdin.readline()
if not line:
break
line = line.decode('utf-8')
for orth in line.split():
predicted = xltor.transliterate(orth)
STDOUT.write('%s\t%s\n' % (orth, predicted))
return
def main(args):
if len(args) == 2:
xltor = GetPronunciationRules(args[1], 'xh-xh_FONIPA')
ApplyPronunciationRules(xltor)
elif len(args) == 4:
xltor = GetPronunciationRules(args[1], 'xh-xh_FONIPA')
mapping = GetSampaToIpaMapping(args[2])
if TestPronunciationRules(xltor, mapping, args[3]):
STDOUT.write('PASS\n')
sys.exit(0)
else:
STDOUT.write('FAIL\n')
sys.exit(1)
else:
STDOUT.write('Usage: %s RULES [MAPPING DICTIONARY]\n' % args[0])
sys.exit(1)
if __name__ == '__main__':
main(sys.argv)
|
Python
| 0.000001
|
@@ -1488,16 +1488,33 @@
ionary.%0A
+ success = True%0A
with c
@@ -2022,22 +2022,25 @@
-return
+success =
False%0A
@@ -2047,20 +2047,23 @@
return
-True
+success
%0A%0A%0Adef A
|
74959fa6f12d5be7491f1fbf3d99b1678486c311
|
bump version for release
|
slacksocket/version.py
|
slacksocket/version.py
|
version = '0.4.4'
|
Python
| 0
|
@@ -10,9 +10,9 @@
'0.
-4.4
+5.0
'%0A
|
022d5ec88c3156b77cf9797dfa51812bc24efb81
|
Return to homescreen from launcher with START
|
esp32/modules/launcher.py
|
esp32/modules/launcher.py
|
import ugfx, badge, sys, gc
import uos as os
import uerrno as errno
import ujson as json
import time
import esp
import appglue
import version
ugfx.init()
ugfx.input_init()
ugfx.clear(ugfx.BLACK)
ugfx.flush()
ugfx.clear(ugfx.WHITE)
ugfx.flush()
ugfx.string_box(148,22,148,26, "STILL", "Roboto_BlackItalic24", ugfx.BLACK, ugfx.justifyCenter)
ugfx.string_box(148,45,148,23, "Hacking", "PermanentMarker22", ugfx.BLACK, ugfx.justifyCenter)
ugfx.string_box(148,70,148,26, "Anyway", "Roboto_BlackItalic24", ugfx.BLACK, ugfx.justifyCenter)
#the line under the text
str_len = ugfx.get_string_width("Hacking","PermanentMarker22")
line_begin = 148 + int((148-str_len)/2)
line_end = str_len+line_begin
ugfx.line(line_begin, 68, line_end, 68, ugfx.BLACK)
#the cursor past the text
cursor_pos = line_end+5
ugfx.line(cursor_pos, 46, cursor_pos, 66, ugfx.BLACK)
ugfx.string_box(148,110,148,18, version.name,"Roboto_Regular12",ugfx.BLACK, ugfx.justifyLeft)
ugfx.flush()
options = None
install_path = None
def populate_it():
global options
options = ugfx.List(0,0,int(ugfx.width()/2),ugfx.height())
try:
apps = os.listdir('lib')
except OSError:
apps = []
options.add_item('installer')
options.add_item('ota_update')
for app in apps:
options.add_item(app)
def run_it(pushed):
if (pushed):
selected = options.selected_text()
options.destroy()
ugfx.clear(ugfx.BLACK)
ugfx.string_box(0, 25, 296, 25,"Running:","Roboto_BlackItalic24",ugfx.WHITE, ugfx.justifyCenter)
ugfx.string_box(0, 51, 296, 23, selected, "PermanentMarker22", ugfx.WHITE, ugfx.justifyCenter)
ugfx.flush()
badge.eink_busy_wait()
appglue.start_app(selected)
def expandhome(s):
if "~/" in s:
h = os.getenv("HOME")
s = s.replace("~/", h + "/")
return s
def get_install_path():
global install_path
if install_path is None:
# sys.path[0] is current module's path
install_path = sys.path[1]
install_path = expandhome(install_path)
return install_path
def uninstall_it(pushed):
if (pushed):
selected = options.selected_text()
if selected == 'installer':
return
if selected == 'ota_update':
return
options.destroy()
def perform_uninstall(ok):
if ok:
ugfx.clear(ugfx.BLACK)
ugfx.string_box(0, 25, 296, 25,"Uninstalling:","Roboto_BlackItalic24",ugfx.WHITE, ugfx.justifyCenter)
ugfx.string_box(0, 51, 296, 23, selected, "PermanentMarker22", ugfx.WHITE, ugfx.justifyCenter)
ugfx.flush()
install_path = get_install_path()
for rm_file in os.listdir("%s/%s" % (install_path, selected)):
os.remove("%s/%s/%s" % (install_path, selected, rm_file))
os.rmdir("%s/%s" % (install_path, selected))
badge.eink_busy_wait()
appglue.start_app('launcher')
import dialogs
uninstall = dialogs.prompt_boolean('Are you sure you want to remove %s?' % selected, cb=perform_uninstall)
populate_it()
ugfx.input_attach(ugfx.BTN_A, run_it)
ugfx.input_attach(ugfx.BTN_B, uninstall_it)
ugfx.input_attach(ugfx.JOY_UP, lambda pushed: ugfx.flush() if pushed else 0)
ugfx.input_attach(ugfx.JOY_DOWN, lambda pushed: ugfx.flush() if pushed else 0)
ugfx.set_lut(ugfx.LUT_FULL)
ugfx.flush()
ugfx.set_lut(ugfx.LUT_FASTER)
|
Python
| 0.000002
|
@@ -3388,16 +3388,106 @@
lse 0)%0A%0A
+ugfx.input_attach(ugfx.BTN_START, lambda pushed: appglue.start_app(%22%22) if pushed else 0)%0A%0A
ugfx.set
|
cf4fd0c08049e42f724f0a00a7385ba32b3a51cd
|
Remove function show_hand
|
blackjack/blackjack.py
|
blackjack/blackjack.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from random import shuffle
class Blackjack(object):
def __init__(self):
self.money = 2000.00
def creat_deck(self, deck=1):
'''
Create card function, it is possible to create a deck with more cards.
Increases the difficulty for the player. Maximum of 8 decks.
'''
suits = ["♣", "♦", "♥", "♠"]
numbers = ["A", "2", "3", "4", "5", "6", "7", "8", "9", "10", "Q",
"J", "K"]
self.decks = []
count = 0
while count < deck:
count += 1
for suit in suits:
for number in numbers:
self.decks.append('{}{}'.format(number, suit))
return shuffle(self.decks)
def bet(self, coin, quantity):
'''
Betting values are defined as casino chips. That is why exist the
exception error.
'''
if coin not in (1, 5, 10, 25, 50, 100):
raise Exception('Invalid coin for bet.')
elif self.money < (coin * quantity):
raise Exception('Value of bet larger what your money.')
else:
self.money -= (coin * quantity)
self.money += 0.01
return self.money
def play(self):
'''
To start the function it is necessary to have a bet, that is why the
variable self.money receives 0.01 in the betting function, ensuring
that a bet was made. Remember that function remove 4 cards of decks.
'''
self.hand = []
self.house = []
if self.money == 2000.00:
raise Exception('Bet is necessary for player.')
else:
while len(self.hand) < 2:
self.hand.append(self.decks.pop(0))
self.house.append(self.decks.pop(0))
return self.hand, self.house
def show_hand(self):
'''
This function only shows the cards to the player. His and those in the
house.
'''
msg = ('Your card: {}')
cards = ', '.join(self.hand)
return(msg.format(cards), 'House: {}, X'.format(self.house[0]))
def show_points(self, count):
'''
Function counting the value of the cards. According to the rule if you
have an Ace and a J, complete the value 21 or Blackjack.
'''
self.points = 0
for card in count:
value = card[:-1]
if value in ('A' and 'J', 'Q', 'K', 10):
self.points += 21
self.points -= 1
elif value == 'A':
self.points += 1
elif value in ('J', 'Q', 'K'):
self.points += 10
else:
self.points += int(value)
return self.points
def hit(self):
'''
Purchase function of cards. Allows purchase while value of sum of cards
is lower than 21.
'''
if self.points < 21:
self.hand.append(self.decks.pop(0))
else:
raise Exception(None)
|
Python
| 0.000009
|
@@ -788,35 +788,37 @@
, coin, quantity
+=1
):%0A
-
'''%0A
@@ -1889,311 +1889,26 @@
how_
-hand(self):%0A '''%0A This function only shows the cards to the player. His and those in the%0A house.%0A '''%0A msg = ('Your card: %7B%7D')%0A cards = ', '.join(self.hand)%0A return(msg.format(cards), 'House: %7B%7D, X'.format(self.house%5B0%5D))%0A%0A def show_points(self, count
+points(self, cards
):%0A
@@ -2119,20 +2119,20 @@
ard in c
-ount
+ards
:%0A
@@ -2736,32 +2736,32 @@
)%0A else:%0A
-
rais
@@ -2778,8 +2778,33 @@
n(None)%0A
+ return self.hand%0A
|
e3e3b59654133bd33c708343976825bb0c68d6f1
|
use development config as default
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import errno
import logging
from flask import current_app
from flask.ext.script import Manager
from pypi_notifier import create_app, db, models, cache
logging.basicConfig(level=logging.DEBUG)
manager = Manager(create_app)
# Must be a class name from config.py
config = os.environ['PYPI_NOTIFIER_CONFIG']
manager.add_option('-c', '--config', dest='config', required=False,
default=config)
@manager.shell
def make_shell_context():
return dict(app=current_app, db=db, models=models)
@manager.command
def init_db():
db.create_all()
@manager.command
def drop_db():
try:
os.unlink('/tmp/pypi_notifier.db')
except OSError as e:
if e.errno != errno.ENOENT:
raise
@manager.command
def fetch_package_list():
models.Package.get_all_names()
@manager.command
def clear_cache():
cache.clear()
@manager.command
def find_latest(name):
print models.Package(name).find_latest_version()
@manager.command
def update_users():
models.User.update_all_users_from_github()
@manager.command
def update_repos():
models.Repo.update_all_repos()
@manager.command
def update_packages():
models.Package.update_all_packages()
@manager.command
def send_emails():
models.User.send_emails()
if __name__ == '__main__':
manager.run()
|
Python
| 0.000001
|
@@ -253,16 +253,26 @@
e_app)%0A%0A
+%0Atry:%0A
# Must b
@@ -301,16 +301,20 @@
nfig.py%0A
+
config =
@@ -349,16 +349,272 @@
ONFIG'%5D%0A
+except KeyError:%0A print %22PYPI_NOTIFIER_CONFIG is not found in env, using DevelopmentConfig.%22%0A print 'If you want to use another config please set it as ' %5C%0A '%22export PYPI_NOTIFIER_CONFIG=ProductionConfig%22.'%0A config = 'DevelopmentConfig'%0A%0A
manager.
|
f63c31349b3191ae46c7fd2f66e964a7b799f4ea
|
Remove --detach flag from "git checkout"
|
slave/skia_slave_scripts/utils/gclient_utils.py
|
slave/skia_slave_scripts/utils/gclient_utils.py
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This module contains utilities for managing gclient checkouts."""
from common import find_depot_tools
import os
import shell_utils
GIT = 'git.bat' if os.name == 'nt' else 'git'
WHICH = 'where' if os.name == 'nt' else 'which'
SKIA_TRUNK = 'skia'
def _GetGclientPy():
""" Return the path to the gclient.py file. """
path_to_gclient = find_depot_tools.add_depot_tools_to_path()
if path_to_gclient:
return os.path.join(path_to_gclient, 'gclient.py')
print 'Falling back on using "gclient" or "gclient.bat"'
if os.name == 'nt':
return 'gclient.bat'
else:
return 'gclient'
GCLIENT_PY = _GetGclientPy()
GCLIENT_FILE = '.gclient'
def _RunCmd(cmd):
""" Run a "gclient ..." command. """
return shell_utils.Bash(['python', GCLIENT_PY] + cmd)
def Config(spec):
""" Configure a local checkout. """
return _RunCmd(['config', '--spec=%s' % spec])
def _GetLocalConfig():
""" Find and return the configuration for the local checkout. """
if not os.path.isfile(GCLIENT_FILE):
raise Exception('Unable to find %s' % GCLIENT_FILE)
config_vars = {}
exec(open(GCLIENT_FILE).read(), config_vars)
return config_vars['solutions']
def Sync(revision=None, force=False, delete_unversioned_trees=False,
branches=None, verbose=False, jobs=None, no_hooks=False,
extra_args=None):
""" Update the local checkout to the given revision, if provided, or to the
most recent revision. """
cmd = ['sync', '--no-nag-max']
if verbose:
cmd.append('--verbose')
if force:
cmd.append('--force')
if delete_unversioned_trees:
cmd.append('--delete_unversioned_trees')
if jobs:
cmd.append('-j%d' % jobs)
if no_hooks:
cmd.append('--nohooks')
if revision and branches and SKIA_TRUNK in branches:
cmd.extend(['--revision', '%s@%s' % (SKIA_TRUNK, revision)])
if extra_args:
cmd.extend(extra_args)
output = _RunCmd(cmd)
# "gclient sync" just downloads all of the commits. In order to actually sync
# to the desired commit, we have to "git reset" to that commit.
start_dir = os.path.abspath(os.curdir)
if branches and SKIA_TRUNK in branches:
os.chdir(SKIA_TRUNK)
if revision:
shell_utils.Bash([GIT, 'reset', '--hard', revision])
else:
shell_utils.Bash([GIT, 'checkout', 'origin/master', '--detach', '-f'])
os.chdir(start_dir)
return output
def GetCheckedOutHash():
""" Determine what commit we actually got. If there are local modifications,
raise an exception. """
config = _GetLocalConfig()
current_directory = os.path.abspath(os.curdir)
# Get the checked-out commit hash for the first gclient solution.
os.chdir(config[0]['name'])
try:
# "git rev-parse HEAD" returns the commit hash for HEAD.
return shell_utils.Bash([GIT, 'rev-parse', 'HEAD'],
log_in_real_time=False).rstrip('\n')
finally:
os.chdir(current_directory)
def Revert():
shell_utils.Bash([GIT, 'clean', '-f', '-d'])
shell_utils.Bash([GIT, 'reset', '--hard', 'HEAD'])
|
Python
| 0.000003
|
@@ -2483,20 +2483,8 @@
er',
- '--detach',
'-f
|
b96877517bedc9934905ff6b79c4158e04d5f9b7
|
initialize currentPoseName to None
|
software/ddapp/src/python/ddapp/jointcontrol.py
|
software/ddapp/src/python/ddapp/jointcontrol.py
|
import math
from ddapp.timercallback import TimerCallback
from ddapp.simpletimer import SimpleTimer
from ddapp import midi
class JointController(object):
def __init__(self, models, poseCollection=None):
#self.numberOfJoints = model.numberOfJoints()
self.numberOfJoints = 34
self.models = models
self.poses = {}
self.poseCollection = poseCollection
self.addPose('q_zero', [0.0 for i in xrange(self.numberOfJoints)])
def setJointPosition(self, jointId, position):
'''
Set joint position in degrees.
'''
assert jointId >= 0 and jointId < len(self.q)
self.q[jointId] = math.radians(position % 360.0)
self.push()
def push(self):
for model in self.models:
model.setJointPositions(self.q)
def reset(self):
self.q = [0.0 for i in xrange(self.numberOfJoints)]
def setPose(self, poseName):
if poseName not in self.poses:
raise Exception('Pose %r has not been defined.' % poseName)
self.q = self.poses[poseName]
self.currentPoseName = poseName
self.push()
def setZeroPose(self):
self.setPose('q_zero')
def setNominalPose(self):
self.setPose('q_nom')
def getPose(self, poseName):
return self.poses.get(poseName)
def addPose(self, poseName, poseData):
assert len(poseData) == self.numberOfJoints
self.poses[poseName] = poseData
if self.poseCollection is not None:
self.poseCollection.setItem(poseName, poseData)
def addNominalPoseFromFile(self, filename):
import scipy.io
matData = scipy.io.loadmat(filename)
#xstar = matData['xstar'][:self.numberOfJoints]
xstar = matData['xstar'][:34]
self.addPose('q_nom', xstar.flatten().tolist())
class MidiJointControl(TimerCallback):
def __init__(self, jointController):
TimerCallback.__init__(self)
self.reader = midi.MidiReader()
self.controller = jointController
self.channelToJoint = { 21: 13 }
def _scaleMidiValue(self, midiValue):
degrees = midiValue * 180.0/127.0
return degrees
def tick(self):
messages = self.reader.getMessages()
if not messages:
return
targets = {}
for message in messages:
channel = message[2]
value = message[3]
targets[channel] = value
for channel, value in targets.iteritems():
jointId = self.channelToJoint.get(channel)
position = self._scaleMidiValue(value)
if jointId is not None:
self.controller.setJointPosition(jointId, position)
class JointControlTestRamp(TimerCallback):
def __init__(self, jointController):
TimerCallback.__init__(self)
self.controller = jointController
self.testTime = 2.0
def testJoint(self, jointId):
self.jointId = jointId
self.testTimer = SimpleTimer()
self.start()
def tick(self):
if self.testTimer.elapsed() > self.testTime:
self.stop()
return
jointPosition = math.sin( (self.testTimer.elapsed() / self.testTime) * math.pi) * math.pi
self.controller.setJointPosition(self.jointId, math.degrees(jointPosition))
|
Python
| 0.999999
|
@@ -388,16 +388,52 @@
lection%0A
+ self.currentPoseName = None%0A
|
26fc40f3ca729147e838af4d98362484bed776df
|
Simplify main function in problem58.py
|
euler_python/problem58.py
|
euler_python/problem58.py
|
"""
problem58.py
Starting with 1 and spiralling anticlockwise in the following way, a square
spiral with side length 7 is formed.
37 36 35 34 33 32 31
38 17 16 15 14 13 30
39 18 5 4 3 12 29
40 19 6 1 2 11 28
41 20 7 8 9 10 27
42 21 22 23 24 25 26
43 44 45 46 47 48 49
It is interesting to note that the odd squares lie along the bottom right
diagonal, but what is more interesting is that 8 out of the 13 numbers lying
along both diagonals are prime; that is, a ratio of 8/13 ≈ 62%.
If one complete new layer is wrapped around the spiral above, a square spiral
with side length 9 will be formed. If this process is continued, what is the
side length of the square spiral for which the ratio of primes along both
diagonals first falls below 10%?
"""
from itertools import count
from math import sqrt
from toolset import is_prime, quantify
def square_length(n):
"Given the bottom right corner number, return the square length"
return int(sqrt(n))
def corners(n):
"Given the bottom right corner number, return the four corner numbers"
# 49 --> [49, 43, 37, 31]
x = square_length(n) - 1
return [n, n-x, n-(2*x), n-(3*x)]
def problem58():
# Yields all four corners from each new layer, starting at fifth layer.
# next(all_corners) --> [81, 73, 65, 57], [121, 111, 101, 91], ...
all_corners = (corners(x**2) for x in count(start=9, step=2))
primes, total = 8, 13
while True:
cs = next(all_corners)
primes += quantify(cs, pred=is_prime)
total += 4
if primes / total < 0.10:
# cs[0] is the bottom right corner number
return square_length(cs[0])
|
Python
| 0.000004
|
@@ -1203,289 +1203,98 @@
-# Yields all four corners from each new layer, starting at fifth layer.%0A # next(all_corners) --%3E %5B81, 73, 65, 57%5D, %5B121, 111, 101, 91%5D, ...%0A all_corners = (corners(x**2) for x in count(start=9, step=2))%0A
+length = 7%0A primes = 8%0A total = 13%0A while
primes
-,
+/
total
-= 8, 13%0A while True:%0A cs = next(all_corners)
+%3E 0.1:%0A length += 2
%0A
@@ -1318,17 +1318,33 @@
antify(c
-s
+orners(length**2)
, pred=i
@@ -1379,128 +1379,18 @@
- if primes / total %3C 0.10:%0A # cs%5B0%5D is the bottom right corner number%0A return square_length(cs%5B0%5D)
+return length
%0A
|
b9e86baf8a7765a4a616ae5c4890263ea07cea14
|
Add support for string filtering
|
zc_common/remote_resource/views.py
|
zc_common/remote_resource/views.py
|
from django.db.models import Model
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
from rest_framework import viewsets
from rest_framework.exceptions import MethodNotAllowed
from rest_framework_json_api.views import RelationshipView as OldRelView
from zc_common.remote_resource.models import RemoteResource
from zc_common.remote_resource.serializers import ResourceIdentifierObjectSerializer
class ModelViewSet(viewsets.ModelViewSet):
"""
This class overwrites the ModelViewSet's list method, which handles
requests made to the collection's base endpoint (/collection), in
order to provide support for filtering via the filter[] query parameter.
Inheriting from this class, along with adding the filter backend, will properly
handle requests made to /collection as well as /collection?filter[name]=test.
It's also possible to filter by a collection of primary keys, for example:
/collection?filter[id__in]=1,2,3
Requests to filter on keys that do not exist will return an empty set.
"""
@property
def filter_fields(self):
queryset = self.get_queryset()
# TODO: replace deprecated get_all_field_names()
field_names = queryset.model._meta.get_all_field_names()
primary_key = queryset.model._meta.pk.name
fields = {}
for name in field_names:
fields[name] = ['exact']
if name == primary_key:
fields['id'] = ['in', 'exact']
return fields
def has_ids_query_params(self):
return hasattr(self.request, 'query_params') and 'filter[id__in]' in self.request.query_params
class RelationshipView(OldRelView):
serializer_class = ResourceIdentifierObjectSerializer
def patch(self, request, *args, **kwargs):
"""
Restricting PATCH requests made to the relationship view temporarily to
prevent the possibility of data corruption when PATCH requests are made
to to-many related resources. This override will not be necessary
once a fix is made upstream.
See:
https://github.com/django-json-api/django-rest-framework-json-api/issues/242
"""
raise MethodNotAllowed('PATCH')
def _instantiate_serializer(self, instance):
if isinstance(instance, RemoteResource):
return ResourceIdentifierObjectSerializer(instance=instance)
if isinstance(instance, Model) or instance is None:
return self.get_serializer(instance=instance)
else:
if isinstance(instance, (QuerySet, Manager)):
instance = instance.all()
return self.get_serializer(instance=instance, many=True)
|
Python
| 0.000001
|
@@ -1,28 +1,80 @@
+from django.db.models import fields as model_fields%0A
from django.db.models import
@@ -1212,167 +1212,658 @@
-# TODO: replace deprecated get_all_field_names()%0A field_names = queryset.model._meta.get_all_field_names()%0A primary_key = queryset.model._meta.pk
+return_fields = %7B%7D%0A%0A fields = queryset.model._meta.get_fields()%0A for field in fields:%0A # For backwards compatibility GenericForeignKey should not be%0A # included in the results.%0A if field.is_relation and field.many_to_one and field.related_model is None:%0A continue%0A # Relations to child proxy models should not be included.%0A if (field.model != queryset.model._meta.model and%0A field.model._meta.concrete_model == queryset.model._meta.concrete_model):%0A continue%0A%0A name = field.attname if hasattr(field, 'attname') else field
.nam
@@ -1876,52 +1876,103 @@
+
+
field
-s = %7B%7D%0A%0A for name in field_names
+_type = type(field)%0A if hasattr(field, 'primary_key') and field.primary_key
:%0A
@@ -1985,24 +1985,41 @@
+ return_
fields%5B
-name
+'id'
%5D = %5B
+'in',
'exa
@@ -2039,30 +2039,75 @@
+el
if
-name == primary_key
+field_type in (model_fields.TextField, model_fields.CharField)
:%0A
@@ -2120,35 +2120,49 @@
+return_
fields%5B
-'id'
+name
%5D = %5B'i
-n
+contains
', 'exac
@@ -2173,23 +2173,97 @@
-return
+ else:%0A return_fields%5Bname%5D = %5B'exact'%5D%0A%0A return return_
fields%0A%0A
|
a6ce64d251effaae77efd5e74d8121d0ff8280f4
|
fix style and grammar
|
eventful/events/models.py
|
eventful/events/models.py
|
from collections import defaultdict
from django.contrib.auth.models import User
from django.db import models
from .managers import EventManager, EventInviteManager
class Event(models.Model):
PUBLIC = 'PB'
PRIVATE = 'PR'
FRIENDS = 'FR'
PRIVACY_CHOICES = (
(PUBLIC, 'Public'),
(PRIVATE, 'Private'),
(FRIENDS, 'Friends only')
)
created_by = models.ForeignKey(User, related_name='created_events', on_delete=models.CASCADE)
title = models.CharField(max_length=128)
description = models.TextField(max_length=500, blank=True)
start_date = models.DateTimeField()
creation_date = models.DateTimeField(auto_now_add=True)
privacy = models.CharField(max_length=2, choices=PRIVACY_CHOICES, default=PUBLIC)
views = models.IntegerField(default=0)
latitude = models.DecimalField(max_digits=10, decimal_places=7, blank=True, null=True)
longitude = models.DecimalField(max_digits=10, decimal_places=7, blank=True, null=True)
objects = EventManager()
class Meta:
ordering = ['start_date']
def __str__(self):
return self.title
def incr_views(self):
Event.objects.filter(pk=self.pk).update(views=models.F('views') + 1)
def invited_by_status(self):
invited_by_status = defaultdict(list)
invites = self.invites.select_related('event', 'to_user')
for invite in invites:
invited_by_status[invite.get_status_display()].append(invite.to_user)
return invited_by_status
def _get_user_invite(self, user_pk):
for invite in self.invites.all():
if invite.to_user_id == user_pk:
return invite
def get_permission_and_invite(self, user):
invite = self._get_user_invite(user.pk)
if invite or self.privacy == self.PUBLIC or self.created_by_id == user.id:
return True, invite
elif (self.privacy == self.FRIENDS and
user.profile.are_friends_by_filter(self.created_by_id)):
return True, None
return False, None
def self_invite_exist(self, user_pk):
return EventInvite.objects.filter(
event=self, to_user_id=user_pk, status=EventInvite.SELF
).exists()
class EventInvite(models.Model):
ACCEPTED = 'A'
REJECTED = 'R'
PENDING = 'P'
SELF = 'S'
STATUS_CHOICES = (
(ACCEPTED, 'accepted'),
(REJECTED, 'rejected'),
(PENDING, 'pending'),
(SELF, 'self'),
)
event = models.ForeignKey(
Event,
related_name='invites',
on_delete=models.CASCADE
)
from_user = models.ForeignKey(
User,
related_name='event_invites_sent',
on_delete=models.CASCADE
)
to_user = models.ForeignKey(
User,
related_name='event_invites_received',
on_delete=models.CASCADE
)
status = models.CharField(
choices=STATUS_CHOICES,
max_length=1,
default=PENDING
)
objects = EventInviteManager()
class Meta:
unique_together = ('event', 'to_user')
def __str__(self):
return 'Event: {}; From: {}; To: {}; S: {}'.format(self.event_id,
self.from_user_id,
self.to_user_id,
self.status)
|
Python
| 0.000005
|
@@ -1896,17 +1896,16 @@
elif
-(
self.pri
@@ -1928,32 +1928,16 @@
ENDS and
-%0A
user.pr
@@ -1983,17 +1983,16 @@
d_by_id)
-)
:%0A
@@ -2068,16 +2068,17 @@
te_exist
+s
(self, u
@@ -2128,29 +2128,16 @@
.filter(
-%0A
event=se
@@ -2183,25 +2183,16 @@
ite.SELF
-%0A
).exists
@@ -2483,31 +2483,14 @@
Key(
-%0A Event,%0A
+Event,
rel
@@ -2509,24 +2509,16 @@
nvites',
-%0A
on_dele
@@ -2530,29 +2530,24 @@
dels.CASCADE
-%0A
)%0A from_u
@@ -2570,38 +2570,21 @@
eignKey(
-%0A User,%0A
+User,
related
@@ -2610,24 +2610,16 @@
s_sent',
-%0A
on_dele
@@ -2631,29 +2631,24 @@
dels.CASCADE
-%0A
)%0A to_use
@@ -2673,30 +2673,13 @@
Key(
-%0A User,%0A
+User,
rel
@@ -2713,24 +2713,16 @@
ceived',
-%0A
on_dele
@@ -2738,21 +2738,16 @@
.CASCADE
-%0A
)%0A st
@@ -2770,25 +2770,16 @@
arField(
-%0A
choices=
@@ -2793,24 +2793,16 @@
CHOICES,
-%0A
max_len
@@ -2807,24 +2807,16 @@
ength=1,
-%0A
default
@@ -2823,21 +2823,16 @@
=PENDING
-%0A
)%0A%0A o
|
68e32ab4c763461ffbbea6a3ed698f66fdb48d4d
|
Use only user_id and course_id during the kNN computation: speedup is 5x Previously most time was consumed in db queries and suprisingly in __hash__ methods (seem that hashing a django model takes longer that hashing a int)
|
catalog/predictions.py
|
catalog/predictions.py
|
from users.models import User
from catalog.models import Course
import collections
from django.contrib.contenttypes.models import ContentType
from actstream.models import Follow
def distance(v1, v2):
absolute_difference = [abs(c1 - c2) for c1, c2 in zip(v1, v2)]
distance = sum(absolute_difference)
return distance
def get_users_following_dict():
course_type = ContentType.objects.get(app_label="catalog", model="course")
follows = Follow.objects\
.filter(content_type=course_type)\
.select_related('user')\
.prefetch_related('follow_object')
following_dict = collections.defaultdict(set)
for follow in follows:
following_dict[follow.user.netid].add(follow.follow_object)
return following_dict
def suggest(target_user, K=15):
courses = Course.objects.all()
users = {user.netid: user for user in User.objects.all()}
users_following = get_users_following_dict()
vectors = {}
for netid, user in users.items():
following = users_following[netid]
vectors[netid] = [course in following for course in courses]
target_vector = vectors[target_user.netid]
distances = {netid: distance(target_vector, vector) for netid, vector in vectors.items()}
non_null_distances = {netid: distance for netid, distance in distances.items() if distance > 0}
get_score = lambda x: x[1]
neighbors = sorted(non_null_distances.items(), key=get_score)[:K]
best_matches = collections.Counter()
target_set = users_following[target_user.netid]
for netid, score in neighbors:
differences = users_following[netid] - target_set
best_matches.update(differences)
return best_matches
|
Python
| 0.000002
|
@@ -1,34 +1,4 @@
-from users.models import User%0A
from
@@ -433,26 +433,16 @@
.objects
-%5C%0A
.filter(
@@ -470,83 +470,35 @@
ype)
-%5C%0A .select_related('user')%5C%0A .prefetch_related('follow_
+.only('user_id', '
object
+_id
')%0A%0A
@@ -612,20 +612,17 @@
user
-.net
+_
id%5D.add(
foll
@@ -621,28 +621,29 @@
add(
+int(
follow.
-follow_
object
+_id)
)%0A%0A
@@ -734,75 +734,18 @@
cts.
-all()%0A users = %7Buser.netid: user for user in User.objects.all()%7D
+only('id')
%0A
@@ -820,45 +820,16 @@
for
-netid, user in users.items():%0A
+user_id,
fol
@@ -835,17 +835,18 @@
llowing
-=
+in
users_f
@@ -853,23 +853,25 @@
ollowing
-%5Bnetid%5D
+.items():
%0A
@@ -879,19 +879,21 @@
vectors%5B
-net
+user_
id%5D = %5Bc
@@ -893,24 +893,27 @@
d%5D = %5Bcourse
+.id
in followin
@@ -970,35 +970,32 @@
ors%5Btarget_user.
-net
id%5D%0A%0A distanc
@@ -992,35 +992,37 @@
distances = %7B
-net
+user_
id: distance(tar
@@ -1041,27 +1041,29 @@
vector) for
-net
+user_
id, vector i
@@ -1107,19 +1107,21 @@
nces = %7B
-net
+user_
id: dist
@@ -1125,27 +1125,29 @@
istance for
-net
+user_
id, distance
@@ -1374,19 +1374,16 @@
et_user.
-net
id%5D%0A%0A
@@ -1387,19 +1387,21 @@
for
-net
+user_
id, scor
@@ -1458,11 +1458,13 @@
ing%5B
-net
+user_
id%5D
@@ -1529,21 +1529,101 @@
return
-best_matches
+%5B(Course.objects.get(id=course_id), hits) for course_id, hits in best_matches.most_common()%5D
%0A
|
8d70c34e0a8c384ed42e3017ba744166ec439a50
|
Use the ConsumerSet so all custom queues are purged as well. Thanks to Mat Clayton.
|
celery/task/control.py
|
celery/task/control.py
|
from celery import conf
from celery.messaging import TaskConsumer, BroadcastPublisher, with_connection
@with_connection
def discard_all(connection=None,
connect_timeout=conf.BROKER_CONNECTION_TIMEOUT):
"""Discard all waiting tasks.
This will ignore all tasks waiting for execution, and they will
be deleted from the messaging server.
:returns: the number of tasks discarded.
"""
consumer = TaskConsumer(connection=connection)
try:
return consumer.discard_all()
finally:
consumer.close()
def revoke(task_id, destination=None, connection=None,
connect_timeout=conf.BROKER_CONNECTION_TIMEOUT):
"""Revoke a task by id.
If a task is revoked, the workers will ignore the task and not execute
it after all.
:param task_id: Id of the task to revoke.
:keyword destination: If set, a list of the hosts to send the command to,
when empty broadcast to all workers.
:keyword connection: Custom broker connection to use, if not set,
a connection will be established automatically.
:keyword connect_timeout: Timeout for new connection if a custom
connection is not provided.
"""
return broadcast("revoke", destination=destination,
arguments={"task_id": task_id})
def rate_limit(task_name, rate_limit, destination=None, connection=None,
connect_timeout=conf.BROKER_CONNECTION_TIMEOUT):
"""Set rate limit for task by type.
:param task_name: Type of task to change rate limit for.
:param rate_limit: The rate limit as tasks per second, or a rate limit
string (``"100/m"``, etc. see :attr:`celery.task.base.Task.rate_limit`
for more information).
:keyword destination: If set, a list of the hosts to send the command to,
when empty broadcast to all workers.
:keyword connection: Custom broker connection to use, if not set,
a connection will be established automatically.
:keyword connect_timeout: Timeout for new connection if a custom
connection is not provided.
"""
return broadcast("rate_limit", destination=destination,
arguments={"task_name": task_name,
"rate_limit": rate_limit})
@with_connection
def broadcast(command, arguments=None, destination=None, connection=None,
connect_timeout=conf.BROKER_CONNECTION_TIMEOUT):
"""Broadcast a control command to the celery workers.
:param command: Name of command to send.
:param arguments: Keyword arguments for the command.
:keyword destination: If set, a list of the hosts to send the command to,
when empty broadcast to all workers.
:keyword connection: Custom broker connection to use, if not set,
a connection will be established automatically.
:keyword connect_timeout: Timeout for new connection if a custom
connection is not provided.
"""
arguments = arguments or {}
broadcast = BroadcastPublisher(connection)
try:
broadcast.send(command, arguments, destination=destination)
finally:
broadcast.close()
|
Python
| 0
|
@@ -49,22 +49,8 @@
port
- TaskConsumer,
Bro
@@ -68,25 +68,71 @@
sher
-, with_connection
+%0Afrom celery.messaging import with_connection, get_consumer_set
%0A%0A%0A@
@@ -453,23 +453,28 @@
umer
+s
=
-TaskC
+get_c
onsumer
+_set
(con
@@ -525,16 +525,17 @@
consumer
+s
.discard
@@ -570,16 +570,17 @@
consumer
+s
.close()
|
797c4405aa2b75da9b7bdbb7e0e26f8bae3308b6
|
handle BadPickleGet on restore
|
coopy/restore.py
|
coopy/restore.py
|
import logging
import fileutils
from snapshot import SnapshotManager
from foundation import RestoreClock
from cPickle import Unpickler
logger = logging.getLogger("coopy")
LOG_PREFIX = '[RESTORE] '
def restore(system, basedir):
#save current clock
current_clock = system._clock
#restore from snapshot
system = SnapshotManager(basedir).recover_snapshot()
files = fileutils.last_log_files(basedir)
logger.debug(LOG_PREFIX + "Files found: " + str(files))
if not files:
return system
actions = []
for file in files:
logger.debug(LOG_PREFIX + "Opening " + str(file))
unpickler = Unpickler(open(file,'rb'))
try:
while True:
action = unpickler.load()
logger.debug(LOG_PREFIX + action.action)
actions.append(action)
except EOFError:
pass
if not actions:
return system
logger.debug(LOG_PREFIX + "Actions re-execution")
for action in actions:
try:
system._clock = RestoreClock(action.timestamps)
action.execute_action(system)
except Exception as e:
logger.debug(LOG_PREFIX + 'Error executing :' + str(action))
system._clock = current_clock
return system
|
Python
| 0
|
@@ -128,17 +128,31 @@
npickler
+, BadPickleGet
%0A
-
%0Alogger
@@ -294,22 +294,17 @@
m._clock
- %0A
+%0A
%0A #re
@@ -380,21 +380,17 @@
pshot()%0A
-
%0A
+
file
@@ -523,29 +523,25 @@
turn system%0A
-
%0A
+
actions
@@ -841,32 +841,136 @@
.append(action)%0A
+ except BadPickleGet:%0A logger.error(LOG_PREFIX + %22Error unpickling %25s%22 %25 (str(file)))%0A
except E
@@ -999,21 +999,9 @@
ass%0A
-
%0A
+
@@ -1038,21 +1038,17 @@
system%0A
-
%0A
+
logg
@@ -1343,17 +1343,9 @@
n))%0A
-
%0A
+
@@ -1370,24 +1370,24 @@
rrent_clock%0A
+
return s
@@ -1396,9 +1396,4 @@
tem%0A
- %0A
|
4c9b8a55d26df7421decdc05236499f61583ab38
|
fix smart folder content getter
|
novaideo/utilities/smart_folder_utility.py
|
novaideo/utilities/smart_folder_utility.py
|
# -*- coding: utf8 -*-
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# avalaible on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from substanced.util import get_oid
from dace.util import get_obj
from novaideo.views.filter import (
find_entities)
from novaideo.utilities.util import (
deepcopy)
from novaideo.views.filter.util import QUERY_OPERATORS
def get_adapted_filter(folder, user):
return {'select': ['metadata_filter',
'contribution_filter', 'temporal_filter',
'text_filter', 'other_filter']}
def get_folder_content(folder, user,
add_query=None,
**args):
_filters = deepcopy(getattr(folder, 'filters', []))
objects = []
if _filters:
query = None
if add_query:
query = QUERY_OPERATORS['and'](query, add_query)
objects = find_entities(
user=user,
add_query=query,
filters=_filters,
filter_op='or',
**args)
oids = [get_oid(c) for c in folder.contents]
if args:
contents = find_entities(
user=user,
intersect=oids,
**args)
oids = contents.ids if not isinstance(contents, list) else contents
if isinstance(objects, list):
objects.extend([get_obj(o) for o in oids])
else: # ResultSet
objects.ids = list(objects.ids)
objects.ids.extend(oids)
objects.numids += len(oids)
return objects
|
Python
| 0.000001
|
@@ -175,18 +175,15 @@
rom
-substanced
+hypatia
.uti
@@ -195,26 +195,34 @@
ort
-get_oid%0A%0Afrom da
+ResultSet%0A%0Afrom substan
ce
+d
.uti
@@ -235,18 +235,34 @@
rt get_o
-bj
+id, find_objectmap
%0A%0Afrom n
@@ -1384,43 +1384,102 @@
ject
-s.extend(%5Bget_obj(o) for o in oids%5D
+map = find_objectmap(folder)%0A objects = ResultSet(oids, len(oids), objectmap.object_for
)%0A
|
70db54eba970f8e8f6c42587675f1525002ea12f
|
Update wrong status code stated in docstring
|
hug/redirect.py
|
hug/redirect.py
|
"""hug/redirect.py
Implements convience redirect methods that raise a redirection exception when called
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import falcon
def to(location, code=falcon.HTTP_302):
"""Redirects to the specified location using the provided http_code (defaults to HTTP_302 FOUND)"""
raise falcon.http_status.HTTPStatus(code, {'location': location})
def permanent(location):
"""Redirects to the specified location using HTTP 301 status code"""
to(location, falcon.HTTP_301)
def found(location):
"""Redirects to the specified location using HTTP 302 status code"""
to(location, falcon.HTTP_302)
def see_other(location):
"""Redirects to the specified location using HTTP 303 status code"""
to(location, falcon.HTTP_303)
def temporary(location):
"""Redirects to the specified location using HTTP 304 status code"""
to(location, falcon.HTTP_307)
def not_found(*args, **kwargs):
"""Redirects request handling to the not found render"""
raise falcon.HTTPNotFound()
|
Python
| 0
|
@@ -1926,9 +1926,9 @@
P 30
-4
+7
sta
|
1db7ef99aef8691600d74b23a751c6a753e2a5da
|
Update : Enhancing timer function and exec times storage
|
hurdles/base.py
|
hurdles/base.py
|
# -*- coding:utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
import time
from functools import wraps
from inspect import getmembers, ismethod
def time_it(func, *args, **kwargs):
"""
Decorator whichs times a function execution.
"""
start = time.time()
func(*args, **kwargs)
end = time.time()
exec_time = "%s (%0.3f ms)" % (func.func_name, (end - start) * 1000)
return exec_time
def extra_setup(setup_code):
"""Allows to setup some extra context to it's decorated function.
As a convention, the bench decorated function should always handle
*args and **kwargs. Kwargs will be updated with the extra context
set by the decorator.
Example:
@extra_setup("l = [x for x in xrange(100)]")
def bench_len(self, *args, **kwargs):
print len(kwargs['l'])
"""
def decorator(func):
@wraps(func)
def decorated_function(*args, **kwargs):
exec setup_code in {}, kwargs
return func(*args, **kwargs)
return decorated_function
return decorator
class BenchCase(object):
def __init__(self):
self._benchmarks = []
def setUp(self):
"""Hook method for setting up the benchmark
fixture before exercising it."""
pass
def tearDown(self):
"""Hook method for deconstructing the benchmark
fixture after testing it."""
pass
@property
def benchmarks(self):
if not self._benchmarks:
bench_case_methods = getmembers(self.__class__, predicate=ismethod)
for (method_name, method_value) in bench_case_methods:
if method_name.startswith('bench_'):
self._benchmarks.append((method_name, method_value))
return self._benchmarks
def run(self):
for method_name, method_value in self.benchmarks:
self.setUp()
time_it(method_value, self)
self.tearDown()
|
Python
| 0
|
@@ -113,16 +113,51 @@
t time%0A%0A
+from collections import namedtuple%0A
from fun
@@ -223,275 +223,81 @@
d%0A%0A%0A
-def time_it(func, *args, **kwargs):%0A %22%22%22%0A Decorator whichs times a function ex
+ExecTimeColl
ec
-u
tion
-.%0A %22%22%22%0A start = time.time()%0A func(*args, **kwargs)%0A end = time.time()%0A exec_time = %22%25s (%250.3f ms)%22 %25 (func.func_name, (end - start) * 1000)%0A%0A return exec_time
+ = namedtuple('ExecTimeCollection', %5B'times', 'scale'%5D)
%0A%0A%0Ad
@@ -1031,16 +1031,109 @@
rks = %5B%5D
+%0A self.results = %7B%0A 'exec_times': %7B%7D,%0A 'averages': %7B%7D,%0A %7D
%0A%0A de
@@ -1776,16 +1776,290 @@
def
-run(self
+tick(self, func, *args, **kwargs):%0A %22%22%22Times a function execution in miliseconds%22%22%22%0A start = time.time()%0A func(*args, **kwargs)%0A end = time.time()%0A exec_time = round(((end - start) * 1000), 2)%0A%0A return exec_time%0A%0A def run(self, repeat=10
):%0A
@@ -2140,16 +2140,17 @@
setUp()%0A
+%0A
@@ -2157,35 +2157,372 @@
-time_it(method_value, self)
+exec_times = ExecTimeCollection(times=%5Bself.tick(method_value, self) for x in %5B0.0%5D * repeat%5D,%0A scale='ms')%0A average = sum(exec_times.times) / repeat%0A%0A self.results%5B'exec_times'%5D.update(%7Bmethod_name: exec_times%7D)%0A self.results%5B'averages'%5D.update(%7Bmethod_name: average%7D)%0A
%0A
@@ -2546,9 +2546,8 @@
rDown()%0A
-%0A
|
aebe8c0b586b408b55b13d8ddd3a974c194455a6
|
Update categorical_braninhoo_example.py
|
examples/categorical_braninhoo_example.py
|
examples/categorical_braninhoo_example.py
|
import whetlab
import numpy as np
# Define parameters to optimize
parameters = { 'X' : {'type':'float','min':0,'max':15,'size':1},
'Y' : {'type':'float','min':-5,'max':10,'size':1},
'Z' : {'type': 'enum', 'options': ['bad','Good!','OK']}}
#access_token = ''
name = 'Categorical Braninhoo'
description = 'Optimize the categorical braninhoo optimization benchmark'
outcome = {'name':'Negative Categorical Braninhoo output', 'type':'float'}
scientist = whetlab.Experiment(name=name, description=description, parameters=parameters, outcome=outcome)
# Braninhoo function
def categorical_braninhoo(X,Y,Z):
if X > 10:
return np.nan
Z = 1 if Z == 'Good!' else 2 if Z == 'OK' else 3
return np.square(Y - (5.1/(4*np.square(np.pi)))*np.square(X) + (5/np.pi)*X - 6) + 10*(1-(1./(8*np.pi)))*np.cos(X) + 10*Z;
for i in range(10000):
# Get suggested new experiment
job = scientist.suggest()
# Perform experiment
print job
outcome = -categorical_braninhoo(**job)
print outcome
# Inform scientist about the outcome
scientist.update(job,outcome)
|
Python
| 0.000001
|
@@ -1,12 +1,206 @@
+# In this example we will optimize the 'Braninhoo' optimization benchmark with a small twist to %0A# demonstrate how to set up a categorical variable. There is also a constraint on the function.%0A
import whetl
|
1db096e38a15543aa93ee20357f8f5369b60443d
|
Exclude failed nodes from pairing
|
models.py
|
models.py
|
"""Define additional classes required for the Joint Estimation Experiment."""
from dallinger.experiments import Experiment
from dallinger.models import Network, Node, Info
from sqlalchemy import Integer
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.sql.expression import cast
from dallinger.nodes import Source
from random import randint
import json
class Paired(Network):
"""Node <-> Node; Node <-> Node; ...
"""
__mapper_args__ = {"polymorphic_identity": "paired"}
def add_node(self, node):
"""Node <-> Node; Node <-> Node; etc. """
# get a list of all potential partners
all_nodes = self.nodes(type=type(node))
other_nodes = [n for n in all_nodes if n is not node]
available_nodes = [n for n in other_nodes if not any(n.vectors())]
# if there are available nodes
if available_nodes:
import random
# pick a partner at random
partner_node = random.choice(available_nodes)
# connect to them
node.connect(direction="both", whom=partner_node)
# grab the source created for the network
source = self.nodes(type=ListSource)[0]
# connect the sources to both nodes and sends them both the same list
source.connect(whom=[node, partner_node])
source.transmit(what=source.new_list(), to_whom=[node, partner_node])
# let both nodes receive the list that've been sent
node.receive()
partner_node.receive()
class Indexed(Node):
"""A node with an index"""
__mapper_args__ = {"polymorphic_identity": "indexed"}
@hybrid_property
def index(self):
"""Convert property1 to index."""
return int(self.property1)
@index.setter
def index(self, index):
"""Make index settable."""
self.property1 = repr(index)
@index.expression
def index(self):
"""Make index queryable."""
return cast(self.property1, Integer)
def __init__(self, network, participant=None):
"""Give the node its index."""
super(Indexed, self).__init__(network, participant)
self.index = self.network.size(type=Indexed)
@hybrid_property
def accuracy(self):
"""Convert property3 to accuracy."""
return int(self.property3)
@accuracy.setter
def accuracy(self, index):
"""Make accuracy settable."""
self.property3 = repr(accuracy)
@accuracy.expression
def accuracy(self):
"""Make index queryable."""
return cast(self.property3, Integer)
class ListSource(Source):
"""A source that generates lists of numbers randomly sampled from a uniform
distribution for each pair in a paired network. These lists are then sent
to each pair."""
__mapper_args__ = {"polymorphic_identity": "listsource"}
def new_list(self):
"""Generate a list of numbers randomly sampled from a uniform distribution."""
# create list container and specify number of trials
list_to_add = []
list_length = 100
# iterate over our desired number of trials to make a new list of the appropriate length
for new_item in range(list_length):
list_to_add.append(randint(1, 100))
# ship our list as a string (which we'll then reconstitute as a list upon reading)
return Info(origin=self, contents=json.dumps(list_to_add))
|
Python
| 0.000001
|
@@ -806,16 +806,28 @@
vectors(
+failed=%22all%22
))%5D%0A%0A
@@ -908,32 +908,45 @@
import random%0A
+ %0A
# pi
|
09a35f644e303b625be941432193d19feb7231ac
|
Make links in posts and comments usable
|
models.py
|
models.py
|
""" Data model module for appengine_multiblog
Contains data model definitions
used by Google Cloud Datastore. These are imported
by main.py
"""
from google.appengine.ext import ndb
import bleach
import jinja2
import os
TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), 'templates')
JINJA_ENV = jinja2.Environment(loader=jinja2.FileSystemLoader(TEMPLATE_DIR),
autoescape=True)
def render_str(template, **params):
""" Passes data from application into jinja templates to render pages"""
template_page = JINJA_ENV.get_template(template)
return template_page.render(params)
""" Take care when modifying these as doing so may cause consistency issues """
class Post(ndb.Model):
""" Blog Post data model for datastore """
subject = ndb.StringProperty(required=True)
content = ndb.TextProperty(required=True)
created = ndb.DateTimeProperty(auto_now_add=True)
last_modified = ndb.DateTimeProperty(auto_now=True)
posting_user = ndb.StringProperty(required=True)
def render(self):
""" escape all html tags from post, then convert newlines to <br> """
self._render_text = jinja2.Markup(
bleach.clean(self.content, tags=[u'a', u'img'],
attributes={u'a': [u'href'], u'img': [u'src', u'alt']},
strip=False))
self._render_text = self._render_text.replace(
'\n', jinja2.Markup('<br>'))
return render_str("post.html", p=self)
def peek(self):
""" Show first part of long posts to not overload multi-post pages """
escaped_post = jinja2.Markup(
bleach.clean(self.content, tags=[u'a', u'img'],
attributes={u'a': [u'href'], u'img': [u'src', u'alt']},
strip=False))
marked_up_post = escaped_post.replace('\n', jinja2.Markup('<br>'))
if len(marked_up_post) > 1000:
self._render_text = marked_up_post[:1000]
return render_str("previewpost.html", p=self)
else:
self._render_text = marked_up_post
return render_str("post.html", p=self)
class Comment(ndb.Model):
""" Comments data model used for datastore """
comment_text = ndb.TextProperty(required=True)
parent_post_id = ndb.StringProperty(required=True)
created = ndb.DateTimeProperty(auto_now_add=True)
last_modified = ndb.DateTimeProperty(auto_now=True)
posting_user = ndb.StringProperty(required=True)
def render(self):
""" Draws comments """
escapedcomment = jinja2.escape(self.comment_text)
marked_up_comment = escapedcomment.replace('\n', jinja2.Markup('<br>'))
self._render_text = marked_up_comment
return render_str("comment.html", c=self)
class Secret(ndb.Model):
""" HMAC Secret Key stored in datastore. Used to verify session cookies """
key_string = ndb.StringProperty(required=True)
class User(ndb.Model):
""" User account info for auth """
username = ndb.StringProperty(required=True)
user_hash = ndb.StringProperty(required=True)
salt = ndb.StringProperty(required=True)
email = ndb.StringProperty(required=True)
current_session = ndb.StringProperty(required=False)
session_expires = ndb.DateTimeProperty(required=False)
class AntiCsrfToken(ndb.Model):
""" Anti forgery token embedded in hidden form fields used
to ensure the request came from the site and not an external site """
csrf_sync_token = ndb.StringProperty(required=True)
associated_user = ndb.StringProperty(required=True)
class ResetToken(ndb.Model):
""" Password reset token used in email when user forgot their password """
associated_acct_email = ndb.StringProperty(required=True)
token_guid = ndb.StringProperty(required=True)
expires = ndb.DateTimeProperty(required=True)
class LoginAttempt(ndb.Model):
""" Keeps track of login attempts for rate limiting """
ip_addr = ndb.StringProperty(required=True)
last_attempt = ndb.DateTimeProperty(required=True)
attempt_count = ndb.IntegerProperty(required=True)
|
Python
| 0
|
@@ -1173,32 +1173,57 @@
jinja2.Markup(%0A
+ bleach.linkify(%0A
bleach
@@ -1256,32 +1256,32 @@
%5Bu'a', u'img'%5D,%0A
-
@@ -1371,32 +1371,33 @@
strip=False))
+)
%0A self._r
@@ -1651,32 +1651,57 @@
jinja2.Markup(%0A
+ bleach.linkify(%0A
bleach
@@ -1857,16 +1857,17 @@
=False))
+)
%0A
@@ -2628,21 +2628,20 @@
t =
-jinja2.escape
+bleach.clean
(sel
@@ -2756,32 +2756,61 @@
._render_text =
+jinja2.Markup(bleach.linkify(
marked_up_commen
@@ -2810,16 +2810,18 @@
_comment
+))
%0A
|
cc5c7e7c360ff6379259ad878310bd59c47c6f29
|
mark string for translation
|
coupons/forms.py
|
coupons/forms.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2016, Raffaele Salmaso <raffaele@salmaso.org>
# Copyright (C) 2013, byteweaver
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of django-coupons nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, division, print_function, unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Coupon, CouponUser, Campaign
from .settings import COUPON_TYPES
class CouponGenerationForm(forms.Form):
quantity = forms.IntegerField(
label=_("Quantity"),
)
value = forms.IntegerField(
label=_("Value"),
)
type = forms.ChoiceField(
choices=COUPON_TYPES,
label=_("Type"),
)
valid_until = forms.SplitDateTimeField(
required=False,
label=_("Valid until"),
help_text=_("Leave empty for coupons that never expire")
)
prefix = forms.CharField(
required=False,
label="Prefix",
)
campaign = forms.ModelChoiceField(
required=False,
queryset=Campaign.objects.all(),
label=_("Campaign"),
)
class CouponForm(forms.Form):
code = forms.CharField(
label=_("Coupon code"),
)
def __init__(self, *args, **kwargs):
self.user = None
self.types = None
if "user" in kwargs:
self.user = kwargs["user"]
del kwargs["user"]
if "types" in kwargs:
self.types = kwargs["types"]
del kwargs["types"]
super(CouponForm, self).__init__(*args, **kwargs)
def clean_code(self):
code = self.cleaned_data["code"]
try:
coupon = Coupon.objects.get(code=code)
except Coupon.DoesNotExist:
raise forms.ValidationError(_("This code is not valid."))
self.coupon = coupon
if self.user is None and coupon.user_limit is not 1:
# coupons with can be used only once can be used without tracking the user, otherwise there is no chance
# of excluding an unknown user from multiple usages.
raise forms.ValidationError(_(
"The server must provide an user to this form to allow you to use this code. Maybe you need to sign in?"
))
if coupon.is_redeemed:
raise forms.ValidationError(_("This code has already been used."))
try: # check if there is a user bound coupon existing
user_coupon = coupon.users.get(user=self.user)
if user_coupon.redeemed_at is not None:
raise forms.ValidationError(_("This code has already been used by your account."))
except CouponUser.DoesNotExist:
if coupon.user_limit is not 0: # zero means no limit of user count
# only user bound coupons left and you don't have one
if coupon.user_limit is coupon.users.filter(user__isnull=False).count():
raise forms.ValidationError(_("This code is not valid for your account."))
if coupon.user_limit is coupon.users.filter(redeemed_at__isnull=False).count(): # all coupons redeemed
raise forms.ValidationError(_("This code has already been used."))
if self.types is not None and coupon.type not in self.types:
raise forms.ValidationError(_("This code is not meant to be used here."))
if coupon.expired():
raise forms.ValidationError(_("This code is expired."))
return code
|
Python
| 0.000019
|
@@ -2399,16 +2399,18 @@
bel=
+_(
%22Prefix%22
,%0A
@@ -2405,16 +2405,17 @@
%22Prefix%22
+)
,%0A )%0A
|
c3c986e08dadf3ecdd4f94eca6abf36c22a1c209
|
Update DB/ Deploy
|
models.py
|
models.py
|
from sqlalchemy import create_engine, Column, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def db_connect():
"""
Performs database connection
Returns sqlalchemy engine instance
"""
return create_engine('postgres://fbcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'
'@ec2-54-221-235-135.compute-1.amazonaws.com'
':5432/d2cc1tb2t1iges', echo=False)
def create_battletag_table(engine):
Base.metadata.create_all(engine)
class Battletags(Base):
"""
Table to store user battletags
"""
__tablename__ = 'Battletags'
disc_name = Column(String, primary_key=True)
battletag = Column(String, unique=True)
|
Python
| 0
|
@@ -292,96 +292,162 @@
://f
-bcmeskynsvati:aURfAdENt6-kumO0j224GuXRWH'%0D%0A '@ec2-54-221-235-135
+cvxvbdsuotypy:a3b010cca1fa4e6949ff39c11c6'%0D%0A 'e0b9edf9ce67a650535436dc349ba29b8c751@ec2-54-243-253-'%0D%0A '17
.com
@@ -466,16 +466,22 @@
naws.com
+:5432/
'%0D%0A
@@ -505,28 +505,22 @@
'
-:5432/d2cc1tb2t1iges
+dfl66jjoa0etqc
', e
|
6c71dd12abffefdaec062555283afc9346dacc0a
|
answering haitham questions
|
modles.py
|
modles.py
|
#encoding: utf-8
from django.db import models
#Haitham: (how models works? will the following model be defined in 1 database?)
#Haitham: (what does foreign key means?)
#Haitham: how the different model interact?
#Haitham: can we change the models (add/remove fields) in the future?
#Haitham: how to store user comments EFFICIENTLY? do we have to deal with it now?
#Haitham: how the data is stored? by a stand alone data base for each layer (can layer be added dynamically by admins)?
# or by a generic field in the data database (can we define a generic field)?
# i guess that a stand alone data base for each layer is better
# where the physical data base will be created?
# what is the optimal char fields length
class UserBasic(models.Model):
'common fields for all users'
user_name = models.CharField("user name",max_length=64, primary_key = True)
name = models.CharField("full name",max_length=64)
password = models.CharField("password",max_length=64) #Haitham: is this the right way to do it? is it safe?
email = models.EmailField("email address",blank=True, null=True)
register_data = models.DateField("registration date")
class Users(UserBasic):
'regular users aka citizens'
address = models.CharField("home address", max_length = 100, blank = True) #maybe be changed for more efficient navigation. maybe to divide the map to regions
auto_update_adresses = None # a list of addresses for the auto update
#---------------------=====================---------------------
class SuperUsers(UserBasic):
'can add data aka local authorities'
position = models.CharField("user title", max_length = 64)
region = models.ForeignKey(Regions) #The region for this user
#---------------------=====================---------------------
class Moderators(UserBasic):
'supervise the data and user comments'
position = models.CharField("user title", max_length = 64)
#---------------------=====================---------------------
class Admins(UserBasic):
'can add superusers and moderators'
#---------------------=====================---------------------
class DataLayer(models.Model):
# each layer is a table inside the database the data are defined for each Layer alone
'environmental data'
layer_name = models.CharField(max_length=64)
position = None #poisiton as defined in the GIS
date = models.DateField
data1 = None
data2 = None
#data3 ......
#---------------------=====================---------------------
class Comments(models.Model):
user = models.ForeignKey(BasicUser)
comment = models.CharField(max_length = 400)
#---------------------=====================---------------------
class Regions(models.Model):
area = None # use polygon/multipolygon from GIS
#---------------------=====================---------------------
|
Python
| 0.998489
|
@@ -129,95 +129,506 @@
?)%0A#
-Haitham: (what does foreign key means?)%0A#Haitham: how the different model interact?
+daonb: all models are deifned in one database - as define in the project's settings.py%0A#Haitham: (what does foreign key means?)%0A#daonb: a one-to-many relationship, helping you connect two models using a %0A# direct, non-exclusive link from one models to another. %0A# please use the related_name of attributed of ForeignKey field to %0A# better name the reverse relationship.%0A#Haitham: how the different model interact?%0A#daonb: using ForeignKeys and ManyToMany relatinshps for start
%0A#Ha
@@ -696,16 +696,61 @@
future?%0A
+#daonb: it's tricky, but the south app helps%0A
#Haitham
@@ -825,16 +825,169 @@
it now?%0A
+#daonb: no. %22We should forget about small efficiencies, say about 97%25 of the time: %0A# premature optimization is the root of all evil%22 Donald Knuth%0A
#Haitham
@@ -1267,99 +1267,402 @@
er%0A#
- where the physical data base will be created?%0A# what is the optimal char fields length
+daonb: not necessarly. we should keep the number of the 'moving parts' to a minimum and%0A# sql databases are fully capble of handling all our layers in one database%0A# where the physical data base will be created?%0A# daonb: in the database defined in the project's settings.py%0A# what is the optimal char fields length%0A# daonb: use TextField and don't worry about running out of chars
%0A%0Acl
|
fac8a172a16da011bab9e41afc52f24f833687fc
|
Simplify the date calculation
|
monday.py
|
monday.py
|
#! /usr/bin/env python
"""Run this during the week to write last week's short-form entry"""
from __future__ import print_function
from datetime import datetime, timedelta
import errno
import operator
import os
import sys
def sunday_after(dt, offset=1):
"""offset == 3 means 3rd Sunday from now, -2 means two Sundays back"""
if offset == 0:
raise ArgumentError("offset must be nonzero")
if offset > 0:
offset -= 1
dt += timedelta(days=offset * 7)
# 23:59:59 on next Sunday
days = 6 - dt.weekday()
hours = 23 - dt.hour
mins = 59 - dt.minute
sec = 59 - dt.second
s = dt + timedelta(days=days, hours=hours, minutes=mins, seconds=sec)
s = s.replace(microsecond=0)
# Watch out for DST transition
#s -= s.gmtoff - t.gmtoff
return s
class Week:
# Monday-to-Sunday week of tweets around mid_week
def __init__(self, mid_week):
latest = sunday_after(mid_week, 1)
earliest = sunday_after(mid_week, -1)
"""r = Reader.new
@tweets = []
while true do
tweet = r.next
break if tweet.time <= earliest
@tweets << tweet if tweet.time <= latest
end"""
class MockedTweet:
def __init__(self, nr):
self.time = latest - timedelta(days=nr, hours=nr * 2, minutes=nr * 3)
def __repr__(self):
return "\nMockedTweet from %s" % self.time
self.tweets = [MockedTweet(1), MockedTweet(2), MockedTweet(3), MockedTweet(4)]
self.tweets.sort(key=operator.attrgetter('time'))
@property
def sunday(self):
return sunday_after(self.tweets[0].time)
def entry(tweets, sunday):
return "blosxom entry for week ending %s:\n%r" % (sunday, tweets)
def main():
w = Week(datetime.now() - timedelta(days=7))
sunday = w.sunday
year = '%04d' % sunday.year
path = os.path.join('tweets', year[:-1] + 'x', year)
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
path = os.path.join(path, '%02d-%02d.txt' % (sunday.month, sunday.day))
with open(path, 'w') as f:
f.write(entry(w.tweets, sunday))
print("Wrote", path)
if __name__ == '__main__':
main()
|
Python
| 0.999998
|
@@ -515,15 +515,32 @@
+s = dt + timedelta(
days
- =
+=
6 -
@@ -555,138 +555,43 @@
ay()
+)
%0A
-hours = 23 - dt.hour%0A mins = 59 - dt.minute%0A sec = 59 - dt.second%0A s = dt + timedelta(days=days, hours=hours
+s = s.replace(hour=23
, minute
s=mi
@@ -590,22 +590,19 @@
nute
-s=mins
+=59
, second
s=se
@@ -601,33 +601,13 @@
cond
-s=sec)%0A s = s.replace(
+=59,
micr
|
334577d786f8fc8a839800d46cae3ac21cff7dc1
|
Add quick cimport identification
|
isort/identify.py
|
isort/identify.py
|
""""""
from typing import NamedTuple, Optional
from .comments import parse as parse_comments
from .settings import DEFAULT_CONFIG, Config
from pathlib import Path
from isort.parse import _normalize_line, _strip_syntax, skip_line
from typing import TextIO, Iterator
def import_type(line: str, config: Config = DEFAULT_CONFIG) -> Optional[str]:
"""If the current line is an import line it will return its type (from or straight)"""
if line.startswith(("import ", "cimport ")):
return "straight"
if line.startswith("from "):
return "from"
return None
class ImportIdentified(NamedTuple):
line: int
module: str
attribute: str = None
alias: Optional[str] = None
src: Optional[Path] = None
cimport: bool = False
def imports(input_stream: TextIO, config: Config = DEFAULT_CONFIG) -> Iterator[ImportIdentified]:
"""Parses a python file taking out and categorizing imports."""
in_quote = ""
indexed_input = enumerate(input_stream)
for index, line in indexed_input:
(skipping_line, in_quote) = skip_line(
line, in_quote=in_quote, index=index, section_comments=config.section_comments
)
if skipping_line:
continue
line, *end_of_line_comment = line.split("#", 1)
if ";" in line:
statements = [line.strip() for line in line.split(";")]
else:
statements = [line]
if end_of_line_comment:
statements[-1] = f"{statements[-1]}#{end_of_line_comment[0]}"
for statement in statements:
line, raw_line = _normalize_line(statement)
type_of_import = import_type(line, config) or ""
if not type_of_import:
continue
import_string, _ = parse_comments(line)
if "(" in line.split("#", 1)[0]:
while not line.split("#")[0].strip().endswith(")"):
try:
index, next_line = next(indexed_input)
except StopIteration:
break
line, _ = parse_comments(next_line)
import_string += "\n" + line
else:
while line.strip().endswith("\\"):
index, next_line = next(indexed_input)
line, _ = parse_comments(next_line)
# Still need to check for parentheses after an escaped line
if (
"(" in line.split("#")[0]
and ")" not in line.split("#")[0]
):
import_string += "\n" + line
while not line.split("#")[0].strip().endswith(")"):
try:
index, next_line = next(indexed_input)
except StopIteration:
break
line, _ = parse_comments(next_line)
import_string += "\n" + line
if import_string.strip().endswith(
(" import", " cimport")
) or line.strip().startswith(("import ", "cimport ")):
import_string += "\n" + line
else:
import_string = import_string.rstrip().rstrip("\\") + " " + line.lstrip()
if type_of_import == "from":
cimports: bool
import_string = (
import_string.replace("import(", "import (")
.replace("\\", " ")
.replace("\n", " ")
)
if " cimport " in import_string:
parts = import_string.split(" cimport ")
cimports = True
else:
parts = import_string.split(" import ")
cimports = False
from_import = parts[0].split(" ")
import_string = (" cimport " if cimports else " import ").join(
[from_import[0] + " " + "".join(from_import[1:])] + parts[1:]
)
just_imports = [
item.replace("{|", "{ ").replace("|}", " }")
for item in _strip_syntax(import_string).split()
]
direct_imports = just_imports[1:]
top_level_module = ""
if "as" in just_imports and (just_imports.index("as") + 1) < len(just_imports):
while "as" in just_imports:
attribute = None
as_index = just_imports.index("as")
if type_of_import == "from":
attribute = just_imports[as_index - 1]
top_level_module = just_imports[0]
module = top_level_module + "." + attribute
alias = just_imports[as_index + 1]
direct_imports.remove(attribute)
direct_imports.remove(alias)
direct_imports.remove("as")
if attribute == alias and config.remove_redundant_aliases:
pass
else:
yield ImportIdentified(
index,
top_level_module,
attribute,
alias=alias
)
else:
module = just_imports[as_index - 1]
alias = just_imports[as_index + 1]
if not (module == alias and config.remove_redundant_aliases):
yield ImportIdentified(index, module, alias)
else:
if type_of_import == "from":
module = just_imports.pop(0)
for attribute in just_imports:
yield ImportIdentified(index, module, attribute)
else:
for module in just_imports:
yield ImportIdentified(index, module)
|
Python
| 0
|
@@ -627,14 +627,21 @@
line
+_number
: int%0A
-
@@ -1796,24 +1796,265 @@
mments(line)
+%0A normalized_import_string = import_string.replace(%22import(%22, %22import (%22).replace(%22%5C%5C%22, %22 %22).replace(%22%5Cn%22, %22 %22)%0A cimports: bool = %22 cimport %22 in normalized_import_string or normalized_import_string.startswith(%22cimport%22)
%0A%0A
@@ -3688,39 +3688,8 @@
m%22:%0A
- cimports: bool%0A
@@ -3885,61 +3885,8 @@
)%0A
- if %22 cimport %22 in import_string:%0A
@@ -3940,165 +3940,37 @@
rt %22
-)%0A cimports = True%0A%0A else:%0A parts = import_string.split(%22 import %22)%0A cimports = False
+ if cimports else %22 import %22)
%0A%0A
@@ -5510,16 +5510,67 @@
as=alias
+,%0A cimport=cimports,
%0A
@@ -5895,16 +5895,26 @@
e, alias
+, cimports
)%0A%0A
@@ -6206,32 +6206,32 @@
n just_imports:%0A
-
@@ -6274,10 +6274,28 @@
, module
+, cimport=cimports
)%0A
|
59257dc386f65f013ca7dd36300ac1078caf5d82
|
Fix setting biome temperature and rainfall from stack
|
burger/toppings/biomes.py
|
burger/toppings/biomes.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""
Copyright (c) 2011 Tyler Kenendy <tk@tkte.ch>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from solum import ClassFile, ConstantType
from .topping import Topping
import types
class BiomeTopping(Topping):
"""Gets most biome types."""
PROVIDES = [
"biomes"
]
DEPENDS = [
"identify.biome.superclass"
]
@staticmethod
def act(aggregate, jar, verbose=False):
biomes = aggregate.setdefault("biomes", {})
if "biome.superclass" not in aggregate["classes"]:
return
superclass = aggregate["classes"]["biome.superclass"]
cf = jar.open_class(superclass)
method = cf.methods.find_one(name="<clinit>")
tmp = None
stack = None
for ins in method.instructions:
if ins.opcode == 187: # new
if tmp is not None and tmp.has_key("name") and tmp["name"] != " and ":
biomes[tmp["name"]] = tmp
stack = []
tmp = {
"calls": {},
"rainfall": 0.5,
"height": [0.1, 0.3],
"temperature": 0.5,
"class": cf.constants[ins.operands[0][1]]["name"]["value"]
}
elif tmp is None:
continue
elif ins.opcode == 183: # invokespecial
const = cf.constants[ins.operands[0][1]]
name = const["name_and_type"]["name"]["value"]
if len(stack) == 2 and type(stack[1]) == types.FloatType:
tmp["calls"][name] = [stack.pop(), stack.pop()]
elif len(stack) >= 1 and type(stack[0]) == types.IntType: # 1, 2, 3-argument beginning with int = id
tmp["id"] = stack[0]
stack = []
elif name != "<init>":
tmp["rainfall"] = 0
elif ins.opcode == 182: # invokevirtual
if len(stack) == 1 and "color" not in tmp:
tmp["color"] = stack.pop()
# numeric values & constants
elif ins.opcode == 18 or ins.opcode == 19: # ldc, ldc_w
const = cf.constants[ins.operands[0][1]]
if const["tag"] == ConstantType.STRING:
tmp["name"] = const["string"]["value"]
if const["tag"] in (ConstantType.FLOAT,
ConstantType.INTEGER):
stack.append(const["value"])
elif ins.opcode <= 8 and ins.opcode >= 2: # iconst
stack.append(ins.opcode - 3)
elif ins.opcode >= 0xb and ins.opcode <= 0xd: # fconst
stack.append(ins.opcode - 0xb)
elif ins.opcode == 16: # bipush
stack.append(ins.operands[0][1])
if tmp is not None and tmp.has_key("name") and tmp["name"] != " and ":
biomes[tmp["name"]] = tmp
weather, height = BiomeTopping.map_methods(biomes)
for biome in biomes.itervalues():
calls = biome.pop("calls")
if height in calls:
biome["height"] = calls[height]
biome["height"].reverse()
if weather in calls:
biome["temperature"] = calls[weather][1]
biome["rainfall"] = calls[weather][0]
@staticmethod
def map_methods(biomes):
for biome in biomes.itervalues():
for call in biome["calls"]:
if biome["calls"][call][1] > 1 and len(biome["calls"]) > 1:
keys = biome["calls"].keys()
keys.remove(call)
return (call, keys[0])
return (None, None)
|
Python
| 0
|
@@ -2531,24 +2531,25 @@
k) == 2 and
+(
type(stack%5B1
@@ -2569,16 +2569,54 @@
loatType
+ or type(stack%5B0%5D) == types.FloatType)
:%0A
@@ -3103,16 +3103,155 @@
ck.pop()
+%0A if len(stack) == 2:%0A tmp%5B%22rainfall%22%5D = stack.pop()%0A tmp%5B%22temperature%22%5D = stack.pop()
%0A%0A
|
8dbb1acb0800aef83a599fdd575c22824e5716da
|
initialize doctors field of the Act model when creating a new appointment
|
calebasse/actes/models.py
|
calebasse/actes/models.py
|
# -*- coding: utf-8 -*-
from django.db import models
from calebasse.agenda.models import Event, EventType
from calebasse.agenda.managers import EventManager
class Act(models.Model):
act_type = models.ForeignKey('ressources.ActType',
verbose_name=u'Type d\'acte')
validated = models.BooleanField(blank=True,
verbose_name=u'Validé')
date = models.DateTimeField()
patient = models.ForeignKey('dossiers.PatientRecord')
transport_company = models.ForeignKey('ressources.TransportCompany',
blank=True,
null=True,
verbose_name=u'Compagnie de transport')
transport_type = models.ForeignKey('ressources.TransportType',
blank=True,
null=True,
verbose_name=u'Type de transport')
doctors = models.ManyToManyField('personnes.Worker',
limit_choices_to={'type__intervene': True },
verbose_name=u'Thérapeutes')
def __unicode__(self):
return '{0} le {1} pour {2} avec {3}'.format(
self.act_type, self.date, self.patient,
', '.join(map(unicode, self.doctors.all())))
def __repr__(self):
return '<%s %r %r>' % (self.__class__.__name__, unicode(self), self.id)
class Meta:
verbose_name = u"Acte"
verbose_name_plural = u"Actes"
ordering = ['-date', 'patient']
class EventActManager(EventManager):
def create_patient_appointment(self, title, patient, participants, act_type,
service, start_datetime, end_datetime, description='', room=None,
note=None, **rrule_params):
"""
This method allow you to create a new patient appointment quickly
Args:
title: patient appointment title (str)
patient: Patient object
participants: List of CalebasseUser (therapists)
act_type: ActType object
service: Service object. Use session service by defaut
start_datetime: datetime with the start date and time
end_datetime: datetime with the end date and time
freq, count, until, byweekday, rrule_params:
follow the ``dateutils`` API (see http://labix.org/python-dateutil)
Example:
Look at calebasse.agenda.tests.EventTest (test_create_appointments method)
"""
event_type, created = EventType.objects.get_or_create(
label="patient_appointment"
)
act_event = EventAct.objects.create(
title=title,
event_type=event_type,
patient=patient,
act_type=act_type,
date=start_datetime.date(),
)
return self._set_event(act_event, participants, description,
services = [service], start_datetime = start_datetime, end_datetime = end_datetime,
room = room, note = note, **rrule_params)
class EventAct(Act, Event):
objects = EventActManager()
VALIDATION_CODE_CHOICES = (
('absent', u'Absent'),
('present', u'Présent'),
)
attendance = models.CharField(max_length=16,
choices=VALIDATION_CODE_CHOICES,
default='absent',
verbose_name=u'Présence')
convocation_sent = models.BooleanField(blank=True,
verbose_name=u'Convoqué')
|
Python
| 0
|
@@ -2697,16 +2697,54 @@
date(),%0A
+ doctors=participants,%0A
|
221d5a352d587eb80b146e79e2baf0d5269f439e
|
Fix resolution of Place-based locations to known ancestor
|
carmen/resolvers/place.py
|
carmen/resolvers/place.py
|
"""Resolvers based on Twitter Places."""
from collections import defaultdict
from itertools import count
import re
import warnings
from ..location import Location, EARTH
from ..names import ALTERNATIVE_COUNTRY_NAMES, US_STATE_ABBREVIATIONS
from ..resolver import AbstractResolver
STATE_RE = re.compile(r'.+,\s*(\w+)')
class PlaceResolver(AbstractResolver):
"""A resolver that locates a tweet by matching Twitter Place
information with a known location. If *allow_unknown_locations* is
True, unknown Places are added as new locations. Otherwise, if
*resolve_to_known_ancestor* is True, tweets with unknown Places will
be resolved to the nearest known location containing that Place."""
name = 'place'
_unknown_id_start = 1000000
def __init__(self, allow_unknown_locations=False,
resolve_to_known_ancestor=False):
self.allow_unknown_locations = allow_unknown_locations
self.resolve_to_known_ancestor = resolve_to_known_ancestor
self._locations_by_name = {}
self._unknown_ids = count(self._unknown_id_start)
def _find_by_name(self, **kwargs):
return self._locations_by_name.get(Location(**kwargs).canonical())
def add_location(self, location):
self._locations_by_name[location.canonical()] = location
def resolve_tweet(self, tweet):
place = tweet['place']
if not place:
return
country = place['country']
if not country:
warnings.warn('Tweet has Place with no country')
return None
country = ALTERNATIVE_COUNTRY_NAMES.get(country.lower(), country)
name = {'country': country}
place_type = place['place_type'].lower()
if place_type in ('neighborhood', 'poi'):
full_name = place['full_name']
if full_name:
split_full_name = full_name.split(',')
if len(split_full_name) > 1:
name['city'] = split_full_name[-1]
else:
warnings.warn('Tweet has Place with no neighborhood or '
'point of interest full name')
elif place_type == 'city':
name['city'] = place['name']
if country.lower() == 'united states':
full_name = place['full_name']
if full_name:
# Attempt to extract a state name from the full_name.
match = STATE_RE.search(full_name)
if match:
state = match.group(1).lower()
name['state'] = US_STATE_ABBREVIATIONS.get(state)
else:
warnings.warn('Tweet has Place with no city full name')
elif place_type == 'admin':
name['state'] = place['name']
elif place_type == 'country':
pass
else:
warnings.warn('Tweet has unknown place type "%s"' % place_type)
return None
location = self._find_by_name(**name)
if location:
return (10, location)
if self.allow_unknown_locations:
# Remember this location for future lookups.
location = Location(
id=next(self._unknown_ids),
twitter_url=place['url'], twitter_id=place['id'],
**name)
self.add_location(location)
return (10, location)
if self.resolve_to_known_ancestor:
ancestor = location
while True:
ancestor = ancestor.parent()
if ancestor == EARTH:
break
known_ancestor = self.id_to_location.get(
self.location_to_id.get(ancestor))
if known_ancestor:
return (1, known_ancestor)
return None
|
Python
| 0.000004
|
@@ -1098,16 +1098,125 @@
start)%0A%0A
+ def _find_by_location(self, location):%0A return self._locations_by_name.get(location.canonical())%0A%0A
def
@@ -3208,110 +3208,8 @@
on)%0A
- if self.allow_unknown_locations:%0A # Remember this location for future lookups.%0A
@@ -3245,20 +3245,16 @@
-
-
id=next(
@@ -3273,20 +3273,16 @@
n_ids),%0A
-
@@ -3351,19 +3351,113 @@
-
**name)
+%0A if self.allow_unknown_locations:%0A # Remember this location for future lookups.
%0A
@@ -3777,13 +3777,16 @@
elf.
-id_to
+_find_by
_loc
@@ -3794,57 +3794,8 @@
tion
-.get(%0A self.location_to_id.get
(anc
@@ -3800,17 +3800,16 @@
ncestor)
-)
%0A
|
a1abbc0ab186eb0560e8924dd85298c931185b37
|
Fix to git.lookaside messages.
|
fedmsg_meta_fedora_infrastructure/scm.py
|
fedmsg_meta_fedora_infrastructure/scm.py
|
# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
from fedmsg.meta.base import BaseProcessor
from fasshim import gravatar_url
from hashlib import md5
import urllib
class SCMProcessor(BaseProcessor):
__name__ = "git"
__description__ = "the Fedora version control system"
__link__ = "http://pkgs.fedoraproject.org/cgit"
__docs__ = "https://fedoraproject.org/wiki/Using_Fedora_GIT"
__obj__ = "Package Commits"
__icon__ = "http://git-scm.com/images/logo.png"
def secondary_icon(self, msg, **config):
if '.git.receive' in msg['topic']:
query_string = urllib.urlencode({
's': 64,
'd': "http://git-scm.com/images/logo.png",
})
email = msg['msg']['commit']['email']
hash = md5(email).hexdigest()
tmpl = "http://www.gravatar.com/avatar/%s?%s"
return tmpl % (hash, query_string)
def subtitle(self, msg, **config):
if '.git.receive' in msg['topic']:
try:
repo = msg['msg']['commit']['repo']
except KeyError:
repo = '.'.join(msg['topic'].split('.')[5:-1])
user = msg['msg']['commit']['username']
summ = msg['msg']['commit']['summary']
whole = msg['msg']['commit']['message']
if summ.strip() != whole.strip():
summ += " (..more)"
branch = msg['msg']['commit']['branch']
tmpl = self._('{user} pushed to {repo} ({branch}). "{summary}"')
return tmpl.format(user=user, repo=repo,
branch=branch, summary=summ)
elif '.git.branch' in msg['topic']:
try:
repo = msg['msg']['name']
branch = msg['msg']['branch']
except KeyError:
repo = '.'.join(msg['topic'].split('.')[5:-1])
branch = msg['topic'].split('.')[-1]
agent = msg['msg']['agent']
tmpl = self._(
"{agent} created branch '{branch}' for the '{repo}' package"
)
return tmpl.format(agent=agent, branch=branch, repo=repo)
elif '.git.lookaside.' in msg['topic']:
name = msg['msg']['name']
agent = msg['msg']['agent']
filename = msg['msg']['filename']
tmpl = self._(
"{agent} uploaded {filename} for {name}"
)
return tmpl.format(agent=agent, name=name, filename=filename)
elif '.git.mass_branch.start' in msg['topic']:
tmpl = self._('{agent} started a mass branch')
elif '.git.mass_branch.complete' in msg['topic']:
tmpl = self._('mass branch started by {agent} completed')
elif '.git.pkgdb2branch.start' in msg['topic']:
tmpl = self._('{agent} started a run of pkgdb2branch')
elif '.git.pkgdb2branch.complete' in msg['topic']:
errors = len(msg['msg']['unbranchedPackages'])
if errors == 0:
tmpl = self._(
'run of pkgdb2branch started by {agent} completed')
elif errors == 1:
tmpl = self._(
'run of pkgdb2branch started by {agent} completed' +
' with 1 error'
)
else:
tmpl = self._(
'run of pkgdb2branch started by {agent} completed' +
' with %i errors'
) % errors
agent = msg['msg']['agent']
return tmpl.format(agent=agent)
def link(self, msg, **config):
prefix = "http://pkgs.fedoraproject.org/cgit"
if '.git.receive' in msg['topic']:
try:
repo = msg['msg']['commit']['repo']
except KeyError:
repo = '.'.join(msg['topic'].split('.')[5:-1])
rev = msg['msg']['commit']['rev']
branch = msg['msg']['commit']['branch']
tmpl = "{prefix}/{repo}.git/commit/?h={branch}&id={rev}"
return tmpl.format(prefix=prefix, repo=repo,
branch=branch, rev=rev)
elif '.git.branch' in msg['topic']:
try:
repo = msg['msg']['name']
branch = msg['msg']['branch']
except KeyError:
repo = '.'.join(msg['topic'].split('.')[5:-1])
branch = msg['topic'].split('.')[-1]
tmpl = "{prefix}/{repo}.git/log/?h={branch}"
return tmpl.format(prefix=prefix, repo=repo, branch=branch)
elif '.git.lookaside.' in msg['topic']:
prefix = "http://pkgs.fedoraproject.org/lookaside/pkgs"
name = msg['msg']['name']
md5sum = msg['msg']['md5sum']
filename = msg['msg']['filename']
tmpl = "{prefix}/{name}/{filename}/{md5sum}/{filename}"
return tmpl.format(prefix=prefix, name=name,
md5sum=md5sum, filename=filename)
def usernames(self, msg, **config):
if 'agent' in msg['msg']:
return set([msg['msg']['agent']])
else:
return set([msg['msg']['commit']['username']])
def packages(self, msg, **config):
if 'git.receive' in msg['topic']:
try:
# Newer fedmsg
return set([msg['msg']['commit']['repo']])
except KeyError:
# Legacy support
return set(['.'.join(msg['topic'].split('.')[5:-1])])
if 'git.branch' in msg['topic']:
try:
return set([msg['msg']['name']])
except KeyError:
return set(['.'.join(msg['topic'].split('.')[5:-1])])
elif '.git.pkgdb2branch.complete' in msg['topic']:
return set(msg['msg']['unbranchedPackages'] +
msg['msg']['branchedPackages'])
elif '.git.lookaside.' in msg['topic']:
return set([msg['msg']['name']])
return set()
def objects(self, msg, **config):
if 'git.receive' in msg['topic']:
try:
repo = msg['msg']['commit']['repo']
except KeyError:
repo = '.'.join(msg['topic'].split('.')[5:-1])
return set([
repo + '/' + filename for filename in
msg['msg']['commit']['stats']['files']
])
elif '.git.branch' in msg['topic']:
try:
repo = msg['msg']['name']
except KeyError:
repo = '.'.join(msg['topic'].split('.')[5:-1])
return set([repo + '/__git__'])
elif '.git.pkgdb2branch.complete' in msg['topic']:
return set([
p + '/__git__' for p in
msg['msg']['unbranchedPackages'] +
msg['msg']['branchedPackages']
])
elif '.git.lookaside.' in msg['topic']:
return set([msg['msg']['name'] + '/' + msg['msg']['filename']])
return set()
|
Python
| 0
|
@@ -2947,33 +2947,32 @@
'.git.lookaside
-.
' in msg%5B'topic'
@@ -5352,33 +5352,32 @@
'.git.lookaside
-.
' in msg%5B'topic'
@@ -6676,33 +6676,32 @@
'.git.lookaside
-.
' in msg%5B'topic'
@@ -7665,17 +7665,16 @@
ookaside
-.
' in msg
|
17cbd0f3c9d9d5d3ef2caa7d15ada64266206c94
|
Add watt to mysensors switch attributes (#13370)
|
homeassistant/components/switch/mysensors.py
|
homeassistant/components/switch/mysensors.py
|
"""
Support for MySensors switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.mysensors/
"""
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components import mysensors
from homeassistant.components.switch import DOMAIN, SwitchDevice
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON
ATTR_IR_CODE = 'V_IR_SEND'
SERVICE_SEND_IR_CODE = 'mysensors_send_ir_code'
SEND_IR_CODE_SERVICE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_IR_CODE): cv.string,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the mysensors platform for switches."""
device_class_map = {
'S_DOOR': MySensorsSwitch,
'S_MOTION': MySensorsSwitch,
'S_SMOKE': MySensorsSwitch,
'S_LIGHT': MySensorsSwitch,
'S_LOCK': MySensorsSwitch,
'S_IR': MySensorsIRSwitch,
'S_BINARY': MySensorsSwitch,
'S_SPRINKLER': MySensorsSwitch,
'S_WATER_LEAK': MySensorsSwitch,
'S_SOUND': MySensorsSwitch,
'S_VIBRATION': MySensorsSwitch,
'S_MOISTURE': MySensorsSwitch,
'S_WATER_QUALITY': MySensorsSwitch,
}
mysensors.setup_mysensors_platform(
hass, DOMAIN, discovery_info, device_class_map,
add_devices=add_devices)
def send_ir_code_service(service):
"""Set IR code as device state attribute."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
ir_code = service.data.get(ATTR_IR_CODE)
devices = mysensors.get_mysensors_devices(hass, DOMAIN)
if entity_ids:
_devices = [device for device in devices.values()
if isinstance(device, MySensorsIRSwitch) and
device.entity_id in entity_ids]
else:
_devices = [device for device in devices.values()
if isinstance(device, MySensorsIRSwitch)]
kwargs = {ATTR_IR_CODE: ir_code}
for device in _devices:
device.turn_on(**kwargs)
hass.services.register(DOMAIN, SERVICE_SEND_IR_CODE,
send_ir_code_service,
schema=SEND_IR_CODE_SERVICE_SCHEMA)
class MySensorsSwitch(mysensors.MySensorsEntity, SwitchDevice):
"""Representation of the value of a MySensors Switch child node."""
@property
def assumed_state(self):
"""Return True if unable to access real state of entity."""
return self.gateway.optimistic
@property
def is_on(self):
"""Return True if switch is on."""
return self._values.get(self.value_type) == STATE_ON
def turn_on(self, **kwargs):
"""Turn the switch on."""
self.gateway.set_child_value(
self.node_id, self.child_id, self.value_type, 1)
if self.gateway.optimistic:
# optimistically assume that switch has changed state
self._values[self.value_type] = STATE_ON
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the switch off."""
self.gateway.set_child_value(
self.node_id, self.child_id, self.value_type, 0)
if self.gateway.optimistic:
# optimistically assume that switch has changed state
self._values[self.value_type] = STATE_OFF
self.schedule_update_ha_state()
class MySensorsIRSwitch(MySensorsSwitch):
"""IR switch child class to MySensorsSwitch."""
def __init__(self, *args):
"""Set up instance attributes."""
super().__init__(*args)
self._ir_code = None
@property
def is_on(self):
"""Return True if switch is on."""
set_req = self.gateway.const.SetReq
return self._values.get(set_req.V_LIGHT) == STATE_ON
def turn_on(self, **kwargs):
"""Turn the IR switch on."""
set_req = self.gateway.const.SetReq
if ATTR_IR_CODE in kwargs:
self._ir_code = kwargs[ATTR_IR_CODE]
self.gateway.set_child_value(
self.node_id, self.child_id, self.value_type, self._ir_code)
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_LIGHT, 1)
if self.gateway.optimistic:
# optimistically assume that switch has changed state
self._values[self.value_type] = self._ir_code
self._values[set_req.V_LIGHT] = STATE_ON
self.schedule_update_ha_state()
# turn off switch after switch was turned on
self.turn_off()
def turn_off(self, **kwargs):
"""Turn the IR switch off."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_LIGHT, 0)
if self.gateway.optimistic:
# optimistically assume that switch has changed state
self._values[set_req.V_LIGHT] = STATE_OFF
self.schedule_update_ha_state()
def update(self):
"""Update the controller with the latest value from a sensor."""
super().update()
self._ir_code = self._values.get(self.value_type)
|
Python
| 0
|
@@ -2597,16 +2597,205 @@
mistic%0A%0A
+ @property%0A def current_power_w(self):%0A %22%22%22Return the current power usage in W.%22%22%22%0A set_req = self.gateway.const.SetReq%0A return self._values.get(set_req.V_WATT)%0A%0A
@pro
|
a7bc9b1111630db55a2bbcd96a30145389937d20
|
fix a pull
|
homeassistant/components/thermostat/zwave.py
|
homeassistant/components/thermostat/zwave.py
|
"""
ZWave Thermostat.
"""
# Because we do not compile openzwave on CI
# pylint: disable=import-error
from homeassistant.components.thermostat import DOMAIN
from homeassistant.components.thermostat import ThermostatDevice
from homeassistant.components.zwave import (
ATTR_NODE_ID, ATTR_VALUE_ID, NETWORK,
ZWaveDeviceEntity)
from homeassistant.const import TEMP_FAHRENHEIT
CONF_NAME = 'name'
DEFAULT_NAME = 'ZWave Thermostat'
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the ZWave thermostats."""
if discovery_info is None or NETWORK is None:
return
node = NETWORK.nodes[discovery_info[ATTR_NODE_ID]]
value = node.values[discovery_info[ATTR_VALUE_ID]]
value.set_change_verified(False)
add_devices([ZWaveThermostat(value)])
# pylint: disable=too-many-arguments
class ZWaveThermostat(ZWaveDeviceEntity, ThermostatDevice):
"""Represents a HeatControl thermostat."""
def __init__(self, value):
from openzwave.network import ZWaveNetwork
from pydispatch import dispatcher
from homeassistant.helpers.temperature import convert
ZWaveDeviceEntity.__init__(self, value, DOMAIN)
self._node = value.node
self._target_temperature = round(convert(71,
TEMP_FAHRENHEIT, self.hass.config.temperature_unit))
self._current_temperature = round(convert(90,
TEMP_FAHRENHEIT, self.hass.config.temperature_unit))
self._current_operation = "Idle"
self._current_operation_state = "Idle"
self.update_properties()
# register listener
dispatcher.connect(
self.value_changed, ZWaveNetwork.SIGNAL_VALUE_CHANGED)
def value_changed(self, value):
"""Called when a value has changed on the network."""
if self._value.node == value.node:
self.update_properties()
self.update_ha_state()
def update_properties(self):
""" Callback on data change for the registered node/value pair. """
# set point
for _, value in self._node.get_values_for_command_class(0x43).items():
if int(value.data) != 0:
self._target_temperature = value.data
# Operation
for _, value in self._node.get_values_for_command_class(0x40).items():
self._current_operation = value.data
# Current Temp
for _, value in self._node.get_values_for_command_class(0x31).items():
if int(value.data) != 0:
self._current_temperature = value.data
# COMMAND_CLASS_THERMOSTAT_OPERATING_STATE
for _, value in self._node.get_values_for_command_class(0x42).items():
self._current_operation_state = value.data
@property
def should_poll(self):
"""No polling on ZWave"""
return False
@property
def is_fan_on(self):
return self._current_operation_state != 'Idle'
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self.hass.config.temperature_unit
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def operation(self):
return self._current_operation
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
def set_temperature(self, temperature):
"""Set new target temperature."""
# set point
for _, value in self._node.get_values_for_command_class(0x43).items():
if int(value.data) != 0:
value.data = temperature
|
Python
| 0.0001
|
@@ -1483,22 +1483,26 @@
ation =
-%22Idle%22
+STATE_IDLE
%0A
@@ -1538,14 +1538,18 @@
e =
-%22Idle%22
+STATE_IDLE
%0A
|
996a037cb90afa29305c61d8662fc9e3b5d508e5
|
Add name to mixer
|
mopidy_alsamixer/mixer.py
|
mopidy_alsamixer/mixer.py
|
from __future__ import unicode_literals
import logging
import sys
import alsaaudio
from mopidy import mixer
import pykka
logger = logging.getLogger(__name__)
class AlsaMixer(pykka.ThreadingActor, mixer.Mixer):
def __init__(self, config):
super(AlsaMixer, self).__init__()
self.config = config
self.card = self.config['alsamixer']['card']
self.control = self.config['alsamixer']['control']
known_cards = alsaaudio.cards()
if self.card >= len(known_cards):
logger.error(
'Could not find ALSA soundcard with index %d. '
'Known soundcards include: %s',
self.card, ', '.join(
'%d (%s)' % (i, name)
for i, name in enumerate(known_cards)))
sys.exit(1)
known_controls = alsaaudio.mixers()
if self.control not in known_controls:
logger.error(
'Could not find ALSA mixer control %s. '
'Known mixers include: %s',
self.control, ', '.join(known_controls))
sys.exit(1)
@property
def _mixer(self):
# The mixer must be recreated every time it is used to be able to
# observe volume/mute changes done by other applications.
# TODO Use card, device, control from config
return alsaaudio.Mixer(control=self.control, cardindex=self.card)
def get_volume(self):
channels = self._mixer.getvolume()
if not channels:
return None
elif channels.count(channels[0]) == len(channels):
return int(channels[0])
else:
# Not all channels have the same volume
return None
def set_volume(self, volume):
self._mixer.setvolume(volume)
return True
def get_mute(self):
channels_muted = self._mixer.getmute()
if all(channels_muted):
return True
elif not any(channels_muted):
return False
else:
# Not all channels have the same mute state
return None
def set_mute(self, muted):
self._mixer.setmute(int(muted))
return True
|
Python
| 0.000002
|
@@ -212,16 +212,40 @@
ixer):%0A%0A
+ name = 'alsamixer'%0A%0A
def
|
fceda4761545eb3838cdbd3bda51531be577a417
|
remove artist
|
mopidy_youtube/backend.py
|
mopidy_youtube/backend.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import string
import unicodedata
from multiprocessing.pool import ThreadPool
from urlparse import parse_qs, urlparse
from mopidy import backend
from mopidy.models import Artist, Album, SearchResult, Track
import pafy
import pykka
import requests
from mopidy_youtube import logger
yt_api_endpoint = 'https://www.googleapis.com/youtube/v3/'
session = requests.Session()
video_uri_prefix = 'youtube:video'
search_uri = 'youtube:search'
def resolve_track(track, stream=False):
logger.debug("Resolving YouTube for track '%s'", track)
if hasattr(track, 'uri'):
return resolve_url(track.comment, stream)
else:
return resolve_url(track.split('.')[-1], stream)
def safe_url(uri):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
safe_uri = unicodedata.normalize(
'NFKD',
unicode(uri)
).encode('ASCII', 'ignore')
return re.sub(
r'\s+',
' ',
''.join(c for c in safe_uri if c in valid_chars)
).strip()
def resolve_url(url, stream=False):
try:
video = pafy.new(url)
if not stream:
uri = '%s/%s.%s' % (
video_uri_prefix, safe_url(video.title), video.videoid)
else:
uri = video.getbestaudio()
if not uri: # get video url
uri = video.getbest()
logger.debug('%s - %s %s %s %s' % (
video.title, video.author, uri.bitrate, uri.mediatype, uri.extension))
uri = uri.url
if not uri:
return
except Exception as e:
# Video is private or doesn't exist
logger.info(e)
return
images = []
if video.bigthumb is not None:
images.append(video.bigthumb)
if video.bigthumbhd is not None:
images.append(video.bigthumbhd)
track = Track(
name=video.title,
comment=video.videoid,
length=video.length * 1000,
album=Album(
name='YouTube',
images=images
),
artists=[
Artist(
name=video.author
)
],
uri=uri
)
return track
def search_youtube(q, youtube_api_key):
query = {
'part': 'id',
'maxResults': 15,
'type': 'video',
'q': q,
'key': youtube_api_key
}
result = session.get(yt_api_endpoint + 'search', params=query)
data = result.json()
resolve_pool = ThreadPool(processes=16)
if 'items' in data:
playlist = [item['id']['videoId'] for item in data['items']]
else:
playlist = []
playlist = resolve_pool.map(resolve_url, playlist)
resolve_pool.close()
return [item for item in playlist if item]
def resolve_playlist(url, youtube_api_key):
resolve_pool = ThreadPool(processes=16)
logger.info("Resolving YouTube-Playlist '%s'", url)
playlist = []
page = 'first'
while page:
params = {
'playlistId': url,
'maxResults': 50,
'part': 'contentDetails',
'key': youtube_api_key
}
if page and page != "first":
logger.debug("Get YouTube-Playlist '%s' page %s", url, page)
params['pageToken'] = page
result = session.get(yt_api_endpoint + 'playlistItems', params=params)
data = result.json()
page = data.get('nextPageToken')
for item in data["items"]:
video_id = item['contentDetails']['videoId']
playlist.append(video_id)
playlist = resolve_pool.map(resolve_url, playlist)
resolve_pool.close()
return [item for item in playlist if item]
class YouTubeBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(YouTubeBackend, self).__init__()
self.config = config
self.library = YouTubeLibraryProvider(backend=self)
self.playback = YouTubePlaybackProvider(audio=audio, backend=self)
self.library.youtube_api_key = config['youtube']['youtube_api_key']
self.uri_schemes = ['youtube', 'yt']
class YouTubeLibraryProvider(backend.LibraryProvider):
def lookup(self, track):
if 'yt:' in track:
track = track.replace('yt:', '')
if 'youtube.com' in track:
url = urlparse(track)
req = parse_qs(url.query)
if 'list' in req:
return resolve_playlist(
req.get('list')[0],
self.youtube_api_key
)
else:
return [item for item in [resolve_url(track)] if item]
else:
return [item for item in [resolve_track(track)] if item]
def search(self, query=None, uris=None, exact=False):
# TODO Support exact search
if not query:
return
if 'uri' in query:
search_query = ''.join(query['uri'])
url = urlparse(search_query)
if 'youtube.com' in url.netloc:
req = parse_qs(url.query)
if 'list' in req:
return SearchResult(
uri=search_uri,
tracks=resolve_playlist(
req.get('list')[0],
self.youtube_api_key
)
)
else:
logger.info(
"Resolving YouTube for track '%s'", search_query)
return SearchResult(
uri=search_uri,
tracks=[t for t in [resolve_url(search_query)] if t]
)
else:
search_query = ' '.join(query.values()[0])
logger.info("Searching YouTube for query '%s'", search_query)
return SearchResult(
uri=search_uri,
tracks=search_youtube(
search_query,
self.youtube_api_key
)
)
class YouTubePlaybackProvider(backend.PlaybackProvider):
def translate_uri(self, uri):
track = resolve_track(uri, True)
if track is not None:
return track.uri
else:
return None
|
Python
| 0
|
@@ -243,16 +243,8 @@
port
- Artist,
Alb
@@ -1442,19 +1442,16 @@
- %25s %25s
-%25s
%25s' %25 (%0A
@@ -1482,22 +1482,8 @@
tle,
- video.author,
uri
@@ -2067,105 +2067,8 @@
),%0A
- artists=%5B%0A Artist(%0A name=video.author%0A )%0A %5D,%0A
|
3a5dfcce22ef2e1da6c2ee680a6bf37228a3be2c
|
change view file for pantry
|
flask-api/api/blueprints/pantry/views.py
|
flask-api/api/blueprints/pantry/views.py
|
from flask import Blueprint, request, make_response, jsonify, g
from flask.views import MethodView
from flask_restful import Api, Resource, url_for
from api import bcrypt, db
from api.models.user import User
from api.models.ingredient import Ingredient, PantryIngredient
from api.decorators import is_logged_in
pantry_blueprint = Blueprint('pantry', __name__)
pantry_api = Api(pantry_blueprint)
class IngredientsResource(Resource):
"""
Resources for managing ingredients in the User's pantry
"""
decorators = [is_logged_in]
def delete(self):
""" Remove an ingredient from the pantry """
pass
def patch(self):
""" Change the amount of an ingredient in the pantry """
try:
# Check if user exists
user = User.query.get(g.user_id)
if not user:
responseObject = {
'status': 'fail',
'message': 'User does not exist.'
}
return make_response(jsonify(responseObject), 202)
patch_data = request.get_json()
for entry in patch_data.get("ingredients"):
# Check if ingredient exists
ingredient = Ingredient.query.filter(Ingredient.name == entry.get('ingredient_name')).first()
if not ingredient:
responseObject = {
'status': 'fail',
'message': 'Ingredient does not exist.'
}
db.session.remove()
return make_response(jsonify(responseObject), 202)
p_i = PantryIngredient.query.filter( (PantryIngredient.user_id == g.user_id) & (PantryIngredient.ingredient_id == ingredient.id) ).first()
if p_i:
p_i.value = entry['value']
else:
pantry_ingredient = PantryIngredient(user = user, ingredient = ingredient, value=entry['value'])
db.session.add(pantry_ingredient)
db.session.commit()
responseObject = {
'status': 'success',
'message': 'Ingredients added to pantry.'
}
return make_response(jsonify(responseObject), 201)
except Exception as e:
print(e)
def get(self):
""" Get a list all of the ingredients, with the ones in the pantry marked """
try:
user = User.query.filter(User.id == g.user_id)
if user:
pantry_ingredients = PantryIngredient.query.filter(PantryIngredient.user_id == g.user_id).all()
ingredientsObject = []
for i in pantry_ingredients:
ingredient = Ingredient.query.filter(Ingredient.id == i.ingredient_id).first()
ingredientsObject.append({'name': ingredient.name, 'type': ingredient.measurement.value, 'value': i.value, "category": i.category})
responseObject = {
'status': 'success',
'data': {
'ingredients': ingredientsObject
}
}
return make_response(jsonify(responseObject), 200)
else:
responseObject = {
'status': 'fail',
'message': 'User %s not found' % user_id
}
return make_response(jsonify(responseObject), 404)
except Exception as e:
print(e)
responseObject = {
'status': 'fail',
'message': '%s is not a valid user id' % user_id
}
return make_response(jsonify(responseObject), 400)
# recipe_api.add_resource(DetailsResource, '/api/recipe/<string:recipe_id>')
pantry_api.add_resource(IngredientsResource, '/api/user/pantry')
|
Python
| 0
|
@@ -537,24 +537,25 @@
ed_in%5D%0A%0A
+%0A
def
delete(s
@@ -550,14 +550,13 @@
def
-delete
+patch
(sel
@@ -575,14 +575,37 @@
%22%22%22
-Remove
+%0A Change the amount of
an
@@ -615,20 +615,18 @@
redient
-from
+in
the pan
@@ -629,19 +629,16 @@
pantry
-%22%22%22
%0A
@@ -642,70 +642,237 @@
-pass%0A%0A def patch(self):%0A %22%22%22 Change the amount of
+Works for existing ingredients or new ingredients%0A Checks for valid User and Ingredients%0A - returns status 'fail' otherwise%0A - does not change database if there is a failure for the user or
an
+y of the
ing
@@ -882,75 +882,183 @@
ient
- in the pantry %22%22%22%0A%0A try:%0A # Check if user exists
+s%0A Reads JSON from HTTP request, contiaining info about the ingredients to be changed%0A Commits changes to the database using SQL-Alchemy%0A %22%22%22%0A try:
%0A
@@ -1381,28 +1381,16 @@
_json()%0A
-
%0A
@@ -1441,54 +1441,8 @@
s%22):
-%0A%0A # Check if ingredient exists
%0A
@@ -1879,18 +1879,16 @@
p
-_i
= Pantr
@@ -2007,32 +2007,49 @@
t.id) ).first()%0A
+ %0A
@@ -2297,16 +2297,17 @@
dient)%0A%0A
+%0A
@@ -2330,17 +2330,16 @@
ommit()%0A
-%0A
@@ -2462,32 +2462,33 @@
'%0A %7D%0A
+%0A
retu
@@ -2534,16 +2534,18 @@
), 201)%0A
+%0A%0A
|
affe8fc9ab1f24aab0463898c84bd7fdaab3e0c1
|
Make sure urlrewrite plugin only affects tasks it is configured in. fix #2812
|
flexget/plugins/urlrewrite_urlrewrite.py
|
flexget/plugins/urlrewrite_urlrewrite.py
|
from __future__ import unicode_literals, division, absolute_import
import re
import logging
from flexget import plugin
from flexget.event import event
from flexget.plugins.plugin_urlrewriting import UrlRewritingError
log = logging.getLogger('urlrewrite')
class UrlRewrite(object):
"""
Generic configurable urlrewriter.
Example::
urlrewrite:
demonoid:
regexp: http://www\.demonoid\.com/files/details/
format: http://www.demonoid.com/files/download/HTTP/
"""
resolves = {}
# built-in resolves
# resolves = yaml.safe_load("""
# tvsubtitles:
# match: http://www.tvsubtitles.net/subtitle-
# replace: http://www.tvsubtitles.net/download-
# """
# )
schema = {
'type': 'object',
'additionalProperties': {
'type': 'object',
'properties': {
'regexp': {'type': 'string', 'format': 'regex'},
'format': {'type': 'string'}
},
'required': ['regexp', 'format'],
'additionalProperties': False
}
}
def on_task_start(self, task, config):
for name, rewrite_config in config.iteritems():
match = re.compile(rewrite_config['regexp'])
format = rewrite_config['format']
self.resolves[name] = {'regexp_compiled': match, 'format': format, 'regexp': rewrite_config['regexp']}
log.debug('Added rewrite %s' % name)
def url_rewritable(self, task, entry):
log.trace('running url_rewritable')
log.trace(self.resolves)
for name, config in self.resolves.iteritems():
regexp = config['regexp_compiled']
log.trace('testing %s' % config['regexp'])
if regexp.search(entry['url']):
return True
return False
def url_rewrite(self, task, entry):
for name, config in self.resolves.iteritems():
regexp = config['regexp_compiled']
format = config['format']
if regexp.search(entry['url']):
log.debug('Regexp resolving %s with %s' % (entry['url'], name))
# run the regexp
entry['url'] = regexp.sub(format, entry['url'])
if regexp.match(entry['url']):
entry.fail('urlrewriting')
raise UrlRewritingError('Regexp %s result should NOT continue to match!' % name)
return
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewrite, 'urlrewrite', groups=['urlrewriter'], api_ver=2)
|
Python
| 0
|
@@ -530,207 +530,8 @@
%7B%7D%0A%0A
- # built-in resolves%0A%0A# resolves = yaml.safe_load(%22%22%22%0A# tvsubtitles:%0A# match: http://www.tvsubtitles.net/subtitle-%0A# replace: http://www.tvsubtitles.net/download-%0A# %22%22%22%0A# )%0A%0A
@@ -541,16 +541,16 @@
ema = %7B%0A
+
@@ -932,16 +932,65 @@
onfig):%0A
+ resolves = self.resolves%5Btask.name%5D = %7B%7D%0A
@@ -1152,21 +1152,16 @@
-self.
resolves
@@ -1453,32 +1453,43 @@
in self.resolves
+%5Btask.name%5D
.iteritems():%0A
@@ -1714,32 +1714,32 @@
, task, entry):%0A
-
for name
@@ -1763,16 +1763,27 @@
resolves
+%5Btask.name%5D
.iterite
|
b6b721bd4072fc28a613fd2d4491cdc8e29b8fe5
|
Fix xref counts in release notes
|
rnacentral/portal/management/commands/ftp_exporters/ftp_base.py
|
rnacentral/portal/management/commands/ftp_exporters/ftp_base.py
|
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import logging
import re
import subprocess
import time
import psycopg2
from django.conf import settings
from django.contrib.humanize.templatetags.humanize import intcomma
from portal.models import Rna, Database, Xref
class FtpBase(object):
"""
Base class for FTP export helper classes.
"""
def __init__(self, destination='', test=False):
"""
Set common variables.
"""
self.destination = destination
self.test = test # boolean indicating whether to export all data or the first `self.entries`.
self.test_entries = 100 # number of entries to process when --test=True
self.examples = 5 # number of entries to write to the example files
self.cursor = None # database cursor
self.filenames = {} # defined in each class
self.filehandles = {} # holds all open filehandles
self.subfolders = { # names of subfolders
'coordinates': 'genome_coordinates',
'md5': 'md5',
'sequences': 'sequences',
'trackhub': os.path.join('genome_coordinates', 'track_hub'),
'xrefs': 'id_mapping',
}
logging.basicConfig(level='INFO')
self.logger = logging.getLogger(self.__class__.__name__)
#########################
# Files and directories #
#########################
def get_output_filename(self, filename, parent_dir=''):
"""
Get full path to the file `filename`
located either in `self.destination` or `parent_dir`.
"""
if parent_dir:
return os.path.join(parent_dir, filename)
else:
return os.path.join(self.destination, filename)
def get_filenames_and_filehandles(self, names, destination=''):
"""
Get all required filenames and filehandles.
names = {
'file_nickname': 'file_full_name',
'readme': 'readme.txt',
}
destination = 'path/to/output/files' # optional
"""
# reset the dictionaries
self.filehandles = {}
self.filenames = {}
# use self.destination by default
if not destination:
destination = self.destination
for key, value in names.iteritems():
value = self.get_output_filename(value, parent_dir=destination)
self.filenames[key] = value
self.filehandles[key] = open(value, 'w')
def make_subdirectory(self, parent_dir, child_dir):
"""
Create a subdirectory child_dir in directory parent_dir.
"""
new_folder = os.path.join(parent_dir, child_dir)
if not os.path.exists(new_folder):
os.mkdir(new_folder)
return new_folder
def clean_up(self):
"""
* close all filehandles
* gzip and delete all files except for examples and readme
"""
for filename, filepath in self.filenames.iteritems():
self.filehandles[filename].close()
if 'example' not in filename and 'readme' not in filename:
self.gzip_file(filepath)
os.remove(filepath)
def gzip_file(self, filename):
"""
Compress a given file using gzip, return the compressed file name.
"""
gzipped_filename = '%s.gz' % filename
cmd = 'gzip < %s > %s' % (filename, gzipped_filename)
self.logger.info('Compressing file %s' % filename)
status = subprocess.call(cmd, shell=True)
if status == 0:
self.logger.info('File compressed, new file %s' % gzipped_filename)
return gzipped_filename
else:
self.logger.info('Compressing failed, no file created')
return ''
def log_database_error(self, pg_exception):
"""
Log Postgres error message.
"""
self.logger.critical('Postgres: %s' % pg_exception.diag.message_primary)
##################
# Data retrieval #
##################
def get_xrefs_with_genomic_coordinates(self, taxid):
"""
Get RNA sequences with genomic coordinates.
"""
xrefs = Xref.objects.select_related('accession__coordinates').\
filter(db__project_id__isnull=True).\
filter(taxid=taxid).\
filter(deleted='N').\
filter(accession__coordinates__chromosome__isnull=False).\
values_list('accession', flat=True).\
distinct()
return xrefs
########
# Misc #
########
def format_docstring(self, text):
"""
Prepare docstring to be saved in a text file.
"""
text = re.sub(r'^\s+', '', text)
text = re.sub(r'\n +', '\n', text)
return text
def create_genomic_readme(self):
"""
===================================================================
RNAcentral Genomic Coordinates Data
===================================================================
This directory contains genomic coordinates for a subset of RNAcentral ids
where such mapping is available.
* Bed
Format description:
http://www.ensembl.org/info/website/upload/bed.html
http://genome.ucsc.edu/FAQ/FAQformat.html
* Gff2
Format description:
http://www.sanger.ac.uk/resources/software/gff/spec.html
* Gff3
Format description:
http://www.sequenceontology.org/gff3.shtml
* track_hub/
UCSC-style track hub description:
https://genome.ucsc.edu/goldenPath/help/hgTrackHubHelp.html
Track hub folder structure:
- genomes.txt [list of annotated genomes]
- hub.txt [track hub description]
- hg38 [human GRCh38 assembly]
-- rnacentral.BigBed [bigBed binary data file]
-- rnacentral.html []
-- trackDb.txt [track description]
"""
text = self.create_genomic_readme.__doc__
text = self.format_docstring(text)
f = open(self.get_output_filename('readme.txt', parent_dir=self.subdirectory), 'w')
f.write(text)
f.close()
def create_release_notes_file(self):
"""
===================================================================
RNAcentral Release {release_date}
===================================================================
RNAcentral is an online resource for organising data
about non-protein coding RNA genes.
This release consists of {sequence_count} unique RNA sequences
with {xrefs_count} cross-references to {database_count} Expert Databases.
The release data are stored in subdirectories in this folder. Large data files
are compressed with Gzip. Small uncompressed example files are also provided.
Each folder contains a readme file with data description.
RNAcentral is available online at http://rnacentral.org.
For more ways of downloading the data go to http://rnacentral.org/downloads.
"""
text = self.create_release_notes_file.__doc__
text = self.format_docstring(text)
release_date = time.strftime("%d/%m/%Y")
sequence_count = intcomma(Rna.objects.count())
xrefs_count = intcomma(Xref.objects.count())
database_count = intcomma(Database.objects.count())
text = text.format(release_date=release_date,
sequence_count=sequence_count,
database_count=database_count,
xrefs_count=xrefs_count)
f = open(self.get_output_filename('release_notes_template.txt'), 'w')
f.write(text)
f.close()
|
Python
| 0
|
@@ -7977,32 +7977,52 @@
ma(Xref.objects.
+filter(deleted='N').
count())%0A
|
27699aa0e7a2fb96dab2900eba6b0b8c7e24909b
|
remove leftover print statement
|
celery/bin/celeryd_detach.py
|
celery/bin/celeryd_detach.py
|
import os
import sys
from optparse import OptionParser, BadOptionError, make_option as Option
from celery import __version__
from celery.platforms import create_daemon_context
OPTION_LIST = (
Option('-f', '--logfile', default=None,
action="store", dest="logfile",
help="Path to the logfile"),
Option('--pidfile', default="celeryd.pid",
action="store", dest="pidfile",
help="Path to the pidfile."),
Option('--uid', default=None,
action="store", dest="uid",
help="Effective user id to run as when detached."),
Option('--gid', default=None,
action="store", dest="gid",
help="Effective group id to run as when detached."),
Option('--umask', default=0,
action="store", type="int", dest="umask",
help="Umask of the process when detached."),
Option('--workdir', default=None,
action="store", dest="working_directory",
help="Directory to change to when detached."),
Option('--chroot', default=None,
action="store", dest="chroot_directory",
help="Change root directory to this path when detached."),
)
class detached(object):
def __init__(self, path, argv, logfile=None, pidfile=None, uid=None,
gid=None, umask=0, working_directory=None, chroot_directory=None):
self.path = path
self.argv = argv
self.logfile = logfile
self.pidfile = pidfile
self.uid = uid
self.gid = gid
self.umask = umask
self.working_directory = working_directory
self.chroot_directory = chroot_directory
def start(self):
context, on_stop = create_daemon_context(
logfile=self.logfile,
pidfile=self.pidfile,
uid=self.uid,
gid=self.gid,
umask=self.umask,
working_directory=self.working_directory,
chroot_directory=self.chroot_directory)
context.open()
try:
os.execv(self.path, [self.path] + self.argv)
finally:
on_stop()
class PartialOptionParser(OptionParser):
def __init__(self, *args, **kwargs):
self.leftovers = []
OptionParser.__init__(self, *args, **kwargs)
def _process_long_opt(self, rargs, values):
arg = rargs.pop(0)
if "=" in arg:
opt, next_arg = arg.split("=", 1)
rargs.insert(0, next_arg)
had_explicit_value = True
else:
opt = arg
had_explicit_value = False
try:
opt = self._match_long_opt(opt)
option = self._long_opt.get(opt)
except BadOptionError:
option = None
if option:
if option.takes_value():
nargs = option.nargs
if len(rargs) < nargs:
if nargs == 1:
self.error(_("%s option requires an argument") % opt)
else:
self.error(_("%s option requires %d arguments")
% (opt, nargs))
elif nargs == 1:
value = rargs.pop(0)
else:
value = tuple(rargs[0:nargs])
del rargs[0:nargs]
elif had_explicit_value:
self.error(_("%s option does not take a value") % opt)
else:
value = None
option.process(opt, value, values, self)
else:
self.leftovers.append(arg)
def _process_short_opts(self, rargs, values):
arg = rargs[0]
try:
OptionParser._process_short_opts(self, rargs, values)
except BadOptionError:
self.leftovers.append(arg)
if rargs and not rargs[0][0] == "-":
self.leftovers.append(rargs.pop(0))
class detached_celeryd(object):
option_list = OPTION_LIST
usage = "%%prog [options] [celeryd options]"
version = __version__
description = ("Detaches Celery worker nodes. See `celeryd --help` "
"for the list of supported worker arguments.")
command = sys.executable
execv_path = sys.executable
execv_argv = ["-m", "celery.bin.celeryd"]
def Parser(self, prog_name):
return PartialOptionParser(prog=prog_name,
option_list=self.option_list,
usage=self.usage,
description=self.description,
version=self.version)
def parse_options(self, prog_name, argv):
parser = self.Parser(prog_name)
options, values = parser.parse_args(argv)
if options.logfile:
parser.leftovers.append("--logfile=%s" % (options.logfile, ))
if options.pidfile:
parser.leftovers.append("--pidfile=%s" % (options.pidfile, ))
print("LEFTOVERS: %r" % (parser.leftovers, ))
return options, values, parser.leftovers
def execute_from_commandline(self, argv=None):
if argv is None:
argv = sys.argv
prog_name = os.path.basename(argv[0])
options, values, leftovers = self.parse_options(prog_name, argv[1:])
detached(path=self.execv_path,
argv=self.execv_argv + leftovers,
**vars(options)).start()
def main():
detached_celeryd().execute_from_commandline()
if __name__ == "__main__":
main()
|
Python
| 0.027089
|
@@ -5160,62 +5160,8 @@
))%0A
- print(%22LEFTOVERS: %25r%22 %25 (parser.leftovers, ))%0A
|
7dc26b39ea988d46669a11a6f6e10d648e4b9470
|
support for token response in authorization code
|
keystoneclient/v3/contrib/oauth2/authorization_codes.py
|
keystoneclient/v3/contrib/oauth2/authorization_codes.py
|
# Copyright (C) 2014 Universidad Politecnica de Madrid
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import urllib
import six
from six.moves.urllib import parse as urlparse
from keystoneclient import base
from keystoneclient.v3.contrib.oauth2 import utils
class AuthorizationCode(base.Resource):
""" TODO(garcianavalon)
"""
pass
class AuthorizationCodeManager(base.CrudManager):
"""Manager class for manipulating identity OAuth authorization codes."""
resource_class = AuthorizationCode
collection_key = 'authorization_codes'
key = 'authorization_code'
base_url = utils.OAUTH2_PATH
def authorize(self, consumer, scopes, redirect=False):
"""Authorize a Consumer for certain scopes, getting an authorization code.
The way the provider (Keystone) will return the code is in the header, as an
HTTP redirection:
'Location': 'https://foo.com/welcome_back?code=somerandomstring&state=xyz'
Utilize Identity API operation:
POST /OS-OAUTH2/authorize/
:param user: the user granting authorization
:param consumer: the client that will be authorized, and
will exchange the authorization code for an access token.
:param scopes: a list of scopes. They are provided by the consumer
in the authorization request
:param redirect: The Keystone OAuth2 extension returns an HTTP 302 to
comply with RFC 6749 but in general we dont want the redirect to happen
if we are using the keystoneclient.
"""
endpoint = self.base_url + '/authorize'
body = {
'user_auth': {
'client_id':base.getid(consumer),
'scopes':scopes
}
}
response, body = self.client.post(endpoint, body=body, redirect=redirect)
redirect_uri = response.headers.get('Location')
parsed = urlparse.urlparse(redirect_uri)
query = dict(urlparse.parse_qsl(parsed.query))
authorization_code = {
'redirect_uri':redirect_uri,
'code': query['code'],
'state': query['state']
}
return self.resource_class(self, authorization_code)
def request_authorization(self, consumer, redirect_uri, scope, response_type='code', state=None):
""" Send the consumer credentials to the OAuth2 provider.
The user then will be asked to authorize the client for the requested scopes. In
the OAuth2 flow this happens when the client(consumer) redirects the resource
owner(user) through his user agent to the authorization server(provider).
Therefore, this call is done by the user but with the data provided by the
consumer.
Utilize Identity API operation:
GET /OS-OAUTH2/authorize/?client_id=&redirect_uri=&response_type=code&state=
:param consumer: the consumer asking for authorization
:param redirect_uri: The url the user will be redirected to. It must be
registered in the server asociated with the requesting consumer.
:param scope: list of strings with the requested scopes from the ones
defined by the provider.
:param state: Optional, a string for consumer use.
"""
# Transform the array with the requested scopes into a list of
# space-delimited, case-sensitive strings as specified in RFC 6749
# http://tools.ietf.org/html/rfc6749#section-3.3
scope_string = ' '.join(scope)
# NOTE(garcianavalon) we use a list of tuples to ensure param order
# in the query string to be able to mock it during testing.
credentials = [
('response_type', response_type),
('client_id', base.getid(consumer)),
('redirect_uri', redirect_uri),
('scope', scope_string),
('state', state)
]
query = urllib.urlencode(credentials)
endpoint = self.base_url + '/authorize?%s' %query
response, body = self.client.get(endpoint)
# TODO(garcianavalon) figure out the return. Do we need a separated manager?
return json.loads(response.content)
|
Python
| 0
|
@@ -2599,29 +2599,79 @@
'
+state': query%5B'state'%5D%0A %7D%0A
code
-':
+ =
query
-%5B
+.get(
'code'
-%5D
,
+ None)
%0A
@@ -2679,45 +2679,170 @@
- 'state': query%5B'state'%5D%0A %7D
+if code:%0A authorization_code%5B'code'%5D = code%0A%0A token = query.get('token', None)%0A if token:%0A authorization_code%5B'token'%5D = token
%0A%0A
|
bb6ba761e6b7d8ff60662b36f18afdbf2c93bd13
|
change link message text to include baby switch
|
snappybouncer/tasks.py
|
snappybouncer/tasks.py
|
from celery import task
from celery.utils.log import get_task_logger
import requests
import json
import urllib
from go_http.send import HttpApiSender
from go_http.contacts import ContactsApiClient
from besnappy import SnappyApiSender
from django.conf import settings
from snappybouncer.models import Ticket
logger = get_task_logger(__name__)
@task()
def send_helpdesk_response(ticket):
# Make a session to Vumi
sender = HttpApiSender(
account_key=settings.VUMI_GO_ACCOUNT_KEY,
conversation_key=settings.VUMI_GO_CONVERSATION_KEY,
conversation_token=settings.VUMI_GO_ACCOUNT_TOKEN
)
# Send message
response = sender.send_text(ticket.msisdn, ticket.response)
# TODO: Log outbound send metric
return response
def jembi_format_date(date):
return date.strftime("%Y%m%d%H%M%S")
def build_jembi_helpdesk_json(ticket):
json_template = {
"encdate": jembi_format_date(ticket.created_at),
"repdate": jembi_format_date(ticket.updated_at),
"mha": 1,
"swt": 2, # 1 ussd, 2 sms
"cmsisdn": ticket.msisdn,
"dmsisdn": ticket.msisdn,
"faccode": str(ticket.faccode),
"data": {
"question": ticket.message,
"answer": ticket.response
},
"class": ticket.tag,
"type": 7, # 7 helpdesk
"op": str(ticket.operator)
}
return json_template
@task()
def send_helpdesk_response_jembi(ticket):
data = build_jembi_helpdesk_json(ticket)
api_url = ("%s/helpdesk" % settings.JEMBI_BASE_URL)
headers = {
'Content-Type': 'application/json'
}
result = requests.post(api_url, headers=headers, data=json.dumps(data),
auth=(settings.JEMBI_USERNAME,
settings.JEMBI_PASSWORD),
verify=False)
return result.text
@task()
def create_snappy_ticket(ticket):
# Make a session to Snappy
snappy_api = SnappyApiSender(
api_key=settings.SNAPPY_API_KEY,
api_url=settings.SNAPPY_BASE_URL
)
# Send message
subject = "Support for %s" % (ticket.msisdn)
snappy_ticket = snappy_api.create_note(
mailbox_id=settings.SNAPPY_MAILBOX_ID,
subject=subject,
message=ticket.message,
to_addr=None,
from_addr=[{"name": ticket.msisdn, "address": settings.SNAPPY_EMAIL}]
)
ticket.support_nonce = snappy_ticket
ticket.save()
update_snappy_ticket_with_extras.delay(snappy_api, ticket.support_nonce,
ticket.contact_key, subject)
# TODO: Log ticket created metric
return True
@task()
def update_snappy_ticket_with_extras(snappy_api, nonce, contact_key, subject):
# Gets more extras from Vumi and creates a private note with them
contacts_api = ContactsApiClient(auth_token=settings.VUMI_GO_API_TOKEN)
contact = contacts_api.get_contact(contact_key)
extra_info = ""
for extra in settings.SNAPPY_EXTRAS:
# Add available contact extras
if extra in contact["extra"]:
extra_info += extra + ": " + contact["extra"][extra] + "\n"
# Add opt-out link
optout_url = settings.SITE_DOMAIN_URL + \
"/controlinterface/subscription/?msisdn=" + \
urllib.quote_plus(contact["msisdn"])
extra_info += "Opt this user out: " + optout_url + "\n"
# Send private note
snappy_api.create_note(
mailbox_id=settings.SNAPPY_MAILBOX_ID,
subject=subject,
message=extra_info,
to_addr=[{
"name": "Internal Information",
"address": settings.SNAPPY_EMAIL}],
ticket_id=nonce,
scope="private",
staff_id=settings.SNAPPY_STAFF_ID
)
return True
def extract_tag(tags):
"""
Takes a list of tags and extracts the first hastagged item
in the list, returning it as a string without the hashtag.
eg. ["@person", "#coffee", "#payment"] -> "coffee"
"""
for tag in tags:
if tag[0] == "#":
return tag[1::]
return None
def extract_operator(tags, operators):
"""
Takes a list of tags and a dict of operator names mapped to their
numbers and returns the operator number of the operator name in
the list of tags.
eg. ["@barry", "#question"] -> barry's operator number
"""
for tag in tags:
if tag[0] == "@":
return operators[tag[1::]]
return None
@task()
def backfill_ticket(ticket_id, operators):
"""
Looks up the Ticket's operator number and first tag and saves it
to the ticket, then fires a follow-up task that saves the faccode
to the ticket if available.
"""
# Make a session to Snappy
snappy_api = SnappyApiSender(
api_key=settings.SNAPPY_API_KEY,
api_url=settings.SNAPPY_BASE_URL
)
# Get the ticket object
ticket = Ticket.objects.get(id=ticket_id)
# Look up the ticket on Snappy (get request)
response = snappy_api._api_request(
'GET', 'ticket/%s/' % ticket.support_id).json()
# Save the operator & tag to the Ticket
ticket.tag = extract_tag(response["tags"])
ticket.operator = extract_operator(response["tags"], operators)
ticket.save()
# Fire off a task to look up the facility_code on the contact
backfill_ticket_faccode.delay(ticket_id)
return "Ticket %s backfilled" % ticket.support_id
def get_ticket_faccode(contact_key):
"""
Looks up and returns a contact's clinic code extra if they have one.
"""
contacts_api = ContactsApiClient(auth_token=settings.VUMI_GO_API_TOKEN)
contact = contacts_api.get_contact(contact_key)
if "clinic_code" in contact["extra"]:
return contact["extra"]["clinic_code"]
return None
@task()
def backfill_ticket_faccode(ticket_id):
"""
Looks up a Ticket contact's clinic code and stores it in the ticket
"""
# Get the ticket object
ticket = Ticket.objects.get(id=ticket_id)
# Get and save the faccode
ticket.faccode = get_ticket_faccode(ticket.contact_key)
ticket.save()
return "Ticket %s faccode backfilled" % ticket.support_id
|
Python
| 0
|
@@ -3359,12 +3359,29 @@
ser
-out:
+Out or Switch to Baby
%22 +
|
d4ef5e2cb956d7ac7b28497cdc849f7c2bc85712
|
add radio by default
|
shop_catalog/settings.py
|
shop_catalog/settings.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
SLUG_FIELD_HELP_TEXT = _(
'Can only contain the letters a-z, A-Z, digits, minus and underscores, '
'and can\'t start with a digit.')
PRODUCT_CHANGE_FORM_TEMPLATE = (
'admin/shop_catalog/product_change_form.html')
ATTRIBUTE_TEMPLATE_CHOICES = ()
HAS_CATEGORIES = getattr(settings, 'SHOP_CATALOG_HAS_CATEGORIES', True)
HAS_BRANDS = getattr(settings, 'SHOP_CATALOG_HAS_BRANDS', True)
HAS_MANUFACTURERS = getattr(settings, 'SHOP_CATALOG_HAS_MANUFACTURERS', True)
|
Python
| 0.000001
|
@@ -405,17 +405,120 @@
OICES =
-(
+getattr(%0A settings, 'SHOP_CATALOG_ATTRIBUTE_TEMPLATE_CHOICES', (%0A ('radio', _('Radio')),%0A )
)%0A%0AHAS_C
|
aa84a3b3a0b936ab29da5b0a8b3590841e787991
|
add properties
|
biothings_explorer/dispatcher.py
|
biothings_explorer/dispatcher.py
|
# -*- coding: utf-8 -*-
"""
biothings_explorer.dispatcher
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains code that biothings_explorer use to communicate to and receive from APIs. It serves as a glue between "apicall" module and "api_output_parser" module.
"""
from collections import defaultdict
from .registry import Registry
from .apicall import BioThingsCaller
from .api_output_parser import OutputParser
class Dispatcher():
def __init__(self, edges, values, batch_mode=False):
self.edges = edges
self.registry = Registry().registry
self.batch_mode = batch_mode
self.values = self.preprocess_input_values(values)
self.caller = BioThingsCaller(batch_mode=batch_mode)
def preprocess_input_values(self, values):
if not self.batch_mode:
return values
else:
if type(values) == str:
return values
elif type(values) == list:
return ','.join(values)
else:
raise ValueError('{} should be str or list'.format(values))
def fetch_schema_mapping_file(self, api):
"""Fetch schema mapping file from the registry"""
return self.registry[api]['mapping']
def subset_mapping_file(self, edge, mapping_file):
"""Only maintain a subset of mapping file based on edge label"""
return {k:v for (k,v) in mapping_file.items() if k in ["@context", "@type", edge["label"]]}
def dispatch(self):
"""send request to and parse response from API"""
results = defaultdict(list)
for _edge in self.edges.values():
mapping = self.fetch_schema_mapping_file(_edge['api'])
subset_mapping = self.subset_mapping_file(_edge, mapping)
response = self.caller.call_api(_edge['api'],
_edge['input_field'],
_edge['output_field'],
self.values)
_res = OutputParser(response, subset_mapping,
_edge['label'],
self.batch_mode,
_edge['api']).parse()
if not self.batch_mode:
results[_edge['label']] += _res
else:
results[_edge['label']].append(_res)
return dict(results)
|
Python
| 0.000001
|
@@ -495,24 +495,25 @@
self.
+_
edges = edge
@@ -563,32 +563,33 @@
ry%0A self.
+_
batch_mode = bat
@@ -605,24 +605,25 @@
self.
+_
values = sel
@@ -726,50 +726,739 @@
-def preprocess_input_values(self, values):
+@property%0A def batch_mode(self):%0A return self._batch_mode%0A%0A @batch_mode.setter%0A def batch_mode(self, value):%0A self._batch_mode = value%0A self._values = self.preprocess_input_values(values)%0A%0A @property%0A def values(self):%0A return self._values%0A%0A @values.setter%0A def values(self, value):%0A self._values = self.preprocess_input_values(value)%0A%0A @property%0A def edges(self):%0A return self._edges%0A%0A @edges.setter%0A def values(self, value):%0A self._edges = value%0A%0A def preprocess_input_values(self, values):%0A %22%22%22Preprocess the input values%0A%0A If batch_mode is set to be True, convert input values into a string%0A separated by ',' %0A %22%22%22
%0A
@@ -1466,32 +1466,33 @@
if not self.
+_
batch_mode:%0A
@@ -2290,16 +2290,17 @@
in self.
+_
edges.va
@@ -2684,16 +2684,17 @@
self.
+_
values)%0A
@@ -2832,24 +2832,25 @@
self.
+_
batch_mode,%0A
@@ -2927,16 +2927,17 @@
ot self.
+_
batch_mo
|
0b635ffb77acb362c34769a72dfd6d0063c32f38
|
Handle range of success codes
|
chargehound/api_requestor.py
|
chargehound/api_requestor.py
|
from __future__ import unicode_literals
import chargehound
import requests
from chargehound.error import create_chargehound_error
from chargehound.version import VERSION
class APIRequestor(object):
def parse_response(self, response):
payload = response.json()
if response.status_code == 200:
return payload
else:
raise create_chargehound_error(payload)
def handle_callback(self, callback):
def handle_response(response, **kwargs):
parsed = self.parse_response(response)
callback(parsed)
return handle_response
def get_url(self, path):
return 'https://' + chargehound.host + chargehound.base_path + path
def make_request(self, method, path, params=None, data=None,
callback=None):
headers = {
'accept': 'application/json',
'user-agent': 'Chargehound/v1 PythonBindings/%s' % VERSION
}
auth = (chargehound.api_key, '')
if callback:
hooks = dict(response=self.handle_callback(callback))
else:
hooks = None
if method == 'get':
return self.parse_response(requests.get(self.get_url(path),
auth=auth,
params=params,
headers=headers,
hooks=hooks))
elif method == 'post':
return self.parse_response(requests.post(self.get_url(path),
auth=auth,
json=data,
headers=headers,
hooks=hooks))
def request(self, method, path, params=None, data=None, callback=None):
if callback is None:
return self.make_request(method, path, params, data)
else:
return self.make_request(method, path, params, data, callback)
|
Python
| 0
|
@@ -305,12 +305,11 @@
ode
-== 2
+%3C 4
00:%0A
|
41537854b93137b9455c194645778d05e94ec33c
|
Fix error when deleting directories.
|
ide/projects.py
|
ide/projects.py
|
import errno
import os
import shutil
WORKSPACE_DIR = os.path.expanduser('~/mclab-ide-projects')
def get_all_projects():
mkdir_p(WORKSPACE_DIR)
return map(Project, os.listdir(WORKSPACE_DIR))
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST or not os.path.isdir(path):
raise
class Project(object):
def __init__(self, name):
self.name = name
self.root = os.path.join(WORKSPACE_DIR, self.name)
def exists(self):
return os.path.exists(self.root)
def create(self):
mkdir_p(self.root)
self.write_file('ide_entry_point.m', '''
function ide_entry_point()
% This function is used as an entry point for profiling runs, which
% provide the data powering features such as jump-to-definition and
% find callers. You should fill it in with code that exercises as
% much of your project as possible.
end'''[1:])
def delete(self):
shutil.rmtree(self.root)
def files(self):
for dirpath, _, paths in os.walk(self.root):
for path in paths:
if not path.startswith('.'):
yield os.path.join(dirpath, path)[len(self.root) + 1:]
def path(self, file):
return os.path.join(self.root, file)
def read_file(self, file):
with open(self.path(file)) as f:
return f.read()
def write_file(self, file, contents):
path = self.path(file)
mkdir_p(os.path.dirname(path))
with open(path, 'w') as f:
f.write(contents)
def delete_file(self, file):
os.remove(self.path(file))
def rename_file(self, src, dest):
src, dest = self.path(src), self.path(dest)
mkdir_p(os.path.dirname(dest))
shutil.move(src, dest)
|
Python
| 0
|
@@ -1622,18 +1622,15 @@
-os.remove(
+path =
self
@@ -1640,16 +1640,121 @@
th(file)
+%0A if os.path.isdir(path):%0A shutil.rmtree(path)%0A else:%0A os.remove(path
)%0A%0A d
|
62569c736148aae34c9bafbfceb1033b88871daf
|
use https for ipinfo.io
|
check.py
|
check.py
|
import json
import urllib2
import datetime
import mysqlhack #pylint: disable = unused-import
from traceback import format_exc as print_traceback
from com.ziclix.python.sql import zxJDBC
from secrets import *
from helpers import *
# receive info based on the user's IP. information provided by ipinfo.io
def ip_info(player):
if player.isOnline():
return json.load(urllib2.urlopen("http://ipinfo.io%s/json" % str(player.getAddress().getAddress())))
else:
return {}
# receive first join date based on the player data (may not be accurate)
def get_first_join(player):
first_join = int(player.getFirstPlayed())
dt = datetime.datetime.fromtimestamp(first_join/1000.0)
return dt.strftime("%Y-%m-%d %H:%M")
# receive last seen date based on the player data
def get_last_seen(player):
last_seen = int(player.getLastPlayed())
dt = datetime.datetime.fromtimestamp(last_seen/1000.0)
return dt.strftime("%Y-%m-%d %H:%M")
# receive link and email from website
def get_website_data(player):
conn = zxJDBC.connect(mysql_database, mysql_user, mysql_pass, "com.mysql.jdbc.Driver")
curs = conn.cursor()
uuid = str(uid(player)).replace("-", "")
curs.execute("SELECT DISTINCT `id`, `email` FROM users WHERE `uuid` = ? LIMIT 1", (uuid,))
results = curs.fetchall()
curs.close()
conn.close()
return ("http://redstoner.com/users/%s" % results[0][0], results[0][1]) if results else (None, None)
def get_all_names(player):
uuid = str(uid(player)).replace("-", "")
names = json.load(urllib2.urlopen("https://api.mojang.com/user/profiles/%s/names" % uuid))
# [ {"name": "some_name"}, {"name": "other_name"} ]
return ", ".join([name["name"] for name in names])
# combines data
def get_all_data(sender, player):
data = ip_info(player)
msg(sender, "")
try:
msg(sender, "&7 -- Data provided by Redstoner")
msg(sender, "&6> UUID: &e%s" % str(uid(player)))
msg(sender, "&6> First joined: &7(y-m-d h:m:s) &e%s" % get_first_join(player))
msg(sender, "&6> Last seen: &7(y-m-d h:m:s) &e%s" % get_last_seen(player))
website = get_website_data(player)
msg(sender, "&6> Website account: &e%s" % website[0])
msg(sender, "&6> email: &e%s" % website[1])
msg(sender, "&7 -- Data provided by ipinfo.io")
msg(sender, "&6> Country: &e%s" % str(data.get("country")))
msg(sender, "&6> Network: &e%s" % str(data.get("org")))
msg(sender, "&7 -- Data provided by Mojang")
msg(sender, "&6> All ingame names used so far: &e%s" % get_all_names(player))
except:
# can throw exceptions such as timeouts when Mojang API is down
warn(print_traceback())
msg(sender, "&cSorry, something went wrong while fetching data")
@hook.command("check", description="Displays useful stuff about a user", usage="/check <player>")
def on_hook_command(sender, command, label, args):
if sender.hasPermission("utils.check"):
if not checkargs(sender, args, 1, 1):
return True
plugin_header(sender, "Check")
msg(sender, "&7Please notice that the data may not be fully accurate!")
player = server.getOfflinePlayer(args[0]) if len(args) > 0 else None
get_all_data(sender, player)
else:
msg(sender, "&4You don't have the required permissions to execute this command!")
return True
|
Python
| 0
|
@@ -391,16 +391,17 @@
en(%22http
+s
://ipinf
|
6ef5db75b8b5798e12baec25f5e1d6d26a8c29fa
|
fix conflict
|
blitzortung/dataimport/strike.py
|
blitzortung/dataimport/strike.py
|
# -*- coding: utf8 -*-
"""
Copyright 2014-2016 Andreas Würl
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import datetime
import logging
import os
import time
from injector import singleton, inject
from .base import HttpFileTransport, BlitzortungDataPath, BlitzortungDataPathGenerator
from .. import builder
logger = logging.getLogger(__name__)
@singleton
class StrikesBlitzortungDataProvider(object):
@inject
def __init__(self, data_transport: HttpFileTransport, data_url: BlitzortungDataPath,
url_path_generator: BlitzortungDataPathGenerator, strike_builder: builder.Strike):
self.data_transport = data_transport
self.data_url = data_url
self.url_path_generator = url_path_generator
self.strike_builder = strike_builder
def get_strikes_since(self, latest_strike=None, region=1):
latest_strike = latest_strike if latest_strike else \
(datetime.datetime.utcnow() - datetime.timedelta(hours=6)).replace(tzinfo=datetime.timezone.utc)
logger.debug("import strikes since %s" % latest_strike)
for url_path in self.url_path_generator.get_paths(latest_strike):
strike_count = 0
start_time = time.time()
<<<<<<< Updated upstream
target_url = self.data_url.build_path(os.path.join('Protected', 'Strikes', url_path), region=region)
=======
target_url = self.data_url.build_path(os.path.join('Protected', 'Strikes_{region}', url_path), region=region)
>>>>>>> Stashed changes
for strike_line in self.data_transport.read_lines(target_url):
try:
strike = self.strike_builder.from_line(strike_line).build()
except builder.BuilderError as e:
logger.warn("%s: %s (%s)" % (e.__class__, e.args, strike_line))
continue
except Exception as e:
logger.error("%s: %s (%s)" % (e.__class__, e.args, strike_line))
raise e
if strike.timestamp.is_valid and strike.timestamp > latest_strike:
strike_count += 1
yield strike
end_time = time.time()
logger.debug("imported %d strikes for region %d in %.2fs from %s",
strike_count,
region, end_time - start_time, target_url)
|
Python
| 0.031708
|
@@ -1739,154 +1739,8 @@
e()%0A
-%3C%3C%3C%3C%3C%3C%3C Updated upstream%0A target_url = self.data_url.build_path(os.path.join('Protected', 'Strikes', url_path), region=region)%0A=======%0A
@@ -1861,32 +1861,8 @@
on)%0A
-%3E%3E%3E%3E%3E%3E%3E Stashed changes%0A
|
30089f005f62e84367aa6affd5acbd4920f8086a
|
fix installation on Arch
|
cfg/search/__main__.py
|
cfg/search/__main__.py
|
from pathlib import Path
from libdotfiles import HOME_DIR, PKG_DIR, packages, util
FZF_DIR = HOME_DIR / ".fzf"
if util.distro_name() == "arch":
packages.try_install("fzf") # super opener
packages.try_install(
"silver-searcher-git"
) # super grep (vim-fzf dependency)
packages.try_install("ripgrep") # super grep (shell)
elif util.distro_name() == "linuxmint":
packages.try_install("silversearcher-ag")
if not packages.has_installed("ripgrep"):
util.run_verbose(
[
"curl",
"-LO",
"https://github.com/BurntSushi/ripgrep/releases/download/11.0.2/ripgrep_11.0.2_amd64.deb",
]
)
util.run_verbose(["sudo", "dpkg", "-i", "ripgrep_11.0.2_amd64.deb"])
util.run_verbose(
[
"git",
"clone",
"--depth",
"1",
"https://github.com/junegunn/fzf.git",
FZF_DIR,
]
)
util.run_verbose(
[
FZF_DIR / "install",
"--key-bindings",
"--completion",
"--no-update-rc",
]
)
util.create_symlink(PKG_DIR / "agignore", HOME_DIR / ".agignore")
|
Python
| 0
|
@@ -217,33 +217,28 @@
all(
-%0A %22
+%22the_
silver
--
+_
searcher
-git
@@ -237,39 +237,14 @@
cher
--git%22%0A ) # super grep (vim-
+%22) #
fzf
@@ -253,17 +253,16 @@
pendency
-)
%0A pac
|
a2efba8d942171249b3ed2f28497d84f81cbbb06
|
Add lint test and format generated code (#4114)
|
java-language/google-cloud-language/synth.py
|
java-language/google-cloud-language/synth.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
import synthtool.gcp as gcp
gapic = gcp.GAPICGenerator()
common_templates = gcp.CommonTemplates()
for version in ["v1", "v1beta2"]:
library = gapic.java_library(
service='language',
version=version,
config_path=f'/google/cloud/language/artman_language_{version}.yaml',
artman_output_name='')
s.copy(library / f'gapic-google-cloud-language-{version}/src', 'src')
s.copy(library / f'grpc-google-cloud-language-{version}/src', f'../../google-api-grpc/grpc-google-cloud-language-{version}/src')
s.copy(library / f'proto-google-cloud-language-{version}/src', f'../../google-api-grpc/proto-google-cloud-language-{version}/src')
|
Python
| 0
|
@@ -696,112 +696,226 @@
gcp%0A
-%0Agapic = gcp.GAPICGenerator()%0Acommon_templates = gcp.CommonTemplates()%0A%0Afor version in %5B%22v1%22, %22v1beta2%22%5D
+import synthtool.languages.java as java%0A%0Agapic = gcp.GAPICGenerator()%0A%0Aservice = 'language'%0Aversions = %5B'v1', 'v1beta2'%5D%0Aconfig_pattern = '/google/cloud/language/artman_language_%7Bversion%7D.yaml'%0A%0Afor version in versions
:%0A
@@ -962,26 +962,23 @@
service=
-'language'
+service
,%0A
@@ -1020,64 +1020,46 @@
ath=
-f'/google/cloud/language/artman_language_%7Bversion%7D.yaml'
+config_pattern.format(version=version)
,%0A
@@ -1126,32 +1126,33 @@
oogle-cloud-
-language
+%7Bservice%7D
-%7Bversion%7D/s
@@ -1200,32 +1200,33 @@
oogle-cloud-
-language
+%7Bservice%7D
-%7Bversion%7D/s
@@ -1268,32 +1268,33 @@
oogle-cloud-
-language
+%7Bservice%7D
-%7Bversion%7D/s
@@ -1336,32 +1336,33 @@
oogle-cloud-
-language
+%7Bservice%7D
-%7Bversion%7D/s
@@ -1409,24 +1409,235 @@
e-cloud-
-language
+%7Bservice%7D-%7Bversion%7D/src')%0A%0A java.format_code('./src')%0A java.format_code(f'../../google-api-grpc/grpc-google-cloud-%7Bservice%7D-%7Bversion%7D/src')%0A java.format_code(f'../../google-api-grpc/proto-google-cloud-%7Bservice%7D
-%7Bversio
|
d8c1dfc97b525728cd2e70070a578d7a216bc55b
|
remove unecessary emailbackend override
|
imager/tests.py
|
imager/tests.py
|
import factory
from django.test import Client
from django.test import TestCase
from django.core import mail
from django.test.utils import override_settings
from django.core.mail.backends.base import BaseEmailBackend
from django.core.files.uploadedfile import SimpleUploadedFile
from django.contrib.auth.models import User
from imager_images.models import Photo
import os
import glob
THE_FILE = SimpleUploadedFile('test.png', 'a photo')
PASSWORD = 'test_password'
class TestEmailBackend(BaseEmailBackend):
def send_messages(self, messages):
mail.outbox.extend(messages)
return len(messages)
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
django_get_or_create = ('username', )
username = 'test_username'
password = factory.PostGenerationMethodCall('set_password', PASSWORD)
class ImageFactory(factory.django.DjangoModelFactory):
class Meta:
model = Photo
user = UserFactory()
image = THE_FILE
file_size = 1000000
published = 'pvt'
class LoggedOutTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_logged_out_home(self):
response = self.client.get('/')
self.assertTemplateUsed(response, template_name='home.html')
self.assertIn('Welcome to Imgr! Sign up to share images and be awesome!',
response.content)
def test_logged_out_home_links(self):
response = self.client.get('/')
self.assertIn('<a href="/">', response.content)
self.assertIn('<a href="/accounts/login/">', response.content)
self.assertIn('<a href="/accounts/register/">', response.content)
def test_logged_out_login_page(self):
response = self.client.get('/accounts/login/')
self.assertIn('<a href="/accounts/register/">Register</a>!',
response.content)
self.assertIn('<input type="submit" value="Log in" />',
response.content)
def test_logged_out_registration_page(self):
response = self.client.get('/accounts/register/')
self.assertIn('<h1>Register Here</h1>',
response.content)
def test_logged_out_profile(self):
response = self.client.get('/profile/')
self.assertEqual(response.items()[3][1],
'http://testserver/accounts/login/?next=/profile/')
def test_logged_out_stream(self):
response = self.client.get('/stream/')
self.assertEqual(response.items()[3][1],
'http://testserver/accounts/login/?next=/stream/')
def test_logged_out_library(self):
response = self.client.get('/library/')
self.assertEqual(response.items()[3][1],
'http://testserver/accounts/login/?next=/library/')
class LoggedInTestCase(TestCase):
def setUp(self):
self.client = Client()
self.username = 'test_username'
UserFactory(username=self.username)
self.client.login(username=self.username, password=PASSWORD)
self.new_user = 'username'
self.new_password = 'password'
self.new_email = 'user@test.com'
def tearDown(self):
for file in glob.glob("media/imager_user/test*"):
os.remove(file)
def test_login_redirect_success(self):
UserFactory()
response = self.client.post('/accounts/login/',
{'username': 'test_username',
'password': PASSWORD})
self.assertRedirects(response, 'profile/')
def test_login_redirect_failuse(self):
UserFactory()
response = self.client.post('/accounts/login/',
{'username': 'wrong',
'password': 'wrong'})
self.assertIn('Please enter a correct username and password',
response.content)
def test_logged_in_home(self):
response = self.client.get('/')
self.assertTemplateUsed(response, template_name='home.html')
self.assertIn('Welcome back {}! Continue being awesome!'.format(self.username),
response.content)
def test_logged_in_home_links(self):
response = self.client.get('/')
self.assertIn('<a href="/">', response.content)
self.assertIn('<a href="/profile/">', response.content)
self.assertIn('<a href="/stream/">', response.content)
self.assertIn('<a href="/library/">', response.content)
self.assertIn('<a href="/accounts/logout/?next=/">', response.content)
def test_logged_in_home_no_public_photos(self):
ImageFactory()
response = self.client.get('/')
self.assertTemplateUsed(response, template_name='home.html')
self.assertIn('imager_images/Space_Needle002.jpg',
response.content)
def test_logged_in_home_public_photos(self):
ImageFactory(published='pub')
response = self.client.get('/')
self.assertTemplateUsed(response, template_name='home.html')
self.assertIn('imager_images/test',
response.content)
def test_logged_in_profile(self):
response = self.client.get('/profile/')
self.assertTemplateUsed(response, template_name='profile.html')
self.assertIn("{}'s Profile".format(self.username),
response.content)
def test_logged_in_stream(self):
response = self.client.get('/stream/')
self.assertTemplateUsed(response, template_name='stream.html')
self.assertIn("{}'s Stream".format(self.username),
response.content)
def test_logged_in_library(self):
response = self.client.get('/library/')
self.assertTemplateUsed(response, template_name='library.html')
self.assertIn("{}'s Library".format(self.username),
response.content)
@override_settings(EMAIL_BACKEND='imager.tests.TestEmailBackend')
class RegistrationTest(TestCase):
def setUp(self):
self.client = Client()
self.new_user = 'username'
self.new_password = 'password'
self.new_email = 'user@test.com'
def registration(self):
return self.client.post('/accounts/register/',
{'username': self.new_user,
'password1': self.new_password,
'password2': self.new_password,
'email': self.new_email})
def login_after_registration(self):
return self.client.post('/accounts/login/',
{'username': self.new_user,
'password': self.new_password})
def test_registration_success(self):
response = self.registration()
self.assertRedirects(response, '/accounts/register/complete/')
response = self.client.get('/accounts/register/complete/')
self.assertIn('Registration Complete', response.content)
def test_registration_user_exists(self):
self.registration()
self.assertTrue(User.objects.get(username='username'))
def test_registration_in_active(self):
response = self.registration()
response = self.login_after_registration()
self.assertIn('This account is inactive.', response.content)
def test_registration_activate(self):
response = self.registration()
self.client.get(mail.outbox[0].body.lstrip('http://testserver'))
response = self.login_after_registration()
self.assertIn('This account is inactive.', response.content)
|
Python
| 0.000035
|
@@ -464,157 +464,8 @@
'%0A%0A%0A
-class TestEmailBackend(BaseEmailBackend):%0A def send_messages(self, messages):%0A mail.outbox.extend(messages)%0A return len(messages)%0A%0A%0A
clas
@@ -5818,74 +5818,8 @@
)%0A%0A%0A
-@override_settings(EMAIL_BACKEND='imager.tests.TestEmailBackend')%0A
clas
|
743b2593113a19382e60c3968897c871d98e20a8
|
add alternate shorthand arguments, simplify shell execution
|
alfred.py
|
alfred.py
|
#!/usr/bin/python
import argparse, subprocess, re, sys
VERSION="0.1";
parser = argparse.ArgumentParser(description='''Execute a test''',)
parser.add_argument('--file', type=str, required=True, help="Filename of php file (ex 'script.php')")
parser.add_argument('--query', type=str, required=True, help="Value to replace {query} with")
parser.add_argument('--version', action='version', version=VERSION, help="Return version of script")
args = parser.parse_args()
if not re.search(r"\.php$", args.file, re.IGNORECASE):
print 'Common batman, you need a file extension.'
sys.exit(1)
p = subprocess.Popen('cat %s | sed -e "s/{query}/%s/" > .tmp && $EXT .tmp && rm .tmp;' % (args.file, args.query), shell=True)
if not args.background:
out, err = p.communicate()
if out: print "stdout=", out
if err: print "stderr=", err
|
Python
| 0.000005
|
@@ -153,16 +153,22 @@
rgument(
+'-f',
'--file'
@@ -168,17 +168,16 @@
--file',
-
type=st
@@ -261,16 +261,22 @@
rgument(
+'-q',
'--query
@@ -277,17 +277,16 @@
-query',
-
type=st
@@ -373,17 +373,16 @@
ersion',
-
action=
@@ -597,16 +597,22 @@
it(1)%0A%0Ap
+rocess
= subpr
@@ -730,128 +730,86 @@
True
-)%0Aif not args.background:%0A out, err = p.communicate()%0A%0A if out: print %22stdout=%22, out%0A if err: print %22stderr=%22, err%0A
+, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)%0Aprint process.communicate()%5B0%5D
|
643b00ee74e8c056bfd0e05856c624800562f969
|
Include tinymt in windows build
|
build.py
|
build.py
|
import sys
import os
import platform
sys.path.insert(0, 'input')
import ninja_syntax
plat = platform.system()
bit64 = platform.architecture()[0] == '64bit'
joinp = os.path.join
if(bit64):
fmoddir = 'x86_64'
else:
fmoddir = 'x86'
cflags = '-Iinc -Wall -Wextra -Werror -g'
ldflags = ''
if plat == 'Windows':
cflags += ' -Iinput/glfw/include -Iinput/libpng/include -Iinput/zlib/include -Iinput/glew/include -Iinput/fmod/win/inc '
ldflags += ' -Linput/glfw/lib-mingw -Linput/glew/lib -Linput/libpng/lib -Linput/fmod/win/lib input/glfw/lib-mingw/glfw3dll.a -lglu32 -lopengl32 -lglew32 -lfmod -lpng input/zlib/lib/zdll.lib -mwindows'
elif plat == 'Darwin':
cflags += ' -mmacosx-version-min=10.6 -isystem input/fmod/inc -Iinput/glfw/include -Iinput/glew/include -Iinput/libpng -Iinput/TinyMT'
ldflags += ' -mmacosx-version-min=10.6 -Linput/fmod/lib -Linput/glfw/build/src -Linput/libpng -Linput/zlib -Linput/glew/lib -framework OpenGL -framework Cocoa -framework IOKit -framework ForceFeedback -framework Carbon -framework CoreAudio -framework CoreVideo -framework AudioUnit -lpng -lfmod -lglfw3 -lGLEW -lz input/TinyMT/tinymt/tinymt64.o'
else:
cflags += ' -isystem input/fmod/api/lowlevel/inc -Iinput/glfw/include -Iinput/TinyMT'
ldflags += ' -Wl,-rpath=.,--enable-new-dtags -Linput/fmod/api/lowlevel/lib/%s -Linput/glfw/build/src -lglfw3 -lGLU -lGL -lGLEW -lm -lfmod -lX11 -lXxf86vm -lpthread -lXrandr -lXi -lpng input/TinyMT/tinymt/tinymt64.o' % fmoddir
def rules(n, cflags, ldflags):
n.variable('cflags', cflags)
n.variable('ldflags', ldflags)
n.newline()
n.rule('cxx',
command='gcc -MMD -MF $out.d $cflags -c $in -o $out',
depfile='$out.d',
description='CXX $out')
n.rule('static',
command='ar rcs $out $in')
if plat == 'Darwin':
n.rule('link',
command='gcc $in $libs $ldflags -o $out && install_name_tool -change @rpath/libfmod.dylib @executable_path/libfmod.dylib $out && install_name_tool -change /usr/lib/libGLEW.1.13.0.dylib @executable_path/libGLEW.1.13.0.dylib $out',
description='LINK $out')
else:
n.rule('link',
command='gcc $in $libs $ldflags -o $out',
description='LINK $out')
n.rule('cp',
command='cp $in $out',
description='COPY $in $out')
n.newline()
def walk_src(n, path, objdir):
obj = []
for (dirpath, dirnames, filenames) in os.walk(path):
for f in filenames:
_, ext = os.path.splitext(f)
if ext == '.c':
s = os.path.relpath(joinp(dirpath, f), path)
o = s.replace('.c', '.o')
obj += n.build(joinp(objdir, o), 'cxx', joinp(path, s))
n.newline()
return obj
def walk_data(n, data_in, data_out, validext=['png','ogg']):
data = []
for (dirpath, dirnames, filenames) in os.walk(data_in):
for f in filenames:
ext = f[-3:]
valid = False
for e in validext:
if ext == e:
valid = True
if not valid:
continue
s = os.path.relpath(joinp(dirpath, f), data_in)
data += n.build(joinp(data_out, s), 'cp', joinp(data_in, s))
n.newline()
return data
def copy_libs(n, inputs, outdir):
targets = []
if plat == 'Windows':
targets += n.build(joinp(outdir, 'fmod.dll'), 'cp', joinp(inputs, 'fmod', 'win', 'lib', 'fmod.dll'))
targets += n.build(joinp(outdir, 'glew32.dll'), 'cp', joinp(inputs, 'glew', 'lib', 'glew32.dll'))
targets += n.build(joinp(outdir, 'glfw3.dll'), 'cp', joinp(inputs, 'glfw', 'lib-mingw', 'glfw3.dll'))
targets += n.build(joinp(outdir, 'libpng3.dll'), 'cp', joinp(inputs, 'libpng', 'bin', 'libpng3.dll'))
targets += n.build(joinp(outdir, 'zlib1.dll'), 'cp', joinp(inputs, 'zlib', 'zlib1.dll'))
elif plat == 'Darwin':
targets += n.build(joinp(outdir, 'libfmod.dylib'), 'cp', joinp(inputs, 'fmod', 'lib', 'libfmod.dylib'))
targets += n.build(joinp(outdir, 'libGLEW.1.13.0.dylib'), 'cp', joinp(inputs, 'glew', 'lib', 'libGLEW.1.13.0.dylib'))
else:
targets += n.build(joinp(outdir, 'libfmod.so.5'), 'cp', joinp(inputs, 'fmod', 'api', 'lowlevel', 'lib', fmoddir, 'libfmod.so'))
n.newline()
return targets
def main():
target = 'whitgl.a'
srcdir = 'src'
examplesrcdir = 'example'
inputdir = 'input'
builddir = 'build'
exampledir = joinp(builddir, 'example')
objdir = joinp(builddir, 'obj')
libdir = joinp(builddir, 'lib')
data_in = joinp(examplesrcdir, 'data')
data_out = joinp(exampledir, 'data')
BUILD_FILENAME = 'build.ninja'
buildfile = open(BUILD_FILENAME, 'w')
n = ninja_syntax.Writer(buildfile)
rules(n, cflags, ldflags)
# Library
obj = walk_src(n, srcdir, objdir)
staticlib = n.build(joinp(libdir, target), 'static', obj)
targets = []
targets += staticlib
n.newline()
# Example
obj = walk_src(n, examplesrcdir, objdir)
targets += n.build(joinp(exampledir, 'example'), 'link', obj+staticlib)
n.newline()
data = walk_data(n, data_in, data_out)
targets += n.build('data', 'phony', data)
n.newline()
targets += copy_libs(n, inputdir, exampledir)
n.build('all', 'phony', targets)
n.default('all')
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -428,16 +428,31 @@
win/inc
+-Iinput/TinyMT
'%0A ldfl
@@ -646,16 +646,47 @@
mwindows
+ input/TinyMT/tinymt/tinymt64.o
'%0Aelif p
|
2ea2958516b32626a2513ea2ae939915ec8332df
|
Save the metadata at the end of the transfer
|
onitu/api/dealer.py
|
onitu/api/dealer.py
|
import time
from threading import Thread, Event
from multiprocessing.pool import ThreadPool
import zmq
import redis
from logbook import Logger
from .metadata import Metadata
class Dealer(Thread):
"""Receive and reply to orders from the Referee.
All the requests are handled in a thread-pool.
"""
def __init__(self, plug):
super(Dealer, self).__init__()
self.plug = plug
self.name = plug.name
self.session = plug.session
self.logger = Logger("{} - Dealer".format(self.name))
self.context = zmq.Context.instance()
self.in_progress = {}
self.pool = ThreadPool()
def run(self):
self.logger.info("Started")
while True:
try:
_, event = self.session.blpop(
'drivers:{}:events'.format(self.name)
)
driver, fid = event.split(':')
except redis.ConnectionError:
exit()
# Remove the newer events for this file
self.session.lrem('drivers:{}:events'.format(self.name), event)
self.get_file(fid, driver)
def stop_transfer(self, fid):
if fid in self.in_progress:
worker, result = self.in_progress[fid]
worker.stop.set()
result.wait()
return True
return False
def resume_transfers(self):
"""Resume transfers after a crash. Called in
:meth:`.Plug.listen`.
"""
transfers = self.session.smembers(
'drivers:{}:transfers'.format(self.name)
)
for fid in transfers:
transfer = self.session.hgetall(
'drivers:{}:transfers:{}'.format(self.name, fid)
)
if not transfer:
self.session.srem(
'drivers:{}:transfers'.format(self.name),
fid
)
continue
driver = transfer['from']
offset = int(transfer['offset'])
self.get_file(fid, driver, offset=offset, restart=True)
def get_file(self, fid, *args, **kwargs):
self.stop_transfer(fid)
worker = Worker(self, fid, *args, **kwargs)
result = self.pool.apply_async(worker)
self.in_progress[fid] = (worker, result)
class Worker(object):
def __init__(self, dealer, fid, driver, offset=0, restart=False):
super(Worker, self).__init__()
self.stop = Event()
self.dealer = dealer
self.logger = dealer.logger
self.session = dealer.session
self.driver = driver
self.fid = fid
self.offset = offset
self.restart = restart
self.chunk_size = self.dealer.plug.options.get(
'chunk_size', 1 << 20 # 1MB
)
def __call__(self):
self.metadata = Metadata.get_by_id(self.dealer.plug, self.fid)
self.filename = self.metadata.filename
self.start_transfer()
success = self.get_file()
self.end_transfer(success)
def get_dealer(self):
dealer = self.dealer.context.socket(zmq.DEALER)
self.logger.debug("Waiting for ROUTER port for {}", self.driver)
while True:
port = self.session.hget('ports', self.driver)
if port:
self.logger.debug("Got ROUTER port for {}", self.driver)
dealer.connect('tcp://localhost:{}'.format(port))
self.logger.debug("Connected")
return dealer
time.sleep(0.1)
def call(self, handler_name, *args, **kwargs):
"""Call a handler if it has been registered by the driver
"""
handler = self.dealer.plug._handlers.get(handler_name)
if handler:
return handler(*args, **kwargs)
def start_transfer(self):
if self.restart:
self.call('restart_upload', self.metadata, self.offset)
self.logger.info(
"Restarting transfer of '{}' from {}",
self.filename, self.driver
)
else:
self.session.sadd(
'drivers:{}:transfers'.format(self.dealer.name),
self.fid
)
self.session.hmset(
'drivers:{}:transfers:{}'.format(self.dealer.name, self.fid),
{'from': self.driver, 'offset': self.offset}
)
self.call('start_upload', self.metadata)
self.logger.info(
"Starting to get '{}' from {}", self.filename, self.driver
)
def get_file(self):
dealer = self.get_dealer()
while self.offset < self.metadata.size:
if self.stop.is_set():
return False
self.logger.debug("Asking {} for a new chunk", self.driver)
dealer.send_multipart((
self.filename.encode(),
str(self.offset).encode(),
str(self.chunk_size).encode()
))
chunk = dealer.recv()
self.logger.debug(
"Received chunk of size {} from {} for '{}'",
len(chunk), self.driver, self.filename
)
if not chunk or len(chunk) == 0:
return False
self.call('upload_chunk', self.filename, self.offset, chunk)
self.offset = self.session.hincrby(
'drivers:{}:transfers:{}'.format(self.dealer.name, self.fid),
'offset', len(chunk)
)
return True
def end_transfer(self, success):
if self.stop.is_set():
success = False
handler = 'end_upload' if success else 'abort_upload'
self.call(handler, self.metadata)
self.session.delete(
'drivers:{}:transfers:{}'.format(self.dealer.name, self.fid)
)
self.session.srem(
'drivers:{}:transfers'.format(self.dealer.name),
self.fid
)
if success:
self.metadata.uptodate.append(self.dealer.name)
self.logger.info(
"Transfer of '{}' from {} successful",
self.filename, self.driver
)
else:
self.logger.info(
"Aborting transfer of '{}' from {}",
self.filename, self.driver
)
|
Python
| 0.000003
|
@@ -6067,24 +6067,58 @@
dealer.name)
+%0A self.metadata.write()
%0A%0A
|
031e99c3545de7e4e80ddb62a2603ec46e39ffab
|
Update db.py
|
app/db.py
|
app/db.py
|
# -*- coding: utf-8 -*-
import pymysql.cursors
import logging
import logging.config
logging.config.fileConfig('logging.conf')
db_logger = logging.getLogger()
config = {
'host':'172.17.0.2',
'port':3306,
'user':'root',
'password':'123456',
'db':'test',
'charset':'utf8',
}
connection = pymysql.connect(**config)
def get_token(appid, card, isTrade):
# type_token = token_type(markettype)
try:
connection.ping()
except Exception as e:
connection = pymysql.connect(**config)
finally:
pass
if isTrade is True:
token_type = 'tradetoken'
else:
token_type = 'accesstoken'
try:
with connection.cursor() as cursor:
sql = 'select %s from account_token ' % token_type
sql += 'where card = %s and appid = %s'
cursor.execute(sql, (card, appid))
result = cursor.fetchone()
connection.commit()
if result is not None and result[0] is not None:
db_logger.info('%s SUCCESS' % token_type)
return result[0]
else:
db_logger.info('%s FAIL' % token_type)
return None
except Exception as e:
connection.rollback()
db_logger.error('get FAIL ,following as:%s' % str(e), exc_info = True)
finally:
# pass
connection.close()
def save_update_token(account, appid, market, token, card, isTrade, card_desc = None):
# type_token = token_type(markettype)
try:
connection.ping()
except Exception as e:
connection = pymysql.connect(**config)
finally:
pass
if isTrade is True:
token_type = 'tradetoken'
else:
token_type = 'accesstoken'
try:
with connection.cursor() as cursor:
operate = ''
sql = 'select id from account_token where card = %s and appid = %s'
cursor.execute(sql, (card, appid))
result = cursor.fetchone()
if result is None:
operate = 'insert'
sql1 = 'insert into account_token(account, %s, card, appid, market, card_infor)' % token_type
sql1 += ' values(%s, %s, %s, %s, %s, %s)'
cursor.execute(sql1, (account, token, card, appid, market, card_desc))
else:
operate = 'update'
sql = 'update account_token set %s =' % token_type
sql += '%s where card = %s and appid = %s'
cursor.execute(sql, (token, card, appid))
db_logger.info('%s SUCCESS' % operate)
connection.commit()
return 'success'
except Exception as e:
connection.rollback()
db_logger.error('%s FAIL,following as:%s' % (operate, str(e)), exc_info = True)
return 'failure'
finally:
connection.close()
def delete_tokens(account, appid):
global operate
try:
connection.ping()
except Exception as e:
connection = pymysql.connect(**config)
finally:
pass
try:
with connection.cursor() as cursor:
operate = 'delete'
sql = 'delete from account_token where account = %s and appid = %s'
cursor.execute(sql, (account, appid))
db_logger.info('%s SUCCESS' % operate)
connection.commit()
return 'success'
except Exception as e:
connection.rollback()
db_logger.error('%s FAIL,following as:%s' % (operate, str(e)), exc_info = True)
return 'failure'
finally:
connection.close()
|
Python
| 0.000001
|
@@ -377,48 +377,8 @@
):%0D%0A
-%09# type_token = token_type(markettype)%0D%0A
%09try
@@ -1147,18 +1147,8 @@
y:%0D%0A
-%09%09# pass%0D%0A
%09%09co
@@ -1261,48 +1261,8 @@
):%0D%0A
-%09# type_token = token_type(markettype)%0D%0A
%09try
|
41124f4e8c071e611d6bf5fbf94a5473b97b8a07
|
Add track.html generator with a bookmark link into the build script
|
build.py
|
build.py
|
# Python 3
import fileinput
import glob
import shutil
import sys
import os
EXEC_UGLIFYJS = "uglifyjs --bare-returns --compress --mangle --mangle-props --reserve-domprops --reserved-file reserve.txt --screw-ie8 --output \"{1}\" \"{0}\""
EXEC_CLOSURECOMPILER = "java -jar lib/closure-compiler-v20160911.jar --js \"{0}\" --js_output_file \"{1}\""
EXEC_YUI = "java -jar lib/yuicompressor-2.4.8.jar --charset utf-8 --line-break 160 --type css -o \"{1}\" \"{0}\""
USE_UGLIFYJS = shutil.which("uglifyjs") != None and not "--closure" in sys.argv and not "--nominify" in sys.argv
USE_JAVA = shutil.which("java") != None and not "--nominify" in sys.argv
def combine_files(input_pattern, output_file):
with fileinput.input(sorted(glob.glob(input_pattern))) as stream:
for line in stream:
output_file.write(line)
def build_tracker():
output_file = "bld/track.js"
output_file_tmp = "bld/track.tmp.js"
input_pattern = "src/tracker/*.js"
with open(output_file, "w") as out:
if not USE_UGLIFYJS:
out.write("(function(){\n")
combine_files(input_pattern, out)
if not USE_UGLIFYJS:
out.write("})()")
if USE_UGLIFYJS:
os.system(EXEC_UGLIFYJS.format(output_file, output_file_tmp))
elif USE_JAVA:
os.system(EXEC_CLOSURECOMPILER.format(output_file, output_file_tmp))
else:
return
with open(output_file, "w") as out:
out.write("javascript:(function(){")
with open(output_file_tmp, "r") as minified:
out.write(minified.read().replace("\n", " ").replace("\r", ""))
out.write("})()")
os.remove(output_file_tmp)
def build_renderer():
output_file = "bld/render.html"
input_html = "src/renderer/index.html"
input_css_pattern = "src/renderer/*.css"
tmp_css_file_combined = "bld/render.tmp.css"
tmp_css_file_minified = "bld/render.min.css"
with open(tmp_css_file_combined, "w") as out:
combine_files(input_css_pattern, out)
if USE_JAVA:
os.system(EXEC_YUI.format(tmp_css_file_combined, tmp_css_file_minified))
else:
shutil.copyfile(tmp_css_file_combined, tmp_css_file_minified)
os.remove(tmp_css_file_combined)
input_js_pattern = "src/renderer/*.js"
tmp_js_file_combined = "bld/render.tmp.js"
tmp_js_file_minified = "bld/render.min.js"
with open(tmp_js_file_combined, "w") as out:
combine_files(input_js_pattern, out)
if USE_UGLIFYJS:
os.system(EXEC_UGLIFYJS.format(tmp_js_file_combined, tmp_js_file_minified))
elif USE_JAVA:
os.system(EXEC_CLOSURECOMPILER.format(tmp_js_file_combined, tmp_js_file_minified))
else:
shutil.copyfile(tmp_js_file_combined, tmp_js_file_minified)
os.remove(tmp_js_file_combined)
tokens = {
"/*{js}*/": tmp_js_file_minified,
"/*{css}*/": tmp_css_file_minified
}
with open(output_file, "w") as out:
with open(input_html, "r") as fin:
for line in fin:
token = None
for token in (token for token in tokens if token in line):
with open(tokens[token], "r") as token_file:
embedded = token_file.read()
out.write(embedded)
os.remove(tokens[token])
if token is None:
out.write(line)
os.makedirs("bld", exist_ok = True)
print("Building tracker...")
build_tracker()
print("Building renderer...")
build_renderer()
|
Python
| 0
|
@@ -840,32 +840,36 @@
):%0A output_file
+_raw
= %22bld/track.js
@@ -870,16 +870,61 @@
ack.js%22%0A
+ output_file_bookmark = %22bld/track.html%22%0A %0A
output
@@ -1009,32 +1009,36 @@
open(output_file
+_raw
, %22w%22) as out:%0A
@@ -1252,32 +1252,36 @@
rmat(output_file
+_raw
, output_file_tm
@@ -1354,16 +1354,20 @@
put_file
+_raw
, output
@@ -1415,32 +1415,36 @@
open(output_file
+_raw
, %22w%22) as out:%0A
@@ -1668,16 +1668,321 @@
le_tmp)%0A
+ %0A with open(output_file_bookmark, %22w%22) as out:%0A out.write(%22%3Ca href='%22)%0A %0A with open(output_file_raw, %22r%22) as raw:%0A out.write(raw.read().replace(%22&%22, %22&%22).replace('%22', %22"%22).replace(%22'%22, %22'%22).replace(%22%3C%22, %22<%22).replace(%22%3E%22, %22>%22))%0A %0A out.write(%22'%3EAdd Bookmark%3C/a%3E%22)%0A
%0A%0Adef bu
|
5390e8c9d999845c997da431894e2fa5ae32b6ce
|
Handle KeyboardInterrupt exception during delete confirmation loop
|
clink.py
|
clink.py
|
#! /usr/bin/env python
# Clink - the CLI URL collector
# Author: Nicholay Nascimento
import pyperclip, argparse, os, time
parser = argparse.ArgumentParser(
description="A simple bookmark manager for the command-line",
epilog="")
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("-a", "--add", help="add a bookmark", metavar="title", nargs='?',
const=time.strftime("%F", time.gmtime()))
exgroup.add_argument("-c", "--copy", help="copy url to clipboard", metavar="id")
exgroup.add_argument("-d", "--delete", help="delete a bookmark by id", metavar="id")
exgroup.add_argument("-f", "--find", help="search for bookmarks containing substring", metavar="string")
exgroup.add_argument("-l", "--list", action="store_true", help="list all the bookmarks")
args = parser.parse_args()
links_txt_name = "links.txt" #Store bookmarks in this file
def addLink(title, url):
"""Saves a new bookmark"""
bookmarks = linksParser()
bookmark_id = len(bookmarks)
new_entry = {'id' : bookmark_id,
'title' : title,
'url' : url
}
bookmarks.append(new_entry)
writeLinks(bookmarks)
print "added bookmark %s: " % (new_entry['id'] + 1) + url
return
def writeLinks(bookmarks):
"""Handles the actual writing of the bookmarks to the text file"""
nl = "\n"
string = ""
last = len(bookmarks)
for i in range(last):
if i + 1 == last: nl = ""
string += bookmarks[i]['title'] + "\n"
string += bookmarks[i]['url'] + nl
string += nl
links_txt = open(links_txt_name, 'w')
links_txt.write(string)
links_txt.close()
return
def copyLink(bookmark_id):
"""Copies a bookmark URL to the clipboard"""
bookmarks = linksParser()
bookmark_id = int(bookmark_id)-1
try: url = bookmarks[bookmark_id]['url']
except IndexError:
print "bookmark doesn't exist"
quit()
pyperclip.copy(url)
print "copied to clipboard: " + url
return
def delLink(bookmark_id):
"""Deletes a bookmark and saves the changes to file"""
bookmarks = linksParser()
bookmark_id = int(bookmark_id)-1
choice = ""
try: title = bookmarks[bookmark_id]['title']
except IndexError:
print "bookmark doesn't exist"
quit()
listLinks([bookmarks[bookmark_id]])
while choice.lower() != ("y" or "n"):
choice = raw_input("really delete this bookmark? y/n ")
if choice.lower() == 'y':
del bookmarks[bookmark_id]
writeLinks(bookmarks)
print "deleted '%s'" % title
break
elif choice.lower() == 'n':
print("abort")
break
return
def findLink(search_string):
"""Searches for bookmarks containing search_string, returns matches"""
bookmarks = linksParser()
matches = []
result = -1
for i in range(len(bookmarks)):
already_found_in_vals = False #Reset flag
for key in bookmarks[i]:
if not key == 'id': #Skip searching the id
string = bookmarks[i][key].lower()
result = string.find(search_string.lower())
if result >= 0 and not already_found_in_vals:
matches.append(bookmarks[i])
already_found_in_vals = True
if matches:
return listLinks(matches)
else:
return None
def listLinks(bookmarks):
"""Prints the bookmarks and returns total amount of them"""
if bookmarks:
total_bookmarks = len(bookmarks)
for i in range(total_bookmarks):
print "%s: " % bookmarks[i]['id'] + bookmarks[i]['title']
print " " + bookmarks[i]['url'] + "\n"
if total_bookmarks > 1: s = "s" #This one's for the Grammar Nazis
else: s = ""
return "%s bookmark%s shown" % (total_bookmarks, s)
else:
return None
def get_raw_links(filename):
raw_links = []
links_txt = open(filename, 'r')
templist = links_txt.read().splitlines()
links_txt.close()
for i in range(len(templist)):
if templist[i]: #Grow a new list, skipping
raw_links.append(templist[i]) #those pesky empty strings
return raw_links
def linksParser():
"""Prepares bookmarks data for use by other functions"""
raw_links = get_raw_links(links_txt_name)
bookmarks = []
tempdict = {}
count = 1
bookmark_id = 1
for i in range(len(raw_links)):
if count == 1:
tempdict['id'] = bookmark_id
tempdict['title'] = raw_links[i]
bookmark_id += 1
else:
tempdict['url'] = raw_links[i]
bookmarks.append(tempdict)
tempdict = {}
count = 0
count += 1
return bookmarks
if not os.path.exists(links_txt_name):
try:
links_txt = open(links_txt_name, 'w') #Create the file if it doesn't exist
except IOError:
print "error: couldn't create or access '%s'" % links_txt_name
quit()
finally:
links_txt.close()
if args.add:
turl = pyperclip.paste()
if turl: addLink(args.add, turl)
else: print "the clipboard is empty"
elif args.copy:
if args.copy.isdigit(): copyLink(args.copy)
else: print "not a valid id"
elif args.delete:
if args.delete.isdigit(): delLink(args.delete)
else: print "not a valid id"
elif args.find:
listresults = findLink(args.find)
if listresults: print listresults
else: print "no matches found"
elif args.list:
listresults = listLinks(linksParser())
if listresults: print listresults
else: print "you don't have any bookmarks"
quit()
|
Python
| 0
|
@@ -2483,16 +2483,21 @@
%0A
+ try:
choice
@@ -2545,24 +2545,76 @@
y/n %22)%0A
-
+%09except KeyboardInterrupt:%0A%09 print %22%5Cnabort%22%0A%09 break%0A%09
if choic
|
1a28b026ae586bfac43c55a6d92d801c0bd0d139
|
use name as name
|
adhocracy4/filters/widgets.py
|
adhocracy4/filters/widgets.py
|
from itertools import chain
import django_filters
from django.db.models.fields import BLANK_CHOICE_DASH
from django.forms import TextInput
from django.forms.widgets import flatatt
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _
class DropdownLinkWidget(django_filters.widgets.LinkWidget):
label = None
right = False
template = 'adhocracy4/widgets/dropdown_link.html'
def get_option_label(self, value, choices=()):
option_label = BLANK_CHOICE_DASH[0][1]
for v, label in chain(self.choices, choices):
if str(v) == value:
option_label = label
break
if option_label == BLANK_CHOICE_DASH[0][1]:
option_label = _('All')
return option_label
def render(self, name, value, attrs=None, choices=()):
all_choices = list(chain(self.choices, choices))
if len(all_choices) <= 1:
return ''
if value is None:
value = all_choices[0][0]
_id = attrs.pop('id')
final_attrs = flatatt(self.build_attrs(attrs))
value_label = self.get_option_label(value, choices=choices)
options = super().render(name, value, attrs={
'class': 'dropdown-menu',
'aria-labelledby': _id,
}, choices=choices)
return render_to_string(self.template, {
'options': options,
'id': _id,
'attrs': final_attrs,
'value_label': value_label,
'label': self.label,
'right': self.right,
})
class TextInputWidget(TextInput):
label = None
right = False
template = 'adhocracy4/widgets/text_input.html'
def value_from_datadict(self, data, files, name):
value = super().value_from_datadict(data, files, name)
self.data = data
return value
def render(self, name, value, attrs=None):
if not hasattr(self, 'data'):
self.data = {}
if value is None:
value = ''
_id = attrs.pop('id')
return render_to_string(self.template, {
'id': _id,
'value_label': value,
'name': self.label.lower(),
'label': self.label,
'right': self.right,
'url_par': self.data
})
|
Python
| 0.999127
|
@@ -2210,26 +2210,12 @@
e':
-self.label.lower()
+name
,%0A
|
a216e92177f6daf7857e600e3fac9a4775ab0a21
|
Make sure that profile actually has an email, before trying to associate users by it
|
linked_accounts/views.py
|
linked_accounts/views.py
|
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.shortcuts import redirect, render
from django.utils import simplejson as json
from django.utils.crypto import salted_hmac
from django.views.decorators.csrf import csrf_exempt
import django.contrib.auth as auth
from django.contrib.auth.models import User
from linked_accounts.forms import RegisterForm
from linked_accounts.handlers import AuthHandler
from linked_accounts.models import LinkedAccount
from linked_accounts.signals import login_successful
from oauth_flow.handlers import get_handler
LINKED_ACCOUNTS_ID_SESSION = getattr(
settings,
'LINKED_ACCOUNTS_ID_SESSION',
'_linked_acccount_id'
)
LINKED_ACCOUNTS_NEXT_KEY = getattr(
settings,
'LINKED_ACCOUNTS_NEXT_KEY',
'oauth_next'
)
LINKED_ACCOUNTS_ALLOW_REGISTRATION = getattr(
settings,
'LINKED_ACCOUNTS_ALLOW_REGISTRATION',
True
)
LINKED_ACCOUNTS_AUTO_REGISTRATION = getattr(
settings,
'LINKED_ACCOUNTS_AUTO_REGISTRATION',
True
)
LINKED_ACCOUNTS_EMAIL_ASSOCIATION = getattr(
settings,
'LINKED_ACCOUNTS_EMAIL_ASSOCIATION',
False
)
LINKED_ACCOUNTS_ALLOW_LOGIN = getattr(
settings,
'LINKED_ACCOUNTS_ALLOW_LOGIN',
True
)
def permute_name(name_string, num):
num_str=str(num)
max_len=29-len(num_str)
return ''.join([name_string[0:max_len], '_', num_str])
class AuthCallback(object):
def __call__(self, request, access, token, api=False):
self.access = access
self.request = request
self.token = token
self.api = api
profile = None
user = None
try:
user = request.user
except User.DoesNotExist:
pass
if user and user.is_authenticated():
profile = self.link_profile_to_user()
else:
profile = auth.authenticate(service=access.SERVICE, token=token)
if profile.user:
if LINKED_ACCOUNTS_ALLOW_LOGIN:
self.login(profile)
elif LINKED_ACCOUNTS_ALLOW_REGISTRATION:
return self.create_user(profile)
else:
return self.registration_closed()
return self.success(profile)
def success(self, profile):
login_successful.send(sender=LinkedAccount, profile=profile)
if self.api:
result = {}
if profile and profile.user:
user_id = profile.user.id
result['user_id'] = user_id
signature = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
result['hash'] = signature
return HttpResponse(
json.dumps(result),
mimetype="application/json"
)
return redirect(self.get_next_url())
def get_next_url(self):
return self.request.session.get(
LINKED_ACCOUNTS_NEXT_KEY,
settings.LOGIN_REDIRECT_URL
)
def create_user(self, profile):
if LINKED_ACCOUNTS_EMAIL_ASSOCIATION:
users = list(User.objects.filter(email=profile.email))
if users and len(users) == 1:
profile.user = users[0]
profile.save()
if LINKED_ACCOUNTS_ALLOW_LOGIN:
self.login(profile)
return self.success(profile)
if LINKED_ACCOUNTS_AUTO_REGISTRATION:
#no match, create a new user - but there may be duplicate user names.
nickname = profile.username
username=nickname
user=None
try:
i=0
while True:
User.objects.get(username=username)
username=permute_name(nickname, i)
i+=1
except User.DoesNotExist:
#available name!
user=User.objects.create_user(username, profile.email or '')
profile.user = user
profile.save()
if LINKED_ACCOUNTS_ALLOW_LOGIN:
self.login(profile)
return self.success(profile)
else:
self.request.session[LINKED_ACCOUNTS_ID_SESSION] = profile.id
return redirect(
reverse('linked_accounts_register') + "?next=%s" % self.get_next_url()
)
def login(self, profile):
profile.user.backend = "linked_accounts.backends.LinkedAccountsBackend"
auth.login(self.request, profile.user)
def registration_closed(self):
return redirect('linked_accounts_registration_closed')
def link_profile_to_user(self):
profile = AuthHandler.get_handler(self.access.SERVICE).get_profile(self.token)
if not profile.user:
profile.user = self.request.user
profile.save()
return profile
def authentication_complete(request, access, token):
callback = AuthCallback()
return callback(request, access, token)
def login(request, service=None, template_name="linked_accounts/login.html"):
next_url = request.REQUEST.get('next', settings.LOGIN_REDIRECT_URL)
request.session[LINKED_ACCOUNTS_NEXT_KEY] = next_url
if service:
oauth_handler = get_handler(
service,
request=request,
redirect=reverse('linked_accounts_complete', args=[service])
)
return redirect(oauth_handler.auth_url())
return render(request, template_name, {
'next': next_url,
'service': service,
})
@csrf_exempt
def auth_complete(request, service=None):
oauth_handler = get_handler(
service,
request=request,
redirect=reverse('linked_accounts_complete', args=[service])
)
api = False
if request.method == 'POST':
data = json.loads(request.raw_post_data)
access_token = data['token']
api = True
else:
access_token = oauth_handler.auth_complete()
callback = AuthCallback()
return callback(request, oauth_handler, access_token, api=api)
def registration_closed(request, template_name="linked_accounts/registration_closed.html"):
return render(request, template_name)
def register(request, form_class=RegisterForm, template_name="linked_accounts/registration.html"):
if not LINKED_ACCOUNTS_ALLOW_REGISTRATION:
return redirect('linked_accounts_registration_closed')
next_url = request.REQUEST.get('next', settings.LOGIN_REDIRECT_URL)
try:
profile_id = request.session[LINKED_ACCOUNTS_ID_SESSION]
profile = LinkedAccount.objects.get(id=profile_id)
except (KeyError, LinkedAccount.DoesNotExist):
return redirect(next_url)
initial_data = {
'username': profile.username
}
email = profile.email
if email:
initial_data['email'] = email
if request.method == "POST":
form = form_class(request.POST)
if form.is_valid():
user = form.save(profile)
user.backend = "linked_accounts.backends.LinkedAccountsBackend"
auth.login(request, user)
return redirect(next_url)
else:
form = form_class(initial=initial_data)
return render(
request,
template_name,
{'form': form, 'profile': profile, 'next': next_url}
)
|
Python
| 0
|
@@ -3092,24 +3092,42 @@
_ASSOCIATION
+ and profile.email
:%0A
|
f74aaf00050d0bbe14fe921efefcd04bd17f4174
|
add intervals
|
bam2vcf_GATK_wolves.py
|
bam2vcf_GATK_wolves.py
|
#!/usr/bin/env python
#import argparse
#from glob import glob
#-s test_samples.txt
#-b /mnt/lfs2/hend6746/devils/reference/sarHar1.fa
#-i /mnt/home/hend6746/scratch/wolves/bed_files/baits_canfam3.1_sorted_merged.bed
from os.path import join as jp
from os.path import abspath
import os
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-s', "--samples", help=" samples.txt file with sample ID.", required=True)
parser.add_argument('-b', "--bwaindex", help="Path to bwa index file.", required=True)
parser.add_argument('-i', "--intervals", help="file to chromosome intervals", required=True)
args = parser.parse_args()
VERBOSE=False
#Function definitions:
def log(txt, out):
if VERBOSE:
print(txt)
out.write(txt+'\n')
out.flush()
## Read in samples and put them in a list:
samples = []
for l in open(args.samples):
if len(l) > 1:
samples.append(l.split('/')[-1].replace('.bam', '').strip())
print samples
# Setup folders and paths variables:
bamFolder = abspath('02-Mapped')
variantFolder = abspath('03-Calls')
PBS_scripts = abspath('GATK_PBS_scripts')
bwaIndex = abspath(args.bwaindex)
gatkCall = 'java -jar /opt/modules/biology/gatk/3.5/bin/GenomeAnalysisTK.jar -R %s' % bwaIndex
intervalPath = abspath(args.intervals)
os.system('mkdir -p %s' % bamFolder)
os.system('mkdir -p %s' % variantFolder)
os.system('mkdir -p %s' % PBS_scripts)
##### Run pipeline ###
for sample in samples:
print "Processing", sample, "....."
# Set up files:
logFile =''.join([jp(PBS_scripts, sample), '_GATK.log'])
logCommands = open(''.join([jp(PBS_scripts, sample), '_GATK_commands.sh']), 'w')
#Setup for qsub
log('#!/bin/bash', logCommands)
log('#PBS -N %s_GATK' % sample, logCommands)
log('#PBS -j oe', logCommands)
log('#PBS -o %s_GATK_job.log' % sample, logCommands)
log('#PBS -m abe', logCommands)
log('#PBS -M shendri4@gmail.com', logCommands)
log('#PBS -q reg', logCommands)
log('#PBS -l mem=100gb', logCommands)
log(". /usr/modules/init/bash", logCommands)
log("module load python/2.7.10", logCommands)
log("module load grc", logCommands)
###########Per-Sample Variant Calling
#HaplotypeCaller on each sample BAM file
#(if a sample's data is spread over more than one BAM, then pass them all in together) to create single-sample gVCFs
#not recommended for somatic (cancer) variant discovery. For that purpose, use MuTect2 instead
cmd = ' '.join([gatkCall, ' -T HaplotypeCaller ', ' -I ' + jp(bamFolder, sample) + '.bam',
' --emitRefConfidence GVCF', ' -o ' + jp(variantFolder, sample) + '.raw.snps.indels.g.vcf',
' --intervals ' intervalPath, ' --interval_padding 1000', ' --interval_set_rule INTERSECTION'
'>>', logFile, '2>&1'])
log(cmd, logCommands)
logCommands.close()
|
Python
| 0.999983
|
@@ -2695,17 +2695,18 @@
rvalPath
-,
+ +
' --int
@@ -2724,17 +2724,18 @@
ng 1000'
-,
+ +
' --int
@@ -2762,16 +2762,17 @@
SECTION'
+,
%0A '%3E%3E
|
1d3651e2556a2dce8a2482ea1e3b56800fa81029
|
Use ChoiceSet.values() for access to raw values
|
netbox/utilities/utils.py
|
netbox/utilities/utils.py
|
import datetime
import json
from collections import OrderedDict
from django.core.serializers import serialize
from django.db.models import Count, OuterRef, Subquery
from jinja2 import Environment
from dcim.choices import CableLengthUnitChoices
from extras.utils import is_taggable
def csv_format(data):
"""
Encapsulate any data which contains a comma within double quotes.
"""
csv = []
for value in data:
# Represent None or False with empty string
if value is None or value is False:
csv.append('')
continue
# Convert dates to ISO format
if isinstance(value, (datetime.date, datetime.datetime)):
value = value.isoformat()
# Force conversion to string first so we can check for any commas
if not isinstance(value, str):
value = '{}'.format(value)
# Double-quote the value if it contains a comma
if ',' in value or '\n' in value:
csv.append('"{}"'.format(value))
else:
csv.append('{}'.format(value))
return ','.join(csv)
def foreground_color(bg_color):
"""
Return the ideal foreground color (black or white) for a given background color in hexadecimal RGB format.
"""
bg_color = bg_color.strip('#')
r, g, b = [int(bg_color[c:c + 2], 16) for c in (0, 2, 4)]
if r * 0.299 + g * 0.587 + b * 0.114 > 186:
return '000000'
else:
return 'ffffff'
def dynamic_import(name):
"""
Dynamically import a class from an absolute path string
"""
components = name.split('.')
mod = __import__(components[0])
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def model_names_to_filter_dict(names):
"""
Accept a list of content types in the format ['<app>.<model>', '<app>.<model>', ...] and return a dictionary
suitable for QuerySet filtering.
"""
# TODO: This should match on the app_label as well as the model name to avoid potential duplicate names
return {
'model__in': [model.split('.')[1] for model in names],
}
def get_subquery(model, field):
"""
Return a Subquery suitable for annotating a child object count.
"""
subquery = Subquery(
model.objects.filter(
**{field: OuterRef('pk')}
).order_by().values(
field
).annotate(
c=Count('*')
).values('c')
)
return subquery
def serialize_object(obj, extra=None):
"""
Return a generic JSON representation of an object using Django's built-in serializer. (This is used for things like
change logging, not the REST API.) Optionally include a dictionary to supplement the object data.
"""
json_str = serialize('json', [obj])
data = json.loads(json_str)[0]['fields']
# Include any custom fields
if hasattr(obj, 'get_custom_fields'):
data['custom_fields'] = {
field: str(value) for field, value in obj.cf.items()
}
# Include any tags
if is_taggable(obj):
data['tags'] = [tag.name for tag in obj.tags.all()]
# Append any extra data
if extra is not None:
data.update(extra)
return data
def dict_to_filter_params(d, prefix=''):
"""
Translate a dictionary of attributes to a nested set of parameters suitable for QuerySet filtering. For example:
{
"name": "Foo",
"rack": {
"facility_id": "R101"
}
}
Becomes:
{
"name": "Foo",
"rack__facility_id": "R101"
}
And can be employed as filter parameters:
Device.objects.filter(**dict_to_filter(attrs_dict))
"""
params = {}
for key, val in d.items():
k = prefix + key
if isinstance(val, dict):
params.update(dict_to_filter_params(val, k + '__'))
else:
params[k] = val
return params
def deepmerge(original, new):
"""
Deep merge two dictionaries (new into original) and return a new dict
"""
merged = OrderedDict(original)
for key, val in new.items():
if key in original and isinstance(original[key], dict) and isinstance(val, dict):
merged[key] = deepmerge(original[key], val)
else:
merged[key] = val
return merged
def to_meters(length, unit):
"""
Convert the given length to meters.
"""
length = int(length)
if length < 0:
raise ValueError("Length must be a positive integer")
valid_units = [u[0] for u in CableLengthUnitChoices]
if unit not in valid_units:
raise ValueError(
"Unknown unit {}. Must be one of the following: {}".format(unit, ', '.join(valid_units))
)
if unit == CableLengthUnitChoices.UNIT_METER:
return length
if unit == CableLengthUnitChoices.UNIT_CENTIMETER:
return length / 100
if unit == CableLengthUnitChoices.UNIT_FOOT:
return length * 0.3048
if unit == CableLengthUnitChoices.UNIT_INCH:
return length * 0.3048 * 12
raise ValueError("Unknown unit {}. Must be 'm', 'cm', 'ft', or 'in'.".format(unit))
def render_jinja2(template_code, context):
"""
Render a Jinja2 template with the provided context. Return the rendered content.
"""
return Environment().from_string(source=template_code).render(**context)
def prepare_cloned_fields(instance):
"""
Compile an object's `clone_fields` list into a string of URL query parameters. Tags are automatically cloned where
applicable.
"""
params = {}
for field_name in getattr(instance, 'clone_fields', []):
field = instance._meta.get_field(field_name)
field_value = field.value_from_object(instance)
# Swap out False with URL-friendly value
if field_value is False:
field_value = ''
# Omit empty values
if field_value not in (None, ''):
params[field_name] = field_value
# Copy tags
if is_taggable(instance):
params['tags'] = ','.join([t.name for t in instance.tags.all()])
# Concatenate parameters into a URL query string
param_string = '&'.join(
['{}={}'.format(k, v) for k, v in params.items()]
)
return param_string
|
Python
| 0
|
@@ -4563,23 +4563,8 @@
ts =
- %5Bu%5B0%5D for u in
Cab
@@ -4582,17 +4582,25 @@
tChoices
-%5D
+.values()
%0A if
|
9c720cf806364e4eaf40da24691bc224a9288485
|
Clean combo script
|
combo.py
|
combo.py
|
from flask import Flask
from libmproxy import filt
import proxyswitch as pswitch
# app = Flask('proxapp')
# @app.route('/')
# def hello_world():
# return 'Hello World!'
# @app.route('/foo')
# def foo():
# return 'foo'
def enable():
settings = ('127.0.0.1', '8080')
pswitch.enable(*settings)
def disable():
pswitch.disable()
def start(context, argv):
context.log(">>> start")
enable()
context.filter = filt.parse("~d ustwo.com")
# context.app_registry.add(app, "proxapp", 80)
def request(context, flow):
if flow.match(context.filter):
context.log(">>> request")
def done(context):
disable()
context.log(">>> done")
|
Python
| 0
|
@@ -1,28 +1,4 @@
-from flask import Flask%0A
from
@@ -55,156 +55,8 @@
ch%0A%0A
-# app = Flask('proxapp')%0A%0A# @app.route('/')%0A# def hello_world():%0A# return 'Hello World!'%0A%0A# @app.route('/foo')%0A# def foo():%0A# return 'foo'%0A%0A
def
@@ -291,59 +291,8 @@
om%22)
-%0A # context.app_registry.add(app, %22proxapp%22, 80)
%0A%0Ade
|
7204f13be70f92749ca88497d11a51fedcd65553
|
Move back to `default_player_roles`.
|
meltingpot/python/utils/substrates/substrate_factory.py
|
meltingpot/python/utils/substrates/substrate_factory.py
|
# Copyright 2022 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Substrate factory."""
from collections.abc import Collection, Mapping, Sequence, Set
from typing import Callable
import dm_env
from meltingpot.python.utils.substrates import builder
from meltingpot.python.utils.substrates import substrate
class SubstrateFactory:
"""Factory for building specific substrates."""
def __init__(
self,
*,
lab2d_settings_builder: Callable[[Sequence[str]], builder.Settings],
individual_observations: Collection[str],
global_observations: Collection[str],
action_table: Sequence[Mapping[str, int]],
timestep_spec: dm_env.TimeStep,
action_spec: dm_env.specs.DiscreteArray,
valid_roles: Collection[str],
scenario_player_roles: Collection[Sequence[str]],
) -> None:
"""Initializes the factory.
Args:
lab2d_settings_builder: callable that takes a sequence of player roles and
returns the lab2d settings for the substrate.
individual_observations: names of the player-specific observations to make
available to each player.
global_observations: names of the dmlab2d observations to make available
to all players.
action_table: the possible actions. action_table[i] defines the dmlab2d
action that will be forwarded to the wrapped dmlab2d environment for the
discrete Melting Pot action i.
timestep_spec: spec of timestep sent to a single player.
action_spec: spec of action expected from a single player.
valid_roles: player roles the substrate supports.
scenario_player_roles: player roles vectors that are used in scenarios
involving this substrate.
"""
self._lab2d_settings_builder = lab2d_settings_builder
self._individual_observations = frozenset(individual_observations)
self._global_observations = frozenset(global_observations)
self._action_table = tuple(dict(row) for row in action_table)
self._timestep_spec = timestep_spec
self._action_spec = action_spec
self._valid_roles = frozenset(valid_roles)
self._scenario_player_roles = frozenset({
tuple(roles) for roles in scenario_player_roles
})
def valid_roles(self) -> Set[str]:
"""Returns the roles the substrate supports."""
return self._valid_roles
def scenario_player_roles(self) -> Set[Sequence[str]]:
"""Returns the player roles used by scenarios."""
return self._scenario_player_roles
def timestep_spec(self) -> dm_env.TimeStep:
"""Returns spec of timestep sent to a single player."""
return self._timestep_spec
def action_spec(self) -> dm_env.specs.DiscreteArray:
"""Returns spec of action expected from a single player."""
return self._action_spec
def build(self, roles: Sequence[str]) -> substrate.Substrate:
"""Builds the substrate.
Args:
roles: the role each player will take.
Returns:
The constructed substrate.
"""
return substrate.build_substrate(
lab2d_settings=self._lab2d_settings_builder(roles),
individual_observations=self._individual_observations,
global_observations=self._global_observations,
action_table=self._action_table)
|
Python
| 0.000002
|
@@ -1286,32 +1286,31 @@
str%5D,%0A
-scenario
+default
_player_role
@@ -1312,27 +1312,16 @@
_roles:
-Collection%5B
Sequence
@@ -1325,17 +1325,16 @@
nce%5Bstr%5D
-%5D
,%0A ) -%3E
@@ -2124,24 +2124,23 @@
.%0A
-scenario
+default
_player_
@@ -2141,24 +2141,36 @@
layer_roles:
+ the default
player role
@@ -2181,69 +2181,49 @@
ctor
-s
that
-are used in scenarios%0A involving this substrate
+should be used%0A for training
.%0A
@@ -2615,32 +2615,31 @@
)%0A self._
-scenario
+default
_player_role
@@ -2646,62 +2646,21 @@
s =
-frozenset(%7B%0A tuple(roles) for roles in scenario
+tuple(default
_pla
@@ -2672,14 +2672,8 @@
oles
-%0A %7D
)%0A%0A
@@ -2796,24 +2796,23 @@
%0A%0A def
-scenario
+default
_player_
@@ -2826,20 +2826,16 @@
elf) -%3E
-Set%5B
Sequence
@@ -2839,17 +2839,16 @@
nce%5Bstr%5D
-%5D
:%0A %22%22
@@ -2912,24 +2912,23 @@
n self._
-scenario
+default
_player_
|
e9151b46cdfd120daf5ecae00a551f23e85373da
|
Update comments to reflect config migration
|
beetsplug/mpdupdate.py
|
beetsplug/mpdupdate.py
|
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Updates an MPD index whenever the library is changed.
Put something like the following in your .beetsconfig to configure:
[mpdupdate]
host = localhost
port = 6600
password = seekrit
"""
from __future__ import print_function
from beets.plugins import BeetsPlugin
import socket
from beets import config
# Global variable so that mpdupdate can detect database changes and run only
# once before beets exits.
database_changed = False
# No need to introduce a dependency on an MPD library for such a
# simple use case. Here's a simple socket abstraction to make things
# easier.
class BufferedSocket(object):
"""Socket abstraction that allows reading by line."""
def __init__(self, sep='\n'):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.buf = ''
self.sep = sep
def connect(self, host, port):
self.sock.connect((host, port))
def readline(self):
while self.sep not in self.buf:
data = self.sock.recv(1024)
if not data:
break
self.buf += data
if '\n' in self.buf:
res, self.buf = self.buf.split(self.sep, 1)
return res + self.sep
else:
return ''
def send(self, data):
self.sock.send(data)
def close(self):
self.sock.close()
def update_mpd(host='localhost', port=6600, password=None):
"""Sends the "update" command to the MPD server indicated,
possibly authenticating with a password first.
"""
print('Updating MPD database...')
s = BufferedSocket()
s.connect(host, port)
resp = s.readline()
if 'OK MPD' not in resp:
print('MPD connection failed:', repr(resp))
return
if password:
s.send('password "%s"\n' % password)
resp = s.readline()
if 'OK' not in resp:
print('Authentication failed:', repr(resp))
s.send('close\n')
s.close()
return
s.send('update\n')
resp = s.readline()
if 'updating_db' not in resp:
print('Update failed:', repr(resp))
s.send('close\n')
s.close()
print('... updated.')
class MPDUpdatePlugin(BeetsPlugin):
def __init__(self):
super(MPDUpdatePlugin, self).__init__()
self.config.add({
'host': u'localhost',
'port': 6600,
'password': u'',
})
@MPDUpdatePlugin.listen('database_change')
def handle_change(lib=None):
global database_changed
database_changed = True
@MPDUpdatePlugin.listen('cli_exit')
def update(lib=None):
if database_changed:
update_mpd(
config['mpdupdate']['host'].get(unicode),
config['mpdupdate']['port'].get(int),
config['mpdupdate']['password'].get(unicode),
)
|
Python
| 0
|
@@ -743,20 +743,19 @@
our
-.beets
config
+.yaml
to
@@ -769,17 +769,16 @@
re:%0A
-%5B
mpdupdat
@@ -782,18 +782,22 @@
date
-%5D%0A
+:%0A
host
= l
@@ -792,18 +792,17 @@
host
- =
+:
localho
@@ -808,16 +808,20 @@
ost%0A
+
port
= 6
@@ -820,16 +820,19 @@
port
- =
+:
6600%0A
+
@@ -839,18 +839,17 @@
password
- =
+:
seekrit
|
d69346bf912f45bca00edf6abea9a4f05f9f7d73
|
bionetplot.py should be executable
|
client/plot/bionetplot.py
|
client/plot/bionetplot.py
|
#!/usr/bin/python
from twisted.internet import reactor, interfaces, protocol
from twisted.protocols import basic
from twisted.web import server, resource
from twisted.web.static import File
from twisted.web.server import Site
from twisted.web.resource import Resource
from twisted_bionet_client import *
from bionet import *
from bionetplot_callback import *
import optparse
class DataServer(resource.Resource):
isLeaf = True
def render_GET(self, request):
session = request.getSession()
# existing session
if (session in sessions) and (sessions[session]['resource'] == request.args['resource']) and (sessions[session]['timespan'] == request.args['timespan']):
retval = "[ "
#print "Old session found." #debugging
for name in sessions[session]['bionet-resources']:
retval += "{\n ";
#print "Looking at resource: %s" % name
retval += "label: '%s',\n data: [" % name
u = bionet_resources[name]
if (None != u) and ('sessions' in u) and (session in u['sessions']):
#print "Resource user_data:", u
for d in u['sessions'][session]:
retval += '[%s, %s], ' % (d[0], d[1])
u['sessions'][session] = []
retval += "]\n"
retval += "},\n"
#print "u after loop: ", u
#print retval
retval += " ]"
return "%s" % retval
else: # new session!
if ('resource' not in request.args) or ('timespan' not in request.args):
return "<html>No subscription.</html>"
resource_list = [] #create the list of resources associated with this session
#create the session
sessions[session] = { 'resource' : request.args['resource'],
'timespan' : request.args['timespan'],
'bionet-resources' : resource_list }
#subscribe to all the resources requested in the HTTP request
for r in sessions[session]['resource']:
#print "Subscribing to %(resource)s" % { 'resource' : r }
bionet_subscribe_datapoints_by_name(r)
return "{}"
class Datapoints(resource.Resource):
isLeaf = True
def render_GET(self, request):
session = request.getSession()
# existing session
if (session in sessions) and (sessions[session]['resource'] == request.args['resource']) and (sessions[session]['timespan'] == request.args['timespan']):
retval = "[ "
#print "Old session found." #debugging
for name in sessions[session]['bionet-resources']:
retval += "{\n ";
#print "Looking at resource: %s" % name
retval += "label: '%s',\n data: [" % name
u = bionet_resources[name]
if (None != u) and ('datapoints' in u):
#print "Resource user_data:", u
u['sessions'][session] = []
for d in u['datapoints']:
retval += '[%s, %s], ' % (d[0], d[1])
retval += "]\n"
retval += "},\n"
#print "u after loop: ", u
#print retval
retval += " ]"
return "%s" % retval
else: # new session!
if ('resource' not in request.args) or ('timespan' not in request.args):
return "<html>No subscription.</html>"
resource_list = [] #create the list of resources associated with this session
#create the session
sessions[session] = { 'resource' : request.args['resource'],
'timespan' : request.args['timespan'],
'bionet-resources' : resource_list }
#subscribe to all the resources requested in the HTTP request
for r in sessions[session]['resource']:
#print "Subscribing to %(resource)s" % { 'resource' : r }
bionet_subscribe_datapoints_by_name(r)
return "{}"
def main():
# parse options
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="Webserver port.",
metavar="<port>",
default=8080)
(options, args) = parser.parse_args()
twisted_client = Client()
#register Bionet callbacks
pybionet_register_callback_new_hab(cb_new_hab)
pybionet_register_callback_lost_hab(cb_lost_hab);
pybionet_register_callback_new_node(cb_new_node);
pybionet_register_callback_lost_node(cb_lost_node);
pybionet_register_callback_datapoint(cb_datapoint);
data = DataServer()
full = Datapoints()
root = Resource()
root.putChild('plot', File("plot.html"))
root.putChild('flot', File("flot"))
root.putChild('data', data)
root.putChild('full', full)
factory = Site(root)
reactor.listenTCP(options.port, factory)
reactor.addReader(twisted_client)
reactor.run()
if __name__ == "__main__":
main()
|
Python
| 0.999999
| |
250e166f02642e9d3c41f3854b6e9e12c560daba
|
update script for battle
|
battle.py
|
battle.py
|
# Welcome to Battleship
# http://www.codecademy.com/courses/python-beginner-en-4XuFm/0/1?curriculum_id=4f89dab3d788890003000096
""" In this project you will build a simplified, one-player version of the
classic board game Battleship! In this version of the game, there will be a
single ship hidden in a random location on a 5x5 grid. The player will have 10
guesses to try to sink the ship. """
from random import randint
board = []
for i in range(5):
board.append(["O"]*5)
def print_board(board):
for row in board:
print " ".join(row)
print_board(board)
def random_row(board):
return randint(0, len(board) - 1)
def random_col(board):
return randint(0, len(board) - 1)
ship_row = random_row(board)
ship_col = random_col(board)
guess_row = int(raw_input("Guess Row: "))
guess_col = int(raw_input("Guess Col: "))
print "Battle ship is in row %s, col %s" % (ship_row, ship_col)
if guess_row == ship_row and guess_col == ship_col:
print "Congratulations! You sank my battleship"
|
Python
| 0
|
@@ -755,16 +755,115 @@
(board)%0A
+print %22Battle ship is in row %25s, col %25s%22 %25 (ship_row, ship_col)%0A%0Adef check_guess(row, col, turn):%0A%09
guess_ro
@@ -896,16 +896,17 @@
ow: %22))%0A
+%09
guess_co
@@ -944,133 +944,495 @@
))%0A%0A
-print %22B
+%09if guess_row == ship_row and guess_col == ship_col:%0A%09%09print %22Congratulations! You sank my b
attle
-
ship
- is in row %25s, col %25s%22 %25 (ship_row, ship_col)%0A%0Aif guess_row == ship_row and guess_col == ship_col
+%22%0A%09%09return True%0A%09else:%0A%09%09print %22You missed my battleship!%22%0A%09%09if guess_row not in range(5) or %5C%0A%09%09 guess_col not in range(5):%0A%09%09%09print %22Oops, that's not even in the ocean.%22%0A%09%09elif board%5Bguess_row%5D%5Bguess_col%5D == %22X%22:%0A%09%09%09print %22You guessed that one already.%22%0A%09%09else:%0A%09%09%09if guess_row in range(5) and guess_col in range(5):%0A%09%09%09%09board%5Bguess_row%5D%5Bguess_col%5D = %22X%22%0A%09%09if turn == 3
:%0A
+%09%09
%09print %22
Cong
@@ -1431,51 +1431,139 @@
nt %22
-Congratulations! You sank my battleship%22%0A%0A%0A
+Game Over%22%0A%09%09return False%0A%0Afor turn in range(4):%0A%09print%0A%09print %22%25s%22 %25 (turn + 1)%0A%09if check_guess(ship_row, ship_col, turn):%0A%09%09break
%0A%0A%0A%0A
|
c2673bc4ab244e55d876ce6eb643a3cab37c6573
|
Use Django's now()
|
src/edurepo/resources/management/commands/validate_resources.py
|
src/edurepo/resources/management/commands/validate_resources.py
|
from datetime import datetime, timedelta
from optparse import make_option
import sys
from bs4 import BeautifulSoup
from django.core.management.base import BaseCommand
from django.utils.timezone import utc
import requests
from resources.models import Resource, ResourceVerification
def now():
return datetime.utcnow().replace(tzinfo=utc)
def get_content_type(debug, rsp):
"""Return content-type in lower case with anything else (e.g., charset)
stripped off. Return empty string if no content-type is available."""
ct_hdr = rsp.headers['Content-Type']
if not ct_hdr:
return ''
fields = ct_hdr.split(';')
if not fields:
return ''
return fields[0].strip().lower()
def handle_request_error(url, verification):
if not verification:
verification = ResourceVerification(url=url)
verification.last_failure = now()
verification.full_clean()
verification.save()
resources = Resource.objects.filter(url=url)
print 'Affected learning objectives:'
for r in resources:
print r.objective
print ''
def create_or_update_verification(debug, url, verification):
if debug:
print url
# www.livescience.com does a permanent redirect to a mobile site
# when using the default urllib2 user-agent string. (I haven't
# checked that again after switching from urllib2 to requests.)
headers = {"User-Agent": "Mozilla/5.0 (edurepo link validity checker"}
try:
response = requests.get(url, headers=headers)
except requests.exceptions.ConnectionError:
print 'Failed now: ' + url
print sys.exc_info()
handle_request_error(url, verification)
return
# temporary hack for https://github.com/kennethreitz/requests/issues/2192
except requests.packages.urllib3.exceptions.ProtocolError:
print 'Failed now: ' + url
print sys.exc_info()
handle_request_error(url, verification)
return
except Exception:
print 'Failed now: ' + url
print sys.exc_info()
handle_request_error(url, verification)
return
if response.status_code != 200:
print 'Failed now with HTTP error code %s: %s' % (response.status_code, url)
if response.status_code != 404:
print response.text
handle_request_error(url, verification)
return
ct = get_content_type(debug, response)
if debug:
print response.status_code
print response.headers['Content-Type'] + ' => ' + ct
if ct == 'text/html':
contents = response.text
try:
soup = BeautifulSoup(contents)
except Exception:
print 'Failed to parse: ' + url
print sys.exc_info()
soup = None
if soup and soup.title:
title = soup.title.string
title = title.strip()
title = title.replace('\r', '')
if title.count('\n') > 1:
# assume the worst and set the title to the text up through the 1st \n
title = title.split('\n')[0]
else:
title = title.replace('\n', ' ')
if debug:
print ' ', title.encode("utf8")
else:
title = ''
if debug:
print " (no title)"
else:
title = ''
if not verification:
verification = ResourceVerification(url=url)
verification.last_success = now()
verification.document_title = title
verification.content_type = ct
verification.full_clean()
verification.save()
def verify_all_resources(debug):
resources = Resource.objects.all()
for resource in resources:
try:
verification = ResourceVerification.objects.get(url=resource.url)
if debug:
print verification
except ResourceVerification.DoesNotExist:
create_or_update_verification(debug, resource.url, None)
def re_verify(debug, oldest_valid_success):
verifications = ResourceVerification.objects.all()
for verification in verifications:
if verification.last_success is None:
if debug:
print "never worked: " + verification.url
create_or_update_verification(debug, verification.url, verification)
elif verification.last_failure and verification.last_failure > verification.last_success:
if debug:
print "most recently failed: " + verification.url
create_or_update_verification(debug, verification.url, verification)
elif verification.last_success < oldest_valid_success:
if debug:
print "not tested in a while: " + verification.url
create_or_update_verification(debug, verification.url, verification)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'--debug',
action='store_true',
dest='debug',
default=False,
help='Show debug messages',
),
)
def handle(self, *args, **options):
# First, ensure that all resources have a verification record.
# Only newly-added resources won't have one.
verify_all_resources(options['debug'])
# Re-verify as necessary
max_success_age = timedelta(days=12)
re_verify(options['debug'], now() - max_success_age)
|
Python
| 0
|
@@ -195,19 +195,19 @@
import
-utc
+now
%0Aimport
@@ -282,70 +282,8 @@
n%0A%0A%0A
-def now():%0A return datetime.utcnow().replace(tzinfo=utc)%0A%0A%0A
def
|
03708a87b65ed4695a54cb588b6dc5298a14edcb
|
Add some checks
|
smartreact/smartreact.py
|
smartreact/smartreact.py
|
import copy
import discord
from redbot.core import Config, commands, checks
from redbot.core.utils.chat_formatting import pagify
BaseCog = getattr(commands, "Cog", object)
class SmartReact(BaseCog):
"""Create automatic reactions when trigger words are typed in chat"""
default_guild_settings = {
"reactions": {}
}
def __init__(self, bot):
self.bot = bot
self.conf = Config.get_conf(self, identifier=964952632)
self.conf.register_guild(
**self.default_guild_settings
)
@commands.guild_only()
@commands.command(name="addreact")
async def addreact(self, ctx, word, emoji):
"""Add an auto reaction to a word"""
guild = ctx.message.guild
message = ctx.message
emoji = self.fix_custom_emoji(emoji)
await self.create_smart_reaction(guild, word, emoji, message)
@commands.guild_only()
@commands.command(name="delreact")
async def delreact(self, ctx, word, emoji):
"""Delete an auto reaction to a word"""
guild = ctx.message.guild
message = ctx.message
emoji = self.fix_custom_emoji(emoji)
await self.remove_smart_reaction(guild, word, emoji, message)
def fix_custom_emoji(self, emoji):
if emoji[:2] != "<:":
return emoji
for guild in self.bot.guilds:
for e in guild.emojis:
if str(e.id) == emoji.split(':')[2][:-1]:
return e
return None
@commands.guild_only()
@commands.command(name="listreact")
async def listreact(self, ctx):
"""List reactions for this server"""
emojis = await self.conf.guild(ctx.guild).reactions()
msg = f"Smart Reactions for {ctx.guild.name}:\n"
for emoji in emojis:
for command in emojis[emoji]:
msg += f"{emoji}: {command}\n"
for page in pagify(msg, delims=["\n"]):
await ctx.send(msg)
async def create_smart_reaction(self, guild, word, emoji, message):
try:
# Use the reaction to see if it's valid
await message.add_reaction(emoji)
emoji = str(emoji)
reactions = await self.conf.guild(guild).reactions()
if emoji in reactions:
if word.lower() in reactions[emoji]:
await message.channel.send("This smart reaction already exists.")
return
reactions[emoji].append(word.lower())
else:
reactions[emoji] = [word.lower()]
await self.conf.guild(guild).reactions.set(reactions)
await message.channel.send("Successfully added this reaction.")
except (discord.errors.HTTPException, discord.errors.InvalidArgument):
await message.channel.send("That's not an emoji I recognize. "
"(might be custom!)")
async def remove_smart_reaction(self, guild, word, emoji, message):
try:
# Use the reaction to see if it's valid
await message.add_reaction(emoji)
emoji = str(emoji)
reactions = await self.conf.guild(guild).reactions()
if emoji in reactions:
if word.lower() in reactions[emoji]:
reactions[emoji].remove(word.lower())
await self.conf.guild(guild).reactions.set(reactions)
await message.channel.send("Removed this smart reaction.")
else:
await message.channel.send("That emoji is not used as a reaction "
"for that word.")
else:
await message.channel.send("There are no smart reactions which use "
"this emoji.")
except (discord.errors.HTTPException, discord.errors.InvalidArgument):
await message.channel.send("That's not an emoji I recognize. "
"(might be custom!)")
# Thanks irdumb#1229 for the help making this "more Pythonic"
async def on_message(self, message):
if message.author == self.bot.user:
return
guild = message.guild
reacts = copy.deepcopy(await self.conf.guild(guild).reactions())
if reacts is None:
return
words = message.content.lower().split()
for emoji in reacts:
if set(w.lower() for w in reacts[emoji]).intersection(words):
emoji = self.fix_custom_emoji(emoji)
try:
await message.add_reaction(emoji)
except discord.errors.Forbidden:
pass
except discord.errors.InvalidArgument:
pass
|
Python
| 0
|
@@ -535,16 +535,67 @@
)%0A%0A
+ @checks.mod_or_permissions(administrator=True)%0A
@com
@@ -921,24 +921,75 @@
, message)%0A%0A
+ @checks.mod_or_permissions(administrator=True)%0A
@command
@@ -1593,16 +1593,67 @@
n None%0A%0A
+ @checks.mod_or_permissions(administrator=True)%0A
@com
|
3566e9a4d59779c1fca5cfa3031d03a1bb5a72ba
|
remove process executor option
|
cheeseprism/wsgiapp.py
|
cheeseprism/wsgiapp.py
|
from .jenv import EnvFactory
from cheeseprism.auth import BasicAuthenticationPolicy
from cheeseprism.resources import App
from functools import partial
from pyramid.config import Configurator
from pyramid.session import UnencryptedCookieSessionFactoryConfig
from pyramid.settings import asbool
import futures
import logging
import multiprocessing
import os
import signal
logger = logging.getLogger(__name__)
def main(global_config, **settings):
settings = dict(global_config, **settings)
settings.setdefault('jinja2.i18n.domain', 'CheesePrism')
session_factory = UnencryptedCookieSessionFactoryConfig('cheeseprism')
config = Configurator(root_factory=App, settings=settings,
session_factory=session_factory,
authentication_policy=\
BasicAuthenticationPolicy(BasicAuthenticationPolicy.noop_check))
setup_workers(config.registry)
config.add_translation_dirs('locale/')
config.include('.request')
config.include('.views')
config.include('.index')
tempspec = settings.get('cheeseprism.index_templates', '')
config.registry['cp.index_templates'] = EnvFactory.from_str(tempspec)
if asbool(settings.get('cheeseprism.pipcache_mirror', False)):
config.include('.sync.pip')
if asbool(settings.get('cheeseprism.auto_sync', False)):
config.include('.sync.auto')
return config.make_wsgi_app()
def sig_handler(executor, sig, frame, wait=True):
logger.warn("Signal %d recieved: wait: %s", sig, wait)
executor.shutdown(wait)
logger.info("Executor shutdown complete")
def ping_proc(i):
pid = os.getpid()
logger.debug("worker %s up: %s", i, pid)
return pid
def setup_workers(registry, handler=sig_handler):
"""
ATT: Sensitive voodoo. Workers must be setup before any other
threads are launched. Workers must be initialized before signals
are registered.
"""
settings = registry.settings
registry['cp.executor_type'] = executor_type =\
settings.get('cheeseprism.futures', 'thread')
executor = executor_type != 'process' and futures.ThreadPoolExecutor \
or futures.ProcessPoolExecutor
workers = int(settings.get('cheeseprism.futures.workers', 0))
if executor_type == 'process' and workers <= 0:
workers = multiprocessing.cpu_count() + 1
else:
workers = workers <= 0 and 10 or workers
logging.info("PID %s using %s executor with %s workers", os.getpid(), executor_type, workers)
executor = registry['cp.executor'] = executor(workers)
# -- This initializes our processes/threads
workers = [str(pid) for pid in executor.map(ping_proc, range(workers))]
logger.info("workers: %s", " ".join(workers))
# -- Register signals after init so to not have an echo effect
for sig in (signal.SIGHUP, signal.SIGTERM, signal.SIGINT, signal.SIGQUIT):
signal.signal(sig, partial(sig_handler, executor))
|
Python
| 0.000002
|
@@ -119,38 +119,8 @@
App%0A
-from functools import partial%0A
from
@@ -298,47 +298,11 @@
ort
-multiprocessing%0Aimport os%0Aimport signal
+os%0A
%0A%0A%0Al
@@ -1380,503 +1380,162 @@
def
-sig_handler(executor, sig, frame, wait=True):%0A logger.warn(%22Signal %25d recieved: wait: %25s%22, sig, wait)%0A executor.shutdown(wait)%0A logger.info(%22Executor shutdown complete%22)%0A%0A%0Adef ping_proc(i):%0A pid = os.getpid()%0A logger.debug(%22worker %25s up: %25s%22, i, pid)%0A return pid%0A%0A%0Adef setup_workers(registry, handler=sig_handler):%0A %22%22%22%0A ATT: Sensitive voodoo. Workers must be setup before any other%0A threads are launched. Workers must be initialized before signals%0A are registered.
+ping_proc(i):%0A pid = os.getpid()%0A logger.debug(%22worker %25s up: %25s%22, i, pid)%0A return pid%0A%0A%0Adef setup_workers(registry):%0A %22%22%22%0A now thread only
%0A
@@ -1611,67 +1611,8 @@
'%5D =
- executor_type =%5C%0A settings.get('cheeseprism.futures',
'th
@@ -1616,17 +1616,16 @@
'thread'
-)
%0A%0A ex
@@ -1637,104 +1637,35 @@
r =
-executor_type != 'process' and futures.ThreadPoolExecutor %5C%0A or futures.ProcessPoolExecutor
+futures.ThreadPoolExecutor
%0A%0A
@@ -1728,172 +1728,12 @@
s',
-0
+5
))%0A
- if executor_type == 'process' and workers %3C= 0:%0A workers = multiprocessing.cpu_count() + 1%0A else:%0A workers = workers %3C= 0 and 10 or workers
%0A%0A
@@ -1752,23 +1752,23 @@
fo(%22
-PID %25s using %25s
+Starting thread
exe
@@ -1778,11 +1778,9 @@
or w
-ith
+/
%25s
@@ -1792,36 +1792,8 @@
rs%22,
- os.getpid(), executor_type,
wor
@@ -2037,209 +2037,5 @@
))%0A%0A
- # -- Register signals after init so to not have an echo effect%0A for sig in (signal.SIGHUP, signal.SIGTERM, signal.SIGINT, signal.SIGQUIT):%0A signal.signal(sig, partial(sig_handler, executor))
%0A
|
8eb938086a77a11cb2df8c83f872b9daa519f858
|
fix pyhon3 compat issues in rename.
|
bento/compat/rename.py
|
bento/compat/rename.py
|
import os.path
import os
import random
def rename(src, dst):
"Atomic rename on windows."
# This is taken from mercurial
try:
os.rename(src, dst)
except OSError, err:
# If dst exists, rename will fail on windows, and we cannot
# unlink an opened file. Instead, the destination is moved to
# a temporary location if it already exists.
def tempname(prefix):
for i in range(5):
fn = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
if not os.path.exists(fn):
return fn
raise IOError, (errno.EEXIST, "No usable temporary filename found")
temp = tempname(dst)
os.rename(dst, temp)
try:
os.unlink(temp)
except:
# Some rude AV-scanners on Windows may cause the unlink to
# fail. Not aborting here just leaks the temp file, whereas
# aborting at this point may leave serious inconsistencies.
# Ideally, we would notify the user here.
pass
os.rename(src, dst)
|
Python
| 0
|
@@ -31,16 +31,29 @@
t random
+%0Aimport errno
%0A%0Adef re
@@ -194,13 +194,8 @@
rror
-, err
:%0A
@@ -616,18 +616,17 @@
IOError
-,
+(
(errno.E
@@ -669,16 +669,17 @@
found%22)
+)
%0A%0A
|
66847c06102731668c65d2728a707eadbd554437
|
Fix for the fix.
|
cherrypy/_cpchecker.py
|
cherrypy/_cpchecker.py
|
import os
import warnings
import cherrypy
class Checker(object):
global_config_contained_paths = False
def __call__(self):
oldformatwarning = warnings.formatwarning
warnings.formatwarning = self.formatwarning
try:
for name in dir(self):
if name.startswith("check_"):
method = getattr(self, name)
if method and callable(method):
method()
finally:
warnings.formatwarning = oldformatwarning
def formatwarning(self, message, category, filename, lineno):
"""Function to format a warning."""
return "CherryPy Checker:\n%s\n\n" % message
def check_skipped_app_config(self):
for sn, app in cherrypy.tree.apps.iteritems():
if not app.config:
msg = "The Application mounted at %r has an empty config." % sn
if self.global_config_contained_paths:
msg += (" It looks like the config you passed to "
"cherrypy.config.update() contains application-"
"specific sections. You must explicitly pass "
"application config via "
"cherrypy.tree.mount(..., config=app_config)")
warnings.warn(msg)
return
def check_static_paths(self):
# Use the dummy Request object in the main thread.
request = cherrypy.request
for sn, app in cherrypy.tree.apps.iteritems():
request.app = app
for section in app.config:
# get_resource will populate request.config
request.get_resource(section + "/dummy.html")
conf = request.config.get
if conf("tools.staticdir.on", False):
msg = ""
root = conf("tools.staticdir.root")
dir = conf("tools.staticdir.dir")
if dir is None:
msg = "tools.staticdir.dir is not set."
else:
fulldir = ""
if os.path.isabs(dir):
fulldir = dir
if root:
msg = "dir is an absolute path, even though a root is provided."
testdir = os.path.join(root, dir[1:])
if os.path.exists(testdir):
msg += ("\nIf you meant to serve the filesystem folder at %r, "
"remove the leading slash from dir." % testdir)
else:
if not root:
msg = "dir is a relative path and no root provided."
else:
fulldir = os.path.join(root, dir)
if not os.path.isabs(fulldir):
msg = "%r is not an absolute path." % fulldir
if fulldir and not os.path.exists(fulldir):
if msg:
msg += "\n"
msg += "%r (root + dir) is not an existing filesystem path." % fulldir
if msg:
warnings.warn("%s\nsection: [%s]\nroot: %r\ndir: %r"
% (msg, section, root, dir))
obsolete = {
'server.default_content_type': 'tools.response_headers.headers',
'log_access_file': 'log.access_file',
'log_config_options': None,
'log_file': 'log.error_file',
'log_file_not_found': None,
'log_request_headers': 'tools.log_headers.on',
'log_to_screen': 'log.screen',
'show_tracebacks': 'request.show_tracebacks',
'throw_errors': 'request.throw_errors',
'profiler.on': 'cherrypy.tree.mount(profiler.make_app(cherrypy.Application(Root())))',
}
deprecated = {}
def _compat(self, config):
"""Process config and warn on each obsolete or deprecated entry."""
for section, conf in config.iteritems():
if isinstance(conf, dict):
for k, v in conf.iteritems():
if k in self.obsolete:
warnings.warn("%r is obsolete. Use %r instead.\n"
"section: [%s]" % (k, self.obsolete[k], section))
elif k in self.deprecated:
warnings.warn("%r is deprecated. Use %r instead.\n"
"section: [%s]" % (k, self.deprecated[k], section))
else:
if section in self.obsolete:
warnings.warn("%r is obsolete. Use %r instead."
% (section, self.obsolete[conf]))
elif section in self.deprecated:
warnings.warn("%r is deprecated. Use %r instead."
% (section, self.decprecated[conf]))
def check_compatibility(self):
"""Process config and warn on each obsolete or deprecated entry."""
self._compat(cherrypy.config)
for sn, app in cherrypy.tree.apps.iteritems():
self._compat(app.config)
|
Python
| 0.000002
|
@@ -5019,20 +5019,23 @@
bsolete%5B
-c
+secti
on
-f
%5D))%0A
@@ -5212,20 +5212,23 @@
recated%5B
-c
+secti
on
-f
%5D))%0A
|
18392d9b4ee6a62eae815d846f21299e652ae56b
|
Add msising import.
|
mysite/search/management/commands/search_daily_tasks.py
|
mysite/search/management/commands/search_daily_tasks.py
|
from django.core.management.base import BaseCommand
import django.conf
django.conf.settings.CELERY_ALWAYS_EAGER = True
import mysite.search.tasks
import mysite.customs.bugtrackers.roundup_general
import mysite.customs.bugtrackers
import mysite.search.tasks.launchpad_tasks
import mysite.customs.miro
import mysite.customs.bugtrackers.trac
import mysite.customs.bugtrackers.gnome_love
import mysite.customs.bugtrackers.fedora_fitfinish
import mysite.customs.bugtrackers.mozilla
import mysite.customs.bugtrackers.wikimedia
import mysite.customs.bugtrackers.kde
### All this code runs synchronously once a day.
### For now, we can crawl all the bug trackers in serial.
### One day, though, we will crawl so many bug trackers that it will take
### too long.
### I suggest that, at that point, we fork a number of worker processes, and
### turn these lists into a queue or something. That should be easy with the
### multiprocessing module. Then we can do N at once.
### We could do something smart with statistics, detecting the average
### refresh time within the bug tracker of a bug, then polling at some
### approximation of that rate. (But, really, why bother?)
### Since most of the time we're waiting on the network, we could also use
### Twisted or Stackless Python or something. That would be super cool, too.
### If somone can show me how to migrate this codebase to it, that'd be neat.
### I'm unlikely to look into it myself, though.
### (If at some point we start indexing absolutely every bug in free and open
### source software, then the above ideas will actually become meaningful.)
### -- New Age Asheesh, 2010-05-31.
class Command(BaseCommand):
help = "Call this once a day to make sure we run Bug search-related nightly jobs."
def update_bugzilla_trackers(self):
bugzilla_trackers = {
'Miro':
mysite.customs.miro.grab_miro_bugs,
'KDE junior jobs':
mysite.customs.bugtrackers.kde.grab,
'Wikimedia easy bugs':
mysite.customs.bugtrackers.wikimedia.grab,
'GNOME Love':
mysite.customs.bugtrackers.gnome_love.grab,
'Mozilla "good first bug"s':
mysite.customs.bugtrackers.mozilla.grab,
# FIXME
# Really, the Bugzilla import code should be reworked to be as
# clean and tidy as the Mercurial/Python/Roundup stuff, with
# an abstract class with a .update() method.
# Then simple sub-classes can handle each of the different projects'
# details.
# What the heck is up with the Fedora code being
# special-cased like this? Well, I'll clean it up another day,
# so long as it seems to work right now.
'Fedora "fit and finish" new bugs':
mysite.search.tasks.bugzilla_instances.LearnAboutNewFedoraFitAndFinishBugs.apply,
'Fedora "fit and finish" refreshing old bugs':
mysite.search.tasks.bugzilla_instances.RefreshAllFedoraFitAndFinishBugs.apply,
}
for bugzilla_tracker in bugzilla_trackers:
logging.info("Refreshing bugs from %s." % bugzilla_tracker)
callable = bugzilla_trackers[bugzilla_tracker]
def find_and_update_enabled_roundup_trackers(self):
enabled_roundup_trackers = []
### First, the "find" step
for thing_name in dir(mysite.customs.bugtrackers.roundup_general):
thing = getattr(mysite.customs.bugtrackers.roundup_general,
thing_name)
if hasattr(thing, 'enabled'):
if getattr(thing, 'enabled'):
enabled_roundup_trackers.append(thing)
### Okay, now update!
for thing in enabled_roundup_trackers:
thing.update()
def handle(self, *args, **options):
# Make celery always eager, baby
# A bunch of classes whose .run() we want to .delay()
dot_run_these = [
# Twisted
mysite.search.tasks.trac_instances.LearnAboutNewEasyTwistedBugs,
mysite.search.tasks.trac_instances.RefreshAllTwistedEasyBugs,
]
for thing in dot_run_these:
thing().run()
# Just functions
run_these = [
# various projects hosted on Launchpad
mysite.search.tasks.launchpad_tasks.refresh_bugs_from_all_indexed_launchpad_projects,
mysite.search.tasks.launchpad_tasks.refresh_all_launchpad_bugs,
# sweet sweet sugar
mysite.search.tasks.trac_instances.learn_about_new_sugar_easy_bugs,
mysite.search.tasks.trac_instances.refresh_all_sugar_easy_bugs,
# miro, whee
mysite.customs.miro.grab_miro_bugs,
# KDE
mysite.customs.bugtrackers.kde.grab,
# Wikimedia
mysite.customs.bugtrackers.wikimedia.grab,
# GNOME is in Love
mysite.customs.bugtrackers.gnome_love.grab,
# Mozira
mysite.customs.bugtrackers.mozilla.grab,
# Fedora:
mysite.search.tasks.bugzilla_instances.LearnAboutNewFedoraFitAndFinishBugs.apply,
mysite.search.tasks.bugzilla_instances.RefreshAllFedoraFitAndFinishBugs.apply,
]
for callable in run_these:
logging.info("About to run %s" % callable)
callable()
# And for Roundup bug trackers, use our special handling
self.find_and_update_enabled_roundup_trackers()
# And for Bugzilla bug trackers, use our special handling!
self.update_bugzilla_trackers()
|
Python
| 0
|
@@ -1,12 +1,28 @@
+import logging%0A%0A
from django.
|
b61a45c13ffa82356f896f0914d2f28dabea7a7f
|
Include credentials for heat calling self
|
heat/engine/clients/os/heat_plugin.py
|
heat/engine/clients/os/heat_plugin.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient import client as hc
from heatclient import exc
from heat.engine.clients import client_plugin
class HeatClientPlugin(client_plugin.ClientPlugin):
exceptions_module = exc
def _create(self):
args = {
'auth_url': self.context.auth_url,
'token': self.auth_token,
'username': None,
'password': None,
'ca_file': self._get_client_option('heat', 'ca_file'),
'cert_file': self._get_client_option('heat', 'cert_file'),
'key_file': self._get_client_option('heat', 'key_file'),
'insecure': self._get_client_option('heat', 'insecure')
}
endpoint = self.get_heat_url()
return hc.Client('1', endpoint, **args)
def is_not_found(self, ex):
return isinstance(ex, exc.HTTPNotFound)
def is_over_limit(self, ex):
return isinstance(ex, exc.HTTPOverLimit)
def get_heat_url(self):
heat_url = self._get_client_option('heat', 'url')
if heat_url:
tenant_id = self.context.tenant_id
heat_url = heat_url % {'tenant_id': tenant_id}
else:
endpoint_type = self._get_client_option('heat', 'endpoint_type')
heat_url = self.url_for(service_type='orchestration',
endpoint_type=endpoint_type)
return heat_url
|
Python
| 0.000114
|
@@ -1272,16 +1272,413 @@
t_url()%0A
+ if self._get_client_option('heat', 'url'):%0A # assume that the heat API URL is manually configured because%0A # it is not in the keystone catalog, so include the credentials%0A # for the standalone auth_password middleware%0A args%5B'username'%5D = self.context.username%0A args%5B'password'%5D = self.context.password%0A del(args%5B'token'%5D)%0A%0A
|
3a956cd2c435dba256b97d1380e0f9bd7598ed0d
|
Fix syntax error in daily search script.
|
mysite/search/management/commands/search_daily_tasks.py
|
mysite/search/management/commands/search_daily_tasks.py
|
from django.core.management.base import BaseCommand
import django.conf
django.conf.settings.CELERY_ALWAYS_EAGER = True
import mysite.search.tasks
import mysite.customs.bugtrackers.roundup_general
import mysite.customs.bugtrackers
import mysite.search.tasks.launchpad_tasks
import mysite.customs.miro
import mysite.customs.bugtrackers.trac
import mysite.customs.bugtrackers.gnome_love
import mysite.customs.bugtrackers.fedora_fitfinish
import mysite.customs.bugtrackers.mozilla
import mysite.customs.bugtrackers.wikimedia
import mysite.customs.bugtrackers.kde
### All this code runs synchronously once a day.
### For now, we can crawl all the bug trackers in serial.
### One day, though, we will crawl so many bug trackers that it will take
### too long.
### I suggest that, at that point, we fork a number of worker processes, and
### turn these lists into a queue or something. That should be easy with the
### multiprocessing module. Then we can do N at once.
### We could do something smart with statistics, detecting the average
### refresh time within the bug tracker of a bug, then polling at some
### approximation of that rate. (But, really, why bother?)
### Since most of the time we're waiting on the network, we could also use
### Twisted or Stackless Python or something. That would be super cool, too.
### If somone can show me how to migrate this codebase to it, that'd be neat.
### I'm unlikely to look into it myself, though.
### (If at some point we start indexing absolutely every bug in free and open
### source software, then the above ideas will actually become meaningful.)
### -- New Age Asheesh, 2010-05-31.
class Command(BaseCommand):
help = "Call this once a day to make sure we run Bug search-related nightly jobs."
def update_bugzilla_trackers(self):
bugzilla_trackers = {
'Miro':
mysite.customs.miro.grab_miro_bugs,
'KDE junior jobs':
mysite.customs.bugtrackers.kde.grab,
'Wikimedia easy bugs':
mysite.customs.bugtrackers.wikimedia.grab,
'GNOME Love':
mysite.customs.bugtrackers.gnome_love.grab,
'Mozilla "good first bug"s':
mysite.customs.bugtrackers.mozilla.grab,
# FIXME
# Really, the Bugzilla import code should be reworked to be as
# clean and tidy as the Mercurial/Python/Roundup stuff, with
# an abstract class with a .update() method.
# Then simple sub-classes can handle each of the different projects'
# details.
# What the heck is up with the Fedora code being
# special-cased like this? Well, I'll clean it up another day,
# so long as it seems to work right now.
'Fedora "fit and finish" new bugs':
mysite.search.tasks.bugzilla_instances.LearnAboutNewFedoraFitAndFinishBugs.apply,
'Fedora "fit and finish" refreshing old bugs':
mysite.search.tasks.bugzilla_instances.RefreshAllFedoraFitAndFinishBugs.apply,
}
for bugzilla_tracker in bugzilla_trackers:
logging.info("Refreshing bugs from %s." % bugzilla_tracker)
callable = bugzilla_trackers[bugzilla_tracker]
def find_and_update_enabled_roundup_trackers(self):
enabled_roundup_trackers = []
### First, the "find" step
for thing_name in dir(mysite.customs.bugtrackers.roundup_general):
thing = getattr(mysite.customs.bugtrackers.roundup_general,
thing_name)
if hasattr(thing, 'enabled'):
if getattr(thing, 'enabled'):
enabled_roundup_trackers.append(thing)
### Okay, now update!
for thing in enabled_roundup_trackers:
thing.update()
def handle(self, *args, **options):
# Make celery always eager, baby
# A bunch of classes whose .run() we want to .delay()
dot_run_these = [
# Twisted
mysite.search.tasks.trac_instances.LearnAboutNewEasyTwistedBugs,
mysite.search.tasks.trac_instances.RefreshAllTwistedEasyBugs,
]
for thing in dot_run_these:
thing().run()
# Just functions
run_these = [
# various projects hosted on Launchpad
mysite.search.tasks.launchpad_tasks.refresh_bugs_from_all_indexed_launchpad_projects,
mysite.search.tasks.launchpad_tasks.refresh_all_launchpad_bugs,
# sweet sweet sugar
mysite.search.tasks.trac_instances.learn_about_new_sugar_easy_bugs,
mysite.search.tasks.trac_instances.refresh_all_sugar_easy_bugs,
# miro, whee
mysite.customs.miro.grab_miro_bugs,
# KDE
mysite.customs.bugtrackers.kde.grab,
# Wikimedia
mysite.customs.bugtrackers.wikimedia.grab,
# GNOME is in Love
mysite.customs.bugtrackers.gnome_love.grab,
# Mozira
mysite.customs.bugtrackers.mozilla.grab,
# Fedora:
mysite.search.tasks.bugzilla_instances.LearnAboutNewFedoraFitAndFinishBugs.apply,
mysite.search.tasks.bugzilla_instances.RefreshAllFedoraFitAndFinishBugs.apply,
]
for callable in run_these:
logging.info("About to run %s" % callable")
callable()
# And for Roundup bug trackers, use our special handling
self.find_and_update_enabled_roundup_trackers()
# And for Bugzilla bug trackers, use our special handling!
self.update_bugzilla_trackers()
|
Python
| 0
|
@@ -5421,17 +5421,16 @@
callable
-%22
)%0A
|
c4108fec4f879b1f98f8cb0ad7e7959bd8a3557d
|
Move yi imports at top-level (#29001)
|
homeassistant/components/yi/camera.py
|
homeassistant/components/yi/camera.py
|
"""Support for Xiaomi Cameras (HiSilicon Hi3518e V200)."""
import asyncio
import logging
import voluptuous as vol
from homeassistant.components.camera import Camera, PLATFORM_SCHEMA
from homeassistant.components.ffmpeg import DATA_FFMPEG
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PATH,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
from homeassistant.exceptions import PlatformNotReady
_LOGGER = logging.getLogger(__name__)
DEFAULT_BRAND = "YI Home Camera"
DEFAULT_PASSWORD = ""
DEFAULT_PATH = "/tmp/sd/record"
DEFAULT_PORT = 21
DEFAULT_USERNAME = "root"
DEFAULT_ARGUMENTS = "-pred 1"
CONF_FFMPEG_ARGUMENTS = "ffmpeg_arguments"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_FFMPEG_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a Yi Camera."""
async_add_entities([YiCamera(hass, config)], True)
class YiCamera(Camera):
"""Define an implementation of a Yi Camera."""
def __init__(self, hass, config):
"""Initialize."""
super().__init__()
self._extra_arguments = config.get(CONF_FFMPEG_ARGUMENTS)
self._last_image = None
self._last_url = None
self._manager = hass.data[DATA_FFMPEG]
self._name = config[CONF_NAME]
self._is_on = True
self.host = config[CONF_HOST]
self.port = config[CONF_PORT]
self.path = config[CONF_PATH]
self.user = config[CONF_USERNAME]
self.passwd = config[CONF_PASSWORD]
@property
def brand(self):
"""Camera brand."""
return DEFAULT_BRAND
@property
def is_on(self):
"""Determine whether the camera is on."""
return self._is_on
@property
def name(self):
"""Return the name of this camera."""
return self._name
async def _get_latest_video_url(self):
"""Retrieve the latest video file from the customized Yi FTP server."""
from aioftp import Client, StatusCodeError
ftp = Client()
try:
await ftp.connect(self.host)
await ftp.login(self.user, self.passwd)
except (ConnectionRefusedError, StatusCodeError) as err:
raise PlatformNotReady(err)
try:
await ftp.change_directory(self.path)
dirs = []
for path, attrs in await ftp.list():
if attrs["type"] == "dir" and "." not in str(path):
dirs.append(path)
latest_dir = dirs[-1]
await ftp.change_directory(latest_dir)
videos = []
for path, _ in await ftp.list():
videos.append(path)
if not videos:
_LOGGER.info('Video folder "%s" empty; delaying', latest_dir)
return None
await ftp.quit()
self._is_on = True
return "ftp://{0}:{1}@{2}:{3}{4}/{5}/{6}".format(
self.user,
self.passwd,
self.host,
self.port,
self.path,
latest_dir,
videos[-1],
)
except (ConnectionRefusedError, StatusCodeError) as err:
_LOGGER.error("Error while fetching video: %s", err)
self._is_on = False
return None
async def async_camera_image(self):
"""Return a still image response from the camera."""
from haffmpeg.tools import ImageFrame, IMAGE_JPEG
url = await self._get_latest_video_url()
if url and url != self._last_url:
ffmpeg = ImageFrame(self._manager.binary, loop=self.hass.loop)
self._last_image = await asyncio.shield(
ffmpeg.get_image(
url, output_format=IMAGE_JPEG, extra_cmd=self._extra_arguments
),
loop=self.hass.loop,
)
self._last_url = url
return self._last_image
async def handle_async_mjpeg_stream(self, request):
"""Generate an HTTP MJPEG stream from the camera."""
from haffmpeg.camera import CameraMjpeg
if not self._is_on:
return
stream = CameraMjpeg(self._manager.binary, loop=self.hass.loop)
await stream.open_camera(self._last_url, extra_cmd=self._extra_arguments)
try:
stream_reader = await stream.get_reader()
return await async_aiohttp_proxy_stream(
self.hass,
request,
stream_reader,
self._manager.ffmpeg_stream_content_type,
)
finally:
await stream.close()
|
Python
| 0.000454
|
@@ -83,16 +83,149 @@
ogging%0A%0A
+from aioftp import Client, StatusCodeError%0Afrom haffmpeg.camera import CameraMjpeg%0Afrom haffmpeg.tools import IMAGE_JPEG, ImageFrame%0A
import v
@@ -289,16 +289,8 @@
port
- Camera,
PLA
@@ -301,16 +301,24 @@
M_SCHEMA
+, Camera
%0Afrom ho
@@ -434,23 +434,8 @@
ME,%0A
- CONF_PATH,%0A
@@ -449,16 +449,31 @@
SSWORD,%0A
+ CONF_PATH,%0A
CONF
@@ -500,16 +500,70 @@
NAME,%0A)%0A
+from homeassistant.exceptions import PlatformNotReady%0A
from hom
@@ -691,62 +691,8 @@
ream
-%0Afrom homeassistant.exceptions import PlatformNotReady
%0A%0A_L
@@ -2638,60 +2638,8 @@
%22%22%22%0A
- from aioftp import Client, StatusCodeError%0A%0A
@@ -4055,67 +4055,8 @@
%22%22%22%0A
- from haffmpeg.tools import ImageFrame, IMAGE_JPEG%0A%0A
@@ -4645,57 +4645,8 @@
%22%22%22%0A
- from haffmpeg.camera import CameraMjpeg%0A%0A
|
ce797b963d7219f131548f32509ce4379c8d055a
|
disable append slash
|
snippit/settings/base.py
|
snippit/settings/base.py
|
"""
Django settings for snippit project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
APPS = os.path.join(BASE_DIR, 'apps')
sys.path.insert(1, APPS)
sys.path.insert(2, BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '56#z0uc5v%p-60az6s4pm3wxajt5u9*cfe4m12v+6&iqvzpfxi'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# Application definition
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
)
THIRD_PARTY_APPS = (
'django_extensions',
'rest_framework',
'rest_framework.authtoken'
)
LOCAL_APPS = (
'account',
'api',
'auth',
'snippet',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
FIXTURE_DIRS = (
os.path.join(APPS, '/account/fixtures/'),
os.path.join(APPS, '/snippet/fixtures/')
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'snippit.urls'
WSGI_APPLICATION = 'snippit.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates')
)
# END TEMPLATE CONFIGURATION
AUTH_USER_MODEL = 'account.User'
ALLOWED_HOSTS = ["*"]
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_DIR, 'media'))
MEDIA_URL = '/media/'
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_ROOT = os.path.normpath(os.path.join(BASE_DIR, 'static'))
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib
# /staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
# Rest Framework Config http://django-rest-framework.org/#installation
REST_FRAMEWORK = {
# Use hyperlinked styles by default.
# Only used if the `serializer_class` attribute is not set on a view.
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.HyperlinkedModelSerializer',
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication'
),
# Custom Exception Handler
'EXCEPTION_HANDLER': 'api.exceptions.custom_exception_handler',
'TEST_REQUEST_RENDERER_CLASSES': (
'rest_framework.renderers.MultiPartRenderer',
'rest_framework.renderers.JSONRenderer',
),
# Pagination settings
'PAGINATE_BY': 10,
'PAGINATE_BY_PARAM': 'page_size',
'MAX_PAGINATE_BY': 100,
}
# Api Token Expire: 15 days
API_TOKEN_TTL = 15
|
Python
| 0.000003
|
@@ -2330,16 +2330,40 @@
%5B%22*%22%5D%0A%0A
+%0AAPPEND_SLASH = False%0A%0A%0A
########
|
d1e4be6cc43968253d980cbb8484cb7e43203590
|
add the team code of FC Bayern München
|
soccer/data/teamnames.py
|
soccer/data/teamnames.py
|
__all__ = ['TEAM_NAMES']
TEAM_NAMES = {
'EFFZEH': 1,
'TSG': 2,
'B04': 3,
'BVB': 4,
'S04': 6,
'HSV': 7,
'H96': 8,
'BSC': 9,
'VFB': 10,
'WOB': 11,
'SVW': 12,
'FCK': 13,
'FCN': 14,
'M05': 15,
'FCA': 16,
'SCF': 17,
'BMG': 18,
'SGE': 19,
'FCP': 20,
'GRE': 21,
'AUE': 22,
'FOR': 24,
'1860': 26,
'FCU': 28,
'FCI': 31,
'KAR': 32,
'EBS': 33,
'VFL': 36,
'HEI': 44,
'SVS': 46,
'DAR': 55,
'AFC': 57,
'AVFC': 58,
'EFC': 62,
'LFC': 64,
'MCFC': 65,
'MUFC': 66,
'NUFC': 67,
'NCFC': 68,
'QPR': 69,
'SCFC': 70,
'SUN': 71,
'SWA': 72,
'THFC': 73,
'WBA': 74,
'BIL': 77,
'ATM': 78,
'OSA': 79,
'ESP': 80,
'FCB': 81,
'FCG': 82,
'GCF': 83,
'MAL': 84,
'MAD': 86,
'VALL': 87,
'BET': 90,
'RSS': 92,
'VCF': 94,
'VAL': 95,
'SPO': 96,
'ACM': 98,
'FIO': 99,
'ROM': 100,
'Atalanta': 102,
'CFC': 107,
'Int': 108,
'JUVE': 109,
'LAZ': 110,
'SSC': 113,
'PAL': 114,
'ALA': 263,
'UDA': 267,
'LPA': 275,
'EIB': 278,
'CFE': 285,
'CCF': 295,
'HUL': 322,
'BFC': 328,
'BCFC': 332,
'LCFC': 338,
'SFC': 340,
'DCFC': 342,
'SWFC': 345,
'Watfordfc': 346,
'ITFC': 349,
'NFFC': 351,
'CRY': 354,
'SUSFC': 356,
'RUFC': 385,
'HTAFC': 394,
'BHAFC': 397,
'EMP': 445,
'SASS': 471,
'SLB': 495,
'SCP': 498,
'TOU': 511,
'SMC': 514,
'MAR': 516,
'OSC': 521,
'NIC': 522,
'OLY': 523,
'PSG': 524,
'LOR': 525,
'BOR': 526,
'ETI': 527,
'REN': 529,
'SCB': 536,
'GUI': 538,
'NAN': 543,
'FCM': 545,
'RCL': 546,
'REI': 547,
'MON': 548,
'VIG': 558,
'SEV': 559,
'LAC': 560,
'WHU': 563,
'FCT': 586,
'CES': 591,
'PSV': 674,
'RBL': 721,
'LEG': 745,
'CSK': 751,
'LUD': 753,
'DIN': 755,
'GER': 759,
'POR': 765,
'SVK': 768,
'ENG': 770,
'FRA': 773,
'ITA': 784,
'SUI': 788,
'UKR': 790,
'SWE': 792,
'POL': 794,
'CZE': 798,
'CRO': 799,
'TUR': 803,
'BEL': 805,
'IRL': 806,
'RUS': 808,
'ROU': 811,
'AUT': 816,
'HUN': 827,
'NIR': 829,
'WAL': 833,
'DYK': 842,
'AFCB': 1044,
'ALB': 1065,
'ISL': 1066
}
|
Python
| 0.99997
|
@@ -93,16 +93,31 @@
VB': 4,%0A
+ 'FCBM': 5,%0A
'S04
|
4a2d16d02cb53ce4c4003247cbd0ed7e137dcb35
|
Add frame Fatal & reduce keepalive interval to 30s
|
cqsdk.py
|
cqsdk.py
|
#!/usr/bin/env python3
import re
import socket
import socketserver
import sys
import threading
import time
import traceback
from base64 import b64encode, b64decode
from collections import namedtuple
ClientHello = namedtuple("ClientHello", ("port"))
ServerHello = namedtuple("ServerHello", ())
RcvdPrivateMessage = namedtuple("RcvdPrivateMessage", ("qq", "text"))
SendPrivateMessage = namedtuple("SendPrivateMessage", ("qq", "text"))
RcvdGroupMessage = namedtuple("RcvdGroupMessage", ("group", "qq", "text"))
SendGroupMessage = namedtuple("SendGroupMessage", ("group", "text"))
RcvdDiscussMessage = namedtuple("RcvdDiscussMessage",
("discuss", "qq", "text"))
SendDiscussMessage = namedtuple("SendDiscussMessage",
("discuss", "text"))
GroupMemberDecrease = namedtuple("GroupMemberDecrease",
("group", "qq", "operatedQQ"))
GroupMemberIncrease = namedtuple("GroupMemberIncrease",
("group", "qq", "operatedQQ"))
GroupBan = namedtuple("GroupBan", ("group", "qq", "duration"))
FrameType = namedtuple("FrameType", ("prefix", "rcvd", "send"))
FRAME_TYPES = (
FrameType("ClientHello", (), ClientHello),
FrameType("ServerHello", ServerHello, ()),
FrameType("PrivateMessage", RcvdPrivateMessage, SendPrivateMessage),
FrameType("GroupMessage", RcvdGroupMessage, SendGroupMessage),
FrameType("DiscussMessage", RcvdDiscussMessage, SendDiscussMessage),
FrameType("GroupMemberDecrease", GroupMemberDecrease, ()),
FrameType("GroupMemberIncrease", GroupMemberIncrease, ()),
FrameType("GroupBan", (), GroupBan),
)
RE_CQ_SPECIAL = re.compile(r'\[CQ:\w+(,.+?)?\]')
class CQAt:
PATTERN = re.compile(r'\[CQ:at,qq=(\d+?)\]')
def __init__(self, qq):
self.qq = qq
def __str__(self):
return "[CQ:at,qq={}]".format(self.qq)
class CQImage:
PATTERN = re.compile(r'\[CQ:image,file=(.+?)\]')
def __init__(self, file):
self.file = file
def __str__(self):
return "[CQ:image,file={}]".format(self.file)
def load_frame(data):
if isinstance(data, str):
parts = data.split()
elif isinstance(data, list):
parts = data
else:
raise TypeError()
frame = None
(prefix, *payload) = parts
for type_ in FRAME_TYPES:
if prefix == type_.prefix:
frame = type_.rcvd(*payload)
# decode text
if isinstance(frame,
(RcvdPrivateMessage, RcvdGroupMessage, RcvdDiscussMessage)):
payload[-1] = b64decode(payload[-1]).decode('gbk')
frame = type(frame)(*payload)
return frame
def dump_frame(frame):
if not isinstance(frame, (tuple, list)):
raise TypeError()
# Cast all payload fields to string
payload = list(map(lambda x: str(x), frame))
# encode text
if isinstance(frame,
(SendPrivateMessage, SendGroupMessage, SendDiscussMessage)):
payload[-1] = b64encode(payload[-1].encode('gbk')).decode()
data = None
for type_ in FRAME_TYPES:
if isinstance(frame, type_.send):
data = " ".join((type_.prefix, *payload))
return data
class FrameListener():
def __init__(self, handler, frame_type):
self.handler = handler
self.frame_type = frame_type
class APIRequestHandler(socketserver.BaseRequestHandler):
def handle(self):
data = self.request[0].decode()
parts = data.split()
try:
message = load_frame(parts)
except:
message = None
if message is None:
print("Unknown message", parts, file=sys.stderr)
return
for listener in self.server.listeners:
try:
if (isinstance(message, listener.frame_type) and
listener.handler(message)):
break
except:
traceback.print_exc()
class APIServer(socketserver.UDPServer):
listeners = []
class CQBot():
def __init__(self, server_port, client_port=0, online=True, debug=False):
self.listeners = []
self.remote_addr = ("127.0.0.1", server_port)
self.client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.local_addr = ("127.0.0.1", client_port)
self.server = APIServer(self.local_addr, APIRequestHandler)
# Online mode
# True: Retrive message from socket API server
# False: Send message only
self.online = online
# Debug Mode
# True: print message instead of sending.
self.debug = debug
def __del__(self):
self.client.close()
self.server.shutdown()
self.server.server_close()
def start(self):
if not self.online:
return
self.server.listeners = self.listeners
threaded_server = threading.Thread(
target=self.server.serve_forever,
daemon=True)
threaded_server.start()
threaded_keepalive = threading.Thread(
target=self.server_keepalive,
daemon=True)
threaded_keepalive.start()
def server_keepalive(self):
while True:
host, port = self.server.server_address
self.send(ClientHello(port))
time.sleep(120)
def listener(self, frame_type):
def decorator(handler):
self.listeners.append(FrameListener(handler, frame_type))
return decorator
def send(self, message):
if self.debug:
print(message)
return
data = dump_frame(message).encode()
self.client.sendto(data, self.remote_addr)
if __name__ == '__main__':
try:
qqbot = CQBot(11235)
@qqbot.listener((RcvdPrivateMessage, ))
def log(message):
print(message)
qqbot.start()
print("QQBot is running...")
input()
except KeyboardInterrupt:
pass
|
Python
| 0
|
@@ -1101,16 +1101,55 @@
ion%22))%0A%0A
+Fatal = namedtuple(%22Fatal%22, (%22text%22))%0A%0A
FrameTyp
@@ -1204,16 +1204,16 @@
send%22))%0A
-
FRAME_TY
@@ -1391,75 +1391,8 @@
e),%0A
- FrameType(%22GroupMessage%22, RcvdGroupMessage, SendGroupMessage),%0A
@@ -1452,32 +1452,99 @@
iscussMessage),%0A
+ FrameType(%22GroupMessage%22, RcvdGroupMessage, SendGroupMessage),%0A
FrameType(%22G
@@ -1694,16 +1694,51 @@
upBan),%0A
+ FrameType(%22Fatal%22, (), Fatal),%0A
)%0A%0ARE_CQ
@@ -2531,32 +2531,34 @@
sinstance(frame,
+ (
%0A
@@ -2554,23 +2554,16 @@
- (
RcvdPriv
@@ -2957,16 +2957,18 @@
e(frame,
+ (
%0A
@@ -2972,23 +2972,16 @@
- (
SendPriv
@@ -3020,32 +3020,39 @@
ndDiscussMessage
+, Fatal
)):%0A payl
@@ -5407,18 +5407,17 @@
e.sleep(
-12
+3
0)%0A%0A
|
0a34e0a96df5ce270b5733656234876e0f1dd451
|
Fix bug with autoremove.
|
pykmer/file.py
|
pykmer/file.py
|
"""
This module provides simple parsers for the FASTA and FASTQ sequence
data formats.
The FASTQ parser is not strictly conformant since it assumes the input
to be in a line oriented form (which is usually true).
"""
__docformat__ = 'restructuredtext'
import os
import subprocess
import uuid
def readFasta(file):
"""
Read textual input from the file object `file`, which is assumed to
be in FASTA format. Yields the sequence of (name, sequence) tuples.
"""
nm = None
seq = []
for l in file:
l = l.strip()
if len(l) and l[0] == '>':
if nm is not None:
yield (nm, ''.join(seq))
nm = l[1:].strip()
seq = []
else:
seq.append(l)
if nm is not None:
yield (nm, ''.join(seq))
def readFastq(file):
"""
Read textual input from the file object `file`, which is assumed
to be in line-oriented FASTQ format (not full multi-line FASTQ).
Yields the sequence of (name, sequence, label, quality) tuples.
"""
grp = []
for l in file:
l = l.strip()
grp.append(l)
if len(grp) == 4:
yield tuple(grp)
grp = []
if grp == 4:
yield tuple(grp)
def readFastqBlock(file, n=1024):
"""
Read textual input from the file object `file`, which is assumed
to be in line-oriented FASTQ format (not full multi-line FASTQ).
Yields a block of sequences at a time.
"""
grps = []
grp = [None, None, None, None]
i = 0
for l in file:
l = l.strip()
grp[i] = l
i += 1
if i == 4:
grps.append(grp)
if len(grps) == n:
yield grps
grps = []
grp = [None, None, None, None]
i = 0
if i == 4:
grps.append(grp)
if len(grps) > 0:
yield grps
def openFile(fn, mode='r'):
"""
Open a file "cleverly".
If the file name ends with ".gz" or ".bz2", it is compressed
or uncompressed on the fly (according to the mode).
In read mode (mode='r'), the filename '-' is interpreted as stdin.
In write mode (mode='w'), the filename '-' is interpreted as stdout.
"""
if mode == 'r':
if fn == "-":
return sys.stdin
if fn.endswith(".gz"):
p = subprocess.Popen(['gunzip', '-c', fn],
stdout=subprocess.PIPE)
return p.stdout
if fn.endswith(".bz2"):
p = subprocess.Popen(['bunzip2', '-c', fn],
stdout=subprocess.PIPE)
if mode == 'w':
if fn == "-":
return sys.stdout
if fn.endswith(".gz"):
p = subprocess.Popen(['gzip', '-9', '-', '>', fn],
stdin=subprocess.PIPE,
shell=True)
return p.stdin
if fn.endswith(".bz2"):
p = subprocess.Popen(['bzip2', '-9', '-', '>', fn],
stdout=subprocess.PIPE,
shell=True)
return p.stdin
return open(fn, mode)
_tmpfiles = []
class _AutoRemover:
def __init__(self):
_tmpfiles.append(set([]))
def __enter__(self):
return None
def __exit__(self, _t, _v, _tb):
assert len(_tmpfiles) > 0
fns = _tmpfiles.pop()
for fn in fns:
os.remove(fn)
def autoremove():
"""
Enable auto-removal of temporary files.
Use in a with statement:
with autoremove():
my_code()
"""
return _AutoRemover()
def tmpfile(suffix = ''):
"""
Create a unique filename for temporary storage. If auto-remove
is enabled (see `autoremove`), the file will be removed when
the __exit__() method of the autoremove object is invoked (i.e.
by exiting a with block).
"""
fn = os.getenv('TMPDIR', '/tmp') + '/' + str(uuid.uuid4()) + suffix
if len(_tmpfiles):
_tmpfiles[-1].add(fn)
return fn
|
Python
| 0
|
@@ -258,16 +258,31 @@
port os%0A
+import os.path%0A
import s
@@ -3422,16 +3422,55 @@
in fns:%0A
+ if os.path.isfile(fn):%0A
|
05c210f1a5f83ebbea2319f48ba58eb054b32ce2
|
fix indent
|
q_out_panel.py
|
q_out_panel.py
|
from . import chain
#show_q_output
class QOutPanelCommand(chain.ChainCommand):
def do(self, edit, input=None):
panel = self.view.window().get_output_panel("q")
syntax_file = "Packages/q KDB/syntax/q_output.tmLanguage"
try:
sublime.load_binary_resource(syntax_file)
except Exception:
continue
panel.set_syntax_file(syntax_file)
panel.settings().set("word_wrap", False)
panel.set_read_only(False)
panel.insert(edit, panel.size(), input)
panel.set_read_only(True)
self.view.window().run_command("show_panel", {"panel": "output.q"})
return '' #return something so that the chain will continue
|
Python
| 0.00006
|
@@ -223,61 +223,37 @@
e%22%0A%09
- try:%0A%09 sublime.load_binary_resourc
+%09try:%0A%09%09%09panel.set_syntax_fil
e(sy
@@ -264,24 +264,17 @@
_file)%0A%09
-
+%09
except E
@@ -288,54 +288,48 @@
n:%0A%09
- continue%0A%0A%09%09panel.set_
+%09%09print(%22Unable to load
syntax
-_
+
file
-(
+: %22,
synt
@@ -337,16 +337,17 @@
x_file)%0A
+%0A
%09%09panel.
|
95fd70cfe1f4ca972d1e31e431dad87b9bab84fc
|
FIX imports
|
addons/infrastructure_x/database.py
|
addons/infrastructure_x/database.py
|
# -*- coding: utf-8 -*-
from openerp import osv, models, fields, api, _
from openerp.exceptions import except_orm, Warning, RedirectWarning
import openerplib
import xmlrpclib
from dateutil.relativedelta import relativedelta
from datetime import datetime
# from fabric.api import local, settings, abort, run, cd, env, sudo
# import os
class database(models.Model):
""""""
# TODO agregar campos calculados
# Cantidad de usuarios
# Modulos instalados
# Ultimo acceso
_inherit = 'infrastructure.database'
_sql_constraints = [
('name_uniq', 'unique(name, server_id)',
'Database Name Must be Unique per server'),
]
server_id = fields.Many2one('infrastructure.server', string='Server', related='instance_id.environment_id.server_id', store=True, readonly=True,)
protected_db = fields.Boolean(string='Protected DB?', related='database_type_id.protect_db', store=True, readonly=True,)
color = fields.Integer(string='Color', related='database_type_id.color', store=True, readonly=True,)
deactivation_date = fields.Date(string='Deactivation Date', compute='get_deact_date', store=True, readonly=False,)
@api.one
def unlink(self):
if self.state not in ('draft', 'cancel'):
raise Warning(_('You cannot delete a database which is not draft or cancelled.'))
return super(database, self).unlink()
@api.onchange('database_type_id')
def onchange_database_type_id(self):
if self.database_type_id:
self.name = self.database_type_id.prefix + '_'
self.db_back_up_policy_ids = self.database_type_id.db_back_up_policy_ids
@api.one
@api.depends('database_type_id','issue_date')
def get_deact_date(self):
deactivation_date = False
if self.issue_date and self.database_type_id.auto_deactivation_days:
deactivation_date = (datetime.strptime(self.issue_date, '%Y-%m-%d') + relativedelta(days=self.database_type_id.auto_deactivation_days))
self.deactivation_date = deactivation_date
@api.one
def get_sock(self):
base_url = self.instance_id.main_hostname # base_url = self.instance_id.environment_id.server_id.main_hostname
server_port = 80 # server_port = self.instance_id.xml_rpc_port
rpc_db_url = 'http://%s:%d/xmlrpc/db' % (base_url, server_port)
return xmlrpclib.ServerProxy(rpc_db_url)
@api.one
def create_db(self):
sock = self.get_sock()[0]
new_db_name = self.name
demo = self.demo_data
user_password = 'admin'
lang = False # lang = 'en_US'
try:
sock.create_database(self.instance_id.admin_pass, new_db_name, demo, lang, user_password)
except:
raise Warning(_('Unable to create Database.'))
self.signal_workflow('sgn_to_active')
return True
@api.one
def drop_db(self):
sock = self.get_sock()[0]
try:
sock.drop(self.instance_id.admin_pass, self.name)
except:
raise Warning(_('Unable to drop Database. If you are working in an instance with "workers" then you can try restarting service.'))
self.signal_workflow('sgn_cancel')
@api.one
def dump_db(self):
raise Warning(_('Not Implemented yet'))
# TODO arreglar esto para que devuelva el archivo y lo descargue
sock = self.get_sock()[0]
try:
return sock.dump(self.instance_id.admin_pass, self.name)
except:
raise Warning(_('Unable to dump Database. If you are working in an instance with "workers" then you can try restarting service.'))
@api.one
def kill_db_connection(self):
self.server_id.get_env()
psql_command = "/SELECT pg_terminate_backend(pg_stat_activity.procpid) FROM pg_stat_activity WHERE pg_stat_activity.datname = '"
psql_command += self.name + " AND procpid <> pg_backend_pid();"
sudo('psql -U postgres -c ' + psql_command)
@api.one
def upload_mail_server_config(self):
# TODO implementar esta funcion
raise Warning(_('Not Implemented yet'))
@api.one
def config_catchall(self):
# TODO implementar esta funcion
raise Warning(_('Not Implemented yet'))
@api.one
def apply_attachment_type(self):
# TODO implementar esta funcion
raise Warning(_('Not Implemented yet'))
# TODO implementar cambio de usuario de postgres al duplicar una bd o de manera manual.
# Al parecer da error por el parametro que se alamcena database.uuid
# Para eso podemos ver todo el docigo que esta en db.py, sobre todo esta parte:
# registry = openerp.modules.registry.RegistryManager.new(db)
# with registry.cursor() as cr:
# if copy:
# # if it's a copy of a database, force generation of a new dbuuid
# registry['ir.config_parameter'].init(cr, force=True)
# if filestore_path:
# filestore_dest = registry['ir.attachment']._filestore(cr, SUPERUSER_ID)
# shutil.move(filestore_path, filestore_dest)
# if openerp.tools.config['unaccent']:
# try:
# with cr.savepoint():
# cr.execute("CREATE EXTENSION unaccent")
# except psycopg2.Error:
# pass
@api.one
def duplicate_db(self, new_database_name):
new_db = self.copy({'name': new_database_name})
sock = self.get_sock()[0]
try:
sock.duplicate_database(self.instance_id.admin_pass, self.name, new_database_name)
except:
raise Warning(_('Unable to duplicate Database. If you are working in an instance with "workers" then you can try restarting service.'))
new_db.signal_workflow('sgn_to_active')
# TODO retornar accion de ventana a la bd creada
@api.one
def back_up_now_db(self):
# TODO implementar esto
raise Warning(_('Not Implemented yet'))
# def connect_to_openerp(self, cr, uid, inst_id, parameters, context=None):
# param = parameters
# base_url = param[inst_id]['base_url']
# server_port = int(param[inst_id]['server_port'])
# admin_name = param[inst_id]['admin_name']
# admin_pass = param[inst_id]['admin_pass']
# database = param[inst_id]['database']
# #domain = database + '.' + param[inst_id]['base_url']
# domain = base_url
# connection = openerplib.get_connection(hostname=domain, database=database, \
# login=admin_name, password=admin_pass, port=server_port)
# return connection
|
Python
| 0.000001
|
@@ -247,18 +247,16 @@
atetime%0A
-#
from fab
@@ -313,18 +313,16 @@
v, sudo%0A
-#
import o
|
468544c29071e07dc0a8923d8924b5ec43f529e4
|
修复大于4M的文件hash计算错误的问题
|
qiniu/utils.py
|
qiniu/utils.py
|
# -*- coding: utf-8 -*-
from hashlib import sha1
from base64 import urlsafe_b64encode, urlsafe_b64decode
from .config import _BLOCK_SIZE
from .compat import b, s
try:
import zlib
binascii = zlib
except ImportError:
zlib = None
import binascii
def urlsafe_base64_encode(data):
"""urlsafe的base64编码:
对提供的数据进行urlsafe的base64编码。规格参考:
http://developer.qiniu.com/docs/v6/api/overview/appendix.html#urlsafe-base64
Args:
data: 待编码的数据,一般为字符串
Returns:
编码后的字符串
"""
ret = urlsafe_b64encode(b(data))
return s(ret)
def urlsafe_base64_decode(data):
"""urlsafe的base64解码:
对提供的urlsafe的base64编码的数据进行解码
Args:
data: 待解码的数据,一般为字符串
Returns:
解码后的字符串。
"""
ret = urlsafe_b64decode(s(data))
return ret
def file_crc32(filePath):
"""计算文件的crc32检验码:
Args:
filePath: 待计算校验码的文件路径
Returns:
文件内容的crc32校验码。
"""
crc = 0
with open(filePath, 'rb') as f:
for block in _file_iter(f, _BLOCK_SIZE):
crc = binascii.crc32(block, crc) & 0xFFFFFFFF
return crc
def crc32(data):
"""计算输入流的crc32检验码:
Args:
data: 待计算校验码的字符流
Returns:
输入流的crc32校验码。
"""
return binascii.crc32(b(data)) & 0xffffffff
def _file_iter(input_stream, size, offset=0):
"""读取输入流:
Args:
input_stream: 待读取文件的二进制流
size: 二进制流的大小
Raises:
IOError: 文件流读取失败
"""
input_stream.seek(offset)
d = input_stream.read(size)
while d:
yield d
d = input_stream.read(size)
def _sha1(data):
"""单块计算hash:
Args:
data: 待计算hash的数据
Returns:
输入数据计算的hash值
"""
h = sha1()
h.update(data)
return h.digest()
def etag_stream(input_stream):
"""计算输入流的etag:
etag规格参考 http://developer.qiniu.com/docs/v6/api/overview/appendix.html#qiniu-etag
Args:
input_stream: 待计算etag的二进制流
Returns:
输入流的etag值
"""
array = [_sha1(block) for block in _file_iter(input_stream, _BLOCK_SIZE)]
if len(array) == 1:
data = array[0]
prefix = b('\x16')
else:
sha1_str = b('').join(array)
data = _sha1(sha1_str)
prefix = b('\x96')
return urlsafe_base64_encode(prefix + data)
def etag(filePath):
"""计算文件的etag:
Args:
filePath: 待计算etag的文件路径
Returns:
输入文件的etag值
"""
with open(filePath, 'rb') as f:
return etag_stream(f)
def entry(bucket, key):
"""计算七牛API中的数据格式:
entry规格参考 http://developer.qiniu.com/docs/v6/api/reference/data-formats.html
Args:
bucket: 待操作的空间名
key: 待操作的文件名
Returns:
符合七牛API规格的数据格式
"""
if key is None:
return urlsafe_base64_encode('{0}'.format(bucket))
else:
return urlsafe_base64_encode('{0}:{1}'.format(bucket, key))
|
Python
| 0.000001
|
@@ -2117,16 +2117,14 @@
= b
-(
'%5Cx16'
-)
%0A
@@ -2220,16 +2220,14 @@
= b
-(
'%5Cx96'
-)
%0A
|
593f51e50d0e492862d98045f4ba2770a4ff341d
|
Remove unneeded imports
|
src/ggrc_basic_permissions/contributed_roles.py
|
src/ggrc_basic_permissions/contributed_roles.py
|
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: david@reciprocitylabs.com
# Maintained By: david@reciprocitylabs.com
from ggrc.extensions import get_extension_modules
from ggrc.models import Program, Audit
from .roles import (
Auditor, AuditorProgramReader, AuditorReader, ObjectEditor,
ProgramAuditEditor, ProgramAuditOwner, ProgramAuditReader,
ProgramBasicReader, ProgramCreator, ProgramEditor, ProgramMappingEditor,
ProgramOwner, ProgramReader, Reader, gGRC_Admin,
)
DECLARED_ROLE = "CODE DECLARED ROLE"
def contribute_role_permissions(permissions, additional_permissions):
for action, resource_permissions in additional_permissions.items():
permissions.setdefault(action, list())
for resource_permission in resource_permissions:
permissions[action].append(resource_permission)
return permissions
def get_declared_role(rolename, resolved_roles={}):
if rolename in resolved_roles:
return resolved_roles[rolename]
declarations = lookup_declarations()
if rolename in declarations:
role_definition = declarations[rolename]
role_contributions = lookup_contributions(rolename)
contribute_role_permissions(role_definition.permissions, role_contributions)
resolved_roles[rolename] = role_definition
return role_definition
return None
def lookup_declarations(declarations={}):
if len(declarations) == 0:
extension_modules = get_extension_modules()
for extension_module in extension_modules:
ext_declarations = getattr(extension_module, "ROLE_DECLARATIONS", None)
if ext_declarations:
declarations.update(ext_declarations.roles())
if len(declarations) == 0:
declarations[None] = None
if None in declarations:
return {}
else:
return declarations
def lookup_contributions(rolename):
extension_modules = get_extension_modules()
contributions = {}
for extension_module in extension_modules:
ext_contributions = getattr(extension_module, "ROLE_CONTRIBUTIONS", None)
if ext_contributions:
ext_role_contributions = ext_contributions.contributions_for(rolename)
contribute_role_permissions(contributions, ext_role_contributions)
return contributions;
def lookup_role_implications(rolename, context_implication):
extension_modules = get_extension_modules()
role_implications = []
for extension_module in extension_modules:
ext_implications = getattr(extension_module, "ROLE_IMPLICATIONS", None)
if ext_implications:
role_implications.extend(
ext_implications.implications_for(rolename, context_implication))
return role_implications
class RoleDeclarations(object):
"""
A RoleDeclarations object provides the names of roles declared by this
extension.
A role declaration is an object with 3 properties: scope, description, and
permissions. Scope and descriptions are strings, permissions MUST be a
dict.
"""
def roles(self):
return {}
class RoleContributions(object):
"""
A RoleContributions object provides role definition dictionaries by name.
"""
def contributions_for(self, rolename):
"""
look up a method in self for the role name, return value of method is the
contribution.
"""
contributions = getattr(self.__class__, 'contributions', {})
if rolename in contributions:
return contributions[rolename]
method = getattr(self.__class__, rolename, None)
if method:
return method(self)
return {}
class RoleImplications(object):
def implications_for(self, rolename, context_implication):
"""
Return a list of rolenames implied for the given rolename, or an empty
list.
"""
return []
class BasicRoleDeclarations(RoleDeclarations):
def roles(self):
return {
'AuditorReader': AuditorReader,
'Reader': Reader,
'ProgramCreator': ProgramCreator,
'ObjectEditor': ObjectEditor,
'ProgramBasicReader': ProgramBasicReader,
'ProgramMappingEditor': ProgramMappingEditor,
'ProgramOwner': ProgramOwner,
'ProgramEditor': ProgramEditor,
'ProgramReader': ProgramReader,
'AuditorProgramReader': AuditorProgramReader,
'ProgramAuditOwner': ProgramAuditOwner,
'ProgramAuditEditor': ProgramAuditEditor,
'ProgramAuditReader': ProgramAuditReader,
'Auditor': Auditor,
}
class BasicRoleImplications(RoleImplications):
# (Source Context Type, Context Type)
# -> Source Role -> Implied Role for Context
implications = {
('Program', 'Audit'): {
'ProgramOwner': ['ProgramAuditOwner'],
'ProgramEditor': ['ProgramAuditEditor'],
'ProgramReader': ['ProgramAuditReader'],
},
('Audit', 'Program'): {
'Auditor': ['AuditorProgramReader'],
},
('Audit', None): {
'Auditor': ['AuditorReader'],
},
(None, None): {
'ProgramCreator': ['ObjectEditor'],
},
(None, 'Program'): {
'ProgramCreator': ['ProgramMappingEditor'],
'ObjectEditor': ['ProgramMappingEditor'],
'Reader': ['ProgramReader'],
},
}
def implications_for(self, rolename, context_implication):
'''Given a role assignment in context return the implied role assignments
in src_context.
'''
src_context_scope = context_implication.source_context_scope
context_scope = context_implication.context_scope
result = self.implications.get((src_context_scope, context_scope), {})\
.get(rolename, list())
return result
|
Python
| 0.000004
|
@@ -289,47 +289,8 @@
les%0A
-from ggrc.models import Program, Audit%0A
from
|
62398ce1fde0402a9c4b77ff018e47716ba1fdd3
|
allow restricting the refresh_useractions command by user
|
tndata_backend/goals/management/commands/refresh_useractions.py
|
tndata_backend/goals/management/commands/refresh_useractions.py
|
import logging
from django.core.management.base import BaseCommand
from goals.models import CustomAction, UserAction
logger = logging.getLogger("loggly_logs")
class Command(BaseCommand):
help = 'Updates the next_trigger_date field for stale UserActions and CustomActions.'
def handle(self, *args, **options):
count = 0
for ua in UserAction.objects.stale():
count += 1
ua.save(update_triggers=True) # fields get refreshed on save.
msg = "Refreshed Trigger Date for {0} UserActions".format(count)
logger.error(msg)
self.stderr.write(msg)
count = 0
for ca in CustomAction.objects.stale():
count += 1
ca.save() # fields get refreshed on save.
msg = "Refreshed Trigger Date for {0} CustomActions".format(count)
logger.error(msg)
self.stderr.write(msg)
|
Python
| 0.000001
|
@@ -27,45 +27,138 @@
o.co
-re.management.base import BaseCommand
+ntrib.auth import get_user_model%0Afrom django.core.management.base import BaseCommand, CommandError%0Afrom django.db.models import Q%0A
%0Afro
@@ -368,16 +368,883 @@
ions.'%0A%0A
+ def add_arguments(self, parser):%0A parser.add_argument(%0A '--user',%0A action='store',%0A dest='user',%0A default=None,%0A help=(%22Restrict this command to the given User. %22%0A %22Accepts a username, email, or id%22)%0A )%0A%0A def _user_kwargs(self, user):%0A User = get_user_model()%0A%0A kwargs = %7B%7D # Keyword arguments that get passed to our action querysets.%0A if user:%0A try:%0A if user.isnumeric():%0A criteria = Q(id=user)%0A else:%0A criteria = (Q(username=user) %7C Q(email=user))%0A kwargs%5B'user'%5D = User.objects.get(criteria)%0A except User.DoesNotExist:%0A msg = %22Could not find user: %7B0%7D%22.format(user)%0A raise CommandError(msg)%0A return kwargs%0A%0A
def
@@ -1285,32 +1285,85 @@
count = 0%0A
+ kwargs = self._user_kwargs(options%5B'user'%5D)%0A%0A
for ua i
@@ -1381,32 +1381,40 @@
n.objects.stale(
+**kwargs
):%0A c
@@ -1693,16 +1693,24 @@
s.stale(
+**kwargs
):%0A
|
e16e0676b466bce1da42ed389f0c7cb27496c9b4
|
add in original column values for better reporting
|
delta.py
|
delta.py
|
import MySQLdb
import csv
import os
import datetime
from numbers import Number
from decimal import *
from collections import deque
import itertools
import csv
# if we are in debug mode, make sure we are verbose with ssh logging too
env = {}
env["dbuser"] = "<user>"
env["dbpass"] = "<pass>"
env["dbhost"] = "<host>"
def db_connect():
return MySQLdb.connect(host=env["dbhost"], port=3306, user=env["dbuser"], passwd=env["dbpass"])
def calc_per_change(a,b):
getcontext().prec = 2
return round(((((b-a)/Decimal(a))*100) if (isinstance(a, Number) and isinstance(b,Number) and a > 0) else 0),2)
def get_modded_rows(sql,degrees):
#db work to get data
db = db_connect()
c = db.cursor()
c.execute(sql)
#get column names
col_names = [i[0] for i in c.description]
#setup the list of queues to hold tracked data
degreeQueues = []
for degree in degrees:
degreeQueues.append(deque([],degree))
#first get the full dataset
currentRow = c.fetchone()
headerDone = False
with open('outFile.csv', 'w') as csvfile:
outcsv = csv.writer(csvfile, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)
while currentRow is not None:
#get only numbers in the current row
merged = zip(col_names, currentRow)
#only get the numbers, can't divide strings! Also make parallel lists of labels and values
currentRow = [x for x in merged if isinstance(x[1],Number)]
filteredRow = map(lambda x : x[1], currentRow)
filteredNames = map(lambda x : x[0], currentRow)
#now, for each degree we want to calc
for q in degreeQueues:
#first check if we have enough data to do this calc for this degree, we know that once the deque is full we can start processing
if q.maxlen == len(q):
#This is a hack, need to reorder this code to allow header generation earlier
if headerDone == False:
outcsv.writerow(map(lambda x : "{}_{}".format(x,len(q)),filteredNames))
headerDone = True
#loop through each entry and add to the output file
#so this code does
# 1. Extracts all previous rows in this deque horizontally - (zip(*q))
# 2. and then sums them vertically - map(sum, zip(*q))
# 3. then extracts the sum next to the current row - zip(map(sum, zip(*q)), filteredRow)
# 4. next to getting the number we are on for refernce - enumerate(
# 5. It then loops through each one where we will calculate the change
outcsv.writerow([calc_per_change(a/len(q),b) for a,b in zip(map(sum, zip(*q)), filteredRow)])
#This is for debugging only
#for i, (a,b) in enumerate(zip(map(sum, zip(*q)), filteredRow)):
# print i, "sumed: ", a, "averaged: ", a/len(q), "current row:", b, "Percent change: ", calc_per_change(a/len(q),b), "column name: ", filteredNames[i], "degree: ", len(q)
#add the row we just processed to the historical list for processing on next iteration
q.append(filteredRow)
#get the next row
currentRow = c.fetchone()
#cleanup time
c.close()
db.close()
def process(sql, degreeNums):
degs = []
for deg in degreeNums:
try:
degs.append(int(deg))
except:
print "Gotta give me all ints for the degrees friend, that other stuff.. not gonna cut it"
return
get_modded_rows(sql, degs)
process('select * from atmosphere.ops_weekly_health_report order by start_of_week asc limit 10000', ["3"])
|
Python
| 0.000001
|
@@ -141,16 +141,44 @@
ertools%0A
+from itertools import chain%0A
import c
@@ -982,16 +982,17 @@
hone()%0A%0A
+%0A
%09headerD
@@ -1875,16 +1875,46 @@
riterow(
+list(chain(*zip(filteredNames,
map(lamb
@@ -1960,16 +1960,19 @@
dNames))
+)))
%0A%09%09%09%09%09%09h
@@ -2460,32 +2460,48 @@
ge%0A%09%09%09%09%09
-outcsv.writerow(
+r = list(chain(*zip(filteredRow,
%5Bcalc_pe
@@ -2569,16 +2569,42 @@
edRow)%5D)
+))%0A%09%09%09%09%09outcsv.writerow(r)
%0A%0A%09%09%09%09%09#
@@ -2705,15 +2705,15 @@
)):%0A
+%09
%09%09%09%09%09#
-%09
prin
|
5b6587cbe03ff79a29a400fb1f9b29d889b4edc5
|
Make executable
|
appid.py
|
appid.py
|
# Find a Steam appid given its name
import json
import os.path
import sys
from fuzzywuzzy import process, fuzz # ImportError? pip install 'fuzzywuzzy[speedup]'
CACHE_FILE = os.path.abspath(__file__ + "/../appid.json")
try:
with open(CACHE_FILE) as f:
appids = json.load(f)
except FileNotFoundError:
import requests # ImportError? pip install requests
print("Downloading Steam appid list...")
r = requests.get("https://api.steampowered.com/ISteamApps/GetAppList/v0001/")
r.raise_for_status()
data = r.json()
appids = {app["name"]: app["appid"] for app in data["applist"]["apps"]["app"]}
with open(CACHE_FILE, "w") as f:
json.dump(appids, f)
print("Downloaded and cached.")
if len(sys.argv) == 1:
print("TODO: Use os.getcwd()")
sys.exit(0)
appnames = list(appids)
def shortest_token_set_ratio(query, choice):
"""Like fuzz.token_set_ratio, but breaks ties by choosing the shortest"""
return fuzz.token_set_ratio(query, choice) * 1000 + 1000 - len(choice)
def show_matches(target):
for name, score in process.extract(target, appnames, limit=10, scorer=shortest_token_set_ratio):
print("\t[%3d%% - %7s] %s" % (score//1000, appids[name], name))
# for arg in sys.argv[1:]: show_matches(arg) # Allow multiple args
show_matches(" ".join(sys.argv[1:])) # Allow unquoted multi-word names
|
Python
| 0.004691
|
@@ -1,8 +1,31 @@
+#!/usr/bin/env python3%0A
# Find a
|
c2dc0ba54279ac555e6c76f8846f873cbd2c238f
|
Add Object (Asset) fields and supporting types from API docs
|
asset.py
|
asset.py
|
import httplib2
from urlparse import urljoin, urlparse, urlunparse
from datetime import datetime
import re
from typepad.remote import RemoteObject, BASE_URL
from typepad import fields
class Link(RemoteObject):
fields = {
'rel': fields.Something(),
'href': fields.Something(),
'type': fields.Something(),
'width': fields.Something(),
'height': fields.Something(),
'duration': fields.Something(),
}
class TypedList(RemoteObject):
@classmethod
def get(cls, url, http=None, startIndex=None, maxResults=None, **kwargs):
queryopts = {'start-index': startIndex, 'max-results': maxResults}
query = '&'.join(['%s=%d' % (k, v) for k, v in queryopts.iteritems() if v is not None])
if query:
parts = list(urlparse(url))
if parts[4]:
parts[4] += '&' + query
else:
parts[4] = query
url = urlunparse(parts)
return super(TypedList, cls).get(url, http=http, **kwargs)
def List(entryClass):
class SpecificTypedList(TypedList):
fields = {
'total-results': fields.Something(),
'start-index': fields.Something(),
'links': fields.List(fields.Object(Link)),
'entries': fields.List(fields.Object(entryClass)),
}
return SpecificTypedList
class User(RemoteObject):
fields = {
# documented fields
'id': fields.Something(),
'displayName': fields.Something(),
'profileAlias': fields.Something(),
'aboutMe': fields.Something(),
'interests': fields.List(fields.Something()),
'urls': fields.List(fields.Something()),
'accounts': fields.List(fields.Something()),
'links': fields.List(fields.Something()),
'object-type': fields.Something(),
# astropad extras
'email': fields.Something(),
'userpic': fields.Something(),
'uri': fields.Something(),
}
def relationships(self, rel='followers', **kwargs):
url = '%susers/%s/relationships/@%s.json' % (BASE_URL, self.userid, rel)
return List(entryClass=UserRelationship).get(url, **kwargs)
@property
def userid(self):
# yes, this is stupid, but damn it, I need this for urls
# tag:typepad.com,2003:user-50
return self.id.split('-', 1)[1]
@property
def permalink(self):
## TODO link to typepad profile?
return self.uri
class UserRelationship(RemoteObject):
fields = {
#'status': fields.Something(),
'source': fields.Object(User),
'target': fields.Object(User),
}
class Object(RemoteObject):
fields = {
'id': fields.Something(),
#'control': fields.Object(Control),
'title': fields.Something(),
'content': fields.Something(),
'link': fields.Something(),
'published': fields.Datetime(),
'updated': fields.Datetime(),
'authors': fields.List(fields.Object(User)),
}
@property
def assetid(self):
# yes, this is stupid, but damn it, I need this for urls
# tag:typepad.com,2003:asset-1794
return self.id.split('-', 1)[1]
@property
def author(self):
try:
return self.authors[0]
except IndexError:
return None
def comments(self, **kwargs):
assert self._id
url = re.sub(r'\.json$', '/comments.json', self._id)
return List(entryClass=Object).get(url, **kwargs)
class Event(RemoteObject):
fields = {
'id': fields.Something(),
'verbs': fields.List(fields.Something()),
# TODO: vary these based on verb content? oh boy
'actor': fields.Object(User),
'object': fields.Object(Object),
}
class Group(RemoteObject):
fields = {
'id': fields.Something(),
'displayName': fields.Something(),
'urls': fields.List(fields.Something()),
'links': fields.List(fields.Something()),
'object-type': fields.List(fields.Something()),
}
def users(self, **kwargs):
assert self._id
userurl = re.sub(r'\.json$', '/users.json', self._id)
return List(entryClass=User).get(userurl, **kwargs)
def assets(self, **kwargs):
assert self._id
asseturl = re.sub(r'\.json$', '/assets.json', self._id)
return List(entryClass=User).get(asseturl, **kwargs)
def events(self, **kwargs):
assert self._id
eventurl = re.sub(r'\.json$', '/events.json', self._id)
return List(entryClass=Event).get(eventurl, **kwargs)
def comments(self, **kwargs):
assert self._id
commenturl = re.sub(r'\.json$', '/comments.json', self._id)
return List(entryClass=Event).get(commenturl, **kwargs)
|
Python
| 0
|
@@ -2760,22 +2760,33 @@
%0A%0Aclass
-Object
+PublicationStatus
(RemoteO
@@ -2821,83 +2821,361 @@
'
-id': fields.Something(),%0A #'control': fields.Object(Control
+published': fields.Something(),%0A 'spam': fields.Something(),%0A %7D%0A%0Aclass ObjectRef(RemoteObject):%0A fields = %7B%0A 'ref': fields.Something(),%0A 'href': fields.Something(),%0A 'type': fields.Something(),%0A %7D%0A%0Aclass Object(RemoteObject):%0A fields = %7B%0A # documented fields%0A 'id': fields.Something(
),%0A
@@ -3189,16 +3189,19 @@
'title':
+
fie
@@ -3230,17 +3230,106 @@
'
-content':
+published': fields.Datetime(),%0A 'updated': fields.Datetime(),%0A 'summary':
f
@@ -3356,20 +3356,23 @@
'
-link
+content
':
@@ -3399,36 +3399,82 @@
-'published
+# TODO: categories should be Tags?%0A 'categories
':
+
fields.
Datetime
@@ -3469,59 +3469,280 @@
lds.
-Datetime(),%0A 'updated': fields.Datetime(),
+List(fields.Something()),%0A 'object-types': field.List(fields.Something()),%0A 'status': fields.Object(PublicationStatus),%0A 'links' fields.List(fields.Something()),%0A 'in-reply-to': fields.Object(AssetRef),%0A%0A # astropad extras
%0A
@@ -3756,16 +3756,19 @@
uthors':
+
field
|
d2d090383d93e89bd8ce07d533715612cf472152
|
Support lists of nodes in astpp
|
astpp.py
|
astpp.py
|
"""
A pretty-printing dump function for the ast module. The code was copied from
the ast.dump function and modified slightly to pretty-print.
Alex Leone (acleone ~AT~ gmail.com), 2010-01-30
"""
from ast import *
def dump(node, annotate_fields=True, include_attributes=False, indent=' '):
"""
Return a formatted dump of the tree in *node*. This is mainly useful for
debugging purposes. The returned string will show the names and the values
for fields. This makes the code impossible to evaluate, so if evaluation is
wanted *annotate_fields* must be set to False. Attributes such as line
numbers and column offsets are not dumped by default. If this is wanted,
*include_attributes* can be set to True.
"""
def _format(node, level=0):
if isinstance(node, AST):
fields = [(a, _format(b, level)) for a, b in iter_fields(node)]
if include_attributes and node._attributes:
fields.extend([(a, _format(getattr(node, a), level))
for a in node._attributes])
return ''.join([
node.__class__.__name__,
'(',
', '.join(('%s=%s' % field for field in fields)
if annotate_fields else
(b for a, b in fields)),
')'])
elif isinstance(node, list):
lines = ['[']
lines.extend((indent * (level + 2) + _format(x, level + 2) + ','
for x in node))
if len(lines) > 1:
lines.append(indent * (level + 1) + ']')
else:
lines[-1] += ']'
return '\n'.join(lines)
return repr(node)
if not isinstance(node, AST):
raise TypeError('expected AST, got %r' % node.__class__.__name__)
return _format(node)
if __name__ == '__main__':
import sys
for filename in sys.argv[1:]:
print('=' * 50)
print('AST tree for', filename)
print('=' * 50)
f = open(filename, 'r')
fstr = f.read()
f.close()
print(dump(parse(fstr, filename=filename), include_attributes=True))
print()
|
Python
| 0
|
@@ -1728,16 +1728,98 @@
r(node)%0A
+ if isinstance(node, list):%0A return '%5Cn'.join(_format(n) for n in node)%0A
if n
|
6b718b0fbfac2c7206a50a451a443079aad362ac
|
Make ESC navigate previous controllers
|
subiquity/controllers/__init__.py
|
subiquity/controllers/__init__.py
|
# Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import urwid
import urwid.curses_display
from subiquity.routes import Routes
from subiquity.palette import STYLES, STYLES_MONO
log = logging.getLogger('subiquity.controller')
class BaseControllerError(Exception):
""" Basecontroller exception """
pass
class BaseController:
def __init__(self, ui, opts):
self.ui = ui
self.opts = opts
def next_controller(self, *args, **kwds):
controller = Routes.next()
controller(self).show(*args, **kwds)
def prev_controller(self, *args, **kwds):
controller = Routes.prev()
controller(self).show(*args, **kwds)
def current_controller(self, *args, **kwds):
controller = Routes.current()
return controller(self)
def redraw_screen(self):
if hasattr(self, 'loop'):
try:
self.loop.draw_screen()
except AssertionError as e:
log.critical("Redraw screen error: {}".format(e))
def set_alarm_in(self, interval, cb):
self.loop.set_alarm_in(interval, cb)
return
def update(self, *args, **kwds):
""" Update loop """
pass
def exit(self):
raise urwid.ExitMainLoop()
def header_hotkeys(self, key):
if key in ['q', 'Q', 'ctrl c']:
self.exit()
def set_body(self, w):
self.ui.set_body(w)
self.redraw_screen()
def set_header(self, title=None, excerpt=None):
self.ui.set_header(title, excerpt)
self.redraw_screen()
def set_footer(self, message):
self.ui.set_footer(message)
self.redraw_screen()
def run(self):
if not hasattr(self, 'loop'):
palette = STYLES
additional_opts = {
'screen': urwid.raw_display.Screen(),
'unhandled_input': self.header_hotkeys,
'handle_mouse': False
}
if self.opts.run_on_serial:
palette = STYLES_MONO
additional_opts['screen'] = urwid.curses_display.Screen()
else:
additional_opts['screen'].set_terminal_properties(colors=256)
additional_opts['screen'].reset_default_terminal_palette()
self.loop = urwid.MainLoop(
self.ui, palette, **additional_opts)
try:
self.set_alarm_in(0.05, self.begin)
self.loop.run()
except:
log.exception("Exception in controller.run():")
raise
def begin(self, *args, **kwargs):
""" Initializes the first controller for installation """
Routes.reset()
initial_controller = Routes.first()
initial_controller(self).show()
|
Python
| 0
|
@@ -1941,16 +1941,187 @@
, key):%0A
+ if key in %5B'esc'%5D and Routes.current_route_idx != 0:%0A Routes.prev()%0A current_route = Routes.current()%0A current_route(self).show()%0A
|
a1996022dd288b5a986cd07b2694f5af514296e4
|
Delete unnecessary annotation in examples/bucket_policy.py
|
examples/bucket_policy.py
|
examples/bucket_policy.py
|
import os
import oss2
import json
# 以下代码展示了bucket_policy相关API的用法,
# 具体policy书写规则参考官网文档说明
# 首先初始化AccessKeyId、AccessKeySecret、Endpoint等信息。
# 通过环境变量获取,或者把诸如“<你的AccessKeyId>”替换成真实的AccessKeyId等。
#
# 以杭州区域为例,Endpoint可以是:
# http://oss-cn-hangzhou.aliyuncs.com
# https://oss-cn-hangzhou.aliyuncs.com
# 分别以HTTP、HTTPS协议访问。
access_key_id = os.getenv('OSS_TEST_ACCESS_KEY_ID', '<你的AccessKeyId>')
access_key_secret = os.getenv('OSS_TEST_ACCESS_KEY_SECRET', '<你的AccessKeySecret>')
bucket_name = os.getenv('OSS_TEST_BUCKET', '<你的Bucket>')
endpoint = os.getenv('OSS_TEST_ENDPOINT', '<你的访问域名>')
# 确认上面的参数都填写正确了
for param in (access_key_id, access_key_secret, bucket_name, endpoint):
assert '<' not in param, '请设置参数:' + param
# 创建Bucket对象,所有Object相关的接口都可以通过Bucket对象来进行
bucket = oss2.Bucket(oss2.Auth(access_key_id, access_key_secret), endpoint, bucket_name)
# 创建policy_text
policy=dict()
policy["Version"] = "1"
policy["Statement"] = []
statement = dict()
statement["Action"] = ["oss:PutObject"]
statement["Effect"] = "Allow"
statement["Resource"] = ["acs:oss:*:*:*/*"]
policy["Statement"].append(statement)
policy_text = json.dumps(policy)
# Put bolicy_text
print("Put policy text : ", policy_text)
bucket.put_bucket_policy(policy_text)
# Get bucket Policy
result = bucket.get_bucket_policy()
policy_json = json.loads(result.policy)
print("Get policy text: ", policy_json)
# 校验返回的policy
assert len(policy["Statement"]) == len(policy_json["Statement"])
assert policy["Version"] == policy_json["Version"]
policy_resource = policy["Statement"][0]["Resource"][0]
policy_json_resource = policy_json["Statement"][0]["Resource"][0]
assert policy_resource == policy_json_resource
# 删除policy
result = bucket.delete_bucket_policy()
assert int(result.status)//100 == 2
|
Python
| 0.000001
|
@@ -296,29 +296,8 @@
com%0A
-# %E5%88%86%E5%88%AB%E4%BB%A5HTTP%E3%80%81HTTPS%E5%8D%8F%E8%AE%AE%E8%AE%BF%E9%97%AE%E3%80%82%0A
acce
|
268d23ebd987a30312694eb95322b1dad332c291
|
Update sender.py
|
adslproxy/sender.py
|
adslproxy/sender.py
|
import re
import time
import requests
from requests.exceptions import ConnectionError, ReadTimeout
from adslproxy.db import RedisClient
from adslproxy.config import *
import platform
if platform.python_version().startswith('2.'):
import commands as subprocess
elif platform.python_version().startswith('3.'):
import subprocess
else:
raise ValueError('python version must be 2 or 3')
class Sender():
def __init__(self):
self.proxy = None
self.headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3100.0 Safari/537.36'}
def get_ip(self, ifname=ADSL_IFNAME):
(status, output) = subprocess.getstatusoutput('ifconfig')
if status == 0:
pattern = re.compile(ifname + '.*?inet.*?(\d+\.\d+\.\d+\.\d+).*?netmask', re.S)
result = re.search(pattern, output)
if result:
ip = result.group(1)
return ip
def test_proxy(self, proxy):
try:
response = requests.get(TEST_URL, proxies={
'http': 'http://' + proxy,
'https': 'https://' + proxy
}, timeout=TEST_TIMEOUT,headers=self.headers)
if proxy != self.proxy:
print('new proxy',proxy)
if response.status_code == 200:
self.proxy = proxy
return True
elif proxy == self.proxy:
return False
except (ConnectionError, ReadTimeout):
return False
def remove_proxy(self):
self.redis = RedisClient()
self.redis.remove(CLIENT_NAME)
print('Successfully Removed Proxy')
def set_proxy(self, proxy):
self.redis = RedisClient()
if self.redis.set(CLIENT_NAME, proxy):
print('Successfully Set Proxy', proxy)
def adsl(self):
while True:
print('ADSL Start, Remove Proxy, Please wait')
(status, output) = subprocess.getstatusoutput(ADSL_BASH)
if status == 0:
print('ADSL Successfully')
ip = self.get_ip()
if ip:
print('New IP', ip)
print('Testing Proxy, Please Wait')
proxy = '{ip}:{port}'.format(ip=ip, port=PROXY_PORT)
if self.test_proxy(proxy):
print('Valid Proxy')
self.set_proxy(proxy)
print('Sleeping',ADSL_CYCLE + 10)
time.sleep(ADSL_CYCLE)
self.remove_proxy()
time.sleep(15)
else:
print('Invalid Proxy')
else:
print('Get IP Failed, Re Dialing')
time.sleep(ADSL_ERROR_CYCLE)
else:
print('ADSL Failed, Please Check')
time.sleep(ADSL_ERROR_CYCLE)
def run():
sender = Sender()
sender.adsl()
if __name__ == '__main__':
run()
|
Python
| 0.000001
|
@@ -1063,209 +1063,8 @@
ry:%0A
- response = requests.get(TEST_URL, proxies=%7B%0A 'http': 'http://' + proxy,%0A 'https': 'https://' + proxy%0A %7D, timeout=TEST_TIMEOUT,headers=self.headers)%0A
@@ -1140,60 +1140,8 @@
xy)%0A
- if response.status_code == 200:%0A
@@ -1171,20 +1171,16 @@
= proxy%0A
-
@@ -1288,39 +1288,8 @@
cept
- (ConnectionError, ReadTimeout)
:%0A
|
b3b753880a0a4e46c8e436f87d752f348503abb2
|
Comment out problematic line for now
|
examples/custom_plugin.py
|
examples/custom_plugin.py
|
"""
Defining a Custom Plugin
========================
Test the custom plugin demoed on the `Pythonic Perambulations
<http://jakevdp.github.io/blog/2014/01/10/d3-plugins-truly-interactive/>`_
blog. Hover over the points to see the associated sinusoid.
Use the toolbar buttons at the bottom-right of the plot to enable zooming
and panning, and to reset the view.
"""
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import mpld3
from mpld3 import plugins, utils
class LinkedView(plugins.PluginBase):
"""A simple plugin showing how multiple axes can be linked"""
JAVASCRIPT = """
mpld3.register_plugin("linkedview", LinkedViewPlugin);
LinkedViewPlugin.prototype = Object.create(mpld3.Plugin.prototype);
LinkedViewPlugin.prototype.constructor = LinkedViewPlugin;
LinkedViewPlugin.prototype.requiredProps = ["idpts", "idline", "data"];
LinkedViewPlugin.prototype.defaultProps = {}
function LinkedViewPlugin(fig, props){
mpld3.Plugin.call(this, fig, props);
};
LinkedViewPlugin.prototype.draw = function(){
var pts = mpld3.get_element(this.props.idpts);
var line = mpld3.get_element(this.props.idline);
var data = this.props.data;
function mouseover(d, i){
line.data = data[i];
line.elements().transition()
.attr("d", line.datafunc(line.data))
.style("stroke", this.style.fill);
}
pts.elements().on("mouseover", mouseover);
};
"""
def __init__(self, points, line, linedata):
if isinstance(points, matplotlib.lines.Line2D):
suffix = "pts"
else:
suffix = None
self.dict_ = {"type": "linkedview",
"idpts": utils.get_id(points, suffix),
"idline": utils.get_id(line),
"data": linedata}
fig, ax = plt.subplots(2)
# scatter periods and amplitudes
np.random.seed(0)
P = 0.2 + np.random.random(size=20)
A = np.random.random(size=20)
x = np.linspace(0, 10, 100)
data = np.array([[x, Ai * np.sin(x / Pi)]
for (Ai, Pi) in zip(A, P)])
points = ax[1].scatter(P, A, c=P + A,
s=200, alpha=0.5)
ax[1].set_xlabel('Period')
ax[1].set_ylabel('Amplitude')
# create the line object
lines = ax[0].plot(x, 0 * x, '-w', lw=3, alpha=0.5)
ax[0].set_ylim(-1, 1)
ax[0].set_title("Hover over points to see lines")
# transpose line data and add plugin
linedata = data.transpose(0, 2, 1).tolist()
plugins.connect(fig, LinkedView(points, lines[0], linedata))
mpld3.show()
|
Python
| 0
|
@@ -1417,16 +1417,53 @@
%7D%0A
+ // TODO: (@vladh) Fix this.%0A //
pts.ele
|
3665b8859f72ec416682857ab22f7e29fc30f0df
|
Add field on cached alignments to store more information
|
alignment/models.py
|
alignment/models.py
|
from django.db import models
# Create your models here.
class AlignmentConsensus(models.Model):
slug = models.SlugField(max_length=100, unique=True)
alignment = models.BinaryField()
|
Python
| 0
|
@@ -184,8 +184,103 @@
yField()
+%0A gn_consensus = models.BinaryField(blank=True) # Store conservation calculation for each GN
|
b1587da729c87ec39c38e0915cdf61bb3cc5f87c
|
remove unused imports in main
|
web3/main.py
|
web3/main.py
|
from __future__ import absolute_import
import warnings
from eth_utils import (
apply_to_return_value,
add_0x_prefix,
decode_hex,
encode_hex,
from_wei,
is_address,
is_checksum_address,
keccak,
remove_0x_prefix,
to_checksum_address,
to_wei,
)
from web3.admin import Admin
from web3.eth import Eth
from web3.iban import Iban
from web3.miner import Miner
from web3.net import Net
from web3.personal import Personal
from web3.shh import Shh
from web3.testing import Testing
from web3.txpool import TxPool
from web3.version import Version
from web3.providers.ipc import (
IPCProvider,
)
from web3.providers.rpc import (
HTTPProvider,
RPCProvider,
KeepAliveRPCProvider,
)
from web3.providers.tester import (
TestRPCProvider,
EthereumTesterProvider,
)
from web3.manager import (
RequestManager,
)
from web3.utils.decorators import (
deprecated_for,
)
from web3.utils.encoding import (
hex_encode_abi_type,
to_bytes,
to_int,
to_hex,
to_text,
)
def get_default_modules():
return {
"eth": Eth,
"shh": Shh,
"net": Net,
"personal": Personal,
"version": Version,
"txpool": TxPool,
"miner": Miner,
"admin": Admin,
"testing": Testing,
}
class Web3(object):
# Providers
HTTPProvider = HTTPProvider
RPCProvider = RPCProvider
KeepAliveRPCProvider = KeepAliveRPCProvider
IPCProvider = IPCProvider
TestRPCProvider = TestRPCProvider
EthereumTesterProvider = EthereumTesterProvider
# Managers
RequestManager = RequestManager
# Iban
Iban = Iban
# Encoding and Decoding
toBytes = staticmethod(to_bytes)
toInt = staticmethod(to_int)
toHex = staticmethod(to_hex)
toText = staticmethod(to_text)
# Currency Utility
toWei = staticmethod(to_wei)
fromWei = staticmethod(from_wei)
# Address Utility
isAddress = staticmethod(is_address)
isChecksumAddress = staticmethod(is_checksum_address)
toChecksumAddress = staticmethod(to_checksum_address)
def __init__(self, providers, middlewares=None, modules=None):
self.manager = RequestManager(self, providers, middlewares)
if modules is None:
modules = get_default_modules()
for module_name, module_class in modules.items():
module_class.attach(self, module_name)
@property
def middleware_stack(self):
return self.manager.middleware_stack
@property
def providers(self):
return self.manager.providers
def setProviders(self, providers):
self.manager.setProvider(providers)
@deprecated_for("the `manager` attribute")
def setManager(self, manager):
self.manager = manager
@property
@deprecated_for("`providers`, which is now a list")
def currentProvider(self):
return self.manager.providers[0]
@staticmethod
@apply_to_return_value(encode_hex)
def sha3(primitive=None, text=None, hexstr=None):
if isinstance(primitive, (bytes, int, type(None))):
input_bytes = to_bytes(primitive, hexstr=hexstr, text=text)
return keccak(input_bytes)
raise TypeError(
"You called sha3 with first arg %r and keywords %r. You must call it with one of "
"these approaches: sha3(text='txt'), sha3(hexstr='0x747874'), "
"sha3(b'\\x74\\x78\\x74'), or sha3(0x747874)." % (
primitive,
{'text': text, 'hexstr': hexstr}
)
)
@classmethod
def soliditySha3(cls, abi_types, values):
"""
Executes sha3 (keccak256) exactly as Solidity does.
Takes list of abi_types as inputs -- `[uint24, int8[], bool]`
and list of corresponding values -- `[20, [-1, 5, 0], True]`
"""
if len(abi_types) != len(values):
raise ValueError(
"Length mismatch between provided abi types and values. Got "
"{0} types and {1} values.".format(len(abi_types), len(values))
)
hex_string = add_0x_prefix(''.join(
remove_0x_prefix(hex_encode_abi_type(abi_type, value))
for abi_type, value
in zip(abi_types, values)
))
return cls.sha3(hexstr=hex_string)
def isConnected(self):
for provider in self.providers:
if provider.isConnected():
return True
else:
return False
|
Python
| 0
|
@@ -37,25 +37,8 @@
rt%0A%0A
-import warnings%0A%0A
from
@@ -107,24 +107,8 @@
ix,%0A
- decode_hex,%0A
|
61e9b3db58c124cf41ede9fc9a3ad9c01e5bff81
|
add select related query for group social links
|
sociallinks/templatetags/sociallink_tags.py
|
sociallinks/templatetags/sociallink_tags.py
|
# -*- coding: utf-8 -*-
from django import template
from django.contrib.contenttypes.models import ContentType
from sociallinks.models import SocialLink, SocialLinkGroup
register = template.Library()
@register.assignment_tag
def obj_social_links(obj):
"""return list of social links for obj. Obj is instance of any model
registred in project
Usage:
{% obj_social_links user as user_links %}
{% for link in user_links %}
<a href="{{ link.link }}" class="{{ link.link_type.css_class }}">
{{ link.link_type.name }}
</a>
{% endfor %}
"""
content_type = ContentType.objects.get_for_model(obj.__class__)
return SocialLink.objects.filter(
content_type=content_type,
object_pk=obj.pk).select_related('link_type')
@register.assignment_tag
def group_social_links(slug):
"""return list of social links for slug
sociallinks.models.SocialLinkGroup.slug"""
group = SocialLinkGroup.objects.get(slug=slug)
return SocialLink.objects.filter(
link_group=group).select_related('link_type')
|
Python
| 0
|
@@ -1056,30 +1056,28 @@
select_related('link_type')%0A
-%0A%0A
|
1b9f944d9123a765068ea02296dcb38014183a91
|
add applogic.addShortcut convenience method
|
software/ddapp/src/python/ddapp/applogic.py
|
software/ddapp/src/python/ddapp/applogic.py
|
import os
import time
import math
import ddapp.vtkAll as vtk
import PythonQt
from PythonQt import QtCore
from PythonQt import QtGui
from ddapp import getDRCBaseDir as getDRCBase
from ddapp import botspy
_mainWindow = None
_defaultRenderView = None
def getMainWindow():
return _mainWindow
def quit():
QtGui.QApplication.instance().quit()
def getViewManager():
return getMainWindow().viewManager()
def getDRCView():
return _defaultRenderView or getMainWindow().viewManager().findView('DRC View')
def getSpreadsheetView():
return getMainWindow().viewManager().findView('Spreadsheet View')
def getCurrentView():
return _defaultRenderView or getMainWindow().viewManager().currentView()
def getCurrentRenderView():
view = getCurrentView()
if hasattr(view, 'camera'):
return view
def getOutputConsole():
return getMainWindow().outputConsole()
def getPythonConsole():
return PythonQt.dd._pythonManager.consoleWidget()
def showPythonConsole():
getPythonConsole().show()
def addWidgetToDock(widget, dockArea=QtCore.Qt.RightDockWidgetArea, action=None):
dock = QtGui.QDockWidget()
dock.setWidget(widget)
dock.setWindowTitle(widget.windowTitle)
getMainWindow().addDockWidget(dockArea, dock)
if action is None:
getMainWindow().addWidgetToViewMenu(dock)
else:
getMainWindow().addWidgetToViewMenu(dock, action)
return dock
def resetCamera(viewDirection=None, view=None):
view = view or getCurrentRenderView()
assert(view)
if viewDirection is not None:
camera = view.camera()
camera.SetPosition([0, 0, 0])
camera.SetFocalPoint(viewDirection)
camera.SetViewUp([0,0,1])
view.resetCamera()
view.render()
def setBackgroundColor(color, color2=None, view=None):
view = view or getCurrentRenderView()
assert(view)
if color2 is None:
color2 = color
ren = view.backgroundRenderer()
ren.SetBackground(color)
ren.SetBackground2(color2)
def displaySnoptInfo(info):
getMainWindow().statusBar().showMessage('Info: %d' % info)
def toggleStereoRender():
view = getCurrentRenderView()
assert(view)
renderWindow = view.renderWindow()
renderWindow.SetStereoRender(not renderWindow.GetStereoRender())
view.render()
def toggleCameraTerrainMode(view = None):
view = view or getCurrentRenderView()
assert(view)
iren = view.renderWindow().GetInteractor()
if isinstance(iren.GetInteractorStyle(), vtk.vtkInteractorStyleTerrain):
iren.SetInteractorStyle(vtk.vtkInteractorStyleTrackballCamera())
else:
iren.SetInteractorStyle(vtk.vtkInteractorStyleTerrain())
view.camera().SetViewUp(0,0,1)
view.render()
updateToggleTerrainAction(view)
def getToolBarActions():
return getActionsDict(getMainWindow().toolBarActions())
def getToolsMenuActions():
return getActionsDict(getMainWindow().toolsMenu().actions())
def getActionsDict(actions):
actionsDict = {}
for action in actions:
if action.name:
actionsDict[action.name] = action
return actionsDict
def updateToggleTerrainAction(view):
if not getMainWindow():
return
isTerrainMode = False
if hasattr(view, 'renderWindow'):
isTerrainMode = isinstance(view.renderWindow().GetInteractor().GetInteractorStyle(), vtk.vtkInteractorStyleTerrain)
getToolBarActions()['ActionToggleCameraTerrainMode'].checked = isTerrainMode
def onCurrentViewChanged(previousView, currentView):
updateToggleTerrainAction(currentView)
def addToolbarMacro(name, func):
toolbar = getMainWindow().macrosToolBar()
action = toolbar.addAction(name)
action.connect('triggered()', func)
def setupActions():
botApyAction = getToolsMenuActions()['ActionBotSpy']
botApyAction.connect(botApyAction, 'triggered()', botspy.startBotSpy)
def showErrorMessage(message, title='Error'):
QtGui.QMessageBox.warning(getMainWindow(), title, message)
def showInfoMessage(message, title='Info'):
QtGui.QMessageBox.information(getMainWindow(), title, message)
def startup(globals):
global _mainWindow
_mainWindow = globals['_mainWindow']
if 'DRC_BASE' not in os.environ:
showErrorMessage('DRC_BASE environment variable is not set')
return
if not os.path.isdir(getDRCBase()):
showErrorMessage('DRC_BASE directory does not exist: ' + getDRCBase())
return
_mainWindow.connect('resetCamera()', resetCamera)
_mainWindow.connect('toggleStereoRender()', toggleStereoRender)
_mainWindow.connect('toggleCameraTerrainMode()', toggleCameraTerrainMode)
setupActions()
vm = getViewManager()
vm.connect('currentViewChanged(ddViewBase*, ddViewBase*)', onCurrentViewChanged);
|
Python
| 0.000001
|
@@ -3743,24 +3743,204 @@
)', func)%0A%0A%0A
+def addShortcut(widget, keySequence, func):%0A shortcut = QtGui.QShortcut(QtGui.QKeySequence(keySequence), widget)%0A shortcut.connect('activated()', func)%0A return shortcut%0A%0A%0A
def setupAct
|
55efbaddf22b009175645dea9aba593740a7b4c9
|
Add message to queue if handcheck is not finished.
|
websocket.py
|
websocket.py
|
import uuid
import re
import hashlib
import base64
from twisted.internet.protocol import Protocol as BaseProtocol
handshake = '\
HTTP/1.1 101 Web Socket Protocol Handshake\r\n\
Upgrade: WebSocket\r\n\
Connection: Upgrade\r\n\
Sec-WebSocket-Accept: %s\r\n\r\n\
'
class WebSocketError(Exception): pass
class FrameError(Exception): pass
class Frame(object):
def __init__(self, buf):
self.buf = buf
self.msg = ""
self.mask = 0
self.key = ""
self.len = 0
self.fin = 0
self.payload = 0
self.opcode = 0
self.frame_length = 0
def _frameHeader(self):
buf = self.buf
if len(buf) < 2:
raise FrameError("Incomple Frame: HEADER DATA")
self.fin = ord(buf[0]) >> 7
self.opcode = ord(buf[0]) & 0b1111
self.payload = ord(buf[1]) & 0b1111111
buf = buf[2:]
if self.payload < 126:
self.len = self.payload
self.frame_length = 6 + self.len
if self.frame_length > len(self.buf):
raise FrameError("Incomple Frame: FRAME DATA")
if len(buf) < 4:
raise FrameError("Incomple Frame: KEY DATA")
self.key = buf[:4]
buf = buf[4:4+len(buf)+1]
elif self.payload == 126:
if len(buf) < 6:
raise FrameError("Incomple Frame: KEY DATA")
for k,i in [(0,1),(1,0)]:
self.len += (ord(buf[k]) * 1 << (8*i))
self.frame_length = 8 + self.len
if self.frame_length > len(self.buf):
raise FrameError("Incomple Frame: FRAME DATA")
buf = buf[2:]
self.key = buf[:4]
buf = buf[4:4+len(buf)+1]
else:
if len(buf) < 10:
raise FrameError("Incomple Frame: KEY DATA")
for k,i in [(0,7),(1,6),(2,5),(3,4),(4,3),(5,2),(6,1),(7,0)]:
self.len += (ord(buf[k]) * 1 << (8*i))
self.frame_length = 14 + self.len
if self.frame_length > len(self.buf):
raise FrameError("Incomple Frame: FRAME DATA")
buf = buf[8:]
self.key = buf[:4]
buf = buf[4:4+len(buf)+1]
self.msg = buf
def getMsg(self):
self._frameHeader()
decoded_msg = ""
for i in xrange(self.len):
c = ord(self.msg[i]) ^ ord(self.key[i % 4])
decoded_msg += str(chr(c))
return decoded_msg
def getFrameLenght(self):
return self.frame_length
@staticmethod
def buildMsg(buf):
c_buf = buf
msg = ""
#first byte
o = (1 << 7) + 1
msg += str(chr(o))
#second byte
buf_len = len(buf)
if buf_len < 126:
o = buf_len
msg += str(chr(o))
msg += buf
return msg
if buf_len <= ((1 << 16) - 1):
msg += str(chr(126))
for i in range(1,3):
o = (buf_len >> (16 - (8*i))) & (2**8 - 1)
msg += str(chr(o))
msg += buf
return msg
if buf_len <= ((1 << 64) - 1):
msg += str(chr(127))
for i in range(1,9):
o = (buf_len >> (64 - (8*i))) & (2**8 - 1)
msg += str(chr(o))
msg += buf
return msg
class Protocol(BaseProtocol, object):
def __init__(self, users):
self.bufferIn = ""
self.users = users
self.id = str(uuid.uuid4())
self.users[self.id] = self
self.websocket_ready = False
self.commands = []
def sendHandcheck(self):
buf = self.bufferIn
pos = buf.find("\r\n\r\n")
if pos == -1:
return
cmd = buf[:pos+5]
self.bufferIn = buf[pos+4:]
key = re.search("Sec-WebSocket-Key:\s*(\S+)\s*", cmd)
key = key.group(1)
self.key = key
key = key+'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
key = base64.b64encode(hashlib.sha1(key).digest())
self.transport.write(handshake % key)
self.websocket_ready = True
def dataReceived(self, data):
self.bufferIn += data
if not self.websocket_ready:
self.sendHandcheck()
self.onConnect()
else:
try:
f = Frame(self.bufferIn)
msg = f.getMsg()
except FrameError, e:
pass
else:
f_len = f.getFrameLenght()
self.bufferIn = self.bufferIn[f_len:]
self.onMessage(msg)
def connectionLost(self, *args, **kwargs):
_id = self.id
if self.id in self.users:
del self.users[self.id]
self.onDisconnect()
def loseConnection(self):
_id = self.id
if self.id in self.users:
del self.users[self.id]
self.onDisconnect()
self.transport.loseConnection()
def abortConnection(self):
_id = self.id
if self.id in self.users:
del self.users[self.id]
self.onDisconnect()
self.transport.abortConnection()
def onConnect(self):
pass
def onDisconnect(self):
pass
def onMessage(self, msg):
pass
def sendMessage(self, msg):
if not self.websocket_ready:
return
self.commands.append(msg)
for cmd in self.commands:
self.transport.write(Frame.buildMsg(cmd))
self.commands = []
|
Python
| 0
|
@@ -4591,24 +4591,54 @@
self, msg):%0A
+ self.commands.append(msg)%0A
if not s
@@ -4675,38 +4675,8 @@
urn%0A
- self.commands.append(msg)%0A
|
709a7139b4f3acaace53e79c7ca1adafd8f24027
|
Use tempfile to save modifications
|
basic.py
|
basic.py
|
"""Run a basic simulation"""
import os
import hoomd
import molecule
import numpy as np
import pandas
import TimeDep
from hoomd import md
import gsd.hoomd
from StepSize import generate_steps
def run_npt(snapshot, temp, steps, **kwargs):
"""Initialise a hoomd simulation"""
with hoomd.context.initialize(kwargs.get('init_args', '')):
system = hoomd.init.read_snapshot(snapshot)
md.update.enforce2d()
mol = kwargs.get('mol', molecule.Trimer())
mol.initialize(create=False)
md.integrate.mode_standard(kwargs.get('dt', 0.005))
md.integrate.npt(
group=hoomd.group.rigid_center(),
kT=temp,
tau=kwargs.get('tau', 1.),
P=kwargs.get('press', 13.5),
tauP=kwargs.get('tauP', 1.)
)
dynamics = TimeDep.TimeDep2dRigid(snapshot, 0)
for curr_step in generate_steps(steps):
hoomd.run_upto(curr_step)
dynamics.append(system.take_snapshot(all=True), curr_step)
return dynamics.get_all_data()
def read_snapshot(fname, rand=False):
"""Read a hoomd snapshot from a hoomd gsd file
Args:
fname (string): Filename of GSD file to read in
Returns:
class:`hoomd.data.Snapshot`: Hoomd snapshot
"""
with gsd.hoomd.open(fname) as trj:
snapshot = trj.read_frame(0)
if rand:
snapshot.particles.angmom
nbodies = snapshot.particles.body.max() + 1
np.random.shuffle(snapshot.particles.velocity[:nbodies])
np.random.shuffle(snapshot.particles.angmom[:nbodies])
return snapshot
def main(directory, temp, steps, iterations=2):
"""Main function to run stuff"""
init_file = directory + "/Trimer-{press}-{temp}.gsd".format(
press=13.50, temp=temp)
for iteration in range(iterations):
dynamics = run_npt(read_snapshot(init_file, rand=True), temp, steps)
with pandas.HDFStore(os.path.splitext(init_file)[0]+'.hdf5') as store:
store['dyn{i}'.format(i=iteration)] = dynamics.get_all_data()
if __name__ == '__main__':
main(".", 1.30, 1000, 20)
|
Python
| 0
|
@@ -41,19 +41,56 @@
%0Aimport
-hoo
+tempfile%0Aimport hoomd%0Afrom hoomd import
md%0Aimpor
@@ -148,37 +148,16 @@
TimeDep%0A
-from hoomd import md%0A
import g
@@ -386,24 +386,19 @@
it.read_
-snapshot
+gsd
(snapsho
@@ -398,16 +398,29 @@
snapshot
+, time_step=0
)%0A
@@ -857,23 +857,45 @@
dRigid(s
-napshot
+ystem.take_snapshot(all=True)
, 0)%0A
@@ -1416,46 +1416,8 @@
nd:%0A
- snapshot.particles.angmom%0A
@@ -1616,23 +1616,168 @@
-return snapshot
+tmp = tempfile.NamedTemporaryFile(delete=False)%0A with gsd.hoomd.open(tmp.name, 'wb') as tfile:%0A tfile.append(snapshot)%0A return tmp.name
%0A%0A%0Ad
@@ -1907,15 +1907,23 @@
ress
+:.2f
%7D-%7Btemp
+:.2f
%7D.gs
@@ -2161,16 +2161,16 @@
store:%0A
+
@@ -2219,31 +2219,16 @@
dynamics
-.get_all_data()
%0A%0Aif __n
|
44f3c42c5eec3c0208dfdf688c80b1aba19ce097
|
Fix unit test when running via `tox` in a VsCode bash shell on Windows.
|
smugcli/terminal_size.py
|
smugcli/terminal_size.py
|
#!/usr/bin/env python
# Source: https://gist.github.com/jtriley/1108174
import os
import shlex
import struct
import platform
import subprocess
def get_terminal_size():
""" getTerminalSize()
- get width and height of console
- works on linux,os x,windows,cygwin(windows)
originally retrieved from:
http://stackoverflow.com/questions/566746/how-to-get-console-window-width-in-python
"""
current_os = platform.system()
tuple_xy = None
if current_os == 'Windows':
tuple_xy = _get_terminal_size_windows()
if tuple_xy is None:
tuple_xy = _get_terminal_size_tput()
# needed for window's python in cygwin's xterm!
if current_os in ['Linux', 'Darwin'] or current_os.startswith('CYGWIN'):
tuple_xy = _get_terminal_size_linux()
if tuple_xy is None:
tuple_xy = (80, 25) # default value
return tuple_xy
def _get_terminal_size_windows():
try:
from ctypes import windll, create_string_buffer
# stdin handle is -10
# stdout handle is -11
# stderr handle is -12
h = windll.kernel32.GetStdHandle(-12)
csbi = create_string_buffer(22)
res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
if res:
(bufx, bufy, curx, cury, wattr,
left, top, right, bottom,
maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
sizex = right - left + 1
sizey = bottom - top + 1
return sizex, sizey
except:
pass
def _get_terminal_size_tput():
# get terminal width
# src: http://stackoverflow.com/questions/263890/how-do-i-find-the-width-height-of-a-terminal-window
try:
cols = int(subprocess.check_call(shlex.split('tput cols')))
rows = int(subprocess.check_call(shlex.split('tput lines')))
return (cols, rows)
except:
pass
def _get_terminal_size_linux():
def ioctl_GWINSZ(fd):
try:
import fcntl
import termios
cr = struct.unpack('hh',
fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
return cr
except:
pass
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
try:
cr = (os.environ['LINES'], os.environ['COLUMNS'])
except:
return None
return int(cr[1]), int(cr[0])
if __name__ == "__main__":
sizex, sizey = get_terminal_size()
print('width =', sizex, 'height =', sizey)
|
Python
| 0
|
@@ -821,32 +821,64 @@
tuple_xy is None
+ or any(not i for i in tuple_xy)
:%0A tuple_
|
5d8e6e47964d80f380db27acd120136a43e80550
|
Fix tool description in argparse help
|
aimpoint_mon/make_web_page.py
|
aimpoint_mon/make_web_page.py
|
#!/usr/bin/env python
import os
import argparse
import json
from pathlib import Path
from jinja2 import Template
import pyyaks.logger
def get_opt():
parser = argparse.ArgumentParser(description='Get aimpoint drift data '
'from aspect solution files')
parser.add_argument("--data-root",
default=".",
help="Root directory for asol and index files")
return parser.parse_args()
# Options
opt = get_opt()
# Set up logging
loglevel = pyyaks.logger.INFO
logger = pyyaks.logger.get_logger(name='make_web_page', level=loglevel,
format="%(asctime)s %(message)s")
def main():
# Files
index_template_file = Path(__file__).parent / 'data' / 'index_template.html'
index_file = os.path.join(opt.data_root, 'index.html')
info_file = os.path.join(opt.data_root, 'info.json')
# Jinja template context
logger.info('Loading info file {}'.format(info_file))
context = json.load(open(info_file, 'r'))
template = Template(open(index_template_file).read())
context['static'] = True
html = template.render(**context)
logger.info('Writing index file {}'.format(index_file))
with open(index_file, 'w') as fh:
fh.write(html)
if __name__ == '__main__':
main()
|
Python
| 0.000038
|
@@ -200,11 +200,12 @@
on='
-Get
+Make
aim
@@ -214,85 +214,24 @@
int
-drift data '%0A 'from aspect solution files
+monitor web page
')%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.