commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
d61a5eaebcaa12ddaec6161a347cbfa847c2a961
|
remove debug information
|
uctrl/load_balancer.py
|
uctrl/load_balancer.py
|
#!/usr/bin/env python
import random
# Not supposed to be something dynamic, for now.
class Load_Balancer(object):
def __init__(self):
# Edge output ports in order of preference
# It will be calculated in the load_balance method
self.edge_out_ports = {}
def lb_policy(self, edge_core):
raise NotImplementedError
def lb_action(self, edge):
raise NotImplementedError
class Dummy_LBalancer(Load_Balancer):
def __init__(self):
super(Dummy_LBalancer, self).__init__()
def lb_policy(self, edge_core):
for edge in edge_core:
self.edge_out_ports.setdefault(edge, {})
core = random.choice([x for x in edge_core[edge]])
self.edge_out_ports[edge] = (core, edge_core[edge][core])
def lb_action(self, edge):
return self.edge_out_ports[edge]
class IP_LBalancer(Load_Balancer):
def __init__(self, config):
super(IP_LBalancer, self).__init__()
self.id_matcher = {}
def init(self, cores, match_bytes):
# fill bytearray with match
# string like '00000011' or
# int from 0 to 255
arr_bytes = bytearray(len(match_bytes))
for i in range(len(match_bytes)):
val = match_bytes[i]
if isinstance(val, str):
arr_bytes[i] = int(val, 2)
elif isinstance(val,int):
arr_bytes[i] = val
#debug print
#for i in range(len(arr_bytes)):
# print arr_bytes[i]
# set for every match
# [set([0]), set([0]), set([0]), set([0, 32, 64, 96])]
# match_bytes [0, 0, 0, "01100000"]
setarray = []
for i in range(len(arr_bytes)):
se = set([])
for j in range(0, 255):
se.add(arr_bytes[i] & j)
setarray.append(se)
#debug print
#for i in se:
# print i
print ("setarray: %s ") % setarray
##todo
# idea to build matches from the different bytes
allset = set([])
for index, value in enumerate(setarray):
print(index, value)
if len(value) > 1:
setlist = list(value)
for set_elem in setlist:
var = (index+1-len(setlist))*256
if (var == 0):
var=1
allset.add(set_elem * var)
#debug print
print ("allset: %s ") % allset
# link every core to a match
# works only for the 4. byte
for core in cores:
set_elem = setarray[3] # fix atm look only at byte 4
elem = set_elem.pop()
self.id_matcher.update({core:elem})
#debug print
print ("id_matcher: %s ") % self.id_matcher
#debug print
for key, value in self.id_matcher.iteritems():
print ("key[%s] = %s " % (key, value))
print ("id_matcher[48]: %s ") % self.id_matcher[48]
def get_ip_match(self, match_id):
METADATA_MASK = 0xffffffff
metadata = [match_id, METADATA_MASK]
if match_id in self.id_matcher:
#return decimal mask
mask = self.id_matcher[match_id]
ipv4_src = mask
#todo build match
match = {"eth_type": ETH_TYPE_IP, "ipv4_src": ipv4_src}
return match, metadata
def get_flow_mod(self):
return self.flow_mods
def lb_policy(self, edge_core):
for edge in edge_core:
self.edge_out_ports.setdefault(edge, {})
core = random.choice([x for x in edge_core[edge]])
self.edge_out_ports[edge] = (core, edge_core[edge][core])
def lb_action(self, edge):
return self.edge_out_ports[edge]
|
Python
| 0.000004
|
@@ -2906,32 +2906,33 @@
value))%0A
+#
print (%22id_match
|
8e536e4911ab18a5ac6e2e018fa041425a57a14b
|
Update serializers.py
|
website/serializers.py
|
website/serializers.py
|
from website.models import Issue, User , UserProfile,Points, Domain
from rest_framework import routers, serializers, viewsets, filters
import django_filters
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id','username')
class IssueSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
class Meta:
model = Issue
fields = '__all__'
class IssueViewSet(viewsets.ModelViewSet):
queryset = Issue.objects.all()
serializer_class = IssueSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('url', 'description', 'user__id')
class UserIssueViewSet(viewsets.ModelViewSet):
queryset = Issue.objects.all()
serializer_class = IssueSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('user__username', 'user__id')
class UserProfileSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
class Meta:
model = UserProfile
fields = '__all__'
class UserProfileViewSet(viewsets.ModelViewSet):
serializer_class = UserProfileSerializer
queryset = UserProfile.objects.all()
filter_backends = (filters.SearchFilter,)
search_fields = ('id', 'user__id','user__username')
class DomainSerializer(serializers.ModelSerializer):
class Meta:
model = Domain
fields = '__all__'
class DomainViewSet(viewsets.ModelViewSet):
serializer_class = DomainSerializer
queryset = Domain.objects.all()
filter_backends = (filters.SearchFilter,)
search_fields = ('url', 'name')
router = routers.DefaultRouter()
router.register(r'issues', IssueViewSet, basename="issues")
router.register(r'userissues', UserIssueViewSet, basename="userissues")
router.register(r'profile', UserProfileViewSet, basename="profile")
router.register(r'domain', DomainViewSet, basename="domain")
|
Python
| 0
|
@@ -645,32 +645,80 @@
on', 'user__id')
+%0A http_method_names = %5B'get', 'post', 'head'%5D
%0A%0Aclass UserIssu
@@ -920,16 +920,64 @@
er__id')
+%0A http_method_names = %5B'get', 'post', 'head'%5D
%0A%0Aclass
@@ -1370,32 +1370,80 @@
user__username')
+%0A http_method_names = %5B'get', 'post', 'head'%5D
%0A%0Aclass DomainSe
@@ -1707,32 +1707,32 @@
.SearchFilter,)%0A
-
search_field
@@ -1751,16 +1751,64 @@
'name')%0A
+ http_method_names = %5B'get', 'post', 'head'%5D%0A
%0A%0Arouter
|
97a80b758908760692a468269a2f1cbf209247d8
|
use default browser
|
config/lighthouse/cmd.py
|
config/lighthouse/cmd.py
|
#!/usr/bin/python2.7
import sys
import random
from time import sleep
import logging
from google import pygoogle
from multiprocessing import Process, Value, Manager, Array
from ctypes import c_char, c_char_p
import subprocess
import json
MAX_OUTPUT = 100 * 1024
resultStr = Array(c_char, MAX_OUTPUT);
def clear_output():
resultStr.value = json.dumps([])
def sanitize_output(string):
string = string.replace("{", "\{")
string = string.replace("}", "\}")
string = string.replace("|", "\|")
string = string.replace("\n", " ")
return string
def create_result(title, action):
return "{" + title + " |" + action + " }"
def append_output(title, action):
title = sanitize_output(title)
action = sanitize_output(action)
results = json.loads(resultStr.value)
if len(results) < 2:
results.append(create_result(title, action))
else: # ignore the bottom two default options
results.insert(-2, create_result(title, action))
resultStr.value = json.dumps(results)
def prepend_output(title, action):
title = sanitize_output(title)
action = sanitize_output(action)
results = json.loads(resultStr.value)
results = [create_result(title, action)] + results
resultStr.value = json.dumps(results)
def update_output():
results = json.loads(resultStr.value)
print "".join(results)
sys.stdout.flush()
google_thr = None
def google(query):
sleep(.5) # so we aren't querying EVERYTHING we type
g = pygoogle(userInput, log_level=logging.CRITICAL)
g.pages = 1
out = g.get_urls()
if (len(out) >= 1):
append_output(out[0], "firefox " + out[0])
update_output()
find_thr = None
def find(query):
sleep(.5) # Don't be too aggressive...
find_out = str(subprocess.check_output(["find", "/home", "-name", query]))
find_array = find_out.split("\n")[:-1]
if (len(find_array) == 0): return
for i in xrange(min(5, len(find_array))):
append_output(str(find_array[i]),"urxvt -e bash -c 'if [[ $(file "+find_array[i]+" | grep text) != \"\" ]]; then vim "+find_array[i]+"; else cd $(dirname "+find_array[i]+"); bash; fi;'");
update_output()
def get_process_output(process, formatting, action):
process_out = str(subprocess.check_output(process))
if "%s" in formatting:
out_str = formatting % (process_out)
else:
out_str = formatting
if "%s" in action:
out_action = action % (process_out)
else:
out_action = action
return (out_str, out_action)
special = {
"chrom": (lambda x: ("did you mean firefox?","firefox")),
"fire": (lambda x: ("firefox","firefox")),
"vi": (lambda x: ("vim","urxvt -e vim")),
"bat": (lambda x: get_process_output("acpi", "%s", ""))
};
while 1:
userInput = sys.stdin.readline()
userInput = userInput[:-1]
# Clear results
clear_output()
# Kill previous worker threads
if google_thr != None:
google_thr.terminate()
if find_thr != None:
find_thr.terminate()
# We don't handle empty strings
if userInput == '':
update_output()
continue
# Could be a command...
append_output("execute '"+userInput+"'", userInput);
# Could be bash...
append_output("run '"+userInput+"' in a shell", "urxvt -e bash -c '"+userInput+" && bash'");
# Scan for keywords
for keyword in special:
if userInput[0:len(keyword)] == keyword:
out = special[keyword](userInput)
if out != None:
prepend_output(*out);
# Is this python?
try:
out = eval(userInput)
if (type(out) != str and str(out)[0] == '<'):
pass # We don't want gibberish type stuff
else:
prepend_output("python: "+str(out), "urxvt -e python2.7 -i -c 'print "+userInput+"'")
except Exception as e:
pass
# Spawn worker threads
google_thr = Process(target=google, args=(userInput,))
google_thr.start()
find_thr = Process(target=find, args=(userInput,))
find_thr.start()
update_output()
|
Python
| 0
|
@@ -1561,23 +1561,24 @@
ut%5B0%5D, %22
-firefox
+xdg-open
%22 + out
|
583a649cb959dacf31777a4d4736e06fbea7a9b1
|
Kill main process first
|
runner.py
|
runner.py
|
#!/usr/bin/env python
import os
import sys
import yaml
import shlex
import signal
import psutil
import subprocess
def run_command(cmd, logfile='/dev/null'):
so = file(logfile, 'a+')
p = subprocess.Popen(cmd, shell=False, universal_newlines=True, stdout=so)
ret_code = p.wait()
so.flush()
return ret_code
def fork_command(cmd, logfile='/dev/null'):
try:
pid = os.fork()
if pid > 0:
return
except OSError, e:
sys.exit(1)
os.setsid()
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError, e:
sys.exit(1)
si = file('/dev/null', 'r')
so = file(logfile, 'a+')
se = file(logfile, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
subprocess.call(cmd)
os._exit(os.EX_OK)
def kill_group(proc, signal):
for child in proc.get_children():
kill_group(child, signal)
proc.send_signal(signal)
def which(cmd):
for dir in os.environ['PATH'].split(':'):
exe = os.path.join(dir, cmd)
if os.path.exists(exe):
return exe
def get_process_path(cmd):
exe = which(cmd)
data = file(exe).read(100).split('\n')[0]
if data.startswith('#!'):
shebang = data.split('#!')[1]
if shebang.startswith('/usr/bin/env'):
app = os.path.realpath(which(shebang.split()[1]))
return app + ' ' + exe
else:
return shebang + ' ' + exe
else:
return exe
def print_usage():
print "Usage: %s start|stop|status|list <service1> ..." % sys.argv[0]
print "Example:"
print " %s start web" % sys.argv[0]
def main():
run_dir = os.getcwd()
run_file = os.path.join(run_dir, 'Runfile')
run_log = os.path.join(run_dir, 'run.log')
if not os.access(run_file, os.R_OK):
print "Nothing to do"
return -1
try:
operation = sys.argv[1]
services = sys.argv[2:]
except IndexError:
print_usage()
return -1
if operation not in ('start', 'stop', 'list', 'status'):
print_usage()
return -1
rules = yaml.load(file(run_file))
if 'all' in services:
services = rules.keys()
if operation == 'list':
for name in rules:
print name
return 0
elif operation in ('start', 'stop'):
unknown = set(services) - set(rules.keys())
if len(unknown):
print "Unknown service(s): %s" % ', '.join(unknown)
return -1
elif operation == 'status':
services = rules.keys()
rule_commands = []
for service in services:
if isinstance(rules[service], str):
rules[service] = [rules[service]]
for i, cmd in enumerate(rules[service]):
cmd = shlex.split(cmd)
cmd = shlex.split(get_process_path(cmd[0])) + cmd[1:]
rules[service][i] = cmd
rule_commands.append(cmd)
running_processes = []
running_commands = []
for process in psutil.process_iter():
try:
cwd = os.path.abspath(process.getcwd())
except:
continue
cmd = process.cmdline
if not len(cmd):
continue
cmd[0] = process.exe
if cwd == run_dir:
if cmd in rule_commands:
running_processes.append(process)
running_commands.append(cmd)
if operation == 'status':
for cmd in running_commands:
for service in rules:
if cmd in rules[service]:
print service
elif operation == 'stop':
for proc in running_processes:
kill_group(proc, signal.SIGKILL)
elif operation == 'start':
for cmd in rule_commands:
if cmd not in running_commands:
fork_command(cmd, run_log)
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -16,16 +16,25 @@
python%0A%0A
+try:%0A
import o
@@ -35,16 +35,20 @@
port os%0A
+
import s
@@ -50,16 +50,20 @@
ort sys%0A
+
import y
@@ -66,16 +66,20 @@
rt yaml%0A
+
import s
@@ -83,16 +83,20 @@
t shlex%0A
+
import s
@@ -101,16 +101,20 @@
signal%0A
+
import p
@@ -119,16 +119,20 @@
psutil%0A
+
import s
@@ -141,16 +141,138 @@
process%0A
+except ImportError:%0A print %22Missing Python module(s)%22%0A print %22 pip install psutil%22%0A print %22 pip install pyyml%22%0A
%0A%0Adef ru
@@ -1061,16 +1061,17 @@
EX_OK)%0A%0A
+%0A
def kill
@@ -1100,20 +1100,18 @@
-for
child
- in
+ren =
pro
@@ -1130,65 +1130,91 @@
en()
-:
%0A
- kill_group(child, signal)%0A proc.send_signal(
+proc.send_signal(signal)%0A for child in children:%0A kill_group(child,
sign
|
936d16449ae8e40435258f79bbb14f4d47c96f02
|
Fix bug in neonlogger using stdout for stderr.
|
src/opencmiss/neon/core/neonlogger.py
|
src/opencmiss/neon/core/neonlogger.py
|
'''
Copyright 2015 University of Auckland
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import sys
import logging
from PySide import QtCore
from opencmiss.zinc.logger import Logger
ENABLE_STD_STREAM_CAPTURE = False
class CustomStream(QtCore.QObject):
_stdout = None
_stderr = None
_saved_stdout = None
messageWritten = QtCore.Signal([str, str])
def flush(self):
pass
def fileno(self):
return -1
def write(self, msg, level="INFORMATION"):
if (not self.signalsBlocked()):
self.messageWritten.emit(msg, level)
@staticmethod
def stdout():
if (not CustomStream._stdout):
CustomStream._stdout = CustomStream()
CustomStream._saved_stdout = sys.stdout
sys.stdout = CustomStream._stdout if ENABLE_STD_STREAM_CAPTURE else sys.stdout
return CustomStream._stdout
@staticmethod
def stderr():
if (not CustomStream._stderr):
CustomStream._stderr = CustomStream()
sys.stderr = CustomStream._stdout if ENABLE_STD_STREAM_CAPTURE else sys.stderr
return CustomStream._stderr
class LogsToWidgetHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
def emit(self, record):
levelString = record.levelname
record = self.format(record)
if record:
CustomStream.stdout().write('%s\n' % record, levelString)
def setup_custom_logger(name):
formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(module)s - %(message)s')
handler = LogsToWidgetHandler()
handler.setFormatter(formatter)
neonLogger = logging.getLogger(name)
neonLogger.setLevel(logging.DEBUG)
neonLogger.addHandler(handler)
return neonLogger
class NeonLogger(object):
_logger = None
_zincLogger = None
_loggerNotifier = None
@staticmethod
def getLogger():
if (not NeonLogger._logger):
NeonLogger._logger = setup_custom_logger("Neon")
return NeonLogger._logger
@staticmethod
def writeErrorMessage(string):
NeonLogger.getLogger().error(string)
@staticmethod
def writeWarningMessage(string):
NeonLogger.getLogger().warning(string)
@staticmethod
def writeInformationMessage(string):
NeonLogger.getLogger().info(string)
@staticmethod
def loggerCallback(event):
if event.getChangeFlags() == Logger.CHANGE_FLAG_NEW_MESSAGE:
text = event.getMessageText()
if event.getMessageType() == Logger.MESSAGE_TYPE_ERROR:
NeonLogger.writeErrorMessage(text)
elif event.getMessageType() == Logger.MESSAGE_TYPE_WARNING:
NeonLogger.writeWarningMessage(text)
elif event.getMessageType() == Logger.MESSAGE_TYPE_INFORMATION:
NeonLogger.writeInformationMessage(text)
@staticmethod
def setZincContext(zincContext):
if NeonLogger._loggerNotifier:
NeonLogger._loggerNotifier.clearCallback()
NeonLogger._zincLogger = zincContext.getLogger()
NeonLogger._loggerNotifier = NeonLogger._zincLogger.createLoggernotifier()
NeonLogger._loggerNotifier.setCallback(NeonLogger.loggerCallback)
|
Python
| 0
|
@@ -723,12 +723,11 @@
E =
-Fals
+Tru
e%0A%0A%0A
@@ -804,33 +804,8 @@
one%0A
- _saved_stdout = None%0A
@@ -1190,60 +1190,8 @@
m()%0A
- CustomStream._saved_stdout = sys.stdout%0A
@@ -1473,35 +1473,35 @@
ustomStream._std
-out
+err
if ENABLE_STD_S
|
6f0c05ee4743528550dd083d9290b5be0074ff0e
|
Add commands args to runner and improve docs in it
|
runner.py
|
runner.py
|
import sys
from vsut.unit import CSVFormatter, TableFormatter
if __name__ == "__main__":
for i in range(1, len(sys.argv)):
try:
modName = sys.argv[i].split(".")[0:-1]
modName = ".".join(modName)
className = sys.argv[i].split(".")[-1]
module = __import__(modName, fromlist=[className])
className = getattr(module, className)
unit = className()
unit.run()
formatter = TableFormatter(unit)
print(formatter.format())
except ImportError as e:
#TODO: Handle this import error.
print(e)
|
Python
| 0
|
@@ -1,12 +1,28 @@
+import argparse%0A
import sys%0Af
@@ -70,16 +70,22 @@
ormatter
+, Unit
%0A%0Aif __n
@@ -113,88 +113,426 @@
-for i in range(1, len(sys.argv)):%0A try:%0A modName = sys.argv%5Bi%5D
+parser = argparse.ArgumentParser(description=%22Runs unit tests.%22)%0A parser.add_argument('units', metavar='Unit', type=str, nargs='+')%0A parser.add_argument(%0A '--format', help=%22Default: table; Decides whether to use table or csv for output.%22, required=False)%0A args = vars(parser.parse_args())%0A%0A for unit in args%5B%22units%22%5D:%0A try:%0A # Get the name of the module.%0A modName = unit
.spl
@@ -582,24 +582,65 @@
n(modName)%0A%0A
+ # Get the name of the class.%0A
@@ -655,19 +655,12 @@
e =
-sys.argv%5Bi%5D
+unit
.spl
@@ -670,16 +670,50 @@
%22.%22)%5B-1%5D
+%0A%0A # Import the module.
%0A
@@ -781,25 +781,58 @@
-className
+# Create unit and run it.%0A unit
= getat
@@ -848,25 +848,26 @@
, className)
-%0A
+()
%0A
@@ -875,47 +875,175 @@
unit
- = className()%0A unit.run()%0A%0A
+.run()%0A%0A # Format the results and output them.%0A if args%5B%22format%22%5D == %22csv%22:%0A formatter = CSVFormatter(unit)%0A else:%0A
@@ -1114,17 +1114,8 @@
tter
-.format()
)%0A
@@ -1127,16 +1127,17 @@
except
+(
ImportEr
@@ -1139,16 +1139,33 @@
ortError
+, AttributeError)
as e:%0A
@@ -1179,58 +1179,61 @@
-#TODO: Handle this import error.%0A print(e
+print(%22%5BError%5D Could not import unit: %7B0%7D%22.format(unit)
)%0A
|
553cd68fb5d54be6ecbf3ca93c6d6c6be75afdb5
|
Add EveLinkCache to evelink.appengine
|
evelink/appengine/__init__.py
|
evelink/appengine/__init__.py
|
from evelink.appengine.api import AppEngineAPI
from evelink.appengine.api import AppEngineCache
from evelink.appengine.api import AppEngineDatastoreCache
from evelink.appengine import account
from evelink.appengine import char
from evelink.appengine import corp
from evelink.appengine import eve
from evelink.appengine import map
from evelink.appengine import server
__all__ = [
"AppEngineAPI",
"AppEngineCache",
"AppEngineDatastoreCache",
"account",
"char",
"corp",
"eve",
"map",
"server",
]
|
Python
| 0.000001
|
@@ -143,24 +143,71 @@
astoreCache%0A
+from evelink.appengine.api import EveLinkCache%0A
from evelink
@@ -487,16 +487,34 @@
Cache%22,%0A
+ %22EveLinkCache%22,%0A
%22accou
|
68c4f723f5eea2802209862d323825f33a445154
|
Fix url id to pk.
|
eventex/subscriptions/urls.py
|
eventex/subscriptions/urls.py
|
from django.urls import path
import eventex.subscriptions.views as s
app_name = 'subscriptions'
urlpatterns = [
path('', s.new, name='new'),
path('<int:id>/', s.detail, name='detail'),
path('json/donut/', s.paid_list_json, name='paid_list_json'),
path('json/column/', s.paid_column_json, name='paid_column_json'),
path('graphic/', s.graphic, name='graphic'),
]
|
Python
| 0.000001
|
@@ -157,18 +157,18 @@
h('%3Cint:
-id
+pk
%3E/', s.d
|
1bde40bebaa05a35e20dcbac7af91274d1c6e4da
|
test for .add_hashvalues was added
|
src/test_minhashsketch.py
|
src/test_minhashsketch.py
|
import pytest
import random
import array
from collections import Counter
from mashingpumpkins._murmurhash3 import hasharray
from mashingpumpkins.minhashsketch import (MaxHashNgramSketch, MaxHashNgramCountSketch, FrozenHashNgramSketch)
def _test_MaxHashNgramSketch(sequence, nsize):
# set the hashing function, size of ngrams, max size for the minhash sketch
hashfun = hasharray
maxsize = 10
mhs = MaxHashNgramSketch(nsize, maxsize, hashfun)
assert mhs.maxsize == maxsize
assert mhs.nsize == nsize
# add the sequence
mhs.add(sequence)
# check that all ngrams/kmers visited when adding sequence
assert mhs.nvisited == (len(sequence)-nsize+1)
# check that the minhash sketch is full
assert len(mhs) == maxsize
assert len(mhs._heap) == maxsize
assert len(mhs._heapset) == maxsize
assert len(tuple(mhs)) == maxsize
# extract all ngrams/kmers of length nsize in the test sequence
allhash = list()
hbuffer = array.array('Q', [0, ])
for i in range(0, len(sequence)-nsize+1):
ngram = sequence[i:(i+nsize)]
hashfun(ngram, nsize, hbuffer)
allhash.append((hbuffer[0], ngram))
# slice the 10 biggest out
allhash.sort(reverse=True)
maxhash = set(allhash[:maxsize])
# check that the slice above matches the content of the maxhash sketch
assert len(maxhash ^ mhs._heapset) == 0
#FIXME: add test for .add_hashvalues
#FIXME: add test for .update
def test_MaxHashNgramSketch_longer_than_buffer():
# random (DNA) sequence
random.seed(123)
sequence = b''.join(random.choice((b'A',b'T',b'G',b'C')) for x in range(250))
nsize = 21
_test_MaxHashNgramSketch(sequence, nsize)
def test_MaxHashNgramSketch_shorter_than_buffer():
# random (DNA) sequence
random.seed(123)
sequence = b''.join(random.choice((b'A',b'T',b'G',b'C')) for x in range(50))
nsize = 21
_test_MaxHashNgramSketch(sequence, nsize)
def test_MaxHashNgramSketch_add_hashvalue():
# random (DNA) sequence
random.seed(123)
sequence = b''.join(random.choice((b'A',b'T',b'G',b'C')) for x in range(50))
nsize = 21
_test_MaxHashNgramSketch(sequence, nsize)
hashfun = hasharray
nsize = 21
maxsize = 10
mhs_a = MaxHashNgramSketch(nsize, maxsize, hashfun)
mhs_a.add(sequence)
mhs_b = MaxHashNgramSketch(nsize, maxsize, hashfun)
hbuffer = array.array('Q', [0, ])
seq_hash = list()
for i in range(0, len(sequence)-nsize):
ngram = sequence[i:(i+nsize)]
hashfun(ngram, nsize, hbuffer)
seq_hash.append((ngram, hbuffer[0]))
mhs_b.add_hashvalues(x[1] for x in seq_hash)
assert mhs_b.nvisited == 0 # !!! nvisited it not updated
assert len(mhs_b) == maxsize
assert len(mhs_b._heap) == maxsize
assert len(mhs_b._heapset) == maxsize
assert len(tuple(mhs_b)) == maxsize
assert len(set(x[0] for x in mhs_a) ^ set(x[0] for x in mhs_b)) == 0
#FIXME: add test for .add_hashvalues
#FIXME: add test for .update
def test_MaxHashNgramCountSketch():
random.seed(123)
sequence = b''.join(random.choice((b'A',b'T',b'G',b'C')) for x in range(50))
hashfun = hasharray
nsize = 2
maxsize = 10
mhs = MaxHashNgramCountSketch(nsize, maxsize, hashfun)
assert mhs.maxsize == maxsize
assert mhs.nsize == nsize
mhs.add(sequence)
assert mhs.nvisited == (50-nsize+1)
allcounthash = Counter()
hbuffer = array.array('Q', [0,])
for i in range(0, len(sequence)-nsize+1):
ngram = sequence[i:(i+nsize)]
hashfun(ngram, nsize, hbuffer)
allcounthash[(hbuffer[0], ngram)] += 1
maxhash = sorted(allcounthash.keys(), reverse=True)[:maxsize]
assert len(set(maxhash) ^ mhs._heapset) == 0
for elt, value in mhs._count.items():
assert allcounthash[elt] == value
#FIXME: look the tests below
#assert len(mhs) == maxsize
#assert len(mhs._heap) == maxsize
#assert len(mhs._heapset) == maxsize
#assert len(mhs._count) == maxsize
#assert len(tuple(mhs)) == maxsize
#FIXME: add test for .add_hashvalues
#FIXME: add test for .update
def test_FrozenHashNgramSketch():
nsize = 2
maxsize = 5
sketch = set((1,2,3,4,5))
nvisited = len(sketch)
mhs = FrozenHashNgramSketch(sketch, nsize, maxsize = maxsize, nvisited=nvisited)
assert mhs.maxsize == maxsize
assert mhs.nsize == nsize
assert mhs.nvisited == nvisited
assert len(mhs) == maxsize
assert len(mhs._sketch) == maxsize
mhs = FrozenHashNgramSketch(sketch, nsize)
assert mhs.maxsize == maxsize
assert mhs.nsize == nsize
assert mhs.nvisited == nvisited
assert len(mhs) == maxsize
assert len(mhs._sketch) == maxsize
assert mhs.jaccard(mhs) == 1
sketch = set((1,2,3,6,7))
mhs_b = FrozenHashNgramSketch(sketch, nsize, maxsize = maxsize, nvisited=len(sketch))
assert mhs.jaccard(mhs_b) == 3/7
with pytest.raises(ValueError):
mhs = FrozenHashNgramSketch(sketch, nsize, maxsize = len(sketch)-1)
with pytest.raises(ValueError):
mhs = FrozenHashNgramSketch(sketch, nsize, nvisited = len(sketch)-1)
|
Python
| 0
|
@@ -1396,49 +1396,8 @@
%0A
- #FIXME: add test for .add_hashvalues%0A
@@ -1954,16 +1954,17 @@
ashvalue
+s
():%0A
@@ -2926,49 +2926,8 @@
%0A
- #FIXME: add test for .add_hashvalues%0A
|
127e5ae02932af67c6157939cff6ab388c89c677
|
convert process_attr to a parameter in contructor so extending the class is not needed
|
scrapy/trunk/scrapy/contrib_exp/link/__init__.py
|
scrapy/trunk/scrapy/contrib_exp/link/__init__.py
|
from HTMLParser import HTMLParser
from scrapy.link import Link
from scrapy.utils.python import unique as unique_list
from scrapy.utils.url import safe_url_string, urljoin_rfc as urljoin
class LinkExtractor(HTMLParser):
"""LinkExtractor are used to extract links from web pages. They are
instantiated and later "applied" to a Response using the extract_links
method which must receive a Response object and return a list of Link objects
containing the (absolute) urls to follow, and the links texts.
This is the base LinkExtractor class that provides enough basic
functionality for extracting links to follow, but you could override this
class or create a new one if you need some additional functionality. The
only requisite is that the new (or overrided) class must provide a
extract_links method that receives a Response and returns a list of Link objects.
This LinkExtractor always returns percent-encoded URLs, using the detected encoding
from the response.
The constructor arguments are:
* tag (string or function)
* a tag name which is used to search for links (defaults to "a")
* a function which receives a tag name and returns whether to scan it
* attr (string or function)
* an attribute name which is used to search for links (defaults to "href")
* a function which receives an attribute name and returns whether to scan it
* unique - if True the same urls won't be extracted twice, otherwise the
same urls will be extracted multiple times (with potentially different link texts)
"""
def __init__(self, tag="a", attr="href", unique=False):
HTMLParser.__init__(self)
self.scan_tag = tag if callable(tag) else lambda t: t == tag
self.scan_attr = attr if callable(attr) else lambda a: a == attr
self.unique = unique
def _extract_links(self, response_text, response_url, response_encoding):
self.reset()
self.feed(response_text)
self.close()
links = unique_list(self.links, key=lambda link: link.url) if self.unique else self.links
ret = []
base_url = self.base_url if self.base_url else response_url
for link in links:
link.url = urljoin(base_url, link.url)
link.url = safe_url_string(link.url, response_encoding)
link.text = link.text.decode(response_encoding)
ret.append(link)
return ret
def extract_links(self, response):
# wrapper needed to allow to work directly with text
return self._extract_links(response.body, response.url,
response.encoding)
def reset(self):
HTMLParser.reset(self)
self.base_url = None
self.current_link = None
self.links = []
def handle_starttag(self, tag, attrs):
if tag == 'base':
self.base_url = dict(attrs).get('href')
if self.scan_tag(tag):
for attr, value in attrs:
if self.scan_attr(attr):
url = self.process_attr(value)
link = Link(url=url)
self.links.append(link)
self.current_link = link
def handle_endtag(self, tag):
self.current_link = None
def handle_data(self, data):
if self.current_link and not self.current_link.text:
self.current_link.text = data.strip()
def process_attr(self, value):
"""Hook to process the value of the attribute before asigning
it to the link"""
return value
def matches(self, url):
"""This extractor matches with any url, since
it doesn't contain any patterns"""
return True
|
Python
| 0.000002
|
@@ -1411,24 +1411,121 @@
to scan it%0A
+ * process (funtion)%0A * a function wich receives the attribute value before assigning it%0A
* unique
@@ -1589,16 +1589,16 @@
ise the%0A
-
sa
@@ -1731,16 +1731,30 @@
=%22href%22,
+ process=None,
unique=
@@ -1865,16 +1865,16 @@
== tag%0A
-
@@ -1938,16 +1938,90 @@
== attr%0A
+ self.process_attr = process if callable(process) else lambda v: v%0A
@@ -3626,161 +3626,8 @@
()%0A%0A
- def process_attr(self, value):%0A %22%22%22Hook to process the value of the attribute before asigning%0A it to the link%22%22%22%0A return value%0A%0A
|
20404db72b04dee0bc578404bdb97225c50430d2
|
Add data_source to default file test data
|
webapp/apps/test_assets/utils.py
|
webapp/apps/test_assets/utils.py
|
import json
import os
import sys
import ast
from ..taxbrain.compute import MockCompute
from ..taxbrain.models import OutputUrl
from ..taxbrain.forms import TaxBrainForm
from ..dynamic import views
from ..taxbrain import views
from django.core.files.uploadedfile import SimpleUploadedFile
NUM_BUDGET_YEARS = int(os.environ.get("NUM_BUDGET_YEARS", "10"))
def get_dropq_compute_from_module(module_import_path, attr='dropq_compute',
MockComputeObj=MockCompute, **mc_args):
"""
mocks dropq compute object from specified module
returns: mocked dropq compute object
"""
module_views = sys.modules[module_import_path]
setattr(module_views, attr, MockComputeObj(**mc_args))
return getattr(module_views, attr)
def do_micro_sim(client, data, tb_dropq_compute=None, dyn_dropq_compute=None,
compute_count=None, post_url='/taxbrain/'):
"""
do the proper sequence of HTTP calls to run a microsim
tb_dropq_compute: mocked taxbrain dropq_compute object; set to default
config if None
dyn_dropq_compute: mocked dynamic dropq_compute object; set to default
config if None
compute_count: number of jobs submitted; only checked in quick_calc tests
post_url: url to post data; is also set to /taxbrain/file/ for file_input
tests
returns: response object, taxbrain mock dropq compute object,
dynamic dropq compute object, primary key for model run
"""
#Monkey patch to mock out running of compute jobs
if tb_dropq_compute is None:
tb_dropq_compute = get_dropq_compute_from_module(
'webapp.apps.taxbrain.views',
num_times_to_wait=0
)
if dyn_dropq_compute is None:
dyn_dropq_compute = get_dropq_compute_from_module(
'webapp.apps.dynamic.views',
num_times_to_wait=1
)
response = client.post(post_url, data)
# Check that redirect happens
assert response.status_code == 302
idx = response.url[:-1].rfind('/')
assert response.url[:idx].endswith("taxbrain")
# Check for good response
response2 = client.get(response.url)
# TODO: check compute count once NUM_BUDGET_YEARS env variable issue is
# resolved
assert response2.status_code == 200
if compute_count is not None:
assert tb_dropq_compute.count == compute_count
# return response
return {"response": response,
"tb_dropq_compute": tb_dropq_compute,
"dyn_dropq_compute": dyn_dropq_compute,
"pk": response.url[idx+1:-1]}
def check_posted_params(mock_compute, params_to_check, start_year,
use_puf_not_cps=True):
"""
Make sure posted params match expected results
user_mods: parameters that are actually passed to taxcalc
params_to_check: gives truth value for parameters that we want to check
(formatted as taxcalc dict style reform)
"""
last_posted = mock_compute.last_posted
user_mods = json.loads(last_posted["user_mods"])
assert last_posted["first_budget_year"] == int(start_year)
assert last_posted["use_puf_not_cps"] == use_puf_not_cps
for year in params_to_check:
for param in params_to_check[year]:
act = user_mods["policy"][str(year)][param]
exp = params_to_check[year][param]
# more extensive assertion statement
# catches: [['true', '2']] == [['true', '2']]
# as well as [['true', '2']] == [['1', '2.0']]
if exp == act:
continue
try:
assert ast.literal_eval(exp) == ast.literal_eval(act)
except ValueError:
assert exp == act
def get_post_data(start_year, _ID_BenefitSurtax_Switches=True, quick_calc=False):
"""
Convenience function for posting GUI data
"""
data = {u'has_errors': [u'False'],
u'start_year': unicode(start_year),
'data_source': 'PUF',
'csrfmiddlewaretoken':'abc123'}
if _ID_BenefitSurtax_Switches:
switches = {u'ID_BenefitSurtax_Switch_0': [u'True'],
u'ID_BenefitSurtax_Switch_1': [u'True'],
u'ID_BenefitSurtax_Switch_2': [u'True'],
u'ID_BenefitSurtax_Switch_3': [u'True'],
u'ID_BenefitSurtax_Switch_4': [u'True'],
u'ID_BenefitSurtax_Switch_5': [u'True'],
u'ID_BenefitSurtax_Switch_6': [u'True']}
data.update(switches)
if quick_calc:
data['quick_calc'] = 'Quick Calculation!'
return data
def get_file_post_data(start_year, reform_text, assumptions_text=None, quick_calc=False):
"""
Convenience function for posting file input data
"""
tc_file = SimpleUploadedFile("test_reform.json", reform_text)
data = {u'docfile': tc_file,
u'has_errors': [u'False'],
u'start_year': unicode(start_year),
u'quick_calc': quick_calc,
'csrfmiddlewaretoken':'abc123'}
if assumptions_text is not None:
tc_file2 = SimpleUploadedFile("test_assumptions.json",
assumptions_text)
data['assumpfile'] = tc_file2
return data
def get_taxbrain_model(_fields, first_year=2017,
quick_calc=False, taxcalc_vers="0.13.0",
webapp_vers="1.2.0", exp_comp_datetime = "2017-10-10",
Form=TaxBrainForm, UrlModel=OutputUrl,
use_puf_not_cps=True):
fields = _fields.copy()
fields.pop('_state', None)
fields.pop('creation_date', None)
fields.pop('id', None)
fields = stringify_fields(fields)
personal_inputs = Form(first_year, use_puf_not_cps, fields)
if not personal_inputs.is_valid():
print(personal_inputs.errors)
model = personal_inputs.save(commit=False)
model.set_fields()
model.save()
model.job_ids = ['1','2','3']
model.json_text = None
model.first_year = first_year
model.quick_calc = quick_calc
model.save()
unique_url = UrlModel()
unique_url.taxcalc_vers = taxcalc_vers
unique_url.webapp_vers = webapp_vers
unique_url.unique_inputs = model
unique_url.model_pk = model.pk
unique_url.exp_comp_datetime = exp_comp_datetime
unique_url.save()
return unique_url
def stringify_fields(fields):
for key in fields:
if isinstance(fields[key], list):
fields[key] = ','.join(map(str, fields[key]))
else:
fields[key] = str(fields[key])
return fields
|
Python
| 0
|
@@ -4997,32 +4997,66 @@
de(start_year),%0A
+ 'data_source': 'PUF',%0A
u'qu
|
ca625e22cb397905f859c826c6507b3977665a51
|
Fix import
|
examples/cifar10_ror.py
|
examples/cifar10_ror.py
|
'''
Trains a Residual-of-Residual Network (WRN-40-2) model on the CIFAR-10 Dataset.
Gets a 94.53% accuracy score after 150 epochs.
'''
import numpy as np
import sklearn.metrics as metrics
import keras.callbacks as callbacks
import keras.utils.np_utils as kutils
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.optimizers import Adam
from keras_contrib.applications.ror import ResidualOfResidual
batch_size = 64
nb_epoch = 150
img_rows, img_cols = 32, 32
(trainX, trainY), (testX, testY) = cifar10.load_data()
trainX = trainX.astype('float32')
testX = testX.astype('float32')
trainX /= 255
testX /= 255
tempY = testY
trainY = kutils.to_categorical(trainY)
testY = kutils.to_categorical(testY)
generator = ImageDataGenerator(rotation_range=15,
width_shift_range=5. / 32,
height_shift_range=5. / 32)
generator.fit(trainX, seed=0)
model = ResidualOfResidual(depth=40, width=2, dropout_rate=0.0, weights='None')
optimizer = Adam(lr=1e-3)
model.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=["acc"])
print("Finished compiling")
model.fit_generator(generator.flow(trainX, trainY, batch_size=batch_size), samples_per_epoch=len(trainX),
nb_epoch=nb_epoch,
callbacks=[callbacks.ModelCheckpoint("weights/RoR-WRN-40-2-Weights.h5", monitor="val_acc",
save_best_only=True, save_weights_only=True)],
validation_data=(testX, testY),
nb_val_samples=testX.shape[0], verbose=2)
scores = model.evaluate(testX, testY, batch_size)
print("Test loss : ", scores[0])
print("Test accuracy : ", scores[1])
|
Python
| 0
|
@@ -419,12 +419,8 @@
ions
-.ror
imp
|
cab81ff75f8fdd6650132927d9201bd0923c8354
|
Fix error messages.
|
src/oidc_fed/__init__.py
|
src/oidc_fed/__init__.py
|
"""
oidc-fed
~~~~~~~~~~~~~~~~
Example implementation of model for OpenID Connect federations.
:copyright: (c) 2016 by Umeå University.
:license: APACHE 2.0, see LICENSE for more details.
"""
import json
import uuid
from Crypto.PublicKey import RSA
from jwkest import JWKESTException
from jwkest.jwk import RSAKey, keyrep
from jwkest.jws import JWS
from oic.utils.keyio import KeyJar, KeyBundle
class OIDCFederationError(Exception):
pass
class OIDCFederationEntity(object):
def __init__(self, name, root_key, software_statements, federation_keys, signed_jwks_uri):
# type: (str, Key, Sequence[str], Sequence[Key], str) -> None
"""
:param name: URI identifying the entity
:param root_key: root signing key for this entity
:param software_statements: all software statements isssued by federations for this entity
:param federation_keys: public keys from all federations this entity is part of
:param signed_jwks_uri: URL endpoint where the signed JWKS is published
"""
verify_signing_key(root_key)
self.root_key = root_key
self.name = name
self.software_statements = [self._verify(ss, federation_keys) for ss in software_statements]
self.federation_keys = federation_keys
self.signed_jwks_uri = signed_jwks_uri
self.intermediate_key = None
self.jwks = None
self.rotate_intermediate_key()
self.rotate_jwks()
@property
def signed_intermediate_key(self):
# type: () -> str
"""
:return: JWS containing the intermediate key
"""
return self._sign(self.intermediate_key.serialize(private=False), self.root_key)
@property
def signed_jwks(self):
# type: () -> str
"""
:return: JWS containing the JWKS
"""
return self._sign(self.jwks.export_jwks(), self.intermediate_key)
@property
def software_statements_jws(self):
# type: () -> List[str]
"""
:return: all the entity's software statements as JWS
"""
return [ss.jwt.pack() for ss in self.software_statements]
def rotate_intermediate_key(self):
# type: () -> None
"""Replace the current intermediate key with a fresh one."""
self.intermediate_key = RSAKey(key=RSA.generate(1024), use="sig", alg="RS256",
kid=self._create_kid())
def rotate_jwks(self):
# type: () -> None
"""Replace the current JWKS with a fresh one."""
self.jwks = KeyJar()
kb = KeyBundle(keyusage=["enc", "sig"])
kb.append(RSAKey(key=RSA.generate(1024), kid=self._create_kid()))
self.jwks.add_kb("", kb)
def _create_kid(self):
# type () -> str
"""
Create a scope (by the entity's name) key id.
:return: a new key id
"""
return "{}/{}".format(self.name, uuid.uuid4())
def _sign(self, data, key):
# type: (Mapping[str, Union[str, Sequence[str]]], Key) -> str
"""
Create a JWS containing the data, signed with key.
:param data: data to sign
:param key: key to use for signature
:return: JWS containing the data
"""
return JWS(json.dumps(data), alg=key.alg).sign_compact(keys=[key])
def _verify(self, jws, keys):
# type: (str, Sequence[Key]) -> Dict[str, Union[str, Lists[str]]]
"""
Verify signature of JWS.
:param jws: JWS to verify signature of
:param keys: possible keys to verify the signature with
:return: payload of the JWS
"""
unpacked = JWS()
unpacked.verify_compact(jws, keys=keys)
return unpacked
def _verify_signature_chain(self, software_statements, signing_key):
# type: (Sequence[str], str) -> Tuple[str, Key]
"""
Verify the signature chain: signature of software statement (containing root key) and
signature of a signing key (in the form of a JWS).
:param software_statements: all software statements from the provider
:param signing_key: the entity's intermediate signing key
:return:
"""
software_statement = self._verify_software_statements(software_statements)
root_key = keyrep(software_statement.msg["root_key"])
signing_key = self._verify_signing_key(signing_key, root_key)
return software_statement, signing_key
def _verify_signing_key(self, signing_key, verification_key):
# type: (str, Key) -> Key
"""
Verify the signature of an intermediate signing key.
:param signing_key: JWS containing the providers intermediate key
:param verification_key: key to verify the signature with
:raise OIDCFederationError: if the signature could not be verified
:return: key contained in the JWS
"""
try:
signing_key = self._verify(signing_key, keys=[verification_key]).msg
except JWKESTException as e:
raise OIDCFederationError("The provider's signing key could not be verified.")
return keyrep(signing_key)
def _verify_software_statements(self, software_statements):
# type: (Sequence[str]) -> Dict[str, Union[str, List[str]]]
"""
Find and verify the signature of the first software statement issued by a common federation.
:param software_statements: all software statements the entity presented in the
metadata
:raise OIDCFederationError: if no software statement has been issued by a common federation
:return: payload of the first software statement issued by a common federation
"""
for jws in software_statements:
try:
return self._verify(jws, self.federation_keys)
except JWKESTException as e:
pass
raise OIDCFederationError(
"No software statement from provider issued by common federation.")
def verify_signing_key(signing_key):
if not signing_key.alg:
raise OIDCFederationError("Specified signing key must have 'alg' set.")
if not signing_key.kid:
raise OIDCFederationError("Specified signing key must have 'kid' set.")
if signing_key.use != "sig":
raise OIDCFederationError("Specified signing key must have 'use=sig'.")
|
Python
| 0.000001
|
@@ -5111,24 +5111,22 @@
or(%22The
-provider
+entity
's signi
@@ -5962,33 +5962,16 @@
onError(
-%0A
%22No soft
@@ -5989,22 +5989,8 @@
ent
-from provider
issu
@@ -5995,21 +5995,20 @@
sued by
-commo
+know
n federa
|
d458fb855df77dfb553ee3e95a8201f58aba169e
|
Increment version number
|
clippercard/__init__.py
|
clippercard/__init__.py
|
"""
Copyright (c) 2012-2017 (https://github.com/clippercard/clippercard-python)
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import clippercard.client as client
Session = client.ClipperCardWebSession
__version__ = '0.4.0'
|
Python
| 0.000021
|
@@ -1197,11 +1197,11 @@
= '0.4.
-0
+1
'%0A
|
279903d44198ed302a07d1ebff584e2f4b1bd9e5
|
Fix the PostReservation Reservation creation
|
server/src/weblab/user_processing/Reservation.py
|
server/src/weblab/user_processing/Reservation.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
import weblab.user_processing.coordinator.WebLabSchedulingStatus as WSS
import weblab.exceptions.user_processing.UserProcessingExceptions as UserProcessingExceptions
class Reservation(object):
WAITING = "Reservation::waiting"
WAITING_CONFIRMATION = "Reservation::waiting_confirmation"
WAITING_INSTANCES = "Reservation::waiting_instances"
CONFIRMED = "Reservation::confirmed"
POST_RESERVATION = "Reservation::post_reservation"
def __init__(self, status):
""" __init__(status)
status is Reservation.WAITING, Reservation.CONFIRMED, etc.
"""
super(Reservation,self).__init__()
self.status = status
def __repr__(self):
return self.status
@staticmethod
def translate_reservation(status):
if status.status == WSS.WebLabSchedulingStatus.WAITING:
reservation = WaitingReservation(status.position)
elif status.status == WSS.WebLabSchedulingStatus.WAITING_CONFIRMATION:
reservation = WaitingConfirmationReservation()
elif status.status == WSS.WebLabSchedulingStatus.RESERVED:
reservation = ConfirmedReservation(
status.time,
status.initial_configuration
)
elif status.status == WSS.WebLabSchedulingStatus.WAITING_INSTANCES: #TODO: test me
reservation = WaitingInstances(
status.position
)
elif status.status == WSS.WebLabSchedulingStatus.POST_RESERVATION: #TODO: test me
reservation = PostReservationReservation(
status.end_data
)
else:
raise UserProcessingExceptions.InvalidReservationStatusException(
"Invalid reservation status.status: '%s'. Only '%s' and '%s' expected" % (
status.status,
WSS.WebLabSchedulingStatus.WAITING,
WSS.WebLabSchedulingStatus.RESERVED
)
)
return reservation
@staticmethod
def translate_reservation_from_data(status_text, position = None, time = None, initial_configuration = None, end_data = None):
if status_text == Reservation.WAITING:
reservation = WaitingReservation(position)
elif status_text == Reservation.WAITING_CONFIRMATION:
reservation = WaitingConfirmationReservation()
elif status_text == Reservation.WAITING_INSTANCES:
reservation = WaitingInstances(position)
elif status_text == Reservation.CONFIRMED:
reservation = ConfirmedReservation(time, initial_configuration)
elif status_text == Reservation.POST_RESERVATION:
reservation = PostReservationReservation(end_data)
else:
raise UserProcessingExceptions.InvalidReservationStatusException("Invalid reservation status_text: '%s'." % ( status_text ) )
return reservation
class WaitingReservation(Reservation):
def __init__(self, position):
super(WaitingReservation,self).__init__(Reservation.WAITING)
self.position = position
def __repr__(self):
return "WaitingReservation(position = %r)" % self.position
class ConfirmedReservation(Reservation):
def __init__(self, time, initial_configuration):
super(ConfirmedReservation,self).__init__(Reservation.CONFIRMED)
self.time = time
self.initial_configuration = initial_configuration
def __repr__(self):
return "ConfirmedReservation(time = %r, initial_configuration = %r)" % (self.time, self.initial_configuration)
class WaitingConfirmationReservation(Reservation):
def __init__(self):
super(WaitingConfirmationReservation,self).__init__(Reservation.WAITING_CONFIRMATION)
def __repr__(self):
return "WaitingConfirmationReservation()"
class WaitingInstances(Reservation):
def __init__(self, position):
super(WaitingInstances,self).__init__(Reservation.WAITING_INSTANCES)
self.position = position
def __repr__(self):
return "WaitingInstances(position = %r)" % self.position
class PostReservationReservation(Reservation):
def __init__(self, finished, initial_data, end_data):
super(PostReservationReservation,self).__init__(Reservation.POST_RESERVATION)
self.finished = finished
self.initial_data = initial_data
self.end_data = end_data
def __repr__(self):
return "PostReservationReservation(finished = %r, initial_data = %r, end_data = %r)" % (self.finished, self.initial_data, self.end_data)
|
Python
| 0.000007
|
@@ -1975,32 +1975,110 @@
ionReservation(%0A
+ status.finished,%0A status.initial_data,%0A
|
6e663d4010f9a79d2816a212e504773a1745a8e6
|
Fix project name!
|
src/txkube/__init__.py
|
src/txkube/__init__.py
|
# Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
A Kubernetes client.
"""
__all__ = [
"version",
"IKubernetesClient",
"network_client", "memory_client",
]
from incremental import Version
from ._metadata import version_tuple as _version_tuple
version = Version("pykube", *_version_tuple)
from ._interface import IKubernetes, IKubernetesClient
from ._network import network_kubernetes
from ._memory import memory_kubernetes
|
Python
| 0
|
@@ -293,18 +293,18 @@
ersion(%22
-py
+tx
kube%22, *
|
171b951c773739b3a485bf4e4b3c2bd93950a174
|
Make tests pass on Python 3.
|
user_agents/parsers.py
|
user_agents/parsers.py
|
from collections import namedtuple
from ua_parser import user_agent_parser
MOBILE_DEVICE_FAMILIES = (
'iPhone',
'iPod',
'Generic Smartphone',
'Generic Feature Phone',
)
MOBILE_OS_FAMILIES = (
'Windows Phone',
'Windows Phone OS', # Earlier versions of ua-parser returns Windows Phone OS
'Symbian OS',
)
TABLET_DEVICE_FAMILIES = (
'iPad',
'BlackBerry Playbook',
'Blackberry Playbook', # Earlier versions of ua-parser returns "Blackberry" (caps)
'Kindle',
'Kindle Fire',
)
TOUCH_CAPABLE_OS_FAMILIES = (
'iOS',
'Android',
'Windows Phone',
'Windows Phone OS',
'Windows RT',
)
TOUCH_CAPABLE_DEVICE_FAMILIES = (
'BlackBerry Playbook',
'Blackberry Playbook',
'Kindle Fire',
)
def parse_version(major=None, minor=None, patch=None, patch_minor=None):
# Returns version number tuple, attributes will be integer if they're numbers
if major is not None and isinstance(major, basestring):
major = int(major) if major.isdigit() else major
if minor is not None and isinstance(minor, basestring):
minor = int(minor) if minor.isdigit() else minor
if patch is not None and isinstance(patch, basestring):
patch = int(patch) if patch.isdigit() else patch
if patch_minor is not None and isinstance(patch_minor, basestring):
patch_minor = int(patch_minor) if patch_minor.isdigit() else patch_minor
if patch_minor:
return (major, minor, patch, patch_minor)
elif patch:
return (major, minor, patch)
elif minor:
return (major, minor)
elif major:
return (major,)
else:
return tuple()
Browser = namedtuple('Browser', ['family', 'version', 'version_string'])
def parse_browser(family, major=None, minor=None, patch=None, patch_minor=None):
# Returns a browser object
version = parse_version(major, minor, patch)
version_string = '.'.join([str(v) for v in version])
return Browser(family, version, version_string)
OperatingSystem = namedtuple('OperatingSystem', ['family', 'version', 'version_string'])
def parse_operating_system(family, major=None, minor=None, patch=None, patch_minor=None):
version = parse_version(major, minor, patch)
version_string = '.'.join([str(v) for v in version])
return OperatingSystem(family, version, version_string)
Device = namedtuple('Device', ['family'])
def parse_device(family):
return Device(family)
class UserAgent(object):
def __init__(self, user_agent_string):
ua_dict = user_agent_parser.Parse(user_agent_string)
self.ua_string = user_agent_string
self.os = parse_operating_system(**ua_dict['os'])
self.browser = parse_browser(**ua_dict['user_agent'])
self.device = parse_device(**ua_dict['device'])
def _is_android_tablet(self):
# Newer Android tablets don't have "Mobile" in their user agent string,
# older ones like Galaxy Tab still have "Mobile" though they're not
if 'Mobile Safari' not in self.ua_string:
return True
if 'SCH-' in self.ua_string:
return True
return False
def _is_blackberry_touch_capable_device(self):
# A helper to determine whether a BB phone has touch capabilities
# Blackberry Bold Touch series begins with 99XX
if 'Blackberry 99' in self.device.family:
return True
if 'Blackberry 95' in self.device.family: # BB Storm devices
return True
if 'Blackberry 95' in self.device.family: # BB Torch devices
return True
return False
@property
def is_tablet(self):
if self.device.family in TABLET_DEVICE_FAMILIES:
return True
if (self.os.family == 'Android' and self._is_android_tablet()):
return True
if self.os.family == 'Windows RT':
return True
return False
@property
def is_mobile(self):
# First check for mobile device families
if self.device.family in MOBILE_DEVICE_FAMILIES:
return True
# Device is considered Mobile OS is Android and not tablet
# This is not fool proof but would have to suffice for now
if self.os.family == 'Android' and not self.is_tablet:
return True
if self.os.family == 'BlackBerry OS' and self.device.family != 'Blackberry Playbook':
return True
if self.os.family in MOBILE_OS_FAMILIES:
return True
# TODO: remove after https://github.com/tobie/ua-parser/issues/126 is closed
if 'J2ME' in self.ua_string or 'MIDP' in self.ua_string:
return True
return False
@property
def is_touch_capable(self):
# TODO: detect touch capable Nokia devices
if self.os.family in TOUCH_CAPABLE_OS_FAMILIES:
return True
if self.device.family in TOUCH_CAPABLE_DEVICE_FAMILIES:
return True
if self.os.family == 'Windows 8' and 'Touch' in self.ua_string:
return True
if 'BlackBerry' in self.os.family and self._is_blackberry_touch_capable_device():
return True
return False
@property
def is_pc(self):
# Returns True for "PC" devices (Windows, Mac and Linux)
if 'Windows NT' in self.ua_string:
return True
# TODO: remove after https://github.com/tobie/ua-parser/issues/127 is closed
if self.os.family == 'Mac OS X' and 'Silk' not in self.ua_string:
return True
if 'Linux' in self.ua_string and 'X11' in self.ua_string:
return True
return False
@property
def is_bot(self):
return True if self.device.family == 'Spider' else False
def parse(user_agent_string):
return UserAgent(user_agent_string)
|
Python
| 0.000001
|
@@ -1,12 +1,23 @@
+import sys%0A
from collect
@@ -82,16 +82,148 @@
arser%0A%0A%0A
+PY2 = sys.version_info%5B0%5D == 2%0APY3 = sys.version_info%5B0%5D == 3%0A%0Aif PY3:%0A string_types = str%0Aelse:%0A string_types = basestring%0A%0A%0A
MOBILE_D
@@ -621,14 +621,31 @@
ry%22
-(caps)
+instead of %22BlackBerry%22
%0A
@@ -1108,34 +1108,36 @@
ance(major,
-base
string
+_types
):%0A m
@@ -1227,34 +1227,36 @@
ance(minor,
-base
string
+_types
):%0A m
@@ -1350,26 +1350,28 @@
(patch,
-base
string
+_types
):%0A
@@ -1481,26 +1481,28 @@
_minor,
-base
string
+_types
):%0A
|
15faef8beb415211a04fd6dca976158343d8f77f
|
add abc to guid, fixed issues
|
user_profile/models.py
|
user_profile/models.py
|
from django.db import models
from django.contrib.auth.models import User
import uuid
# Create your models here.
# using the guid model
from framework.models import GUIDModel
class Profile(GUIDModel):
author = models.ForeignKey(User)
display_name = models.CharField(max_length=55)
# guid
guid = models.CharField(max_length=55, default=None)
def as_dict(self):
return {
"id": self.guid,
# TODO implement host
"host": "",
"displayname" : self.display_name,
"url": self.host + "/author/" + self.guid
}
|
Python
| 0.000231
|
@@ -289,77 +289,8 @@
5)%0A%0A
- # guid%0A guid = models.CharField(max_length=55, default=None)%0A%0A
|
127ad982617c2376c9378d1ef7e50b716a077428
|
Replace imp with __import__
|
dm_root.py
|
dm_root.py
|
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#pylint: disable-msg=W0404
#
"""The Device Model root, allowing specific platforms to populate it."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import imp
import sys
import google3
import dm.catawampus
import dm.management_server
import tr.core
import traceroute
def _RecursiveImport(name):
split = name.split('.')
last = split.pop()
if split:
path = _RecursiveImport('.'.join(split)).__path__
else:
path = sys.path
fileobj, path, description = imp.find_module(last, path)
return imp.load_module(name, fileobj, path, description)
class DeviceModelRoot(tr.core.Exporter):
"""A class to hold the device models."""
def __init__(self, loop, platform):
tr.core.Exporter.__init__(self)
if platform:
self.device = _RecursiveImport('platform.%s.device' % platform)
(params, objects) = self.device.PlatformInit(name=platform,
device_model_root=self)
else:
(params, objects) = (list(), list())
self.TraceRoute = traceroute.TraceRoute(loop)
objects.append('TraceRoute')
self.X_CATAWAMPUS_ORG_CATAWAMPUS = dm.catawampus.CatawampusDm()
objects.append('X_CATAWAMPUS-ORG_CATAWAMPUS')
self.Export(params=params, objects=objects)
def get_platform_config(self):
"""Return the platform_config.py object for this platform."""
return self.device.PlatformConfig()
def add_management_server(self, mgmt):
# tr-181 Device.ManagementServer
try:
ms181 = self.GetExport('Device')
ms181.ManagementServer = dm.management_server.ManagementServer181(mgmt)
except (AttributeError, KeyError):
pass # no tr-181 for this platform
# tr-98 InternetGatewayDevice.ManagementServer
try:
ms98 = self.GetExport('InternetGatewayDevice')
ms98.ManagementServer = dm.management_server.ManagementServer98(mgmt)
except (AttributeError, KeyError):
pass # no tr-98 for this platform
|
Python
| 0.000617
|
@@ -317,30 +317,8 @@
)'%0A%0A
-import imp%0Aimport sys%0A
impo
@@ -328,16 +328,16 @@
google3%0A
+
import d
@@ -446,263 +446,45 @@
:%0A
-split = name.split('.')%0A last = split.pop()%0A if split:%0A path = _RecursiveImport('.'.join(split)).__path__%0A else:%0A path = sys.path%0A fileobj, path, description = imp.find_module(last, path)%0A return imp.load_module(name, fileobj, path, description
+return __import__(name, fromlist=%5B''%5D
)%0A%0A%0A
|
8233f9c312955d56dff2fc80aed71dae6af910be
|
Check for None repos, in case of bad configuration file
|
do/main.py
|
do/main.py
|
# -*- coding: utf-8 -*-
""" DO!
I can do things thanks to Python, YAML configurations and Docker
NOTE: the command check does nothing
"""
from do.project import project_configuration, apply_variables
from do.gitter import clone, upstream
from do.builds import find_and_build
from do.utils.logs import get_logger
log = get_logger(__name__)
class Application(object):
def __init__(self, args):
self.action = args.get('command')
if self.action is None:
raise AttributeError("Misconfiguration")
else:
# print(f"\n********************\tDO: {self.action}")
print("\n********************\tDO: %s" % self.action)
self.blueprint = args.get('blueprint')
self.current_args = args
self.run()
def read_specs(self):
""" Read project configuration """
self.specs = project_configuration()
self.vars = self.specs \
.get('variables', {}) \
.get('python', {})
self.frontend = self.vars \
.get('frontend', {}) \
.get('enable', False)
log.very_verbose("Frontend is %s" % self.frontend)
def git_submodules(self):
""" Check and/or clone git projects """
initialize = self.action == 'init'
repos = self.vars.get('repos')
core = repos.pop('rapydo')
upstream(
url=core.get('online_url'),
path=core.get('path'),
do=initialize
)
myvars = {'frontend': self.frontend}
for _, repo in repos.items():
# substitute $$ values
repo = apply_variables(repo, myvars)
if repo.pop('if', False):
clone(**repo, do=initialize)
raise NotImplementedError("TO FINISH")
def builds(self):
""" Look up for builds depending on templates """
# FIXME: move here the logic
# TODO: pass the check/init option
find_and_build(
bp=self.blueprint,
frontend=self.frontend,
build=self.current_args.get('force_build'),
)
def run(self):
func = getattr(self, self.action, None)
if func is None:
# log.critical_exit(f"Command not yet implemented: {self.action}")
log.critical_exit("Command not yet implemented: %s" % self.action)
self.read_specs()
self.git_submodules()
self.builds()
# Do what you're supposed to
func()
def check(self):
raise AttributeError("Not completed yet")
def init(self):
raise AttributeError("Not completed yet")
|
Python
| 0
|
@@ -1316,16 +1316,144 @@
repos')%0A
+ if repos is None:%0A raise AttributeError(%0A %22Invalid configuration: repos section is missing%22)%0A%0A
|
e784227ae5da242d474bc02209289e1dabd2d3a2
|
Test Spectral Reconstruction on Sin Wave
|
utils/spectral_test.py
|
utils/spectral_test.py
|
# Lint as: python3
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import spectral
class SpectralTest(tf.test.TestCase):
def test_waveform_to_spectogram_shape(self):
waveform = np.random.normal(size=(2**14,))
spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128)
self.assertEqual((128, 256, 2), spectogram.shape)
def test_waveform_to_magnitude_shape(self):
waveform = np.random.normal(size=(2**14,))
magnitude = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128)
self.assertEqual((128, 256), magnitude.shape)
def test_waveform_to_spectogram_return(self):
waveform = np.random.normal(size=(2**14,))
spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128)
waveform_hat = spectral.spectogram_2_waveform(spectogram, frame_length=512, frame_step=128)
# Account for extra samples from reverse transform
waveform_hat = waveform[0:len(waveform)]
self.assertAllClose(waveform, waveform_hat)
def test_waveform_to_magnitude_return(self):
waveform = np.random.normal(size=(2**14,))
spectogram = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128)
waveform_hat = spectral.magnitude_2_waveform(spectogram, frame_length=512, frame_step=128)
# Account for extra samples from reverse transform
waveform_hat = waveform[0:len(waveform)]
self.assertAllClose(waveform, waveform_hat)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main()
|
Python
| 0
|
@@ -873,34 +873,43 @@
np.
-random.normal(size=(
+sin(np.linspace(0, 4 * np.pi,
2**14
-,
))%0A
@@ -1365,34 +1365,43 @@
np.
-random.normal(size=(
+sin(np.linspace(0, 4 * np.pi,
2**14
-,
))%0A
|
05939b0b797780ac1d265c8415f72f1ca44be53d
|
Modify return tag search data with tag_name
|
coco/dashboard/views.py
|
coco/dashboard/views.py
|
# -*- coding: utf-8 -*-
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from posts.models import Post, Tag
@login_required
def index(request):
context = {'posts': Post.objects.all()}
return render(request, 'dashboard/index.html', context)
@login_required
def tagged_posts(request, tag_name=""):
context = {'posts': Post.objects.filter(tags__name=tag_name)}
return render(request, 'dashboard/search_result.html', context)
|
Python
| 0.000001
|
@@ -352,32 +352,66 @@
%0A context = %7B
+%0A 'tag': tag_name,%0A
'posts': Post.ob
@@ -443,16 +443,21 @@
ag_name)
+%0A
%7D%0A re
|
a82aab3199f264716af49f627f621597d70c9544
|
Fix width when using multiple cascaded devices
|
examples/matrix_demo.py
|
examples/matrix_demo.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Richard Hull and contributors
# See LICENSE.rst for details.
import re
import time
import argparse
from luma.led_matrix.device import max7219
from luma.core.serial import spi, noop
from luma.core.render import canvas
from luma.core.virtual import viewport
from luma.core.legacy import text, show_message
from luma.core.legacy.font import proportional, CP437_FONT, TINY_FONT, SINCLAIR_FONT
def demo(n, block_orientation):
# create matrix device
serial = spi(port=0, device=0, gpio=noop())
device = max7219(serial, cascaded=n or 1, block_orientation=block_orientation)
print("Created device")
# start demo
msg = "MAX7219 LED Matrix Demo"
print(msg)
show_message(device, msg, fill="white", font=proportional(CP437_FONT))
time.sleep(1)
print("Vertical scrolling")
words = [
"Victor", "Echo", "Romeo", "Tango", "India", "Charlie", "Alpha",
"Lima", " ", "Sierra", "Charlie", "Romeo", "Oscar", "Lima", "Lima",
"India", "November", "Golf", " "
]
virtual = viewport(device, width=64, height=len(words) * 8)
with canvas(virtual) as draw:
for i, word in enumerate(words):
text(draw, (0, i * 8), text=word, fill="white", font=proportional(CP437_FONT))
for i in range(virtual.height - device.height):
virtual.set_position((0, i))
time.sleep(0.05)
msg = "Brightness"
print(msg)
show_message(device, msg, fill="white")
time.sleep(1)
with canvas(device) as draw:
text(draw, (0, 0), text="A", fill="white")
time.sleep(1)
for _ in range(5):
for intensity in range(16):
device.contrast(intensity * 16)
time.sleep(0.1)
device.contrast(0x80)
time.sleep(1)
msg = "Alternative font!"
print(msg)
show_message(device, msg, fill="white", font=SINCLAIR_FONT)
time.sleep(1)
msg = "Proportional font - characters are squeezed together!"
print(msg)
show_message(device, msg, fill="white", font=proportional(SINCLAIR_FONT))
# http://www.squaregear.net/fonts/tiny.shtml
time.sleep(1)
msg = "Tiny is, I believe, the smallest possible font \
(in pixel size). It stands at a lofty four pixels \
tall (five if you count descenders), yet it still \
contains all the printable ASCII characters."
msg = re.sub(" +", " ", msg)
print(msg)
show_message(device, msg, fill="white", font=proportional(TINY_FONT))
time.sleep(1)
msg = "CP437 Characters"
print(msg)
show_message(device, msg)
time.sleep(1)
for x in range(256):
with canvas(device) as draw:
text(draw, (0, 0), text=chr(x), fill="white")
time.sleep(0.1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='matrix_demo arguments',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--cascaded', '-n', type=int, default=1, help='Number of cascaded MAX7219 LED matrices')
parser.add_argument('--block-orientation', type=str, default='horizontal', choices=['horizontal', 'vertical'], help='Corrects block orientation when wired vertically')
args = parser.parse_args()
try:
demo(args.cascaded, args.block_orientation)
except KeyboardInterrupt:
pass
|
Python
| 0.000002
|
@@ -1118,10 +1118,20 @@
dth=
-64
+device.width
, he
|
d0de5476580b466d7b13cfc7668c267e62cb15f0
|
create 32 bit integer var, not 64 (to allow test with NETCDF4_CLASSIC)
|
examples/mpi_example.py
|
examples/mpi_example.py
|
# to run: mpirun -np 4 python mpi_example.py
from mpi4py import MPI
import numpy as np
from netCDF4 import Dataset
rank = MPI.COMM_WORLD.rank # The process ID (integer 0-3 for 4-process run)
nc = Dataset('parallel_test.nc', 'w', parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info(),format='NETCDF4_CLASSIC')
# below should work also - MPI_COMM_WORLD and MPI_INFO_NULL will be used.
#nc = Dataset('parallel_test.nc', 'w', parallel=True)
d = nc.createDimension('dim',4)
v = nc.createVariable('var', np.int, 'dim')
v[rank] = rank
# switch to collective mode, rewrite the data.
v.set_collective(True)
v[rank] = rank
nc.close()
# reopen the file read-only, check the data
nc = Dataset('parallel_test.nc', parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info())
assert rank==nc['var'][rank]
nc.close()
# reopen the file in append mode, modify the data on the last rank.
nc = Dataset('parallel_test.nc', 'a',parallel=True, comm=MPI.COMM_WORLD,
info=MPI.Info())
if rank == 3: v[rank] = 2*rank
nc.close()
# reopen the file read-only again, check the data.
# leave out the comm and info kwargs to check that the defaults
# (MPI_COMM_WORLD and MPI_INFO_NULL) work.
nc = Dataset('parallel_test.nc', parallel=True)
if rank == 3:
assert 2*rank==nc['var'][rank]
else:
assert rank==nc['var'][rank]
nc.close()
|
Python
| 0
|
@@ -504,16 +504,18 @@
, np.int
+32
, 'dim')
|
a53fae5b42e9b33774650e017967b865552870e9
|
tag v0.7.4
|
unihan_tabular/__about__.py
|
unihan_tabular/__about__.py
|
__title__ = 'unihan-tabular'
__package_name__ = 'unihan_tabular'
__description__ = 'Export UNIHAN to Python, Data Package, CSV, JSON and YAML'
__version__ = '0.7.3'
__author__ = 'Tony Narlock'
__email__ = 'cihai@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013-2017 Tony Narlock'
|
Python
| 0.000001
|
@@ -155,17 +155,17 @@
= '0.7.
-3
+4
'%0A__auth
|
4420892ad3e8c1797753e7893772e53785efb570
|
add logfile handling
|
updatebot/cmdline/simple.py
|
updatebot/cmdline/simple.py
|
#
# Copyright (c) 2008 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
import os
from updatebot import log
from updatebot import config
def validatePlatform(platform, configDir):
validPlatforms = os.listdir(configDir)
if platform not in validPlatforms:
print ('Invalid platform %s... Please select from the following '
'available platforms %s' % (platform, ', '.join(validPlatforms)))
return False
return True
def usage(argv):
print 'usage: %s <platform name>' % argv[0]
return 1
def main(argv, workerFunc, configDir='/etc/mirrorball', enableLogging=True):
if enableLogging:
log.addRootLogger()
if len(argv) != 2:
return usage(argv)
platform = argv[1]
if not validatePlatform(platform, configDir):
return 1
cfg = config.UpdateBotConfig()
cfg.read(os.path.join(configDir, platform, 'updatebotrc'))
rc = workerFunc(cfg)
return rc
|
Python
| 0.000001
|
@@ -1007,16 +1007,26 @@
rm name%3E
+ %5Blogfile%5D
' %25 argv
@@ -1131,51 +1131,86 @@
if
+l
en
-ableLogging:%0A log.addRootLogger()%0A
+(argv) %3C 2 or len(argv) %3E 3:%0A return usage(argv)%0A%0A logFile = None
%0A
@@ -1227,12 +1227,12 @@
gv)
-!= 2
+== 3
:%0A
@@ -1237,33 +1237,98 @@
-return usage(argv
+logFile = argv%5B2%5D%0A%0A if enableLogging:%0A log.addRootLogger(logFile=logFile
)%0A%0A p
@@ -1413,16 +1413,17 @@
turn 1%0A%0A
+%0A
cfg
|
4ffa2483021b360eb7460bfcf1d845712806390b
|
Move motor ports because our retail EV3 brick's port A doesn't work.
|
app/ev3.py
|
app/ev3.py
|
# See http://ev3dev-lang.readthedocs.org/projects/python-ev3dev/en/stable/index.html
# for API details -- specific Sensor/Motor docs http://www.ev3dev.org/docs/
from time import sleep
from ev3dev.auto import *
# Connect two large motors on output ports A and C
lmotor, rmotor = [LargeMotor(address) for address in (OUTPUT_A, OUTPUT_C)]
moving = False
# Connect medium motors on output port B for the camera
cmotor = MediumMotor(OUTPUT_B)
camera_pos = 0
cmotor.reset()
# Check that the motors are actually connected
assert lmotor.connected
assert rmotor.connected
color_sensor = ColorSensor()
touch_sensor = TouchSensor()
gyro = GyroSensor()
def _start():
'''
Start the motors using run_direct() so we can just vary speed
'''
global moving
if not moving:
for motor in (lmotor, rmotor):
motor.duty_cycle_sp = 0
motor.run_direct()
moving = True
def forward(speed=50):
'''
Move the robot forward
'''
_start()
for motor in (lmotor, rmotor):
motor.duty_cycle_sp=speed
def backward(speed=50):
'''
Reverse
'''
_start()
for motor in (lmotor, rmotor):
motor.duty_cycle_sp=-speed
def stop():
'''
Stop the robot
'''
global moving
for motor in (lmotor, rmotor):
motor.stop()
moving = False
def turn_right(speed=40):
'''
Turn while moving forward
'''
_start()
lmotor.duty_cycle_sp=speed
rmotor.duty_cycle_sp=0
def turn_left(speed=40):
'''
Turn while moving forward
'''
_start()
lmotor.duty_cycle_sp=0
rmotor.duty_cycle_sp=speed
def spin_right(speed=50):
'''
Turn on the spot
'''
_start()
lmotor.duty_cycle_sp=speed
rmotor.duty_cycle_sp=-speed
def spin_left(speed=50):
'''
Turn on the spot
'''
_start()
lmotor.duty_cycle_sp=-speed
rmotor.duty_cycle_sp=speed
def speak():
Sound.speak('Excuse me!')
def color():
return color_sensor.value()
def touch():
return touch_sensor.value() == 1
def direction():
return gyro.value() % 360 # degrees, but needs a 0 value
def camera_left():
global camera_pos
camera_pos -= 25
camera_pos = max(-150, camera_pos)
cmotor.run_to_abs_pos(speed_regulation_enabled='on', speed_sp=100, position_sp=camera_pos)
def camera_right():
global camera_pos
camera_pos += 25
camera_pos = min(150, camera_pos)
cmotor.run_to_abs_pos(speed_regulation_enabled='on', speed_sp=100, position_sp=camera_pos)
def camera_forward():
global camera_pos
camera_pos = 0
cmotor.run_to_abs_pos(speed_regulation_enabled='on', speed_sp=100, position_sp=camera_pos)
|
Python
| 0
|
@@ -316,17 +316,17 @@
(OUTPUT_
-A
+B
, OUTPUT
@@ -326,17 +326,17 @@
OUTPUT_
-C
+D
)%5D%0Amovin
@@ -430,17 +430,17 @@
(OUTPUT_
-B
+C
)%0Acamera
|
98cb673b358671211a0aa7fed0725dbb732200d0
|
Fix edge cases due to artworkUrl100 being missing
|
coverpy/coverpy.py
|
coverpy/coverpy.py
|
import os
import requests
from . import exceptions
class Result:
""" Parse an API result into an object format. """
def __init__(self, item):
""" Call the list parser. """
self.parse(item)
def parse(self, item):
""" Parse the given list into self variables. """
self.artworkThumb = item['artworkUrl100']
self.artist = item['artistName']
self.album = item['collectionName']
self.url = item['url']
# Take some measures to detect whether it is a song or album
if 'kind' in item:
self.type = item['kind'].lower()
elif 'wrapperType' in item:
if item['wrapperType'].lower() == 'track':
self.type = 'song'
elif item['wrapperType'].lower() == 'collection':
self.type = 'album'
elif 'collectionType' in item:
self.type = 'album'
else:
# Assuming edge case of the API
self.type = 'unknown'
if self.type == 'song':
self.name = item['trackName']
elif self.type == 'album':
self.name = item['collectionName']
else:
self.name = 'unknown'
def artwork(self, size = 625):
""" Return the artwork to the thumb URL contained. """
# Replace size because API doesn't hand links to full res. It only gives 60x60 and 100x100.
# However, I found a way to circumvent it.
return self.artworkThumb.replace('100x100bb', "%sx%s" % (size, size))
class CoverPy:
def __init__(self):
""" Initialize CoverPy. Set a base_url. """
self.base_url = "https://itunes.apple.com/search/"
def _get(self, payload, override = False, entities = False):
""" Get a payload using the base_url. General purpose GET interface """
if override:
data = requests.get("%s%s" % (self.base_url, override))
else:
payload['entity'] = "musicArtist,musicTrack,album,mix,song"
payload['media'] = 'music'
data = requests.get(self.base_url, params = payload)
if data.status_code != 200:
raise requests.HTTPError
else:
return data
def _search(self, term, limit = 1):
""" Expose a friendlier internal API for executing searches """
payload = {
'term': term,
'limit': limit
}
req = self._get(payload)
return req
def get_cover(self, term, limit = 1, debug = False):
""" Get an album cover, return a Result object """
search = self._search(term, limit)
parsed = search.json()
if parsed['resultCount'] == 0:
raise exceptions.NoResultsException
result = parsed['results'][0]
result['url'] = search.url
return Result(result)
|
Python
| 0
|
@@ -263,24 +263,32 @@
iables. %22%22%22%0A
+%09%09try:%0A%09
%09%09self.artwo
@@ -319,16 +319,695 @@
rl100'%5D%0A
+%09%09except KeyError as e:%0A%09%09%09# A vital parameter is missing, and magic on our end can't get us out of this error case situation.%0A%09%09%09# Therefore, we try to save the user from issues (mostly KeyErrors), and stop them from using the public API.%0A%09%09%09# Just return a NoResultsException, because the data is corrupt on the API's end,%0A%09%09%09# and the library can't use the results.%0A%0A%09%09%09# This gets many edge cases in which the API had issues dealing with.%0A%09%09%09raise exceptions.NoResultsException%0A%0A%09%09%0A%09%09# The above should prevent most keyerrors, this one is just guarding. However, if something fails here,%0A%09%09# I can't do much because the API sometimes skips on things and this is _not vital._%0A
%09%09self.a
|
7f0b530db953698e6e923366be6d0d98033e4afb
|
add description
|
prontopull.py
|
prontopull.py
|
# -*- coding: utf-8 -*-
from urllib2 import Request, urlopen
import json
from pandas.io.json import json_normalize
import time
#from datetime import datetime
url = "https://secure.prontocycleshare.com/data/stations.json"
request = Request(url)
response = urlopen(request)
data = json.loads(response.read())
df=json_normalize(data['stations'])
timestring = time.strftime("%Y%m%d-%H%M%S")
SAVE_PATH = "../Desktop/pronto/pronto%s.csv" %timestring
df.to_csv(SAVE_PATH, sep = ",")
|
Python
| 0.000004
|
@@ -17,16 +17,104 @@
f-8 -*-%0A
+'''%0APulls data from pronto cycle share. Combine with cron job to%0Aget data over time%0A'''
%0Afrom ur
@@ -212,39 +212,8 @@
time
-%0A#from datetime import datetime
%0A%0Aur
|
d016e9f2620688bc1059977a12df638393c3fff1
|
Bump version
|
lintreview/__init__.py
|
lintreview/__init__.py
|
__version__ = '2.1.1'
|
Python
| 0
|
@@ -12,11 +12,11 @@
= '2.1.
-1
+2
'%0A
|
c86e7107d2f9d8079b0010ac100f627f1c34d127
|
Update ipc_lista1.2.py
|
lista1/ipc_lista1.2.py
|
lista1/ipc_lista1.2.py
|
#ipc_lista1.2
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça um número e então mostre a mensagem O número informado foi [número].
number = input("Digite um número: ")
print "O número digitado foi ",number
|
Python
| 0.000001
|
@@ -77,17 +77,16 @@
%0A#%0A#%0A#%0A%0A
-%0A
#Fa%C3%A7a um
|
85432b9509744eadc47c73a21b49f9ea93172c78
|
Update ipc_lista1.8.py
|
lista1/ipc_lista1.8.py
|
lista1/ipc_lista1.8.py
|
#ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615
|
Python
| 0
|
@@ -61,9 +61,11 @@
o - 1615
+31
%0A
|
694eb7367f6f9dcd78f42a0a4bcd0b674451d250
|
improve log message #11
|
livereload/compiler.py
|
livereload/compiler.py
|
#!/usr/bin/python
"""livereload.compiler
Provides a set of compilers for web developers.
Available compilers now:
+ less
+ uglifyjs
+ slimmer
"""
import os
import functools
import logging
from subprocess import Popen, PIPE
def make_folder(dest):
folder = os.path.split(dest)[0]
if not folder:
return
if os.path.isdir(folder):
return
try:
os.makedirs(folder)
except:
pass
def _get_http_file(url, build_dir='build/assets'):
import hashlib
key = hashlib.md5(url).hexdigest()
filename = os.path.join(os.getcwd(), build_dir, key)
if os.path.exists(filename):
return filename
make_folder(filename)
import urllib
print('Downloading: %s' % url)
urllib.urlretrieve(url, filename)
return filename
class BaseCompiler(object):
"""BaseCompiler
BaseCompiler defines the basic syntax of a Compiler.
>>> c = BaseCompiler('a')
>>> c.write('b') #: write compiled code to 'b'
>>> c.append('c') #: append compiled code to 'c'
"""
def __init__(self, path):
self.filetype = os.path.splitext(path)[1]
if path.startswith('http://') or path.startswith('https://'):
path = _get_http_file(path)
self.path = path
def _get_code(self):
f = open(self.path)
code = f.read()
f.close()
return code
def write(self, output):
"""write code to output"""
logging.info('write %s' % output)
make_folder(output)
f = open(output, 'w')
f.write(self._get_code())
f.close()
def append(self, output):
"""append code to output"""
logging.info('append %s' % output)
make_folder(output)
f = open(output, 'a')
f.write(self._get_code())
f.close()
class _CommandCompiler(BaseCompiler):
command = ''
def _get_code(self):
cmd = self.command.split()
cmd.append(self.path)
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if stderr:
raise Exception(stderr)
#: stdout is bytes, decode for python3
return stdout.decode()
class LessCompiler(_CommandCompiler):
command = 'lessc --compress'
def lessc(path, output, mode='w'):
def _compile(path, output, mode):
c = LessCompiler(path)
if mode == 'a':
c.append(output)
return
c.write(output)
return
return functools.partial(_compile, path, output, mode)
class UglifyJSCompiler(_CommandCompiler):
command = 'uglifyjs -nc'
def uglifyjs(path, output, mode='w'):
def _compile(path, output, mode):
c = UglifyJSCompiler(path)
if mode == 'a':
c.append(output)
return
c.write(output)
return
return functools.partial(_compile, path, output, mode)
class SlimmerCompiler(BaseCompiler):
def _get_code(self):
import slimmer
f = open(self.path)
code = f.read()
f.close()
if self.filetype == '.css':
return slimmer.css_slimmer(code)
if self.filetype == '.js':
return slimmer.js_slimmer(code)
if self.filetype == '.html':
return slimmer.xhtml_slimmer(code)
return code
def slimmer(path, output, mode='w'):
def _compile(path, output, mode):
c = SlimmerCompiler(path)
if mode == 'a':
c.append(output)
return
c.write(output)
return
return functools.partial(_compile, path, output, mode)
class RstCompiler(_CommandCompiler):
command = 'rst2html.py'
def rstc(path, output, mode='w'):
def _compile(path, output, mode):
c = RstCompiler(path)
if mode == 'a':
c.append(output)
else:
c.write(output)
return functools.partial(_compile, path, output, mode)
|
Python
| 0.000022
|
@@ -1541,32 +1541,31 @@
w')%0A
-f.write(
+code =
self._get_co
@@ -1560,32 +1560,74 @@
self._get_code()
+%0A if code:%0A f.write(code
)%0A f.clos
@@ -1995,24 +1995,41 @@
self.path)%0A%0A
+ try:%0A
p =
@@ -2065,16 +2065,169 @@
r=PIPE)%0A
+ except OSError as e:%0A logging.error(e)%0A logging.error(%22it maybe you haven't installed %25s%22, cmd%5B0%5D)%0A return None%0A
@@ -2294,31 +2294,53 @@
-raise Exception(stderr)
+logging.error(stderr)%0A return None
%0A
|
9cad93eb5f04e9f455cec679089d8c8787ce3b04
|
Enable appsembler reporting settings
|
lms/envs/appsembler.py
|
lms/envs/appsembler.py
|
import os
import json
from path import path
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
CONFIG_ROOT = path('/edx/app/edxapp/') #don't hardcode this in the future
CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""
with open(CONFIG_ROOT / CONFIG_PREFIX + 'env.json') as env_file:
ENV_TOKENS = json.load(env_file)
APPSEMBLER_FEATURES = ENV_TOKENS.get('APPSEMBLER_FEATURES', {})
# search APPSEMBLER_FEATURES first, env variables second, fallback to None
GOOGLE_TAG_MANAGER_ID = APPSEMBLER_FEATURES.get('GOOGLE_TAG_MANAGER_ID', os.environ.get('GOOGLE_TAG_MANAGER_ID', None))
INTERCOM_APP_ID = APPSEMBLER_FEATURES.get('INTERCOM_APP_ID', os.environ.get('INTERCOM_APP_ID', ''))
INTERCOM_API_KEY = APPSEMBLER_FEATURES.get('INTERCOM_API_KEY', os.environ.get('INTERCOM_API_KEY', ''))
INTERCOM_USER_EMAIL = APPSEMBLER_FEATURES.get('INTERCOM_USER_EMAIL', os.environ.get('INTERCOM_USER_EMAIL', ''))
|
Python
| 0.000474
|
@@ -908,16 +908,247 @@
ER_EMAIL', ''))%0A
+%0A%0Aif APPSEMBLER_FEATURES.get('ENABLE_APPSEMBLER_REPORTING', False):%0A from appsembler_reporting.settings import APPSEMBLER_REPORTING%0A%0A APPSEMBLER_REPORTING.update(APPSEMBLER_FEATURES.get(%0A 'APPSEMBLER_REPORTING', %7B%7D ))%0A
|
4315d028f114ae1005f57d33df964be05b2fb8a6
|
use bin/penchy_test_job instead of running it directly
|
docs/commented_sample_job.py
|
docs/commented_sample_job.py
|
# A job description is two part: part 1 introduces the involved elements and
# part 2 joins them in a job
# part 1: introduce the elements
# setup job environment
from penchy.jobs import *
# define a node
node = NodeConfiguration(
# that is the localhost
'localhost',
# ssh port is 22
22,
# the executing user is the current one
os.environ['USER'],
# we execute in /tmp
'/tmp',
# all jvm are specified relative to /usr/bin
'/usr/bin')
# define a jvm with relative path java
jvm = jvms.JVM('java')
# you can also specify an absolute path:
# jvm = jvms.JVM('/usr/java')
# fuse jvm and node
jconfig = makeJVMNodeConfiguration(jvm, node,
# and give it a decorative name (optional)
name="Simple Example!")
# setup a workload
w = workloads.ScalaBench('dummy')
# and add it the the jvms that should execute it
jvm.workload = w
# setup filter, used in flows
f1 = filters.DacapoHarness()
f2 = filters.Print()
# part 2: form elements to a job
job = Job(
# setup the JVMNodeConfigurations that are included, can be a single one or
# a list of configurations
configurations=jconfig,
# specify the flow of data on clients
client_flow=[
# flow from Scalabench workload to DacapoHarness filter
Edge(w, f1,
# and match filter inputs to workload outputs (here with same name)
[('stderr', 'stderr'),
('exit_code', 'exit_code')]),
# flow from ScalaBench workload to Print filter
Edge(w, f2,
# and feed stderr and exit_code output prefix with 'workload_' to filter
[('stderr', 'workload_stderr'),
('exit_code', 'workload_exit_code')]),
# feed whole output of DacapoHarness filter to print filter (with the name of the output)
Edge(f1, f2)
],
# there is no flow on the server side
server_flow=[],
# jvms will be run twice
invocations = 2
)
# a nice trick: check the job for plausibility if run as ``python <jobname>``
if __name__ == '__main__':
job.check()
|
Python
| 0
|
@@ -215,16 +215,75 @@
port *%0A%0A
+# import the configuration file (if needed)%0Aimport config%0A%0A
# define
@@ -2091,126 +2091,4 @@
2%0A)%0A
-%0A# a nice trick: check the job for plausibility if run as %60%60python %3Cjobname%3E%60%60%0Aif __name__ == '__main__':%0A job.check()%0A
|
927de70d3212c5106846b6f6f6333b93eceacea5
|
add python 脚本
|
pub-python.py
|
pub-python.py
|
# coding=utf8
import paramiko
import datetime
HOSTS = [
{
'HOST':'hive1_host',
'PORT':9092,
'USER':'root'
},
{
'HOST':'hive2_host',
'PORT':9092,
'USER':'root'
}
]
class SSH():
def __init__(self):
self.client = None
def connect(self,host,port=22,username='root',password=None):
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(host, port, username=username, password=password, timeout=10)
return self
def exe(self,cmd,isprint=True):
if not cmd:
return
stdin, stdout, stderr = self.client.exec_command(cmd)
if isprint:
for std in stdout.readlines():
print std,
return stdin, stdout, stderr
def close(self):
if self.client:
self.client.close()
def showText(s, typ):
if typ == 'RED':
return redText(s)
elif typ == 'GREEN':
return greenText(s)
elif typ == 'YELLOW':
return yellowText(s)
else:
return s
def redText(s):
return "\033[1;31m%s\033[0m" % s
def greenText(s):
return "\033[1;32m%s\033[0m" % s
def yellowText(s):
return "\033[1;33m%s\033[0m" % s
def main():
for item in HOSTS:
ssh = SSH().connect(item['HOST'],item['PORT'],username=item['USER'])
##back
base = '/root/mpush/mpush-jar-with-dependency.tar.gz'
to = '/root/mpush/back/mpush-jar-with-dependency.tar.gz.'+datetime.datetime.now().strftime('%Y%m%d%H%M%S')
ssh.exe('cp %s %s '%(base,to))
ssh.close()
if __name__ == "__main__":
main()
|
Python
| 0.000245
|
@@ -39,16 +39,33 @@
datetime
+%0Aimport telnetlib
%0A%0AHOSTS
@@ -240,16 +240,44 @@
%7D%0A%5D%0A%0A
+BASEPATH = '/root/mpush'%0A%0A%0A%0A
class SS
@@ -972,17 +972,16 @@
ose()%0A%0A%0A
-%0A
def show
@@ -1458,16 +1458,17 @@
USER'%5D)%0A
+%0A
@@ -1473,16 +1473,18 @@
##back
+up
%0A
@@ -1491,28 +1491,26 @@
base =
-'/root/mpush
+BASEPATH+'
/mpush-j
@@ -1549,28 +1549,26 @@
to =
-'/root/mpush
+BASEPATH+'
/back/mp
@@ -1688,16 +1688,274 @@
se,to))%0A
+ print greenText('backup mpush ok')%0A%0A ##telnet remove zk info%0A #ssh.exe('telent 127.0.0.1 4001')%0A #ssh.exe('')%0A%0A ## kill process%0A ssh.exe('ps aux%7Cgrep mpush-cs.jar')%0A%0A ## start process%0A # ssh.exe('')%0A%0A
|
9f699f66c1ff14d884157cee358793d715b1e702
|
delete print
|
tests/test_apiserver.py
|
tests/test_apiserver.py
|
# -*- coding: utf-8 -*-
"""
tests.apiserver
~~~~~~~~~~~~
Tests cobra.api
:author: 40huo <git@40huo.cn>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 Feei. All rights reserved
"""
# 测试完成需要手动关闭 API server 和扫描进程
# kill -9 $(ps aux|grep test_apiserver.py|awk '{print $2}')
# kill -9 $(ps aux|grep cobra.py|awk '{print $2}')
# 第一次启动 server 测试可能会卡住
import requests
from cobra.api import start
import json
start(host="127.0.0.1", port=5000, debug=True)
def test_add_job():
url = "http://127.0.0.1:5000/api/add"
post_data = {
"key": "your_secret_key",
"target": "https://github.com/wufeifei/grw.git",
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.content
assert "Add scan job successfully" in re.content
assert "scan_id" in re.content
def test_job_status():
url = "http://127.0.0.1:5000/api/status"
post_data = {
"key": "your_secret_key",
"scan_id": 24,
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
print re.content
assert "1001" in re.content
assert "msg" in re.content
assert "scan_id" in re.content
assert "status" in re.content
assert "report" in re.content
|
Python
| 0.000195
|
@@ -1294,29 +1294,8 @@
rs)%0A
- print re.content%0A
|
17d79c5ec4584ea2f1f8b7fe52b157b3988bb7fc
|
test gap score
|
tests/test_gap_score.py
|
tests/test_gap_score.py
|
"""
Using gap score to determine optimal cluster number
"""
import unittest
from unittest import TestCase
from flaky import flaky
import numpy as np
import scipy
from uncurl import gap_score
class GapScoreTest(TestCase):
def setUp(self):
pass
def test_gap_score(self):
data_mat = scipy.io.loadmat('data/10x_pooled_400.mat')
data = data_mat['data']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
# just test that the score is in a very broad range
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 20)
def test_gap_score_2(self):
data_mat = scipy.io.loadmat('data/GSE60361_dat.mat')
data = data_mat['Dat']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 30)
@flaky(max_runs=3)
def test_gap_score_3(self):
data_mat = scipy.io.loadmat('data/SCDE_test.mat')
data = data_mat['dat']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k < 10)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000004
|
@@ -1236,32 +1236,50 @@
reproc_data(data
+, gene_subset=True
)%0A max_k,
|
e3479cd460fb4e3a66db396f2bb1d59aa870adb3
|
Remove unused variable
|
tests/test_generator.py
|
tests/test_generator.py
|
"""Validate Generator class."""
import pytest
from gogoutils.generator import Generator, GeneratorError
PROJECTS = {
'repo1': {
'project': 'gogoair',
'repo': 'test',
'env': 'dev',
},
'repo2': {
'project': 'gogoair',
'repo': 'test',
'env': 'stage',
},
'repo3': {
'project': 'project1',
'repo': 'repo1',
'env': 'env1',
},
'repo4': {
'project': 'gogoair.test',
'repo': 'unknown',
'env': 'stage',
},
}
ERROR_PROJECTS = {
'repo1': {
'project': 'gogoair',
'repo': 'test',
},
'repo2': {
'project': 'gogoair',
'env': 'stage',
},
'repo3': {
'repo': 'repo1',
'env': 'env1',
},
'repo4': {
'env': 'unknown',
},
}
def test_default_env():
"""Validate defaults."""
project = {
'project': 'gogoair',
'repo': 'no_env',
}
g = Generator(
project['project'],
project['repo'],
)
dns_elb = '{0}.{1}.dev.example.com'.format(
project['repo'],
project['project'],
)
assert dns_elb == g.dns()['elb']
def test_empty_params():
"""Validate empty."""
for project in ERROR_PROJECTS:
args = []
for key in ['project', 'repo', 'env']:
try:
value = ERROR_PROJECTS[project][key]
except KeyError:
value = None
args.append(value)
with pytest.raises(GeneratorError):
g = Generator(*args)
def test_camel_cases():
"""Validate Application name is lowercase."""
app_name = 'Testgogoair'
g = Generator('gogoair', 'Test')
assert app_name.lower() == g.app_name()
def test_valid_camel_cases():
"""Validate case sensitivity for Git repository names."""
repo_name = 'gogoair/Test-config'
g = Generator('gogoair', 'Test-config')
uri_dict = g.gitlab()
assert repo_name == uri_dict['main']
def test_generate_dns():
"""Validate generated DNS values."""
for project in PROJECTS:
g = Generator(
PROJECTS[project]['project'],
PROJECTS[project]['repo'],
PROJECTS[project]['env'],
)
dns = '{0}.{1}.{2}.example.com'.format(
PROJECTS[project]['repo'],
PROJECTS[project]['project'],
PROJECTS[project]['env'],
)
instance = '{0}{1}-xx.{2}.example.com'.format(
PROJECTS[project]['repo'],
PROJECTS[project]['project'],
PROJECTS[project]['env'],
)
assert dns == g.dns()['elb']
assert instance == g.dns()['instance']
def test_generate_app():
"""Validate generated App values."""
for project in PROJECTS:
g = Generator(
PROJECTS[project]['project'],
PROJECTS[project]['repo'],
PROJECTS[project]['env'],
)
app = '{0}{1}'.format(
PROJECTS[project]['repo'],
PROJECTS[project]['project'],
)
assert app == g.app_name()
def test_generate_archaius():
"""Validate generated Archiaus values."""
options = {}
for repo in PROJECTS.values():
g = Generator(
repo['project'],
repo['repo'],
repo['env'],
)
options['s3'] = 'archaius-{0}/{1}/{2}{1}/'.format(
repo['env'],
repo['project'],
repo['repo'],
)
options['bucket'] = 'archaius-{0}'.format(repo['env'])
options['path'] = '{0}/{1}{0}'.format(repo['project'], repo['repo'])
archaius = g.archaius()
for option in archaius:
assert options[option] == archaius[option]
def test_generate_iam():
"""Validate generated IAM values."""
for project in PROJECTS:
g = Generator(
PROJECTS[project]['project'],
PROJECTS[project]['repo'],
PROJECTS[project]['env'],
)
iam_base = '{0}_{1}'.format(
PROJECTS[project]['project'],
PROJECTS[project]['repo'],
)
iam_user = iam_base
iam_group = PROJECTS[project]['project']
iam_role = '{0}_role'.format(iam_base)
iam_policy = '{0}_policy'.format(iam_base)
iam_profile = '{0}_profile'.format(iam_base)
assert iam_user == g.iam()['user']
assert iam_group == g.iam()['group']
assert iam_role == g.iam()['role']
assert iam_policy == g.iam()['policy']
assert iam_profile == g.iam()['profile']
def test_generate_jenkins():
"""Validate generated Jenkins values."""
for project in PROJECTS:
g = Generator(
PROJECTS[project]['project'],
PROJECTS[project]['repo'],
PROJECTS[project]['env'],
)
job_name = '{0}_{1}'.format(
PROJECTS[project]['project'],
PROJECTS[project]['repo'],
)
assert job_name == g.jenkins()['name']
def test_generate_gitlab():
"""Validate generated GitLab values."""
for project in PROJECTS:
g = Generator(
PROJECTS[project]['project'],
PROJECTS[project]['repo'],
PROJECTS[project]['env'],
)
git = '{0}/{1}'.format(
PROJECTS[project]['project'],
PROJECTS[project]['repo'],
)
git_main = git
git_qe = '{0}-qa'.format(git)
git_config = '{0}-config'.format(git)
assert git_main == g.gitlab()['main']
assert git_qe == g.gitlab()['qe']
assert git_config == g.gitlab()['config']
def test_generate_app_property():
"""Validate deprecated property."""
g = Generator('project', 'repo')
assert g.app == 'repoproject'
|
Python
| 0.000347
|
@@ -1538,28 +1538,24 @@
%0A
- g =
Generator(*
|
b6cb3b5239aa5cb6e18d97a3a0e827052b6873db
|
add ability to print args too
|
pygam/core.py
|
pygam/core.py
|
"""
Core classes
"""
from __future__ import absolute_import
import numpy as np
from pygam.utils import round_to_n_decimal_places
def nice_repr(name, param_kvs, line_width=30, line_offset=5, decimals=3):
"""
tool to do a nice repr of a class.
Parameters
----------
name : str
class name
param_kvs : dict
dict containing class parameters names as keys,
and the corresponding values as values
line_width : int
desired maximum line width.
default: 30
line_offset : int
desired offset for new lines
default: 5
decimals : int
number of decimal places to keep for float values
default: 3
Returns
-------
out : str
nicely formatted repr of class instance
"""
if len(param_kvs) == 0:
# if the object has no params it's easy
return '{}()'.format(name)
# sort keys and values
ks = list(param_kvs.keys())
vs = list(param_kvs.values())
idxs = np.argsort(ks)
param_kvs = [(ks[i],vs[i]) for i in idxs]
param_kvs = param_kvs[::-1]
out = ''
current_line = name + '('
while len(param_kvs) > 0:
k, v = param_kvs.pop()
if issubclass(v.__class__, (float, np.ndarray)):
# round the floats first
v = round_to_n_decimal_places(v, n=decimals)
param = '{}={},'.format(k, str(v))
else:
param = '{}={},'.format(k, repr(v))
if len(current_line + param) <= line_width:
current_line += param
else:
out += current_line + '\n'
current_line = ' '*line_offset + param
if len(current_line) < line_width and len(param_kvs) > 0:
current_line += ' '
out += current_line[:-1] # remove trailing comma
out += ')'
return out
class Core(object):
def __init__(self, name=None, line_width=70, line_offset=3):
"""
creates an instance of the Core class
comes loaded with useful methods
Parameters
----------
name : str, default: None
line_width : int, default: 70
number of characters to print on a line
line_offset : int, default: 3
number of characters to indent after the first line
Returns
-------
self
"""
self._name = name
self._line_width = line_width
self._line_offset = line_offset
self._exclude = []
def __str__(self):
"""__str__ method"""
if self._name is None:
return self.__repr__()
return self._name
def __repr__(self):
"""__repr__ method"""
name = self.__class__.__name__
return nice_repr(name, self.get_params(),
line_width=self._line_width,
line_offset=self._line_offset,
decimals=4)
def get_params(self, deep=False):
"""
returns a dict of all of the object's user-facing parameters
Parameters
----------
deep : boolean, default: False
when True, also gets non-user-facing paramters
Returns
-------
dict
"""
if deep is True:
return self.__dict__
return dict([(k,v) for k,v in list(self.__dict__.items()) \
if (k[0] != '_') \
and (k[-1] != '_') and (k not in self._exclude)])
def set_params(self, deep=False, force=False, **parameters):
"""
sets an object's paramters
Parameters
----------
deep : boolean, default: False
when True, also sets non-user-facing paramters
force : boolean, default: False
when True, also sets parameters that the object does not already
have
**parameters : paramters to set
Returns
------
self
"""
param_names = self.get_params(deep=deep).keys()
for parameter, value in parameters.items():
if (parameter in param_names) or force:
setattr(self, parameter, value)
return self
|
Python
| 0
|
@@ -197,16 +197,27 @@
cimals=3
+, args=None
):%0A %22
@@ -800,20 +800,20 @@
%0A if
-len(
+not
param_kv
@@ -817,14 +817,22 @@
_kvs
-) == 0
+ and not args
:%0A
@@ -1079,16 +1079,104 @@
idxs%5D%0A%0A
+ if args is not None:%0A param_kvs = %5B(None, arg) for arg in args%5D + param_kvs%0A%0A
para
@@ -1454,16 +1454,126 @@
cimals)%0A
+ v = str(v)%0A else:%0A v = repr(v)%0A%0A if k is None:%0A # handle args%0A
@@ -1579,35 +1579,32 @@
param = '%7B%7D
-=%7B%7D
,'.format(k, str
@@ -1601,17 +1601,9 @@
mat(
-k, str(v)
+v
)%0A
@@ -1657,16 +1657,11 @@
(k,
-repr(
v)
-)
+%0A
%0A
@@ -3105,16 +3105,27 @@
cimals=4
+, args=None
)%0A%0A d
|
3ce54da38119987c2e23089cca3e14a1664cd0c9
|
remove dots at the end of description
|
python2nix.py
|
python2nix.py
|
#!/usr/bin/env python2.7
import sys
import requests
import pip_deps
PACKAGE = """\
{name_only} = pythonPackages.buildPythonPackage rec {{
name = "{name}";
propagatedBuildInputs = [ {inputs} ];
src = fetchurl {{
url = "{url}";
md5 = "{md5}";
}};
meta = with stdenv.lib; {{
description = "{description}";
homepage = {homepage};
license = {license};
}};
}};
"""
LICENSE_MAP = {
'http://www.opensource.org/licenses/mit-license.php': 'licenses.mit',
'MIT': 'licenses.mit',
'PSF': 'licenses.psfl'
}
_missing = object()
def guess_license(info):
l = info['info']['license']
license = LICENSE_MAP.get(l, _missing)
if license is _missing:
sys.stderr.write('WARNING: unknown license (please update LICENSE_MAP): ' + l + '\n')
return 'unknown'
return license
_pip_dependency_cache = {}
def pip_dump_dependencies(name): # memoized version
if name in _pip_dependency_cache:
return _pip_dependency_cache[name]
ret = pip_deps.pip_dump_dependencies(name)
_pip_dependency_cache[name] = ret
return ret
def build_inputs(name):
reqs, vsns = pip_dump_dependencies(name)
def vsn(name):
vsn = vsns.get(name)
if not vsn:
name = name.replace('_', '-') # pypi workaround ?
vsn = vsns.get(name)
if vsn:
vsn = "_" + vsn
return vsn or ''
return [name + vsn(name) for name, specs in reqs[name]]
def package_to_info(package):
url = "https://pypi.python.org/pypi/{}/json".format(package)
r = requests.get(url)
return r.json()
def info_to_expr(info):
name_only = info['info']['name']
version = info['info']['version']
name = name_only + "-" + version
inputs = ' '.join(build_inputs(name_only))
url = None
md5 = None
for url_item in info['urls']:
url_ext = url_item['url']
if url_ext.endswith('zip') or url_ext.endswith('tar.gz'):
url = url_item['url']
md5 = url_item['md5_digest']
break
if url is None:
raise Exception('No download url found :-(')
description = info['info']['description'].split('\n')[0]
homepage = info['info']['home_page']
license = guess_license(info)
return PACKAGE.format(**locals())
if __name__ == '__main__':
print info_to_expr(package_to_info(sys.argv[1]))
|
Python
| 0.001447
|
@@ -2193,16 +2193,28 @@
'%5Cn')%5B0%5D
+.rstrip('.')
%0A hom
|
3f0ab3d63ad0a602b3332b9c83c742caae47289a
|
Fix test for invalid queue class
|
tests/test_lib_queue.py
|
tests/test_lib_queue.py
|
"""
This file contains the tests for the job queue modules.
In particular, this tests
lib/queue/*.py
"""
from huey import RedisHuey
import mock
from privacyidea.app import create_app
from privacyidea.config import TestingConfig
from privacyidea.lib.error import ServerError
from privacyidea.lib.queue import job, JOB_COLLECTOR, JobCollector, get_job_queue, wrap_job, has_job_queue
from privacyidea.lib.queues.huey_queue import HueyQueue
from privacyidea.lib.queues.base import QueueError
from .base import OverrideConfigTestCase, MyTestCase
class TestSender(object):
""" defined in order to be able to mock the ``send_mail`` function in tests """
def send_mail(*args, **kwargs):
pass
SENDER = TestSender()
@job("test.my_add")
def my_add(a, b):
return a + b
@job("test.my_send_mail")
def my_send_mail(message):
SENDER.send_mail(message)
return 1337
class NoQueueTestCase(OverrideConfigTestCase):
class Config(TestingConfig):
PI_JOB_QUEUE_CLASS = ""
def test_01_no_job_queue(self):
self.assertFalse(has_job_queue())
with self.assertRaises(ServerError):
get_job_queue()
def test_02_collector(self):
self.assertIsInstance(JOB_COLLECTOR, JobCollector)
self.assertDictContainsSubset({
"test.my_add": (my_add, (), {}),
"test.my_send_mail": (my_send_mail, (), {})
}, JOB_COLLECTOR.jobs)
class InvalidQueueTestCase(MyTestCase):
def test_01_create_app_fails(self):
class Config(TestingConfig):
PI_JOB_QUEUE_CLASS = "obviously.invalid"
with mock.patch.dict("privacyidea.config.config", {"testing": Config}):
with self.assertRaises(ImportError):
create_app("testing", "")
class HueyQueueTestCase(OverrideConfigTestCase):
class Config(TestingConfig):
PI_JOB_QUEUE_CLASS = "privacyidea.lib.queues.huey_queue.HueyQueue"
PI_JOB_QUEUE_NAME = "myqueuename"
PI_JOB_QUEUE_ALWAYS_EAGER = True # avoid redis server for testing
def test_01_app_job_queue(self):
queue = get_job_queue()
self.assertIsInstance(queue, HueyQueue)
self.assertEqual(queue.options, {"name": "myqueuename", "always_eager": True})
self.assertTrue({"test.my_add", "test.my_send_mail"}.issubset(set(queue.jobs)))
self.assertIsInstance(queue.huey, RedisHuey)
self.assertEqual(queue.huey.name, "myqueuename")
self.assertFalse(queue.huey.store_none)
with self.assertRaises(QueueError):
queue.register_job("test.my_add", lambda x: x)
def test_03_enqueue_jobs(self):
queue = get_job_queue()
queue.enqueue("test.my_add", (3, 4), {}) # No result is stored
with mock.patch.object(SENDER, 'send_mail') as mock_mail:
queue.enqueue("test.my_send_mail", ("hi",), {})
mock_mail.assert_called_once_with("hi")
with self.assertRaises(QueueError):
queue.enqueue("test.unknown", ("hi",), {})
def test_04_wrap_jobs(self):
wrapped = wrap_job("test.my_send_mail", True)
with mock.patch.object(SENDER, 'send_mail') as mock_mail:
result = wrapped("hi")
mock_mail.assert_called_once_with("hi")
self.assertTrue(result)
with mock.patch.object(SENDER, 'send_mail') as mock_mail:
result = my_send_mail("hi")
mock_mail.assert_called_once_with("hi")
self.assertEqual(result, 1337)
|
Python
| 0.000001
|
@@ -1483,13 +1483,18 @@
app_
-fails
+graciously
(sel
@@ -1684,86 +1684,70 @@
-with self.assertRaises(ImportError):%0A create_app(%22testing%22, %22%22)
+app = create_app(%22testing%22, %22%22) # we do not throw an exception
%0A%0A%0Ac
|
04f7b8aa85bf2bb2c16eb246ee7c9d7ae5fc8cff
|
check contents
|
tests/test_roundtrip.py
|
tests/test_roundtrip.py
|
import json
import bitjws
def test_encode_decode():
key = bitjws.PrivateKey()
ser = bitjws.sign_serialize(key)
header, payload = bitjws.validate_deserialize(ser)
rawheader, rawpayload = ser.rsplit('.', 1)[0].split('.')
origheader = bitjws.base64url_decode(rawheader.encode('utf8'))
origpayload = bitjws.base64url_decode(rawpayload.encode('utf8'))
assert header == json.loads(origheader.decode('utf8'))
assert payload == json.loads(origpayload.decode('utf8'))
def test_audience():
key = bitjws.PrivateKey()
ser = bitjws.sign_serialize(key, requrl='https://example.com/api/login')
header, payload = bitjws.validate_deserialize(
ser, requrl='https://example.com/api/login')
assert header is not None
assert payload is not None
|
Python
| 0
|
@@ -389,49 +389,352 @@
ader
- == json.loads(origheader.decode('utf8'))
+%5B'typ'%5D == 'JWT'%0A assert header%5B'alg'%5D == 'CUSTOM-BITCOIN-SIGN'%0A assert header%5B'kid'%5D == bitjws.pubkey_to_addr(key.pubkey.serialize())%0A assert len(header) == 3%0A assert header == json.loads(origheader.decode('utf8'))%0A%0A assert isinstance(payload.get('exp', ''), float)%0A assert payload%5B'aud'%5D is None%0A assert len(payload) == 2
%0A
@@ -836,32 +836,79 @@
s.PrivateKey()%0A%0A
+ audience = 'https://example.com/api/login'%0A
ser = bitjws
@@ -939,39 +939,16 @@
url=
-'https://example.com/api/login'
+audience
)%0A
@@ -999,17 +999,8 @@
ize(
-%0A
ser,
@@ -1011,39 +1011,16 @@
url=
-'https://example.com/api/login'
+audience
)%0A
@@ -1066,20 +1066,58 @@
payload is not None%0A
+ assert payload%5B'aud'%5D == audience%0A
|
3059e2cf76e2e7bfb90c6c03afc5ee372294de94
|
use with_setup instead of setUp/tearDown
|
tests/test_spotifile.py
|
tests/test_spotifile.py
|
import unittest
import os
from subprocess import check_call
from sh import ls
mountpoint = '/tmp/spotifile_test_mount'
class SpotifileTestClass(unittest.TestCase):
@classmethod
def setUpClass(cls):
if not os.path.exists(mountpoint):
os.mkdir(mountpoint)
@classmethod
def tearDownClass(cls):
if os.path.exists(mountpoint):
os.rmdir(mountpoint)
def setUp(self):
check_call(['./spotifile', mountpoint])
def tearDown(self):
check_call(['fusermount', '-u', mountpoint])
def test_ls(self):
assert 'connection' in ls(mountpoint)
|
Python
| 0
|
@@ -1,29 +1,61 @@
-import unittest%0Aimport os
+from nose import with_setup%0Aimport os%0Afrom os import path
%0Afro
@@ -102,16 +102,21 @@
mport ls
+, cat
%0A%0A%0Amount
@@ -155,98 +155,33 @@
nt'%0A
-class SpotifileTestClass(unittest.TestCase):%0A%09@classmethod%0A%09def setUpClass(cls
+%0Adef fs_mount(
):%0A
-%09
%09if not
os.p
@@ -176,19 +176,16 @@
%09if not
-os.
path.exi
@@ -203,17 +203,16 @@
int):%0A%09%09
-%09
os.mkdir
@@ -228,56 +228,118 @@
nt)%0A
-%0A%09@classmethod%0A%09def tearDownClass(cls):%0A%09
+%09check_call(%5B'./spotifile', mountpoint%5D)%0A%0Adef fs_unmount():%0A%09check_call(%5B'fusermount', '-u', mountpoint%5D)%0A
%09if
-os.
path
@@ -361,17 +361,16 @@
int):%0A%09%09
-%09
os.rmdir
@@ -387,55 +387,84 @@
t)%0A%0A
-%09def setUp(self):%0A%09%09check_call(%5B'./spotifile',
+@with_setup(fs_mount, fs_unmount)%0Adef test_ls():%0A%09assert 'connection' in ls(
moun
@@ -473,102 +473,72 @@
oint
-%5D
)%0A%0A
-%09def tearDown(self):%0A%09%09check_call(%5B'fuser
+@with_setup(fs_
mount
-'
,
-'-u', mountpoint%5D)%0A%0A%09def test_ls(self
+fs_unmount)%0Adef test_cat_connection(
):%0A
-%09
%09ass
@@ -546,34 +546,60 @@
rt '
-connection' in ls(mountpoint)
+logged in' in cat(path.join(mountpoint, 'connection'))%0A
%0A
|
c74faacfc91c8925ced63abda00e7e097903e0f7
|
Remove stray print statements.
|
tests/test_table_xls.py
|
tests/test_table_xls.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateexcel
class TestXLS(agate.AgateTestCase):
def setUp(self):
self.rows = (
(1, 'a', True, '11/4/2015', '11/4/2015 12:22 PM'),
(2, u'👍', False, '11/5/2015', '11/4/2015 12:45 PM'),
(None, 'b', None, None, None)
)
self.column_names = [
'number', 'text', 'boolean', 'date', 'datetime'
]
self.column_types = [
agate.Number(), agate.Text(), agate.Boolean(),
agate.Date(), agate.DateTime()
]
self.table = agate.Table(self.rows, self.column_names, self.column_types)
def test_from_xls(self):
table = agate.Table.from_xls('examples/test.xls')
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_file_like(self):
with open('examples/test.xls', 'rb') as f:
table = agate.Table.from_xls(f)
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_sheet_name(self):
table = agate.Table.from_xls('examples/test_sheets.xls', 'data')
self.table.print_table()
table.print_table()
print('here')
print(self.table.rows[2])
print(table.rows[2])
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_sheet_index(self):
table = agate.Table.from_xls('examples/test_sheets.xls', 1)
self.assertColumnNames(table, self.column_names)
self.assertColumnTypes(table, [agate.Number, agate.Text, agate.Boolean, agate.Date, agate.DateTime])
self.assertRows(table, [r.values() for r in self.table.rows])
def test_zeros(self):
table = agate.Table.from_xls('examples/test_zeros.xls')
self.assertColumnNames(table, ['ordinal', 'binary', 'all_zero'])
self.assertColumnTypes(table, [agate.Number, agate.Number, agate.Number])
self.assertRows(table, [
[0, 0, 0],
[1, 1, 0],
[2, 1, 0]
])
|
Python
| 0.000022
|
@@ -1534,158 +1534,8 @@
')%0A%0A
- self.table.print_table()%0A table.print_table()%0A%0A print('here')%0A%0A print(self.table.rows%5B2%5D)%0A print(table.rows%5B2%5D)%0A%0A%0A
|
eb64546b08bf93c0e1ffc5be8db413a2ebbca621
|
correct vasp util unit test
|
tests/test_vasp_util.py
|
tests/test_vasp_util.py
|
import pytest
import os
import sys
import numpy as np
from pymatgen.io.vasp.inputs import Poscar
from pymatgen.io.vasp.outputs import Vasprun
from flare.struc import Structure, get_unique_species
from flare.dft_interface.vasp_util import parse_dft_forces, run_dft, \
edit_dft_input_positions, dft_input_to_structure, \
parse_dft_forces_and_energy, md_trajectory_from_vasprun, \
check_vasprun, run_dft_par
from pytest import raises
pytestmark = pytest.mark.filterwarnings("ignore::UserWarning", \
"ignore::pymatgen.io.vasp.outputs.UnconvergedVASPWarning")
def cleanup_vasp_run():
os.system('rm POSCAR')
os.system('rm vasprun.xml')
def test_check_vasprun():
fname = './test_files/test_vasprun.xml'
vr = Vasprun(fname)
assert type(check_vasprun(fname)) == Vasprun
assert type(check_vasprun(vr)) == Vasprun
with raises(ValueError):
check_vasprun(0)
# ------------------------------------------------------
# test otf helper functions
# ------------------------------------------------------
@pytest.mark.parametrize("poscar",
[
'./test_files/test_POSCAR'
]
)
def test_structure_parsing(poscar):
structure = dft_input_to_structure(poscar)
pmg_struct = Poscar.from_file(poscar).structure
assert len(structure.species_labels) == len(pmg_struct)
assert (structure.cell == pmg_struct.lattice.matrix).all()
for i, spec in enumerate(structure.species_labels):
assert spec == pmg_struct[i].specie.symbol
assert np.isclose(structure.positions, pmg_struct.cart_coords).all()
@pytest.mark.parametrize("poscar",
[
'./test_files/test_POSCAR'
]
)
def test_input_to_structure(poscar):
assert isinstance(dft_input_to_structure(poscar), Structure)
@pytest.mark.parametrize('cmd, poscar',
[
('python ./test_files/dummy_vasp.py',
'./test_files/test_POSCAR')
]
)
def test_vasp_calling(cmd, poscar):
cleanup_vasp_run()
structure = dft_input_to_structure(poscar)
forces1 = run_dft('.', cmd, structure=structure, en=False)
forces2, energy2 = run_dft('.', cmd, structure=structure, en=True)
forces3 = parse_dft_forces('./test_files/test_vasprun.xml')
forces4, energy4 = parse_dft_forces_and_energy(
'./test_files/test_vasprun.xml')
vr_step = Vasprun('./test_files/test_vasprun.xml').ionic_steps[-1]
ref_forces = vr_step['forces']
ref_energy = vr_step['electronic_steps'][-1]['e_0_energy']
assert len(forces1) == len(ref_forces)
assert len(forces2) == len(ref_forces)
assert len(forces3) == len(ref_forces)
assert len(forces4) == len(ref_forces)
for i in range(structure.nat):
assert np.isclose(forces1[i], ref_forces[i]).all()
assert np.isclose(forces2[i], ref_forces[i]).all()
assert np.isclose(forces3[i], ref_forces[i]).all()
assert np.isclose(forces4[i], ref_forces[i]).all()
assert energy2 == ref_energy
assert energy4 == ref_energy
cleanup_vasp_run()
@pytest.mark.parametrize('cmd, poscar',
[
('python ./test_files/dummy_vasp.py test_fail',
'./test_files/test_POSCAR')
]
)
def test_vasp_calling_fail(cmd, poscar):
structure = dft_input_to_structure(poscar)
with raises(FileNotFoundError):
_ = run_dft('.', cmd, structure=structure, en=False)
def test_vasp_input_edit():
os.system('cp test_files/test_POSCAR ./POSCAR')
structure = dft_input_to_structure('./test_files/test_POSCAR')
structure.vec1 += np.random.randn(3)
structure.positions[0] += np.random.randn(3)
new_file = edit_dft_input_positions('./POSCAR', structure=structure)
final_structure = dft_input_to_structure(new_file)
assert np.isclose(final_structure.vec1, structure.vec1, atol=1e-4).all()
assert np.isclose(final_structure.positions[0],
structure.positions[0], atol=1e-4).all()
os.system('rm ./POSCAR')
os.system('rm ./POSCAR.bak')
@pytest.mark.skipif(not os.environ.get('VASP_COMMAND',
False), reason='VASP_COMMAND not found '
'in environment: Please install VASP '
' and set the VASP_COMMAND env. '
'variable to point to cp2k.popt')
def test_run_dft_par():
os.system('cp test_files/test_POSCAR ./POSCAR')
test_structure = dft_input_to_structure('./POSCAR')
for dft_command in [None]:
with raises(FileNotFoundError):
run_dft_par('POSCAR',test_structure,dft_command=dft_command,
n_cpus=2)
call_string = "echo 'testing_call'"
forces = run_dft_par('POSCAR', test_structure, dft_command=call_string,
n_cpus=1, serial_prefix=' ', dft_out='test_files/test_vasprun.xml',
screen_out='TEST_CALL_OUT')
with open("TEST_CALL_OUT", 'r') as f:
assert 'testing_call' in f.readline()
os.system('rm ./TEST_CALL_OUT')
assert isinstance(forces, np.ndarray)
# ------------------------------------------------------
# test static helper functions
# ------------------------------------------------------
def test_md_trajectory():
structures = md_trajectory_from_vasprun('test_files/test_vasprun.xml')
assert len(structures) == 2
for struct in structures:
assert struct.forces.shape == (6, 3)
assert struct.energy is not None
assert struct.stress.shape == (3, 3)
structures = md_trajectory_from_vasprun('test_files/test_vasprun.xml',
ionic_step_skips=2)
assert len(structures) == 1
|
Python
| 0.000001
|
@@ -3918,49 +3918,8 @@
')%0A%0A
- structure.vec1 += np.random.randn(3)%0A
|
7fb5b04bb4054f60cefc79efabcef07979628285
|
add directory encoding test in test_conf
|
tests/unit/test_conf.py
|
tests/unit/test_conf.py
|
import os
from twisted.trial import unittest
from lbrynet import conf
class SettingsTest(unittest.TestCase):
def setUp(self):
os.environ['LBRY_TEST'] = 'test_string'
def tearDown(self):
del os.environ['LBRY_TEST']
@staticmethod
def get_mock_config_instance():
settings = {'test': (str, '')}
env = conf.Env(**settings)
return conf.Config({}, settings, environment=env)
def test_envvar_is_read(self):
settings = self.get_mock_config_instance()
self.assertEqual('test_string', settings['test'])
def test_setting_can_be_overridden(self):
settings = self.get_mock_config_instance()
settings['test'] = 'my_override'
self.assertEqual('my_override', settings['test'])
def test_setting_can_be_updated(self):
settings = self.get_mock_config_instance()
settings.update({'test': 'my_update'})
self.assertEqual('my_update', settings['test'])
def test_setting_is_in_dict(self):
settings = self.get_mock_config_instance()
setting_dict = settings.get_current_settings_dict()
self.assertEqual({'test': 'test_string'}, setting_dict)
def test_invalid_setting_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertRaises(AssertionError, settings.set, 'invalid_name', 123)
def test_invalid_data_type_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertIsNone(settings.set('test', 123))
self.assertRaises(AssertionError, settings.set, 'test', 123, ('fake_data_type',))
def test_setting_precedence(self):
settings = self.get_mock_config_instance()
settings.set('test', 'cli_test_string', data_types=(conf.TYPE_CLI,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'this_should_not_take_precedence', data_types=(conf.TYPE_ENV,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'runtime_takes_precedence', data_types=(conf.TYPE_RUNTIME,))
self.assertEqual('runtime_takes_precedence', settings['test'])
|
Python
| 0.000001
|
@@ -39,17 +39,16 @@
nittest%0A
-%0A
from lbr
@@ -2135,28 +2135,452 @@
cedence', settings%5B'test'%5D)%0A
+%0A def test_data_dir(self):%0A # check if these directories are returned as string and not unicode%0A # otherwise there will be problems when calling os.path.join on%0A # unicode directory names with string file names%0A self.assertEqual(str, type(conf.default_download_directory))%0A self.assertEqual(str, type(conf.default_data_dir))%0A self.assertEqual(str, type(conf.default_lbryum_dir))%0A
|
c81393a8de27595f61cffc09fa6fa8352bb54b9c
|
Return a random set of factors
|
palindrome-products/palindrome_products.py
|
palindrome-products/palindrome_products.py
|
from collections import defaultdict
def largest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, max)
def smallest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, min)
def _palindromes(max_factor, min_factor, minmax):
pals = defaultdict(set)
for i in range(min_factor, max_factor+1):
for j in range(min_factor, max_factor+1):
p = i * j
if is_palindrome(p):
pals[p].add(tuple(sorted([i,j])))
value = minmax(pals)
factors = pals[value]
return (value, factors)
def is_palindrome(n):
return str(n) == str(n)[::-1]
|
Python
| 0.999999
|
@@ -1,8 +1,22 @@
+import random%0A
from col
@@ -576,16 +576,35 @@
ctors =
+random.choice(list(
pals%5Bval
@@ -606,16 +606,18 @@
s%5Bvalue%5D
+))
%0A ret
|
a1a261a88667c3066fd9e11e7af4673c1fca1b44
|
Add tags! Task name and owner to start.
|
teuthology/run_tasks.py
|
teuthology/run_tasks.py
|
import sys
import logging
from teuthology.sentry import get_client as get_sentry_client
from .config import config as teuth_config
log = logging.getLogger(__name__)
def run_one_task(taskname, **kwargs):
submod = taskname
subtask = 'task'
if '.' in taskname:
(submod, subtask) = taskname.rsplit('.', 1)
parent = __import__('teuthology.task', globals(), locals(), [submod], 0)
mod = getattr(parent, submod)
fn = getattr(mod, subtask)
return fn(**kwargs)
def run_tasks(tasks, ctx):
stack = []
try:
for taskdict in tasks:
try:
((taskname, config),) = taskdict.iteritems()
except ValueError:
raise RuntimeError('Invalid task definition: %s' % taskdict)
log.info('Running task %s...', taskname)
manager = run_one_task(taskname, ctx=ctx, config=config)
if hasattr(manager, '__enter__'):
manager.__enter__()
stack.append(manager)
except Exception, e:
ctx.summary['success'] = False
if 'failure_reason' not in ctx.summary:
ctx.summary['failure_reason'] = str(e)
log.exception('Saw exception from tasks.')
sentry = get_sentry_client()
if sentry:
exc_id = sentry.get_ident(sentry.captureException())
event_url = "{server}/search?q={id}".format(
server=teuth_config.sentry_server, id=exc_id)
log.exception(" Sentry event: %s" % event_url)
sentry_url_list = ctx.summary.get('sentry_events', [])
sentry_url_list.append(event_url)
ctx.summary['sentry_events'] = sentry_url_list
if ctx.config.get('interactive-on-error'):
from .task import interactive
log.warning('Saw failure, going into interactive mode...')
interactive.task(ctx=ctx, config=None)
finally:
try:
exc_info = sys.exc_info()
while stack:
manager = stack.pop()
log.debug('Unwinding manager %s', manager)
try:
suppress = manager.__exit__(*exc_info)
except Exception, e:
ctx.summary['success'] = False
if 'failure_reason' not in ctx.summary:
ctx.summary['failure_reason'] = str(e)
log.exception('Manager failed: %s', manager)
if exc_info == (None, None, None):
# if first failure is in an __exit__, we don't
# have exc_info set yet
exc_info = sys.exc_info()
if ctx.config.get('interactive-on-error'):
from .task import interactive
log.warning('Saw failure, going into interactive mode...')
interactive.task(ctx=ctx, config=None)
else:
if suppress:
sys.exc_clear()
exc_info = (None, None, None)
if exc_info != (None, None, None):
log.debug('Exception was not quenched, exiting: %s: %s', exc_info[0].__name__, exc_info[1])
raise SystemExit(1)
finally:
# be careful about cyclic references
del exc_info
|
Python
| 0
|
@@ -1262,24 +1262,129 @@
if sentry:%0A
+ tags = %7B%0A 'task': taskname,%0A 'owner': ctx.owner,%0A %7D%0A
@@ -1433,16 +1433,25 @@
ception(
+tags=tags
))%0A
|
45c1446779cbce050573264101b1afe3d7fe42b4
|
Update BaseSearchCommand
|
elasticsearch_django/management/commands/__init__.py
|
elasticsearch_django/management/commands/__init__.py
|
# -*- coding: utf-8 -*-
"""Base command for search-related management commands."""
import logging
from django.core.management.base import BaseCommand
from elasticsearch.exceptions import TransportError
logger = logging.getLogger(__name__)
class BaseSearchCommand(BaseCommand):
"""Base class for commands that interact with the search index."""
description = "Base search command."
def add_arguments(self, parser):
"""Add default base options of --noinput and indexes."""
parser.add_argument(
'--noinput',
action='store_false',
dest='interactive',
default=True,
help='Do no display user prompts - may affect data.'
)
parser.add_argument(
'indexes',
nargs='*',
help="Names of indexes on which to run the command."
)
def do_index_command(self, index, interactive):
"""Run a command against a named index."""
raise NotImplementedError()
def handle(self, *args, **options):
"""Run do_index_command on each specified index and log the output."""
for index in options.pop('indexes'):
data = {}
try:
print "calling do_index_command", index, options
data = self.do_index_command(index, **options)
except TransportError as ex:
logger.warn("ElasticSearch threw an error: %s", ex)
data = {
"index": index,
"status": ex.status_code,
"reason": ex.info['error']['reason']
}
finally:
logger.info(data)
|
Python
| 0.000001
|
@@ -1211,73 +1211,8 @@
ry:%0A
- print %22calling do_index_command%22, index, options%0A
|
7fe12dadbaba23a41664e49a003c328f3d82f67c
|
bump version and adjust copyright year
|
s3scan.py
|
s3scan.py
|
"""
:copyright: (c) 2012 by Mike Taylor
:license: BSD, see LICENSE for more details.
Usage:
python s3scan.py [-k <api_key>] [-s <api_secret>] [-f <format>]
Options:
-k | --key S3 Access Key
-s | --secret S3 Access Secret
-f | --format Output format
[Optional]
Where the output format can be either 'text' or 'csv'
If no parameters are given, the configuration file 's3scan.cfg' will
be loaded and should contain:
[aws]
access_key = YOUR_KEY_HERE
access_secret = YOUR_SECRETS_HERE
format = text
"""
VERSION = (0, 1, 0, '')
__author__ = 'bear (Mike Taylor)'
__contact__ = 'bear@bear.im'
__copyright__ = 'Copyright 2013, Mike Taylor'
__license__ = 'BSD 2-Clause'
__site__ = 'https://github.com/bear/s3scan'
__version__ = u'.'.join(map(str, VERSION[0:3])) + u''.join(VERSION[3:])
import os
import ConfigParser
from optparse import OptionParser
from xml.etree import cElementTree as ET
from boto.s3.connection import S3Connection, OrdinaryCallingFormat
def getConfig():
parser = OptionParser()
parser.add_option('-k', '--key', dest='api_key', default='', help='S3 Access Key')
parser.add_option('-s', '--secret', dest='api_secret', default='', help='S3 Access Secret')
parser.add_option('-f', '--format', dest='format', default='text', help='Output format: text, csv')
options, args = parser.parse_args()
api_key = options.api_key
api_secret = options.api_secret
format = options.format
if os.path.isfile('s3scan.cfg'):
config = ConfigParser.SafeConfigParser({ 'access_key': api_key,
'access_secret': api_secret,
'format': format,
})
config.read('s3scan.cfg')
api_key = config.get('aws', 'access_key')
api_secret = config.get('aws', 'access_secret')
format = config.get('aws', 'format').lower()
return (api_key, api_secret, format)
def discoverBuckets(api_key, api_secret):
c = S3Connection(api_key, api_secret, calling_format=OrdinaryCallingFormat())
rs = c.get_all_buckets()
buckets = {}
maxName = 0
for b in rs:
bucketName = b.name
buckets[bucketName] = {}
if len(bucketName) > maxName:
maxName = len(bucketName)
acp = b.get_acl()
xml = acp.acl.to_xml()
xRoot = ET.fromstring(xml)
for grant in xRoot:
grantee = { 'id': None, 'name': None, 'permission': [] }
for item in grant:
if item.tag == 'Grantee':
for element in item:
if element.tag == 'ID':
grantee['id'] = element.text
elif element.tag == 'DisplayName':
grantee['name'] = element.text
elif element.tag == 'URI':
grantee['id'] = element.text
grantee['name'] = element.text.split('/')[-1]
elif item.tag == 'Permission':
grantee['permission'].append(item.text)
if grantee['name'] not in buckets[bucketName]:
buckets[bucketName][grantee['name']] = []
buckets[bucketName][grantee['name']].append((grantee['id'], grantee['permission']))
return buckets, maxName
def csvFormat(bucket):
reads = []
writes = []
reads_acp = []
writes_acp = []
for grantee in bucket:
for grantee_id, permission in bucket[grantee]:
if 'READ' in permission:
reads.append(grantee)
if 'WRITE' in permission:
writes.append(grantee)
if 'READ_ACP' in permission:
reads_acp.append(grantee)
if 'WRITE_ACP' in permission:
writes_acp.append(grantee)
l = [key,
';'.join(writes),
';'.join(reads),
';'.join(writes_acp),
';'.join(reads_acp),
]
return ','.join(l)
def textFormat(bucket, maxName):
reads = []
writes = []
reads_acp = []
writes_acp = []
for grantee in bucket:
for grantee_id, permission in bucket[grantee]:
if 'READ' in permission:
reads.append(grantee)
if 'WRITE' in permission:
writes.append(grantee)
if 'READ_ACP' in permission:
reads_acp.append(grantee)
if 'WRITE_ACP' in permission:
writes_acp.append(grantee)
s = '{0:>{1}} --'.format(key, maxName)
t = '\n' + ' '*(maxName + 4)
if len(writes) > 0:
s += ' Write: %s;' % ','.join(writes)
if len(reads) > 0:
s += ' Read: %s;' % ','.join(reads)
if len(writes_acp) > 0:
s += t + 'ACP Write: %s' % ','.join(writes_acp)
if len(reads_acp) > 0:
s += t + 'ACP Read: %s' % ','.join(reads_acp)
return s
if __name__ == '__main__':
api_key, api_secret, format = getConfig()
buckets, maxName = discoverBuckets(api_key, api_secret)
for key in buckets:
bucket = buckets[key]
if format == 'csv':
print csvFormat(bucket)
else:
print textFormat(bucket, maxName)
|
Python
| 0
|
@@ -17,16 +17,21 @@
(c) 2012
+-2015
by Mike
@@ -571,17 +571,17 @@
(0, 1,
-0
+1
, '')%0A%0A_
@@ -677,17 +677,22 @@
ight 201
-3
+2-2015
, Mike T
|
f7e6d1a39837458a18c977e4766d831f89aa870d
|
Correct connection returning behavior
|
tilequeue/postgresql.py
|
tilequeue/postgresql.py
|
from itertools import chain
from itertools import cycle
import psycopg2
import threading
class RoundRobinConnectionFactory(object):
def __init__(self, conn_info, hosts):
self.conn_info = conn_info
self.hosts_gen = iter(cycle(hosts))
def __call__(self, ignored_dsn):
host = self.hosts_gen.next()
conn_info = dict(self.conn_info, host=host)
conn = psycopg2.connect(**conn_info)
return conn
class NoPoolingConnectionPool(object):
# This adheres to the connection pool interface, but generates a
# new connection with every request
def __init__(self, minconn, maxconn, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def getconn(self, key=None):
return psycopg2.connect(*self.args, **self.kwargs)
def putconn(self, conn, key=None, close=False):
# this pool implementation always closes connections
try:
conn.close()
except:
pass
def closeall(self):
pass
class ThreadedConnectionPool(object):
# Custom version of a threaded connection pool. This is a simpler
# implementation than what the postgresql connection pool does. In
# particular, this pool attempts to be much safer when returning
# connections back to the pool.
def __init__(self, minconn, maxconn, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.maxconn = maxconn
self.lock = threading.Lock()
self.not_in_use = {}
self.in_use = {}
for i in xrange(minconn):
conn = self._make_conn()
self.not_in_use[id(conn)] = conn
def _make_conn(self):
return psycopg2.connect(*self.args, **self.kwargs)
def getconn(self, key=None):
assert key is None, 'Keys not supported'
with self.lock:
if self.not_in_use:
conn_id, conn = self.not_in_use.popitem()
self.in_use[conn_id] = conn
return conn
else:
if len(self.in_use) == self.maxconn:
raise RuntimeError(
'Maximum number of connections created: %d' %
self.maxconn)
conn = self._make_conn()
self.in_use[id(conn)] = conn
return conn
def putconn(self, conn, key=None, close=False):
assert key is None, 'Keys not supported'
with self.lock:
conn_id = id(conn)
if conn_id not in self.in_use:
raise ValueError('Connection not part of pool')
del self.in_use[conn_id]
if close:
try:
conn.close()
except:
pass
if not conn.closed:
self.not_in_use[conn_id] = conn
def closeall(self):
with self.lock:
for conn_id, conn in chain(self.not_in_use.iteritems(),
self.in_use.iteritems()):
try:
conn.close()
except:
pass
self.not_in_use.clear()
self.in_use.clear()
class HostAffinityConnectionPool(object):
# conn_info is expected to have all connection information except
# for the host. For each host, n_conn_per_host connections will be
# made to it. If a connection to a particular host fails, a new
# one will be created to ensure that all connections are balanced.
def __init__(self, hosts, n_conn_per_host, conn_info):
self.hosts = hosts
self.conn_info = conn_info
self.n_conn_per_host = n_conn_per_host
self.lock = threading.Lock()
self.conns_by_host = {}
for host in hosts:
conn_info_with_host = dict(conn_info, host=host)
for i in range(n_conn_per_host):
conn = self._make_conn(conn_info_with_host)
self.conns_by_host.setdefault(host, []).append(conn)
self.host_conns_in_use = set()
self.host_conns_not_in_use = set(hosts)
def _make_conn(self, conn_info):
return psycopg2.connect(**conn_info)
def get_conns_for_host(self, host):
with self.lock:
assert host in self.conns_by_host, 'Unknown host: %s' % host
assert host in self.host_conns_not_in_use, \
'Connections already in use for host: %s' % host
conns = self.conns_by_host[host]
if len(conns) < self.n_conn_per_host:
# we are short some connections
# create the connections that we expect to have available
conn_info_with_host = dict(self.conn_info, host=host)
for i in range(self.n_conn_per_host - len(conns)):
conn = self._make_conn(conn_info_with_host)
conns.append(conn)
self.host_conns_in_use.add(host)
self.host_conns_not_in_use.remove(host)
return conns
def put_conns_for_host(self, host):
with self.lock:
assert host in self.conns_by_host, 'Unknown host: %s' % host
assert host in self.host_conns_in_use, \
'Connections not in use for host: %s' % host
# check if any connections have been closed
# those will need to be recreated before being returned to
# the rotation
conns_to_return = []
conns = self.conns_by_host.pop(host)
try:
conn_info_with_host = dict(self.conn_info, host=host)
for conn in conns:
if conn.closed:
try:
conn = self._make_conn(conn_info_with_host)
except:
print 'Error creating new connection to host: %s' % \
host
# When connections are fecthed for this
# host again, new ones will attempt to be
# created at that point
else:
conns_to_return.append(conn)
finally:
# always add whatever connections we have available back
# and restore the host connection accounting
self.conns_by_host[host] = conns_to_return
self.host_conns_in_use.remove(host)
self.host_conns_not_in_use.add(host)
def closeall(self):
with self.lock:
for host in self.hosts:
conns = self.conns_by_host[host]
for conn in conns:
try:
conn.close()
except:
pass
self.conns_by_host.clear()
self.host_conns_in_use.clear()
self.host_conns_not_in_use.clear()
self.hosts = []
|
Python
| 0.000001
|
@@ -6157,38 +6157,37 @@
-else:%0A
+ continue%0A
|
15a32b91b36c9deba5a4fc1d8c843a5e044b62c3
|
remove unnecessary comments and print statements
|
tdp_core/mapping_table.py
|
tdp_core/mapping_table.py
|
import logging
from . import db
import itertools
_log = logging.getLogger(__name__)
class SQLMappingTable(object):
def __init__(self, mapping, engine):
self.from_idtype = mapping.from_idtype
self.to_idtype = mapping.to_idtype
self._engine = engine
self._query = mapping.query
self._integer_ids = mapping.integer_ids
def __call__(self, ids):
# ensure strings
print(type(ids))
# ids = [i.decode('utf-8') for i in ids if not isinstance(i, int)]
ids = [i for i in ids]
if self._integer_ids: # convert to integer ids
ids = [int(i) for i in ids]
with db.session(self._engine) as session:
mapped = session.execute(self._query, ids=ids)
# handle multi mappings
data = sorted(mapped, key=lambda x: x['f'])
grouped = {k: [r['t'] for r in g] for k, g in itertools.groupby(data, lambda x: x['f'])}
return [grouped.get(id, []) for id in ids]
def _discover_mappings():
for k, connector in db.configs.connectors.items():
if not connector.mappings:
continue
engine = db.configs.engine(k)
for mapping in connector.mappings:
_log.info('registering %s to %s', mapping.from_idtype, mapping.to_idtype)
yield SQLMappingTable(mapping, engine)
class SQLMappingProvider(object):
def __init__(self):
self._mappings = list(_discover_mappings())
def __iter__(self):
return iter(((f.from_idtype, f.to_idtype, f) for f in self._mappings))
def create():
return SQLMappingProvider()
|
Python
| 0
|
@@ -386,127 +386,8 @@
ings
-%0A print(type(ids))%0A # ids = %5Bi.decode('utf-8') for i in ids if not isinstance(i, int)%5D%0A ids = %5Bi for i in ids%5D
%0A%0A
|
0b608d60ecad22dbecdd066fe0930b697b6eee9a
|
Add collection to tile dirname
|
tilezilla/cli/ingest.py
|
tilezilla/cli/ingest.py
|
# -*- coding: utf-8 -*-
""" CLI to process imagery products to tiles and index in database
"""
import logging
import os
import click
import rasterio
import six
from . import options
from .cliutils import config_to_resources, Echoer
from .. import products, db
from .._util import decompress_to, include_bands
from ..errors import FillValueException
from ..geoutils import reproject_as_needed, reproject_bounds
from ..stores import STORAGE_TYPES
logger = logging.getLogger('tilezilla')
echoer = Echoer(message_indent=0)
def ingest_source(config, source, overwrite=False):
""" Ingest (tile and ingest) a source
"""
spec, storage_name, database, datacube = config_to_resources(config)
# TODO: config file should describe the basename of tile directories
# TODO: document choice of where these variables come from (Tile)
# TODO: parametrize zfill
tile_root = os.path.join(config['store']['root'],
config['store']['tile_dirpattern'])
with decompress_to(source) as tmpdir:
# Find product and get dataset database resource
product = products.registry.sniff_product_type(tmpdir)
dataset = db.DatasetResource(database, datacube, product.description)
collection_name = product.description
# Subset bands
desired_bands = _include_bands_from_config(config, product.bands)
bbox = reproject_bounds(product.bounds, 'EPSG:4326', spec.crs)
# Find tiles for product & IDs of these tiles in database
tiles = list(spec.bounds_to_tile(bbox))
tiles_id = [
datacube.ensure_tile(
collection_name, tile.horizontal, tile.vertical)
for tile in tiles
]
tiles_product_query = [
database.get_product_by_name(tile_id, product.timeseries_id)
for tile_id in tiles_id
]
for band in desired_bands:
with reproject_as_needed(band.src, spec) as src:
band.src = src
echoer.item('Tiling: {}'.format(band.long_name))
for tile, tile_id, tile_prod_query in zip(tiles,
tiles_id,
tiles_product_query):
# Check if exists
if tile_prod_query:
_band_names = [b.standard_name for b in
tile_prod_query.bands]
if band.standard_name in _band_names and not overwrite:
echoer.item('Already tiled -- skipping')
continue
# Setup dataset store
path = tile.str_format(tile_root)
store_cls = STORAGE_TYPES[config['store']['name']]
store = store_cls(path, tile,
meta_options=config['store']['co'])
# Save and record path
try:
dst_path = store.store_variable(product, band,
overwrite=overwrite)
except FillValueException:
# TODO: skip tile but complain
continue
band.path = dst_path
echoer.item(' saved to: {}'.format(dst_path))
# Update index with new product/band entry
if tile_prod_query:
# TODO: we need to handle deleting/transfering existing
# bands over maybe overwriting product
band_id = dataset.update_band(tile_prod_query.id, band)
else:
product_id = dataset.ensure_product(tile_id, product)
band_id = dataset.ensure_band(product_id, band)
# TODO: delete file if index went bad
# Copy over metadata files
for md_name in product.metadata_files:
store.store_file(product,
str(product.metadata_files[md_name]))
def _include_bands_from_config(config, bands):
""" Return a list of :class:`Band`s specified in tilezilla configuration
Args:
config (dict): `tilezilla` configuration
bands (list[Band]): List of bands to filter
Returns:
list[Bands]: Included bands
"""
# TODO: move elsewhere
product_filter = (config.get('products', {}).copy()
.get('include_filter', {}))
include_regex = product_filter.pop('regex', False)
return include_bands(bands, product_filter, regex=include_regex)
@click.command(short_help='Ingest known products into tile dataset format')
@click.option('--overwrite', is_flag=True,
help='Overwriting existing tiled data')
@options.arg_sources
@options.pass_config
@click.pass_context
def ingest(ctx, config, sources, overwrite=False):
for source in sources:
echoer.info('Working on: {}'.format(source))
ingest_source(config, source, overwrite=overwrite)
|
Python
| 0
|
@@ -696,300 +696,9 @@
ig)%0A
- # TODO: config file should describe the basename of tile directories%0A # TODO: document choice of where these variables come from (Tile)%0A # TODO: parametrize zfill%0A tile_root = os.path.join(config%5B'store'%5D%5B'root'%5D,%0A config%5B'store'%5D%5B'tile_dirpattern'%5D)
%0A
+
%0A
@@ -934,16 +934,16 @@
iption)%0A
-
@@ -976,24 +976,25 @@
description%0A
+%0A
# Su
@@ -1577,16 +1577,197 @@
%5D%0A%0A
+ tile_root = os.path.join(config%5B'store'%5D%5B'root'%5D,%0A product.description,%0A config%5B'store'%5D%5B'tile_dirpattern'%5D)%0A
|
1ce24bd04f4b217e560707bd699bbeb6fe14fe09
|
username should be case insensitive
|
timed/authentication.py
|
timed/authentication.py
|
import base64
import functools
import hashlib
import requests
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import SuspiciousOperation
from django.utils.encoding import force_bytes
from mozilla_django_oidc.auth import LOGGER, OIDCAuthenticationBackend
class TimedOIDCAuthenticationBackend(OIDCAuthenticationBackend):
def get_introspection(self, access_token, id_token, payload):
"""Return user details dictionary."""
basic = base64.b64encode(
f"{settings.OIDC_RP_INTROSPECT_CLIENT_ID}:{settings.OIDC_RP_INTROSPECT_CLIENT_SECRET}".encode(
"utf-8"
)
).decode()
headers = {
"Authorization": f"Basic {basic}",
"Content-Type": "application/x-www-form-urlencoded",
}
response = requests.post(
settings.OIDC_OP_INTROSPECT_ENDPOINT,
verify=settings.OIDC_VERIFY_SSL,
headers=headers,
data={"token": access_token},
)
response.raise_for_status()
return response.json()
def get_userinfo_or_introspection(self, access_token):
try:
claims = self.cached_request(
self.get_userinfo, access_token, "auth.userinfo"
)
except requests.HTTPError as e:
if not (
e.response.status_code in [401, 403] and settings.OIDC_CHECK_INTROSPECT
):
raise e
# check introspection if userinfo fails (confidental client)
claims = self.cached_request(
self.get_introspection, access_token, "auth.introspection"
)
if "client_id" not in claims:
raise SuspiciousOperation("client_id not present in introspection")
return claims
def get_or_create_user(self, access_token, id_token, payload):
"""Verify claims and return user, otherwise raise an Exception."""
claims = self.get_userinfo_or_introspection(access_token)
users = self.filter_users_by_claims(claims)
if len(users) == 1:
user = users.get()
self.update_user_from_claims(user, claims)
return user
elif settings.OIDC_CREATE_USER:
return self.create_user(claims)
else:
LOGGER.debug(
"Login failed: No user with username %s found, and "
"OIDC_CREATE_USER is False",
self.get_username(claims),
)
return None
def update_user_from_claims(self, user, claims):
user.email = claims.get(settings.OIDC_EMAIL_CLAIM, "")
user.first_name = claims.get(settings.OIDC_FIRSTNAME_CLAIM, "")
user.last_name = claims.get(settings.OIDC_LASTNAME_CLAIM, "")
user.save()
def filter_users_by_claims(self, claims):
username = self.get_username(claims)
return self.UserModel.objects.filter(username=username)
def cached_request(self, method, token, cache_prefix):
token_hash = hashlib.sha256(force_bytes(token)).hexdigest()
func = functools.partial(method, token, None, None)
return cache.get_or_set(
f"{cache_prefix}.{token_hash}",
func,
timeout=settings.OIDC_BEARER_TOKEN_REVALIDATION_TIME,
)
def create_user(self, claims):
"""Return object for a newly created user account."""
username = self.get_username(claims)
email = claims.get(settings.OIDC_EMAIL_CLAIM, "")
first_name = claims.get(settings.OIDC_FIRSTNAME_CLAIM, "")
last_name = claims.get(settings.OIDC_LASTNAME_CLAIM, "")
return self.UserModel.objects.create(
username=username, email=email, first_name=first_name, last_name=last_name
)
def get_username(self, claims):
try:
return claims[settings.OIDC_USERNAME_CLAIM]
except KeyError:
raise SuspiciousOperation("Couldn't find username claim")
|
Python
| 0.999949
|
@@ -2967,32 +2967,40 @@
.filter(username
+__iexact
=username)%0A%0A
|
207c5d9b41933a4e08c88c026c4d9e8cba3a1d3a
|
Display the text if the regexp does not match
|
weboob/tools/browser2/filters.py
|
weboob/tools/browser2/filters.py
|
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from decimal import Decimal
import re
_NO_DEFAULT = object()
class _Filter(object):
_creation_counter = 0
def __init__(self):
self._creation_counter = _Filter._creation_counter
_Filter._creation_counter += 1
class Filter(_Filter):
"""
Class used to filter on a HTML element given as call parameter to return
matching elements.
Filters can be chained, so the parameter supplied to constructor can be
either a xpath selector string, or an other filter called before.
>>> from lxml.html import etree
>>> f = CleanDecimal(CleanText('//p'))
>>> f(etree.fromstring('<html><body><p>blah: <span>229,90</span></p></body></html>'))
Decimal('229.90')
"""
def __init__(self, selector=None):
super(Filter, self).__init__()
self.selector = selector
def __call__(self, item):
if isinstance(self.selector, basestring):
value = item.xpath(self.selector)
elif callable(self.selector):
value = self.selector(item)
else:
value = self.selector
return self.filter(value)
def filter(self, value):
"""
This method have to be overrided by children classes.
"""
return value
class Env(_Filter):
"""
Filter to get environment value of the item.
It is used for example to get page parameters, or when there is a parse()
method on ItemElement.
"""
def __init__(self, name):
super(Env, self).__init__()
self.name = name
def __call__(self, item):
return item.env[self.name]
class TableCell(_Filter):
"""
Used with TableElement, it get the cell value from its name.
For example:
class table(TableElement):
head_xpath = '//table/thead/th'
item_xpath = '//table/tbody/tr'
col_date = u'Date'
col_label = [u'Name', u'Label']
class item(ItemElement):
klass = Object
obj_date = Date(TableCell('date'))
obj_label = CleanText(TableCell('label'))
"""
def __init__(self, *names, **kwargs):
super(TableCell, self).__init__()
self.names = names
self.default = kwargs.pop('default', _NO_DEFAULT)
def __call__(self, item):
for name in self.names:
idx = item.parent.get_colnum(name)
if idx is not None:
return item.xpath('./td[%s]' % (idx + 1))
if self.default is not _NO_DEFAULT:
return self.default
raise KeyError('Unable to find column %s' % ' or '.join(self.names))
class CleanText(Filter):
"""
Get a cleaned text from an element.
It replaces all tabs and multiple spaces to one space and strip the result
string.
"""
def filter(self, txt):
if isinstance(txt, (tuple,list)):
txt = ' '.join(map(self.clean, txt))
return self.clean(txt)
@classmethod
def clean(self, txt):
if not isinstance(txt, basestring):
txt = [t.strip() for t in txt.itertext()]
txt = u' '.join(txt) # 'foo bar'
txt = re.sub(u'[\s\xa0\t]+', u' ', txt) # 'foo bar'
return txt.strip()
class CleanDecimal(CleanText):
"""
Get a cleaned Decimal value from an element.
"""
def filter(self, text):
text = super(CleanDecimal, self).filter(text)
text = text.replace('.','').replace(',','.')
return Decimal(re.sub(u'[^\d\-\.]', '', text))
class Link(Filter):
"""
Get the link uri of an element.
If the <a> tag is not found, an exception IndexError is raised.
"""
def filter(self, el):
return el[0].attrib.get('href', '')
class Attr(_Filter):
"""
Get the attribute of object.
"""
def __init__(self, name):
super(Attr, self).__init__()
self.name = name
def __call__(self, item):
return item.use_selector(getattr(item, 'obj_%s' % self.name))
class CleanChars(Filter):
"""
Remove chars.
"""
def __init__(self, selector, symbols):
super(CleanChars, self).__init__(selector)
self.symbols = symbols
def filter(self, txt):
if isinstance(txt, (tuple,list)):
txt = ' '.join([t.strip() for t in txt.itertext()])
for symbol in self.symbols:
txt = txt.replace(symbol, '')
return txt
class Regexp(Filter):
"""
Apply a regex.
>>> from lxml.html import etree
>>> f = Regexp(CleanText('//p'), r'Date: (\d+)/(\d+)/(\d+)', '\\3-\\2-\\1')
>>> f(etree.fromstring('<html><body><p>Date: <span>13/08/1988</span></p></body></html>'))
u'1988-08-13'
"""
def __init__(self, selector, pattern, template=None, flags=0, default=_NO_DEFAULT):
super(Regexp, self).__init__(selector)
self.pattern = pattern
self.regex = re.compile(pattern, flags)
self.template = template
self.default = default
def filter(self, txt):
if isinstance(txt, (tuple,list)):
txt = ' '.join([t.strip() for t in txt.itertext()])
mobj = self.regex.search(txt)
if not mobj:
if self.default is not _NO_DEFAULT:
return self.default
else:
raise KeyError('Unable to match %s' % self.pattern)
if self.template is None:
return next(g for g in mobj.groups() if g is not None)
else:
return mobj.expand(self.template)
|
Python
| 0.999995
|
@@ -6109,22 +6109,29 @@
o match
+%25s in
%25s' %25
+(
self.pat
@@ -6134,16 +6134,22 @@
.pattern
+, txt)
)%0A%0A
|
f14c5c9e4a3c7d196421ce3d60ec64fdee4749dd
|
make arguments consistent
|
src/redditquery/parse.py
|
src/redditquery/parse.py
|
#!/usr/bin/python3
import os
import argparse
def parser():
"""Parses arguments from comman line using argparse.
Parameters"""
# default directory for reddit files
default_directory = os.path.join(os.getcwd(), "data")
parser = argparse.ArgumentParser()
# obligatory
parser.add_argument("mode", type = int, help = "execution mode: 1 build index, 2: query using existing index, 3 build and query")
# conditionally obligatory
parser.add_argument("-f", "--first", type = str, help = "first year/month")
parser.add_argument("-l", "--last", type = str, help = "last year/month")
# optional with defaults
parser.add_argument("--dir", "-d", type = str, nargs = "?", default = default_directory, help = "directory for data storage")
parser.add_argument("--num", "-n", type = int, nargs = "?", default = 10, help = "number of results per query")
parser.add_argument("--cores", "-c", type = int, nargs = "?", default = 1, help = "number of cores to use")
parser.add_argument("--minfreq", "-m", type = int, nargs = "?", default = 5, help = "minimum term frequency")
parser.add_argument("--progress", "-p", action = "store_true", help = "report progress")
return parser
|
Python
| 0.999824
|
@@ -480,20 +480,20 @@
t(%22-
-f%22, %22-
-first
+%22, %22-f
%22, t
@@ -560,19 +560,19 @@
t(%22-
-l%22, %22--last
+-last%22, %22-l
%22, t
|
312acba0b6a1efb4a0b4d62c5ac2844608c7f203
|
Add basic message scoring algorithm
|
consequences/facebook.py
|
consequences/facebook.py
|
import requests
import sys
import config
import indicoio
import itertools
from pymongo import MongoClient
from collections import namedtuple
sys.path.append('../')
access_token = ''
indicoio.config.api_key = config.INDICO_API_KEY
# Connects to mongo and returns a MongoClient
def connect_to_mongo():
host = config.MONGO_HOST
user = config.MONGO_USER
password = config.MONGO_PASS
db = config.MONGO_DB
connection_url = "mongodb://" + user + ":" + password + "@" + host + "/" + db + "?authSource=admin"
client = MongoClient(connection_url)
return client[db]
# Get facebook access token
def get_access_token(phone):
db = connect_to_mongo()
access_token_dict = db.users.find_one({'phone_number': phone}, {'consequences.facebook.access_token': 1, '_id': 0})
access_token = access_token_dict['consequences']['facebook']['access_token']
print access_token
return access_token
# Get last_date used
def get_last_date(phone):
db = connect_to_mongo()
last_date_dict = db.users.find_one({'phone_number': phone}, {'consequences.facebook.last_date': 1, '_id': 0})
if 'last_date' in last_date_dict['consequences']['facebook']:
last_date = last_date_dict['consequences']['facebook']['last_date']
else:
last_date = 0
print 'Last date : ' + str(last_date)
return last_date
# Update the last_date used for getting a user's post
def update_last_date(phone, date):
return None
Post = namedtuple('Post', ['id', 'message', 'likes', 'score'])
def score_post(likes, sentiment, tags, personalities):
base_score = 1
return base_score
def choose_post(response_json):
posts = []
posts_messages = []
data = response_json['data']
for post in data.iteritems():
if 'message' not in post:
continue
likes = 0
if 'likes' in post:
likes = len(post['likes'])
posts.append(Post(post['id'], post['message'], likes, None))
posts_messages.append(post['message'])
apis = ['sentiment_hq', 'text_tags', 'personality']
analyses = indicoio.analyze_text(posts_messages, apis=apis)
for post, analysis in itertools.izip(posts, analyses):
likes = post.likes
sentiment = analysis['sentiment_hq']
tags = analysis['text_tags']
personalities = analysis['personality']
post.score = score_post(likes, sentiment, tags, personalities)
def get_score(post):
return post.score
posts.sort(key=get_score)
return posts[0]
# Get earliest unused facebook post
def get_old_post(phone):
# global access_token
print "GETTING ACCESS TOKEN FROM DB"
access_token = get_access_token(phone)
print access_token
print "GOT ACCESS TOKEN"
last_date = get_last_date(phone)
if last_date is None:
print "No last date found, resetting to 0"
last_date = 0
fields = 'id,created_time,updated_time,message,likes'
until = '1453587145'
filter = 'app_2915120374'
limit = '500'
params = {'fields': fields,
'until': until,
'filter': filter,
'limit': limit,
'access_token': access_token}
print "MAKING REQUEST"
response_json = requests.get('https://graph.facebook.com/v2.5/me/posts', params=params).json()
print response_json
return choose_post(response_json)
# Share post
def share_post_using_id(post_id):
pass
def main():
get_access_token("9145632336")
if __name__ == '__main__':
main()
|
Python
| 0.00034
|
@@ -1598,23 +1598,1098 @@
= 1%0A
+%0A
-return base
+sentiment_score = 1 / sentiment%0A%0A likes_score = 0.1 * likes%0A%0A bonus_tags = %7B%0A 'anime': 1,%0A 'art': 1,%0A 'atheism': 1,%0A 'business': 1,%0A 'consipiracy': 1,%0A 'drugs': 1,%0A 'music': 1,%0A 'personal': 1,%0A 'psychology': 1,%0A 'relationships': 1,%0A 'romance': 1,%0A 'ultimate': 1,%0A 'writing': 1,%0A %7D%0A%0A bonus_tag_score = 0%0A tag_threshold = 0.2%0A%0A for tag, multiplier in bonus_tags.iteritems():%0A tag_score = personalities%5Btag%5D%0A%0A if tag_score %3E= tag_threshold:%0A bonus_tag_score += tag_score * float(multiplier)%0A%0A # low agreeableness, low conscientiousness, high extraversion%0A agreeableness = 1 / personalities%5B'agreeableness'%5D%0A conscientiousness = 1 / personalities%5B'agreeableness'%5D%0A extraversion = 1 * personalities%5B'extraversion'%5D%0A%0A final_score = (base_score + sentiment_score + likes_score +%0A bonus_tag_score + agreeableness + conscientiousness + extraversion)%0A%0A return final
_sco
|
60202e6a4b51fb68045ee1df859c0827f5b770e4
|
debug info
|
src/zeit/content/article/edit/body.py
|
src/zeit/content/article/edit/body.py
|
# Copyright (c) 2010 gocept gmbh & co. kg
# See also LICENSE.txt
import gocept.lxml.interfaces
import grokcore.component
import lxml.objectify
import uuid
import z3c.traverser.interfaces
import zeit.content.article.edit.interfaces
import zeit.content.article.interfaces
import zeit.edit.container
import zope.publisher.interfaces
editable_body_name = 'editable-body'
class EditableBody(zeit.edit.container.Base,
grokcore.component.MultiAdapter):
grokcore.component.implements(
zeit.content.article.edit.interfaces.IEditableBody)
grokcore.component.provides(
zeit.content.article.edit.interfaces.IEditableBody)
grokcore.component.adapts(zeit.content.article.interfaces.IArticle,
gocept.lxml.interfaces.IObjectified)
__name__ = editable_body_name
_find_item = lxml.etree.XPath(
'.//*[@cms:__name__ = $name]',
namespaces=dict(
cms='http://namespaces.zeit.de/CMS/cp'))
def _set_default_key(self, xml_node):
key = xml_node.get('{http://namespaces.zeit.de/CMS/cp}__name__')
if not key:
key = str(uuid.uuid4())
xml_node.set('{http://namespaces.zeit.de/CMS/cp}__name__',
key)
self._p_changed = True
return key
def _get_keys(self, xml_node):
# XXX this is much too simple and needs work. and tests.
result = []
for didx, division in enumerate(
xml_node.xpath('division[@type="page"]'), start=1):
key = self._set_default_key(division)
if didx > 1:
# Skip the first division as it isn't editable
result.append(key)
for child in division.iterchildren():
result.append(self._set_default_key(child))
return result
def _get_element_type(self, xml_node):
return xml_node.tag
def _add(self, item):
# Add to last division instead of self.xml
name = item.__name__
if name:
if name in self:
raise zope.container.interfaces.DuplicateIDError(name)
else:
name = str(uuid.uuid4())
item.__name__ = name
self.xml.division[:][-1].append(item.xml)
return name
def _delete(self, key):
item = self[key]
if zeit.content.article.edit.interfaces.IDivision.providedBy(item):
# Move contained elements to previous devision
prev = item.xml.getprevious()
for child in item.xml.iterchildren():
prev.append(child)
item.xml.getparent().remove(item.xml)
return item
@grokcore.component.adapter(zeit.content.article.interfaces.IArticle)
@grokcore.component.implementer(
zeit.content.article.edit.interfaces.IEditableBody)
def get_editable_body(article):
return zope.component.queryMultiAdapter(
(article,
zope.security.proxy.removeSecurityProxy(article.xml['body'])),
zeit.content.article.edit.interfaces.IEditableBody)
class BodyTraverser(object):
zope.interface.implements(z3c.traverser.interfaces.IPluggableTraverser)
def __init__(self, context, request):
self.context = context
self.request = request
def publishTraverse(self, request, name):
if name == editable_body_name:
body = zeit.content.article.edit.interfaces.IEditableBody(
self.context, None)
if body is not None:
return body
raise zope.publisher.interfaces.NotFound(self.context, name, request)
|
Python
| 0.000001
|
@@ -2312,16 +2312,52 @@
, key):%0A
+ __traceback_info__ = (key,)%0A
|
2a8b560b1abcca6dd93e86a893fc6b2d8b224dd5
|
break definition of url_map and dispatcher out of __init__
|
werkzeug_dispatch/application.py
|
werkzeug_dispatch/application.py
|
# -*- coding: utf-8 -*-
"""
werkzeug_dispatch.application
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 by Ben Mather.
:license: BSD, see LICENSE for more details.
"""
try:
from functools import singledispatch
except:
from singledispatch import singledispatch
from werkzeug import Request
from werkzeug.routing import Map
from werkzeug.local import Local, LocalManager
from werkzeug_dispatch import Dispatcher
class Application(object):
""" helper class for creating a wsgi application from a url map and list of
bindings.
`url_map`
werkzeug `Map` object that maps from urls to names
`dispatcher`
object to map from endpoint names to handler functions
"""
#: Constructor applied to each wsgi environment to create the request
#: object to be passed to the handler
request_class = Request
def __init__(self, url_map=None, dispatcher=None):
"""
:param url_map:
a werkzeug `Map` object`
:param dispatcher:
a `Dispatcher` object
"""
if url_map is None:
url_map = Map()
self.url_map = url_map
if dispatcher is None:
dispatcher = Dispatcher()
self.dispatcher = dispatcher
# reference to the bottom of a stack of wsgi middleware wrapping
# :method:`_dispatch_request`. Invoked by :method:`__call__`.
# Essentially the real wsgi application.
self._stack = self._dispatch_request
# TODO provide a way of adding request specific variables. Need to be
# able to register name, `(Application, wsgi_env) -> value` pairs
# Alternatively get rid of this entirely as it's a massive hack
self._local = Local()
self._wsgi_env = self._local('wsgi_env')
self._map_adapter = self._local('map_adapter')
local_manager = LocalManager([self._local])
self.add_middleware(local_manager.make_middleware)
# singledispatch is only used to provide a mapping between exception
# types and handlers. Handlers do not actually take the exception as
# the first argument but are instead called with the application, the
# request, and then the exception.
# TODO provide a sane default for normal and werzeug HTTPExceptions
self._exception_handler = singledispatch(None)
def add_routes(self, *routes):
for route in routes:
self.url_map.add(route)
def add_views(self, *views):
for view in views:
self.dispatcher.add(view)
def add_middleware(self, middleware, *args, **kwargs):
""" Wrap the application in a layer of wsgi middleware.
:param middleware:
a function which takes a wsgi application as it's first argument
and returns a new wsgi application. Any other args or kwargs are
passed after.
"""
self._stack = middleware(self._stack, *args, **kwargs)
def add_exception_handler(self, exception_class, handler):
""" Bind a function to render exceptions of the given class and all
sub classes.
Exception handlers take three arguments:
* a reference to the application
* a request object
* the exception to be rendered
"""
self._exception_handler.register(exception_class, handler)
def _bind(self, wsgi_env):
self._local.wsgi_env = wsgi_env
self._local.map_adapter = self.url_map.bind_to_environ(wsgi_env)
def _dispatch_request(self, wsgi_env, start_response):
self._bind(wsgi_env)
request = self._request_class(wsgi_env)
def call_view(name, kwargs):
endpoint = self.dispatcher.lookup(
name,
method=wsgi_env.get('REQUEST_METHOD'),
accept=wsgi_env.get('HTTP_ACCEPT'),
accept_charset=wsgi_env.get('HTTP_ACCEPT_CHARSET'),
accept_language=wsgi_env.get('HTTP_ACCEPT_LANGUAGE'))
return endpoint(self, request, **kwargs)
try:
response = self._map_adapter.dispatch(call_view)
except BaseException as e:
handler = self._exception_handler.dispatch(type(e))
if handler is None:
raise
handler(self, request, e)
return response(wsgi_env, start_response)
def __call__(self, wsgi_env, start_response):
return self._stack(wsgi_env, start_response)
def url_for(self, *args, **kwargs):
""" construct the url corresponding to an endpoint name and parameters
Unfortunately will only work if the application has been bound to a
wsgi request. If it is not then there is not generally enough
information to construct full urls. TODO.
"""
self._map_adapter(*args, **kwargs)
|
Python
| 0
|
@@ -389,16 +389,59 @@
lManager
+%0Afrom werkzeug.utils import cached_property
%0A%0Afrom w
@@ -916,216 +916,31 @@
-def __init__(self, url_map=None, dispatcher=None):%0A %22%22%22%0A :param url_map:%0A a werkzeug %60Map%60 object%60%0A%0A :param dispatcher:%0A a %60Dispatcher%60 object%0A%0A %22%22%22%0A i
+@cached_property%0A de
f ur
@@ -944,24 +944,22 @@
url_map
- is None
+(self)
:%0A
@@ -964,69 +964,49 @@
- url_map =
+return
Map()%0A
-
+%0A
- self.url_map = url_map%0A%0A
+@cached_property%0A
- i
+de
f di
@@ -1013,24 +1013,22 @@
spatcher
- is None
+(self)
:%0A
@@ -1033,75 +1033,52 @@
- dispatcher = Dispatcher()%0A self.dispatcher = dispatcher%0A
+return Dispatcher()%0A%0A def __init__(self):
%0A
|
deeb9a1cc773e7af4c539d3f451ab927ecea29ed
|
Check for uploader
|
whippersnapper/whippersnapper.py
|
whippersnapper/whippersnapper.py
|
#!/usr/bin/env python
import logging
import os
import subprocess
import sys
import time
import yaml
import screenshotter
import uploader
class WhipperSnapper(object):
"""
Implements all screenshot-related logic.
"""
def __init__(self):
if len(sys.argv) != 2:
self.usage()
sys.exit(1)
config_filepath = sys.argv[1]
self.config = self.load_config(config_filepath)
self.log_file = self.init_logging()
self.screenshotter = screenshotter.Screenshotter(self.config)
if not self.config.get('skip_upload'):
self.uploader = uploader.Uploader(self.config)
def main(self):
"""
Runs through the full screenshot process.
"""
print """
Whippersnapper is running. To view its log file:
tail -f %s
To quit, press ^C (ctrl-C).""" % (self.log_file)
while True:
targets = self.screenshotter.take_screenshots()
self.uploader.upload_screenshots(targets)
# TODO Image delete code probably doesn't belong here
if (self.config.get('delete_local_images')):
[os.remove(path.get('local_filepath')) for path in targets]
time.sleep(self.config.get('time_between_screenshots'))
def init_logging(self):
"""
Create a log file, and attach a basic logger to it.
"""
log_file = os.path.abspath(os.path.expanduser(self.config.get('log_file')))
# Create the log file if it does not yet exist
with open(log_file, 'a+'):
pass
logging.basicConfig(filename=log_file,
format='%(levelname)s:%(asctime)s %(message)s',
level=logging.INFO)
return log_file
def load_config(self, config_filepath):
"""
Load configuration from config.yaml.
Many options have defaults; use these unless they are overwritten in
config.yaml. This file includes the urls, css selectors and slugs for
the targets to screenshot.
"""
log_file = os.path.abspath(os.path.expanduser(os.path.dirname(
os.path.abspath(__file__)) + '/../whippersnapper.log'))
config = {
'skip_upload': False,
'aws_bucket': '',
'aws_subpath': '',
'aws_access_key': None,
'aws_secret_key': None,
'log_file': log_file,
'delete_local_images': False,
'time_between_screenshots': 60,
'override_css_file': None,
'page_load_delay': 2,
'wait_for_js_signal': False,
'failure_timeout': 30,
}
required = (
'targets',
'local_image_directory',
)
raw_config = None
with open(config_filepath) as f:
raw_config = yaml.load(f)
for option_name, option_value in raw_config.iteritems():
config[option_name] = option_value
for option in required:
try:
config[option] = raw_config[option]
except KeyError:
raise RuntimeError('Config is missing required attribute: %s'
% option)
return config
def usage(self):
config_template_file = os.path.abspath(os.path.expanduser(
os.path.dirname(os.path.abspath(__file__))
+ '/config_templates/config.yaml.template'))
"""
Print usage information.
"""
print """
USAGE: whippersnapper CONFIG_FILEPATH
To see an example config file:
cat %s
""" % config_template_file
def launch_new_instance():
"""
Launch an instance of Whippersnapper.
"""
try:
s = WhipperSnapper()
s.main()
except KeyboardInterrupt:
# Print a blank line
print
if __name__ == '__main__':
launch_new_instance()
|
Python
| 0
|
@@ -955,16 +955,62 @@
shots()%0A
+ if hasattr(self, 'uploader'):%0A
@@ -1055,16 +1055,20 @@
argets)%0A
+
@@ -1137,16 +1137,20 @@
+
+
if (self
@@ -1186,16 +1186,20 @@
ges')):%0A
+
|
2797797497f4f5ad606764815b334321732bef3b
|
Rename fibonacci() to fibonacci_recur()
|
alg_fibonacci.py
|
alg_fibonacci.py
|
"""Fibonacci series:
0, 1, 1, 2, 3, 5, 8,...
- Fib(0) = 0
- Fib(1) = 1
- Fib(n) = Fib(n - 1) + Fib(n - 2)
"""
from __future__ import print_function
def fibonacci(n):
"""Get nth number of Fibonacci series by recursion."""
if n == 0:
return 0
elif n == 1 or n == 2:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
def main():
import time
n = 13
print('{}th number of Fibonacci series: {}'
.format(n, fibonacci(n)))
if __name__ == '__main__':
main()
|
Python
| 0.999999
|
@@ -134,23 +134,93 @@
ort
-print_function%0A
+absolute_import%0Afrom __future__ import print_function%0Afrom __future__ import division
%0A%0Ade
@@ -230,16 +230,22 @@
ibonacci
+_recur
(n):%0A
@@ -313,12 +313,12 @@
f n
-== 0
+%3C= 1
:%0A
@@ -334,53 +334,9 @@
urn
-0%0A elif n == 1 or n == 2:%0A return 1
+n
%0A
@@ -362,24 +362,30 @@
rn fibonacci
+_recur
(n - 1) + fi
@@ -391,16 +391,22 @@
ibonacci
+_recur
(n - 2)%0A
@@ -446,16 +446,50 @@
n = 13%0A
+ %0A start_time = time.time()%0A
prin
@@ -522,16 +522,29 @@
i series
+ by recursion
: %7B%7D'%0A
@@ -575,14 +575,74 @@
acci
-(n)))%0A
+_recur(n)))%0A print('Time: %7B%7D'.format(time.time() - start_time))
%0A%0Aif
|
b5099530084deb61fcb9e60314579dd0daed79b5
|
maven_jar compatibility to rules_closure#java_import_external
|
tools/bzl/maven_jar.bzl
|
tools/bzl/maven_jar.bzl
|
GERRIT = "GERRIT:"
GERRIT_API = "GERRIT_API:"
MAVEN_CENTRAL = "MAVEN_CENTRAL:"
MAVEN_LOCAL = "MAVEN_LOCAL:"
def _maven_release(ctx, parts):
"""induce jar and url name from maven coordinates."""
if len(parts) not in [3, 4]:
fail('%s:\nexpected id="groupId:artifactId:version[:classifier]"'
% ctx.attr.artifact)
if len(parts) == 4:
group, artifact, version, classifier = parts
file_version = version + '-' + classifier
else:
group, artifact, version = parts
file_version = version
jar = artifact.lower() + '-' + file_version
url = '/'.join([
ctx.attr.repository,
group.replace('.', '/'),
artifact,
version,
artifact + '-' + file_version])
return jar, url
# Creates a struct containing the different parts of an artifact's FQN
def _create_coordinates(fully_qualified_name):
parts = fully_qualified_name.split(":")
packaging = None
classifier = None
if len(parts) == 3:
group_id, artifact_id, version = parts
elif len(parts) == 4:
group_id, artifact_id, version, packaging = parts
elif len(parts) == 5:
group_id, artifact_id, version, packaging, classifier = parts
else:
fail("Invalid fully qualified name for artifact: %s" % fully_qualified_name)
return struct(
fully_qualified_name = fully_qualified_name,
group_id = group_id,
artifact_id = artifact_id,
packaging = packaging,
classifier = classifier,
version = version,
)
def _format_deps(attr, deps):
formatted_deps = ""
if deps:
if len(deps) == 1:
formatted_deps += "%s = [\'%s\']," % (attr, deps[0])
else:
formatted_deps += "%s = [\n" % attr
for dep in deps:
formatted_deps += " \'%s\',\n" % dep
formatted_deps += " ],"
return formatted_deps
def _generate_build_file(ctx, binjar, srcjar):
srcjar_attr = ""
if srcjar:
srcjar_attr = 'srcjar = "%s",' % srcjar
contents = """
# DO NOT EDIT: automatically generated BUILD file for maven_jar rule {rule_name}
package(default_visibility = ['//visibility:public'])
java_import(
name = 'jar',
jars = ['{binjar}'],
{srcjar_attr}
{deps}
{exports}
)
java_import(
name = 'neverlink',
jars = ['{binjar}'],
neverlink = 1,
{deps}
{exports}
)
\n""".format(srcjar_attr = srcjar_attr,
rule_name = ctx.name,
binjar = binjar,
deps = _format_deps("deps", ctx.attr.deps),
exports = _format_deps("exports", ctx.attr.exports))
if srcjar:
contents += """
java_import(
name = 'src',
jars = ['{srcjar}'],
)
""".format(srcjar = srcjar)
ctx.file('%s/BUILD' % ctx.path("jar"), contents, False)
def _maven_jar_impl(ctx):
"""rule to download a Maven archive."""
coordinates = _create_coordinates(ctx.attr.artifact)
name = ctx.name
sha1 = ctx.attr.sha1
parts = ctx.attr.artifact.split(':')
# TODO(davido): Only releases for now, implement handling snapshots
jar, url = _maven_release(ctx, parts)
binjar = jar + '.jar'
binjar_path = ctx.path('/'.join(['jar', binjar]))
binurl = url + '.jar'
python = ctx.which("python")
script = ctx.path(ctx.attr._download_script)
args = [python, script, "-o", binjar_path, "-u", binurl]
if ctx.attr.sha1:
args.extend(["-v", sha1])
if ctx.attr.unsign:
args.append('--unsign')
for x in ctx.attr.exclude:
args.extend(['-x', x])
out = ctx.execute(args)
if out.return_code:
fail("failed %s: %s" % (' '.join(args), out.stderr))
srcjar = None
if ctx.attr.src_sha1 or ctx.attr.attach_source:
srcjar = jar + '-src.jar'
srcurl = url + '-sources.jar'
srcjar_path = ctx.path('jar/' + srcjar)
args = [python, script, "-o", srcjar_path, "-u", srcurl]
if ctx.attr.src_sha1:
args.extend(['-v', ctx.attr.src_sha1])
out = ctx.execute(args)
if out.return_code:
fail("failed %s: %s" % (args, out.stderr))
_generate_build_file(ctx, binjar, srcjar)
maven_jar = repository_rule(
attrs = {
"artifact": attr.string(mandatory = True),
"sha1": attr.string(),
"src_sha1": attr.string(),
"_download_script": attr.label(default = Label("//tools:download_file.py")),
"repository": attr.string(default = MAVEN_CENTRAL),
"attach_source": attr.bool(default = True),
"unsign": attr.bool(default = False),
"deps": attr.string_list(),
"exports": attr.string_list(),
"exclude": attr.string_list(),
},
local = True,
implementation = _maven_jar_impl,
)
|
Python
| 0.999998
|
@@ -1803,32 +1803,33 @@
erate_build_file
+s
(ctx, binjar, sr
@@ -1835,16 +1835,112 @@
rcjar):%0A
+ header = %22# DO NOT EDIT: automatically generated BUILD file for maven_jar rule %25s%22 %25 ctx.name%0A
srcjar
@@ -2028,87 +2028,15 @@
%22%22%22%0A
-# DO NOT EDIT: automatically generated BUILD file for maven_jar rule %7Brule_name
+%7Bheader
%7D%0Apa
@@ -2353,32 +2353,25 @@
- rule_name = ctx.name
+header = header
,%0A
-
@@ -2396,17 +2396,16 @@
binjar,%0A
-
@@ -2453,17 +2453,16 @@
.deps),%0A
-
@@ -2697,16 +2697,324 @@
False)%0A%0A
+ # Compatibility layer for java_import_external from rules_closure%0A contents = %22%22%22%0A%7Bheader%7D%0Apackage(default_visibility = %5B'//visibility:public'%5D)%0A%0Aalias(%0A name = %22%7Brule_name%7D%22,%0A actual = %22@%7Brule_name%7D//jar%22,%0A)%0A%5Cn%22%22%22.format(rule_name = ctx.name, header = header)%0A ctx.file(%22BUILD%22, contents, False)%0A%0A
def _mav
@@ -4254,16 +4254,17 @@
ild_file
+s
(ctx, bi
|
0d7dc04a4e0c31924e64f8e2b8ed9da25e2b64ce
|
Fix PEP8 issues
|
wikidataeditor/wikidataeditor.py
|
wikidataeditor/wikidataeditor.py
|
# encoding=utf8
# @author Dan Michael O. Heggø <danmichaelo@gmail.com>
__ver__ = '0.0.1'
import requests
import logging
import time
import re
import json
from item import Item
logger = logging.getLogger('wikidataeditor')
class Repo:
def __init__(self, user_agent,
api_url='https://www.wikidata.org/w/api.php'):
self.session = requests.Session()
self.session.headers.update({'User-Agent': user_agent})
self.api_url = api_url
# Respect https://www.mediawiki.org/wiki/Maxlag
lps = r'Waiting for [^ ]*: (?P<lag>[0-9.]+) seconds? lagged'
self.lagpattern = re.compile(lps)
@property
def user_agent(self):
return self.session.headers.get('User-Agent')
def raw_api_call(self, args):
while True:
url = self.api_url
args['format'] = 'json'
args['maxlag'] = 5
# print args
# for k, v in args.iteritems():
# if type(v) == unicode:
# args[k] = v.encode('utf-8')
# else:
# args[k] = v
# data = urllib.urlencode(args)
logger.debug(args)
response = self.session.post(url, data=args)
response = json.loads(response.text)
logger.debug(response)
if 'error' not in response:
return response
code = response['error'].pop('code', 'Unknown')
info = response['error'].pop('info', '')
if code == 'maxlag':
lag = self.lagpattern.search(info)
if lag:
logger.warn('Pausing due to database lag: %s', info)
time.sleep(int(lag.group('lag')))
continue
logger.error("Unknown API error: %s\n%s\nResponse:\n%s",
info,
json.dumps(args, indent="\t"),
json.dumps(response, indent="\t"))
return response
# sys.exit(1)
def login(self, user, pwd):
args = {
'action': 'login',
'lgname': user,
'lgpassword': pwd
}
response = self.raw_api_call(args)
if response['login']['result'] == 'NeedToken':
args['lgtoken'] = response['login']['token']
response = self.raw_api_call(args)
return (response['login']['result'] == 'Success')
def item(self, entity):
return Item(self, entity)
def pageinfo(self, entity):
args = {
'action': 'query',
'prop': 'info',
'intoken': 'edit',
'titles': entity
}
return self.raw_api_call(args)
def get_entities(self, site, page):
args = {
'action': 'wbgetentities',
'sites': site,
'titles': page
}
return self.raw_api_call(args)
def add_entity(self, site, lang, title):
args = {
'new': 'item',
'data': {
'sitelinks': {site: {'site': site, 'title': title}},
'labels': {lang: {'language': lang, 'value': title}}
}
}
logger.info(' Adding entity for %s:%s', site, title)
time.sleep(3)
return self.edit_entity(**args)
def edit_entity(self, data={}, site=None, title=None, new=None,
summary=None):
response = self.pageinfo('DUMMY')
itm = response['query']['pages'].items()[0][1]
edittoken = itm['edittoken']
args = {
'action': 'wbeditentity',
'bot': 1,
'data': json.dumps(data),
'token': edittoken
}
if site:
args['site'] = site
if title:
args['title'] = title
if new:
args['new'] = new
if summary:
args['summary'] = summary
response = self.raw_api_call(args)
return response
|
Python
| 0.000001
|
@@ -68,27 +68,8 @@
om%3E%0A
-__ver__ = '0.0.1'%0A%0A
impo
@@ -129,17 +129,16 @@
rt json%0A
-%0A
from ite
@@ -152,16 +152,35 @@
t Item%0A%0A
+__ver__ = '0.0.1'%0A%0A
logger =
|
ec1d0b5673ef0eca398715eb1f48f1a99f427cca
|
Format detect_targets.py
|
tools/detect_targets.py
|
tools/detect_targets.py
|
#! /usr/bin/env python2
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
TEST BUILD & RUN
"""
import sys
import os
import json
ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, ROOT)
from tools.options import get_default_options_parser
# Check: Extra modules which are required by core test suite
from tools.utils import check_required_modules
check_required_modules(['prettytable'])
# Imports related to mbed build api
from tools.build_api import mcu_toolchain_matrix
from tools.test_api import get_autodetected_MUTS_list
if __name__ == '__main__':
try:
# Parse Options
parser = get_default_options_parser()
parser.add_argument("-S", "--supported-toolchains",
action="store_true",
dest="supported_toolchains",
default=False,
help="Displays supported matrix of targets and toolchains")
parser.add_argument('-f', '--filter',
dest='general_filter_regex',
default=None,
help='Filter targets')
parser.add_argument("-v", "--verbose",
action="store_true",
dest="verbose",
default=False,
help="Verbose diagnostic output")
options = parser.parse_args()
# Only prints matrix of supported toolchains
if options.supported_toolchains:
print mcu_toolchain_matrix(platform_filter=options.general_filter_regex)
exit(0)
# If auto_detect attribute is present, we assume other auto-detection
# parameters like 'toolchains_filter' are also set.
MUTs = get_autodetected_MUTS_list()
count = 0
for mut in MUTs.values():
print ""
print "[mbed] Detected %s, port %s, mounted %s" % (mut['mcu'], mut['port'], mut['disk'])
print "[mbed] Supported toolchains for %s" % mut['mcu']
print mcu_toolchain_matrix(platform_filter=r'^'+mut['mcu']+'$')
count += 1
if count == 0:
print "[mbed] No mbed targets where detected on your system."
except KeyboardInterrupt, e:
print "\n[CTRL+c] exit"
except Exception,e:
import traceback
traceback.print_exc(file=sys.stdout)
print "[ERROR] %s" % str(e)
sys.exit(1)
|
Python
| 0.000002
|
@@ -595,27 +595,8 @@
se.%0A
-%0A%0ATEST BUILD & RUN%0A
%22%22%22%0A
@@ -619,20 +619,8 @@
t os
-%0Aimport json
%0A%0ARO
@@ -713,17 +713,16 @@
ROOT)%0A%0A
-%0A
from too
@@ -1061,34 +1061,41 @@
t%0A%0A%0A
-if __name__ == '__main__':
+def main():%0A %22%22%22Entry Point%22%22%22
%0A
@@ -1249,32 +1249,34 @@
+
action=%22store_tr
@@ -1272,32 +1272,34 @@
n=%22store_true%22,%0A
+
@@ -1355,32 +1355,34 @@
+
default=False,%0A
@@ -1402,24 +1402,26 @@
+
+
help=%22Displa
@@ -1442,16 +1442,47 @@
atrix of
+%22%0A %22
targets
@@ -1568,24 +1568,26 @@
+
+
dest='genera
@@ -1621,32 +1621,34 @@
+
default=None,%0A
@@ -1645,16 +1645,18 @@
t=None,%0A
+
@@ -1736,32 +1736,34 @@
%22, %22--verbose%22,%0A
+
@@ -1815,24 +1815,26 @@
+
dest=%22verbos
@@ -1855,32 +1855,34 @@
+
default=False,%0A
@@ -1872,32 +1872,34 @@
default=False,%0A
+
@@ -2107,32 +2107,49 @@
oolchain_matrix(
+%0A
platform_filter=
@@ -2345,19 +2345,19 @@
-MUT
+mut
s = get_
@@ -2419,19 +2419,19 @@
mut in
-MUT
+mut
s.values
@@ -2516,16 +2516,34 @@
ed %25s%22 %25
+ %5C%0A
(mut%5B'm
@@ -2741,24 +2741,16 @@
nt += 1%0A
-
%0A
@@ -2868,19 +2868,16 @@
nterrupt
-, e
:%0A
@@ -2926,10 +2926,15 @@
tion
-,e
+ as exc
:%0A
@@ -3035,16 +3035,18 @@
%25 str(e
+xc
)%0A
@@ -3059,8 +3059,47 @@
exit(1)%0A
+%0Aif __name__ == '__main__':%0A main()%0A
|
d484d4e1782a5552e10f91ff25b6acd6d36b07ae
|
Rename variables; exit if not support it
|
wonderful_bing/wonderful_bing.py
|
wonderful_bing/wonderful_bing.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
wonderful_bing
~~~~~~~~~~~~~~
Wonderful_bing is a small and simple program that helps you download
pictures from Bing and set as wallpaper. My first python program :)
:copyright: (c) 2014 by lord63.
:license: MIT, see LICENSE for more details.
"""
__title__ = "wonderful_bing"
__version__ = "0.5.0"
__author__ = "lord63"
__license__ = "MIT"
__copyright__ = "Copyright 2014 lord63"
import re
import time
import os
import sys
from os import path
import argparse
import subprocess
from commands import getoutput
import requests
class WonderfulBing(object):
def __init__(self, config):
# Get all the information we need from this url, see issue#7
self.url = "http://www.bing.com/HPImageArchive.aspx?format=js&idx=0&n=\
1&nc=1409879295618&pid=hp"
information = requests.get(self.url).json()["images"][0]
self.copyright = information["copyright"]
self.picture_url = information["url"]
if not self.picture_url.startswith('http'):
self.picture_url = 'http://www.bing.com' + self.picture_url
self.directory = path.abspath(config['directory']) + '/'
def show_notify(self):
"""show the notify to get to know the picture story"""
title = "Today's Picture Story"
story_content = re.match(
".+(?=\(\xa9)", self.copyright).group().encode('utf-8')
notify_icon = path.dirname(path.realpath(__file__)) + '/img/icon.png'
safe_story_content = story_content.replace('"', '\"')
subprocess.Popen(["notify-send", "-a", "wonderful_bing", "-i",
notify_icon, title, safe_story_content])
def get_picture_name(self):
"""get a nice picture name from the download url"""
match = re.search(
"(?<=/az/hprichbg/rb/).+?(?=_)", self.picture_url)
picture_name = match.group() + '.jpg'
return picture_name
def set_wallpaper(self, picture_path):
# We use this command to make it work when using cron, see #3
de = self.detect_de()
if de == 'gnome':
subprocess.Popen(
"DISPLAY=:0 GSETTINGS_BACKEND=dconf /usr/bin/gsettings set \
org.gnome.desktop.background picture-uri file://{0}".format(
picture_path), shell=True)
elif de == 'xfce':
subprocess.Popen(
"DISPLAY=:0 xfconf-query -c xfce4-desktop -p \
/backdrop/screen0/monitor0/image-path -s {0}".format(
picture_path), shell=True)
else:
pass
def download_and_set(self):
picture_name = self.get_picture_name()
picture_path = self.directory + picture_name
if path.exists(picture_path):
print "You have downloaded the picture before."
print "Have a look at it --> {0}".format(picture_path)
sys.exit()
# Sleep for two seconds, otherwise the newly setted wallpaper
# will be setted back by the system when your system boots up
# if you have added this script to autostart.
time.sleep(2)
# Set stream to true to get the raw content
request = requests.get(self.picture_url, stream=True)
with open(picture_path, "wb") as f:
for chunk in request.iter_content(1024):
f.write(chunk)
print "Successfully download the picture to --> {0}.".format(
picture_path)
self.set_wallpaper(picture_path)
print "Successfully set the picture as the wallpaper. :)"
self.show_notify()
def detect_de(self):
de = 'generic'
if os.environ.get('KDE_FULL_SESSION') == 'true':
de = 'kde'
elif os.environ.get('GNOME_DESKTOP_SESSION_ID'):
de = 'gnome'
else:
try:
info = getoutput('xprop -root')
if 'XFCE_DESKTOP_WINDOW' in info:
de = 'xfce'
except (OSError, RuntimeError):
pass
return de
def main():
parser = argparse.ArgumentParser(
prog='wonderful_bing',
description="Wonderful_bing's configuration")
parser.add_argument('-V', '--version', action='version',
version='%(prog)s {0}'.format(__version__))
parser.add_argument(
'-d', dest='directory',
help="set the directory to save Bing's imgs, end with '/'")
config = vars(parser.parse_args())
if not config['directory']:
sys.exit("Set the directory to save Bing's imgs first.\n"
"For more information, use --help.")
if not path.exists(config['directory']):
sys.exit('No such directory :(')
bing = WonderfulBing(config)
try:
bing.download_and_set()
except requests.exceptions.ConnectionError:
print "ConnectionError,check your network please."
print "Will try again after 5 minutes."
time.sleep(300)
bing.download_and_set()
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -2101,41 +2101,92 @@
de
- = self.detect_de()%0A if de
+sktop_environment = self.detect_desktop_environment()%0A if desktop_environment
==
@@ -2440,16 +2440,33 @@
elif de
+sktop_environment
== 'xfc
@@ -2578,22 +2578,16 @@
-
/backdro
@@ -2705,20 +2705,279 @@
-pass
+sys.exit(%0A %22Currently we don't support your desktop_environment: %7B%7D %5Cn%22%0A %22Please file an issue or make a pull request :) %5Cn%22%0A %22https://github.com/lord63/wonderful_bing%22.format(%0A desktop_environment))
%0A%0A de
@@ -3996,16 +3996,33 @@
etect_de
+sktop_environment
(self):%0A
@@ -4027,24 +4027,41 @@
:%0A de
+sktop_environment
= 'generic'
@@ -4128,24 +4128,41 @@
de
+sktop_environment
= 'kde'%0A
@@ -4225,24 +4225,41 @@
de
+sktop_environment
= 'gnome'%0A
@@ -4408,16 +4408,33 @@
de
+sktop_environment
= 'xfce
@@ -4517,16 +4517,33 @@
eturn de
+sktop_environment
%0A%0A%0Adef m
|
985dc40970ca236238b481d70dd94098c2c44167
|
Wrap all zmq messages in json
|
admin/web.py
|
admin/web.py
|
import os
import os.path
import sys
import tornado.auth
import tornado.escape
import tornado.ioloop
import tornado.web
import tornado.httpserver
import tornado.options as options
from tornado.concurrent import Future
from tornado import gen
from tornado.options import define, options
import sockjs.tornado
import admin.uimodules
import zmq
import pymongo
import bson.json_util as json_util
import numpy as np
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from panoptes.utils import config, database, messaging, logger
define("port", default=8888, help="port", type=int)
define("db", default="panoptes", help="Name of the Mongo DB to use")
define("collection", default="admin", help="Name of the Mongo Collection to use")
define("debug", default=False, help="debug mode")
@logger.has_logger
@config.has_config
class Application(tornado.web.Application):
""" The main Application entry for our PANOPTES admin interface """
def __init__(self):
db = database.PanMongo()
# Setup up our communication socket to listen to Observatory broker
self.context = zmq.Context()
self.socket = self.context.socket(zmq.REQ)
self.socket.connect("tcp://localhost:5559")
AdminRouter = sockjs.tornado.SockJSRouter(
MessagingConnection,
'/messaging_conn',
user_settings=dict(db=db, socket=self.socket),
)
handlers = [
(r"/", MainHandler),
(r"/sensors", SensorHandler),
(r"/login", LoginHandler),
(r"/logout", LogoutHandler),
] + AdminRouter.urls
settings = dict(
cookie_secret="PANOPTES_SUPER_DOOPER_SECRET",
login_url="/login",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
db=db,
debug=options.debug,
site_title="PANOPTES",
ui_modules=admin.uimodules,
)
super(Application, self).__init__(handlers, **settings)
@config.has_config
class BaseHandler(tornado.web.RequestHandler):
"""
BaseHandler is inherited by all Handlers and is responsible for any
global operations. Provides the `db` property and the `get_current_user`
"""
@property
def db(self):
""" Simple property to access the DB easier """
return self.settings['db']
def get_current_user(self):
"""
Looks for a cookie that shows we have been logged in. If cookie
is found, attempt to look up user info in the database
"""
# Get email from cookie
email = tornado.escape.to_unicode(self.get_secure_cookie("email"))
if not email:
return None
# Look up user data
user_data = self.db.admin.find_one({'username': email})
if user_data is None:
return None
return user_data
class MainHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
user_data = self.current_user
webcams = self.config.get('webcams')
self.render("main.html", user_data=user_data, webcams=webcams)
class SensorHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
self.render("sensor_status.html")
@logger.has_logger
class MessagingConnection(sockjs.tornado.SockJSConnection):
""" Handler for the messaging connection.
This is the connection between the administrative web interface and
the running `Panoptes` instance.
Implemented with sockjs for websockets or long polling.
"""
def __init__(self, session):
""" """
self.session = session
self.logger.info('Setting up websocket mount control')
self.context = zmq.Context()
self.socket = self.context.socket(zmq.REQ)
self.socket.connect("tcp://localhost:5559")
self.connections = set()
self.cb_delay = 1e9 # ms
self.db = pymongo.MongoClient().panoptes
def on_open(self, info):
""" Action to be performed when a client first connects
We set up a periodic callback to the `_send_stats` method, effectively
updating the stats on the web site after every delay period
"""
self.logger.info('Setting up websocket mount control for user')
self.connections.add(self)
self.send("New connection to mount established")
self.loop = tornado.ioloop.PeriodicCallback(self._send_stats, self.cb_delay)
self.loop.start()
def on_message(self, message):
""" A message received from the client
The client will be passing commands to our `Panoptes` instance, which
are captured here and processed. Uses the REQ socket to communicate
with Observatory broker
Args:
message(str): Message received from client (web). This is
command that is then passed on to the broker, which is a
`Panoptes` instance.
"""
# Send message back to client as confirmation
# self.send("Message Received: {}".format(message))
self.logger.info("Message Received: {}".format(message))
# Send message to Mount
self.socket.send_string(message)
# Get response
response = self.socket.recv()
# Send the response back to the web admins
self.send(response)
def on_close(self):
""" Actions to be performed when web admin client leaves """
self.connections.remove(self)
self.loop.stop()
def _send_stats(self):
""" Sends the current environment stats to the web admin client
Called periodically from the `on_open` method, this simply grabs the
current stats from the mongo db, serializes them to json, and then sends
to the client.
"""
data_raw = self.db.sensors.find_one({'status': 'current', 'type': 'environment'})
data = json_util.dumps(data_raw.get('data'))
self.send(data)
class LoginHandler(BaseHandler):
"""
Login and authenticate the user and perform any actions for startup
"""
def get(self):
self.render("login.html")
def post(self):
email = tornado.escape.to_unicode(self.get_argument("email"))
self.set_secure_cookie("email", email)
self.redirect("/")
class LogoutHandler(BaseHandler):
"""
Operations run when the user logs out.
"""
def get(self):
self.clear_cookie("email")
self.redirect("/")
if __name__ == '__main__':
tornado.options.parse_command_line()
http_server = tornado.httpserver.HTTPServer(Application())
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
|
Python
| 0.999999
|
@@ -4016,17 +4016,17 @@
lay = 1e
-9
+3
# ms%0A%0A
@@ -5374,16 +5374,20 @@
+raw_
response
@@ -5405,24 +5405,155 @@
ket.recv()%0A%0A
+ response = json_util.dumps(%7B%0A 'type': 'mount',%0A 'message': raw_response.decode('ascii'),%0A %7D)%0A%0A
# Se
@@ -6191,16 +6191,76 @@
l.dumps(
+%7B%0A 'type': 'environment',%0A 'message':
data_raw
@@ -6271,16 +6271,27 @@
('data')
+,%0A %7D
)%0A
|
228c726baa9742f8ed99b543443c5ff96729e054
|
Move bool test above int test, because bool is a subclass of int
|
ext/dcos-installer/dcos_installer/config/__init__.py
|
ext/dcos-installer/dcos_installer/config/__init__.py
|
"""
Configuration loader for dcosgen
Set all configuration for a given run with a simple map
my_opts = {
'config_dir': '/tmp'
}
c = DcosConfig()
print(c)
"""
import json
import logging
import os
import yaml
from dcos_installer.util import CONFIG_PATH, SSH_KEY_PATH, IP_DETECT_PATH
log = logging.getLogger(__name__)
class DCOSConfig(dict):
"""
Return the site configuration object for dcosgen library
"""
def __init__(self, overrides={}, config_path=CONFIG_PATH, write_default_config=True):
defaults = """
---
# The name of your DC/OS cluster. Visable in the DC/OS user interface.
cluster_name: 'DC/OS'
master_discovery: static
exhibitor_storage_backend: 'static'
resolvers:
- 8.8.8.8
- 8.8.4.4
ssh_port: 22
process_timeout: 10000
bootstrap_url: file:///opt/dcos_install_tmp
"""
self.write_default_config = write_default_config
self.defaults = yaml.load(defaults)
self.config_path = config_path
self.overrides = overrides
self._build()
log.debug("Configuration:")
for k, v in self.items():
log.debug("%s: %s", k, v)
def get_hidden_config(self):
self.hidden_config = {
'ip_detect_filename': IP_DETECT_PATH,
'ssh_key_path': SSH_KEY_PATH,
}
def get_external_config(self):
self.external_config = {
'ssh_key': self._try_loading_from_disk(SSH_KEY_PATH),
'ip_detect_script': self._try_loading_from_disk(IP_DETECT_PATH)
}
def _try_loading_from_disk(self, path):
if os.path.isfile(path):
with open(path, 'r') as f:
return f.read()
else:
return None
def _build(self):
"""Build takes the default configuration, overrides this with
the config on disk, and overrides that with configruation POSTed
to the backend"""
# Create defaults
for key, value in self.defaults.items():
self[key] = value
# Add user-land configuration
user_config = self.get_config_from_disk()
if user_config:
for k, v in user_config.items():
self[k] = v
# Override with POST data
self._add_overrides()
def _add_overrides(self):
if self.overrides is not None and len(self.overrides) > 0:
for key, value in self.overrides.items():
if value is None:
log.warning("Adding new configuration %s: %s", key, value)
self[key] = value
elif key in self:
log.warning("Overriding %s: %s -> %s", key, self[key], value)
self[key] = value
else:
log.warning("Adding new value %s: %s", key, value)
self[key] = value
def get_config_from_disk(self):
if os.path.isfile(self.config_path):
log.debug("Loading YAML configuration: %s", self.config_path)
with open(self.config_path, 'r') as data:
configuration = yaml.load(data)
else:
if self.write_default_config:
log.error(
"Configuration file not found, %s. Writing new one with all defaults.",
self.config_path)
self.write()
configuration = yaml.load(open(self.config_path))
else:
log.error("Configuration file not found: %s", self.config_path)
return {}
return configuration
def write(self):
"""Write the configuration to disk, removing keys that are not permitted to be
used by end-users"""
if self.config_path:
self._remove_unrequired_config_keys()
data = open(self.config_path, 'w')
data.write(yaml.dump(self._unbind_configuration(), default_flow_style=False, explicit_start=True))
data.close()
else:
log.error("Must pass config_path=/path/to/file to execute .write().")
def print_to_screen(self):
print(yaml.dump(self._unbind_configuration(), default_flow_style=False, explicit_start=True))
def _unbind_configuration(self):
"""Unbinds the methods and class variables from the DCOSConfig
object and returns a simple dictionary.
"""
dictionary = {}
for k, v in self.items():
dictionary[k] = v
return dictionary
def stringify_configuration(self):
"""Create a stringified version of the complete installer configuration
to send to gen.generate()"""
gen_config = {}
for key, value in self.items():
if isinstance(value, list):
log.debug("Caught list for genconf configuration, transforming to JSON string: %s", value)
value = json.dumps(value)
elif isinstance(value, int):
log.debug("Caught int for genconf configuration, transforming to string: %s", value)
value = str(value)
elif isinstance(value, bool):
if value:
value = 'true'
else:
value = 'false'
gen_config[key] = value
log.debug('Stringified configuration: \n{}'.format(gen_config))
return gen_config
def _remove_unrequired_config_keys(self):
"""Remove the configuration we do not want
in the config file.
:param config: The config dictionary
:type config: dict | {}
"""
do_not_write = [
'ssh_key',
'ssh_key_path',
'ip_detect_path',
'ip_detect_script'
]
for key in do_not_write:
if key in self:
del self[key]
|
Python
| 0.998967
|
@@ -4905,19 +4905,20 @@
(value,
-int
+bool
):%0A
@@ -4932,92 +4932,17 @@
-log.debug(%22Caught int for genconf configuration, transforming to string: %25s%22,
+if
value
-)
+:
%0A
@@ -4954,24 +4954,28 @@
+
value =
str(valu
@@ -4970,19 +4970,19 @@
e =
-str(value)%0A
+'true'%0A
@@ -4995,34 +4995,10 @@
el
-if isinstance(value, bool)
+se
:%0A
@@ -5015,79 +5015,171 @@
-if
+
value
-:%0A value = 'true'%0A else:%0A
+ = 'false'%0A%0A elif isinstance(value, int):%0A log.debug(%22Caught int for genconf configuration, transforming to string: %25s%22, value)%0A
@@ -5190,39 +5190,42 @@
value =
-'false'
+str(value)
%0A%0A ge
|
ebbcce590483a5970268db0c59bae0cec81648ad
|
Add example commands for the User Preferences api
|
storyboard/api/v1/user_preferences.py
|
storyboard/api/v1/user_preferences.py
|
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from pecan import abort
from pecan import request
from pecan import rest
from pecan.secure import secure
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from storyboard.api.auth import authorization_checks as checks
from storyboard.api.v1 import validations
from storyboard.common import decorators
import storyboard.db.api.users as user_api
from storyboard.openstack.common.gettextutils import _ # noqa
CONF = cfg.CONF
LOG = log.getLogger(__name__)
class UserPreferencesController(rest.RestController):
validation_post_schema = validations.USER_PREFERENCES_POST_SCHEMA
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int)
def get_all(self, user_id):
"""Return all preferences for the current user.
:param user_id: An ID of the user.
"""
if request.current_user_id != user_id:
abort(403, _("You can't read preferences of other users."))
return
return user_api.user_get_preferences(user_id)
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int,
body=wtypes.DictType(wtypes.text, wtypes.text))
def post(self, user_id, body):
"""Allow a user to update their preferences. Note that a user must
explicitly set a preference value to Null/None to have it deleted.
:param user_id: The ID of the user whose preferences we're updating.
:param body: A dictionary of preference values.
"""
if request.current_user_id != user_id:
abort(403, _("You can't change preferences of other users."))
return user_api.user_update_preferences(user_id, body)
|
Python
| 0.000002
|
@@ -1481,16 +1481,158 @@
user.%0A%0A
+ Example::%0A%0A curl https://my.example.org/api/v1/users/21/preferences %5C%5C%0A -H 'Authorization: Bearer MY_ACCESS_TOKEN'%0A%0A
@@ -2269,16 +2269,288 @@
leted.%0A%0A
+ Example::%0A%0A curl https://my.example.org/api/v1/users/21/preferences %5C%5C%0A -H 'Authorization: Bearer MY_ACCESS_TOKEN' %5C%5C%0A -H 'Content-Type: application/json;charset=UTF-8' %5C%5C%0A --data-binary '%7B%22display_events_tags_added%22:%22false%22%7D'%0A%0A
|
839ff975b9d3cf29acd9c921e1b7c3722290d98a
|
Use np.nan_to_num instead of 'if x == 0' in _xlog2x
|
antropy/utils.py
|
antropy/utils.py
|
"""Helper functions"""
import numpy as np
from numba import jit
from math import log, floor
all = ['_embed', '_linear_regression', '_log_n', '_xlog2x']
def _embed(x, order=3, delay=1):
"""Time-delay embedding.
Parameters
----------
x : 1d-array
Time series, of shape (n_times)
order : int
Embedding dimension (order).
delay : int
Delay.
Returns
-------
embedded : ndarray
Embedded time-series, of shape (n_times - (order - 1) * delay, order)
"""
N = len(x)
if order * delay > N:
raise ValueError("Error: order * delay should be lower than x.size")
if delay < 1:
raise ValueError("Delay has to be at least 1.")
if order < 2:
raise ValueError("Order has to be at least 2.")
Y = np.zeros((order, N - (order - 1) * delay))
for i in range(order):
Y[i] = x[(i * delay):(i * delay + Y.shape[1])]
return Y.T
@jit('UniTuple(float64, 2)(float64[:], float64[:])', nopython=True)
def _linear_regression(x, y):
"""Fast linear regression using Numba.
Parameters
----------
x, y : ndarray, shape (n_times,)
Variables
Returns
-------
slope : float
Slope of 1D least-square regression.
intercept : float
Intercept
"""
n_times = x.size
sx2 = 0
sx = 0
sy = 0
sxy = 0
for j in range(n_times):
sx2 += x[j] ** 2
sx += x[j]
sxy += x[j] * y[j]
sy += y[j]
den = n_times * sx2 - (sx ** 2)
num = n_times * sxy - sx * sy
slope = num / den
intercept = np.mean(y) - slope * np.mean(x)
return slope, intercept
@jit('i8[:](f8, f8, f8)', nopython=True)
def _log_n(min_n, max_n, factor):
"""
Creates a list of integer values by successively multiplying a minimum
value min_n by a factor > 1 until a maximum value max_n is reached.
Used for detrended fluctuation analysis (DFA).
Function taken from the nolds python package
(https://github.com/CSchoel/nolds) by Christopher Scholzel.
Parameters
----------
min_n (float):
minimum value (must be < max_n)
max_n (float):
maximum value (must be > min_n)
factor (float):
factor used to increase min_n (must be > 1)
Returns
-------
list of integers:
min_n, min_n * factor, min_n * factor^2, ... min_n * factor^i < max_n
without duplicates
"""
max_i = int(floor(log(1.0 * max_n / min_n) / log(factor)))
ns = [min_n]
for i in range(max_i + 1):
n = int(floor(min_n * (factor ** i)))
if n > ns[-1]:
ns.append(n)
return np.array(ns, dtype=np.int64)
@np.vectorize
def _xlog2x(x):
"""Returns x log2 x if x is positive, 0 if x == 0, and np.nan
otherwise. This handles the case when the power spectrum density
takes any zero value.
"""
return 0.0 if x == 0 else x * np.log2(x)
|
Python
| 0.000628
|
@@ -2678,22 +2678,8 @@
)%0A%0A%0A
-@np.vectorize%0A
def
@@ -2874,27 +2874,22 @@
urn
-0.0 if x == 0 else
+np.nan_to_num(
x *
@@ -2898,9 +2898,19 @@
.log2(x)
+, nan=0.0)
%0A
|
f0ab4ecbc2e385dd69d644b6f8e4e41cdaa48423
|
Add note.
|
software_engineering/problem_solving/design_patterns/grasp/pattern_pure_fabrication.py
|
software_engineering/problem_solving/design_patterns/grasp/pattern_pure_fabrication.py
|
# -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from helpers.display import Section
from uuid import uuid1
from random import choice
from random import randrange as rr
DEBUG = True if __name__ == '__main__' else False
class MonitorDB:
def __init__(self):
self.store = {}
def __setitem__(self, id, data):
self.store[id] = data
class GridMonitorService:
"""This monitor service acts as an intermediary for handling db and object
related functionality, and can be used to continually add more utilities
that are related to the single entity, but that shouldn't be stored
directly on it."""
def __init__(self, grid):
self.data = MonitorDB()
self.grid = grid
def check_status(self):
for id, light in self.grid.lights.iteritems():
print('Light #{} is currently: {} @ x:{} y:{} z:{}'.format(
id, light.status(), *light.coords))
class Monitor:
def on(self):
self.on = True
def off(self):
self.off = False
def status(self):
return 'ON' if self.on else 'OFF'
class LightMonitor(Monitor):
def __init__(self, coords):
self.coords = coords
# For fun
self.on = choice([True, False])
class LightGrid:
def __init__(self):
self.lights = {}
def __setitem__(self, id, coords):
self.lights[id] = LightMonitor(coords)
if DEBUG:
with Section('GRASP pure fabrication pattern'):
grid = LightGrid()
gridmon = GridMonitorService(grid)
for _ in xrange(10):
grid[uuid1()] = (rr(0, 1000), rr(0, 1000), rr(0, 1000))
gridmon.check_status()
|
Python
| 0
|
@@ -761,16 +761,143 @@
y on it.
+%0A%0A It can be though of as a service-like layer of indirection:%0A%0A entity %3C------%3E entity_service %3C------%3E data-store%0A%0A
%22%22%22%0A%0A
|
705e7f1d68e4fb6bf37db623869a2c6d623dd9ae
|
use a pytest fixture for the CommandManager related tests
|
sunpy/tests/database/test_commands.py
|
sunpy/tests/database/test_commands.py
|
from __future__ import absolute_import
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import pytest
from sunpy.database.commands import AddEntry, RemoveEntry, EditEntry,\
NoSuchEntryError, CommandManager
from sunpy.database.tables import DatabaseEntry
@pytest.fixture
def session():
# always create an in-memory database with its own new table in each test
engine = create_engine('sqlite:///:memory:')
Session = sessionmaker()
DatabaseEntry.metadata.create_all(bind=engine)
return Session(bind=engine)
def test_add_entry(session):
assert not session.new
entry = DatabaseEntry()
AddEntry(session, entry)()
assert len(session.new) == 1
assert entry.id is None
session.commit()
assert not session.new
assert entry.id == 1
def test_add_entry_undo(session):
entry = DatabaseEntry()
cmd = AddEntry(session, entry)
cmd()
assert session.query(DatabaseEntry).count() == 1
assert entry.id == 1
cmd.undo()
assert entry in session.deleted
assert session.query(DatabaseEntry).count() == 0
def test_add_entry_undo_precommit(session):
entry = DatabaseEntry()
cmd = AddEntry(session, entry)
cmd()
cmd.undo()
session.commit()
assert session.query(DatabaseEntry).count() == 0
def test_edit_entry_invalid(session):
with pytest.raises(ValueError):
EditEntry(DatabaseEntry())
def test_edit_entry(session):
entry = DatabaseEntry()
session.add(entry)
session.commit()
assert entry.id == 1
EditEntry(entry, id=42)()
assert entry.id == 42
def test_edit_entry_undo(session):
entry = DatabaseEntry()
session.add(entry)
session.commit()
cmd = EditEntry(entry, id=42)
cmd()
session.commit()
assert entry.id == 42
cmd.undo()
session.commit()
assert entry.id == 1
def test_remove_existing_entry(session):
entry = DatabaseEntry()
session.add(entry)
assert session.query(DatabaseEntry).count() == 1
assert entry.id == 1
RemoveEntry(session, entry)()
assert entry in session.deleted
assert session.query(DatabaseEntry).count() == 0
def test_remove_nonexisting_entry(session):
with pytest.raises(NoSuchEntryError):
RemoveEntry(session, DatabaseEntry())()
def test_remove_entry_undo(session):
entry = DatabaseEntry()
session.add(entry)
cmd = RemoveEntry(session, entry)
session.commit()
cmd()
assert session.query(DatabaseEntry).count() == 0
cmd.undo()
assert session.query(DatabaseEntry).count() == 1
def test_redo_stack_empty_after_call(session):
manager = CommandManager()
manager.do(AddEntry(session, DatabaseEntry()))
manager.do(AddEntry(session, DatabaseEntry()))
assert len(manager.undo_commands) == 2
session.commit()
manager.undo(2)
assert not manager.undo_commands
assert len(manager.redo_commands) == 2
manager.do(AddEntry(session, DatabaseEntry()))
assert not manager.redo_commands
|
Python
| 0
|
@@ -551,24 +551,93 @@
d=engine)%0A%0A%0A
+@pytest.fixture%0Adef command_manager():%0A return CommandManager()%0A%0A%0A
def test_add
@@ -2690,46 +2690,40 @@
sion
-):%0A manager = C
+, c
ommand
-M
+_m
anager
-(
)
+:
%0A
+command_
mana
@@ -2765,24 +2765,32 @@
try()))%0A
+command_
manager.do(A
@@ -2831,32 +2831,40 @@
%0A assert len(
+command_
manager.undo_com
@@ -2896,24 +2896,32 @@
ommit()%0A
+command_
manager.undo
@@ -2931,32 +2931,40 @@
%0A assert not
+command_
manager.undo_com
@@ -2984,16 +2984,24 @@
ert len(
+command_
manager.
@@ -3024,16 +3024,24 @@
= 2%0A
+command_
manager.
@@ -3094,16 +3094,24 @@
ert not
+command_
manager.
|
5883b249fa1ade49ad2e57dd8b39f79a4e5c5ed8
|
Better handling of {{{None}}}, {{{True}}} and {{{False}}} in the HDF wrapper.
|
trac/web/clearsilver.py
|
trac/web/clearsilver.py
|
# -*- coding: iso8859-1 -*-
#
# Copyright (C) 2005 Edgewall Software
# Copyright (C) 2005 Christopher Lenz <cmlenz@gmx.de>
#
# Trac is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Trac is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# Author: Christopher Lenz <cmlenz@gmx.de>
from trac.util import enum, TracError
class HDFWrapper:
"""
Convenience layer on top of the low-level ClearSilver python bindings
for HDF manipulation. This class makes the HDF look and behave more
like a standard Python dict.
>>> hdf = HDFWrapper()
>>> hdf['trac.url'] = 'http://projects.edgewall.com/trac/'
>>> hdf['trac.version'] = '1.0'
>>> print hdf
trac {
url = http://projects.edgewall.com/trac/
version = 1.0
}
HDFWrapper can also assign Python lists and dicts to HDF nodes,
automatically expanding them into the corresponding HDF structure.
A dictionary is mapped to a HDF node with named children:
>>> hdf = HDFWrapper()
>>> hdf['item'] = {'name': 'An item', 'value': '0'}
>>> print hdf
item {
name = An item
value = 0
}
A sequence is mapped to a HDF node with children whose names are
the indexes of the elements:
>>> hdf = HDFWrapper()
>>> hdf['items'] = ['Item 1', 'Item 2']
>>> print hdf
items {
0 = Item 1
1 = Item 2
}
Simple values can also be easily retrieved using the same syntax.
>>> hdf = HDFWrapper()
>>> hdf['time'] = 42
>>> hdf['time']
'42'
>>> hdf['name'] = 'Foo'
>>> hdf['name']
'Foo'
An attempt to retrieve a value that hasn't been set will raise a KeyError,
just like a standard dictionary:
>>> hdf['undef']
Traceback (most recent call last):
...
KeyError: 'undef'
It may be preferable to return a default value if the given key does not exit.
It will return 'None' when the specified key is not present:
>>> hdf.get('time')
'42'
>>> hdf.get('undef')
A second argument may be passed to specify the default return value:
>>> hdf.get('time', 'Undefined Key')
'42'
>>> hdf.get('undef', 'Undefined Key')
'Undefined Key'
The 'in' and 'not in' operators can be used to test whether the HDF contains
a value with a given name.
>>> 'name' in hdf
True
>>> 'undef' in hdf
False
has_key() performs the same function:
>>> hdf.has_key('name')
True
>>> hdf.has_key('undef')
False
"""
hdf = None
def __init__(self, loadpaths=[]):
"""
Creates a new HDF dataset.
The loadpaths parameter can be used to specify a sequence of paths under
which ClearSilver will search for template files:
>>> hdf = HDFWrapper(loadpaths=['/etc/templates',
... '/home/john/templates'])
>>> print hdf
hdf {
loadpaths {
0 = /etc/templates
1 = /home/john/templates
}
}
"""
try:
import neo_cgi
# The following line is needed so that ClearSilver can be loaded when
# we are being run in multiple interpreters under mod_python
neo_cgi.update()
import neo_util
self.hdf = neo_util.HDF()
except ImportError, e:
raise TracError, "ClearSilver not installed (%s)" % e
self['hdf.loadpaths'] = loadpaths
def __getattr__(self, name):
# For backwards compatibility, expose the interface of the underlying HDF
# object
return getattr(self.hdf, name)
def __contains__(self, name):
return self.hdf.getObj(str(name)) != None
has_key = __contains__
def get(self, name, default=None):
value = self.hdf.getValue(str(name), '<<NONE>>')
if value == '<<NONE>>':
return default
return value
def __getitem__(self, name):
value = self.get(name, None)
if value == None:
raise KeyError, name
return value
def __setitem__(self, name, value):
def add_value(prefix, value):
if isinstance(value, (str, unicode)):
self.hdf.setValue(prefix, value)
elif isinstance(value, dict):
for k in value.keys():
add_value('%s.%s' % (prefix, k), value[k])
else:
if hasattr(value, '__iter__'):
for idx, item in enum(value):
add_value('%s.%d' % (prefix, idx), item)
else:
self.hdf.setValue(prefix, str(value))
add_value(name, value)
def __str__(self):
from StringIO import StringIO
buf = StringIO()
def hdf_tree_walk(node, prefix=''):
while node:
name = node.name() or ''
buf.write('%s%s' % (prefix, name))
value = node.value()
if value:
if value.find('\n') == -1:
buf.write(' = %s' % value)
else:
buf.write(' = << EOM\n%s\nEOM' % value)
if node.child():
buf.write(' {\n')
hdf_tree_walk(node.child(), prefix + ' ')
buf.write('%s}\n' % prefix)
else:
buf.write('\n')
node = node.next()
hdf_tree_walk(self.hdf.child())
return buf.getvalue().strip()
def parse(self, string):
"""
Parses the given string as template text, and returns a neo_cs.CS object.
"""
import neo_cs
cs = neo_cs.CS(self.hdf)
cs.parseStr(string)
return cs
def render(self, template):
"""
Renders the HDF using the given template.
The template parameter can be either an already parse neo_cs.CS
object, or a string. In the latter case it is interpreted as name of the
template file.
"""
if isinstance(template, (str, unicode)):
filename = template
import neo_cs
template = neo_cs.CS(self.hdf)
template.parseFile(filename)
return template.render()
def _test():
import doctest
doctest.testmod()
if __name__ == '__main__':
_test()
|
Python
| 0.999992
|
@@ -4708,32 +4708,184 @@
%0A if
+value == None or value == False:%0A return%0A elif value == True:%0A self.hdf.setValue(prefix, '1')%0A elif
isinstance(value
|
db31ec971f092c9b9a6171b7de727ce47739ec6a
|
Fix an issue where the lldbinit parsing logic throws an out of range error when the lldbinit contained content after the tulsi block.
|
src/TulsiGenerator/Scripts/bootstrap_lldbinit.py
|
src/TulsiGenerator/Scripts/bootstrap_lldbinit.py
|
#!/usr/bin/python
# Copyright 2018 The Tulsi Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bootstraps the presence and setup of ~/.lldbinit-tulsiproj."""
import os
import shutil
import StringIO
import sys
TULSI_LLDBINIT_FILE = os.path.expanduser('~/.lldbinit-tulsiproj')
CHANGE_NEEDED = 0
NO_CHANGE = 1
NOT_FOUND = 2
class BootstrapLLDBInit(object):
"""Bootstrap Xcode's preferred lldbinit for Bazel debugging."""
def _ExtractLLDBInitContent(self, lldbinit_path, source_string,
add_source_string):
"""Extracts non-Tulsi content in a given lldbinit if needed.
Args:
lldbinit_path: Absolute path to the lldbinit we are writing to.
source_string: String that we wish to write or remove from the lldbinit.
add_source_string: Boolean indicating whether we intend to write or remove
the source string.
Returns:
(int, [string]): A tuple featuring the status code along with the list
of strings representing content to write to lldbinit
that does not account for the Tulsi-generated strings.
Status code will be 0 if Tulsi-generated strings are
not all there. Status code will be 1 if we intend to
write Tulsi strings and all strings were accounted for.
Alternatively, if we intend to remove the Tulsi strings,
the status code will be 1 if none of the strings were
found. Status code will be 2 if the lldbinit file could
not be found.
"""
if not os.path.isfile(lldbinit_path):
return (NOT_FOUND, [])
content = []
with open(lldbinit_path) as f:
ignoring = False
# Split on the newline. This works as long as the last string isn't
# suffixed with \n.
source_lines = source_string.split('\n')
source_idx = 0
# If the last line was suffixed with \n, last elements would be length
# minus 2, accounting for the extra \n.
source_last = len(source_lines) - 1
for line in f:
# For each line found matching source_string, increment the iterator
# and do not append that line to the list.
if source_lines[source_idx] in line:
# If we intend to write the source string and all lines were found,
# return an error code with empty content.
if add_source_string and source_idx == source_last:
return (NO_CHANGE, [])
# Increment for each matching line found.
source_idx += 1
ignoring = True
continue
if ignoring:
# If the last line was found...
if source_lines[source_last] in line:
# Stop ignoring lines and continue appending to content.
ignoring = False
continue
# If the line could not be found within source_string, append to the
# content array.
content.append(line)
# If we intend to remove the source string and none of the lines to remove
# were found, return an error code with empty content.
if not add_source_string and source_idx == 0:
return (NO_CHANGE, [])
return (CHANGE_NEEDED, content)
def _LinkTulsiLLDBInit(self, add_source_string):
"""Adds or removes a reference to ~/.lldbinit-tulsiproj to the primary lldbinit file.
Xcode 8+ executes the contents of the first available lldbinit on startup.
To help work around this, an external reference to ~/.lldbinit-tulsiproj is
added to that lldbinit. This causes Xcode's lldb-rpc-server to load the
possibly modified contents between Debug runs of any given app. Note that
this only happens after a Debug session terminates; the cache is only fully
invalidated after Xcode is relaunched.
Args:
add_source_string: Boolean indicating whether we intend to write or remove
the source string.
"""
# ~/.lldbinit-Xcode is the only lldbinit file that Xcode will read if it is
# present, therefore it has priority.
lldbinit_path = os.path.expanduser('~/.lldbinit-Xcode')
if not os.path.isfile(lldbinit_path):
# If ~/.lldbinit-Xcode does not exist, write the reference to
# ~/.lldbinit-tulsiproj to ~/.lldbinit, the second lldbinit file that
# Xcode will attempt to read if ~/.lldbinit-Xcode isn't present.
lldbinit_path = os.path.expanduser('~/.lldbinit')
# String that we plan to inject or remove from this lldbinit.
source_string = ('# <TULSI> LLDB bridge [:\n'
'# This was autogenerated by Tulsi in order to modify '
'LLDB source-maps at build time.\n'
'command source %s\n' % TULSI_LLDBINIT_FILE +
'# ]: <TULSI> LLDB bridge')
# Retrieve the contents of lldbinit if applicable along with a return code.
return_code, content = self._ExtractLLDBInitContent(lldbinit_path,
source_string,
add_source_string)
out = StringIO.StringIO()
if add_source_string:
if return_code == CHANGE_NEEDED:
# Print the existing contents of this ~/.lldbinit without any malformed
# tulsi lldbinit block, and add the correct tulsi lldbinit block to the
# end of it.
for line in content:
out.write(line)
elif return_code == NO_CHANGE:
# If we should ignore the contents of this lldbinit, and it has the
# association with ~/.lldbinit-tulsiproj that we want, do not modify it.
return
# Add a newline after the source_string for protection from other elements
# within the lldbinit file.
out.write(source_string + '\n')
else:
if return_code != CHANGE_NEEDED:
# The source string was not found in the lldbinit so do not modify it.
return
# Print the existing contents of this ~/.lldbinit without the tulsi
# lldbinit block.
for line in content:
out.write(line)
out.seek(0, os.SEEK_END)
if out.tell() == 0:
# The file did not contain any content other than the source string so
# remove the file altogether.
os.remove(lldbinit_path)
return
with open(lldbinit_path, 'w') as outfile:
out.seek(0)
# Negative length to make copyfileobj write the whole file at once.
shutil.copyfileobj(out, outfile, -1)
def __init__(self, do_inject_link=True):
self._LinkTulsiLLDBInit(do_inject_link)
if __name__ == '__main__':
BootstrapLLDBInit()
sys.exit(0)
|
Python
| 0.000012
|
@@ -2793,32 +2793,62 @@
ist.%0A if
+source_idx %3C= source_last and
source_lines%5Bsou
@@ -3198,35 +3198,16 @@
g = True
-%0A continue
%0A%0A
|
aa278487b4e65da413a217729b852a9c08a090cf
|
create function headers and change request structure
|
pagarme/resources/handler_request.py
|
pagarme/resources/handler_request.py
|
import requests
import json
TEMPORARY_COMPANY = 'https://api.pagar.me/1/companies/temporary'
def validate_response(pagarme_response):
if pagarme_response.status_code == 200:
return pagarme_response.json()
else:
return error(pagarme_response.json())
def create_temporary_company():
company = requests.post(TEMPORARY_COMPANY)
valid_company = validate_response(company)
return valid_company
KEYS = {}
def authentication_key(api_key=None):
global KEYS
if api_key is None:
company = create_temporary_company()
api_key = company['api_key']['test']
encryption_key = company['encryption_key']['test']
KEYS['api_key'] = api_key
KEYS['encryption_key'] = encryption_key
return KEYS
else:
KEYS['api_key'] = api_key
return KEYS
def post(end_point, data={}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.post(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def get(end_point, data = {}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.get(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def put(end_point, data = {}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.put(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def delete(end_point, data = {}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.delete(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def error(data):
erros = data['errors']
return erros
|
Python
| 0
|
@@ -13,19 +13,8 @@
sts%0A
-import json
%0A%0ATE
@@ -891,59 +891,8 @@
y'%5D%0A
- headers = %7B'content-type': 'application/json'%7D%0A
@@ -935,37 +935,25 @@
_point,
-data=json.dumps(
+json=
data
-)
, header
@@ -953,32 +953,34 @@
headers=headers
+()
)%0A return val
@@ -1087,59 +1087,8 @@
y'%5D%0A
- headers = %7B'content-type': 'application/json'%7D%0A
@@ -1130,37 +1130,25 @@
_point,
-data=json.dumps(
+json=
data
-)
, header
@@ -1148,32 +1148,34 @@
headers=headers
+()
)%0A return val
@@ -1282,59 +1282,8 @@
y'%5D%0A
- headers = %7B'content-type': 'application/json'%7D%0A
@@ -1325,37 +1325,25 @@
_point,
-data=json.dumps(
+json=
data
-)
, header
@@ -1343,32 +1343,34 @@
headers=headers
+()
)%0A return val
@@ -1480,59 +1480,8 @@
y'%5D%0A
- headers = %7B'content-type': 'application/json'%7D%0A
@@ -1530,29 +1530,17 @@
nt,
-data=json.dumps(
+json=
data
-)
, he
@@ -1552,16 +1552,18 @@
=headers
+()
)%0A re
@@ -1666,8 +1666,97 @@
n erros%0A
+%0A%0Adef headers():%0A _headers = %7B'content-type': 'application/json'%7D%0A return _headers%0A
|
99641c5052a944e887fb786983e5575301c604a2
|
Replace _face_distance with more efficient calculation (#19)
|
face_recognition/api.py
|
face_recognition/api.py
|
# -*- coding: utf-8 -*-
import scipy.misc
import dlib
import numpy as np
try:
import face_recognition_models
except:
print("Please install `face_recognition_models` with this command before using `face_recognition`:")
print()
print("pip install git+https://github.com/ageitgey/face_recognition_models")
quit()
face_detector = dlib.get_frontal_face_detector()
predictor_model = face_recognition_models.pose_predictor_model_location()
pose_predictor = dlib.shape_predictor(predictor_model)
face_recognition_model = face_recognition_models.face_recognition_model_location()
face_encoder = dlib.face_recognition_model_v1(face_recognition_model)
def _rect_to_css(rect):
"""
Convert a dlib 'rect' object to a plain tuple in (top, right, bottom, left) order
:param rect: a dlib 'rect' object
:return: a plain tuple representation of the rect in (top, right, bottom, left) order
"""
return rect.top(), rect.right(), rect.bottom(), rect.left()
def _css_to_rect(css):
"""
Convert a tuple in (top, right, bottom, left) order to a dlib `rect` object
:param css: plain tuple representation of the rect in (top, right, bottom, left) order
:return: a dlib `rect` object
"""
return dlib.rectangle(css[3], css[0], css[1], css[2])
def _trim_css_to_bounds(css, image_shape):
"""
Make sure a tuple in (top, right, bottom, left) order is within the bounds of the image.
:param css: plain tuple representation of the rect in (top, right, bottom, left) order
:param image_shape: numpy shape of the image array
:return: a trimmed plain tuple representation of the rect in (top, right, bottom, left) order
"""
return max(css[0], 0), min(css[1], image_shape[1]), min(css[2], image_shape[0]), max(css[3], 0)
def _face_distance(faces, face_to_compare):
"""
Given a list of face encodings, compared them to a known face encoding and get a euclidean distance
for each comparison face.
:param faces: List of face encodings to compare
:param face_to_compare: A face encoding to compare against
:return: A list with the distance for each face in the same order as the 'faces' array
"""
return np.array([np.linalg.norm(face - face_to_compare) for face in faces])
def load_image_file(filename, mode='RGB'):
"""
Loads an image file (.jpg, .png, etc) into a numpy array
:param filename: image file to load
:param mode: format to convert the image to. Only 'RGB' (8-bit RGB, 3 channels) and 'L' (black and white) are supported.
:return: image contents as numpy array
"""
return scipy.misc.imread(filename, mode=mode)
def _raw_face_locations(img, number_of_times_to_upsample=1):
"""
Returns an array of bounding boxes of human faces in a image
:param img: An image (as a numpy array)
:param number_of_times_to_upsample: How many times to upsample the image looking for faces. Higher numbers find smaller faces.
:return: A list of dlib 'rect' objects of found face locations
"""
return face_detector(img, number_of_times_to_upsample)
def face_locations(img, number_of_times_to_upsample=1):
"""
Returns an array of bounding boxes of human faces in a image
:param img: An image (as a numpy array)
:param number_of_times_to_upsample: How many times to upsample the image looking for faces. Higher numbers find smaller faces.
:return: A list of tuples of found face locations in css (top, right, bottom, left) order
"""
return [_trim_css_to_bounds(_rect_to_css(face), img.shape) for face in _raw_face_locations(img, number_of_times_to_upsample)]
def _raw_face_landmarks(face_image, face_locations=None):
if face_locations is None:
face_locations = _raw_face_locations(face_image)
else:
face_locations = [_css_to_rect(face_location) for face_location in face_locations]
return [pose_predictor(face_image, face_location) for face_location in face_locations]
def face_landmarks(face_image, face_locations=None):
"""
Given an image, returns a dict of face feature locations (eyes, nose, etc) for each face in the image
:param face_image: image to search
:param face_locations: Optionally provide a list of face locations to check.
:return: A list of dicts of face feature locations (eyes, nose, etc)
"""
landmarks = _raw_face_landmarks(face_image, face_locations)
landmarks_as_tuples = [[(p.x, p.y) for p in landmark.parts()] for landmark in landmarks]
# For a definition of each point index, see https://cdn-images-1.medium.com/max/1600/1*AbEg31EgkbXSQehuNJBlWg.png
return [{
"chin": points[0:14],
"left_eyebrow": points[17:22],
"right_eyebrow": points[22:27],
"nose_bridge": points[27:31],
"nose_tip": points[31:36],
"left_eye": points[36:42],
"right_eye": points[42:48],
"top_lip": points[48:55] + [points[64]] + [points[63]] + [points[62]] + [points[61]] + [points[60]],
"bottom_lip": points[54:60] + [points[48]] + [points[60]] + [points[67]] + [points[66]] + [points[65]] + [points[64]]
} for points in landmarks_as_tuples]
def face_encodings(face_image, known_face_locations=None, num_jitters=1):
"""
Given an image, return the 128-dimension face encoding for each face in the image.
:param face_image: The image that contains one or more faces
:param known_face_locations: Optional - the bounding boxes of each face if you already know them.
:param num_jitters: How many times to re-sample the face when calculating encoding. Higher is more accurate, but slower (i.e. 100 is 100x slower)
:return: A list of 128-dimentional face encodings (one for each face in the image)
"""
raw_landmarks = _raw_face_landmarks(face_image, known_face_locations)
return [np.array(face_encoder.compute_face_descriptor(face_image, raw_landmark_set, num_jitters)) for raw_landmark_set in raw_landmarks]
def compare_faces(known_face_encodings, face_encoding_to_check, tolerance=0.6):
"""
Compare a list of face encodings against a candidate encoding to see if they match.
:param known_face_encodings: A list of known face encodings
:param face_encoding_to_check: A single face encoding to compare against the list
:param tolerance: How much distance between faces to consider it a match. Lower is more strict. 0.6 is typical best performance.
:return: A list of True/False values indicating which known_face_encodings match the face encoding to check
"""
return list(_face_distance(known_face_encodings, face_encoding_to_check) <= tolerance)
|
Python
| 0.005834
|
@@ -2204,18 +2204,8 @@
urn
-np.array(%5B
np.l
@@ -2219,16 +2219,17 @@
orm(face
+s
- face_
@@ -2242,28 +2242,16 @@
pare
-) for face in faces%5D
+, axis=1
)%0A%0A%0A
|
5c05b933d88f3aa819660755360923fa10f8fcf3
|
Allow passing extra environment files when scaling nodes
|
tripleo_common/scale.py
|
tripleo_common/scale.py
|
# Copyright 2015 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import shutil
from heatclient.common import template_utils
from tripleo_common import libutils
from tuskarclient.common import utils as tuskarutils
LOG = logging.getLogger(__name__)
TEMPLATE_NAME = 'overcloud-without-mergepy.yaml'
REGISTRY_NAME = "overcloud-resource-registry-puppet.yaml"
class ScaleManager(object):
def __init__(self, heatclient, stack_id, tuskarclient=None, plan_id=None,
tht_dir=None):
self.tuskarclient = tuskarclient
self.heatclient = heatclient
self.stack_id = stack_id
self.tht_dir = tht_dir
if self.tuskarclient:
self.plan = tuskarutils.find_resource(self.tuskarclient.plans,
plan_id)
def scaleup(self, role, num):
LOG.debug('updating role %s count to %d', role, num)
param_name = '{0}::count'.format(role)
param = next(x for x in self.plan.parameters if
x['name'] == param_name)
if num < int(param['value']):
raise ValueError("Role %s has already %s nodes, can't set lower "
"value" % (role, param['value']))
self.plan = self.tuskarclient.plans.patch(
self.plan.uuid,
[{'name': param_name, 'value': str(num)}])
self._update_stack()
def scaledown(self, instances):
resources = self.heatclient.resources.list(self.stack_id,
nested_depth=5)
resources_by_role = {}
instance_list = list(instances)
for res in resources:
try:
instance_list.remove(res.physical_resource_id)
except ValueError:
continue
stack_name, stack_id = next(
x['href'] for x in res.links if
x['rel'] == 'stack').rsplit('/', 2)[1:]
# get resource to remove from resource group (it's parent resource
# of nova server)
role_resource = next(x for x in resources if
x.physical_resource_id == stack_id)
# get tuskar role name from resource_type,
# resource_type is in format like "Tuskar::Compute-1"
role = role_resource.resource_type.rsplit('::', 1)[-1]
if role not in resources_by_role:
resources_by_role[role] = []
resources_by_role[role].append(role_resource)
if instance_list:
raise ValueError(
"Couldn't find following instances in stack %s: %s" %
(self.stack_id, ','.join(instance_list)))
# decrease count for each role (or resource group) and set removal
# policy for each resource group
if self.tuskarclient:
stack_params = self._get_removal_params_from_plan(
resources_by_role)
else:
stack_params = self._get_removal_params_from_heat(
resources_by_role)
self._update_stack(parameters=stack_params)
def _update_stack(self, parameters={}):
if self.tuskarclient:
self.tht_dir = libutils.save_templates(
self.tuskarclient.plans.templates(self.plan.uuid))
tpl_name = 'plan.yaml'
env_name = 'environment.yaml'
else:
tpl_name = TEMPLATE_NAME
env_name = REGISTRY_NAME
try:
tpl_files, template = template_utils.get_template_contents(
template_file=os.path.join(self.tht_dir, tpl_name))
env_files, env = (
template_utils.process_multiple_environments_and_files(
env_paths=[os.path.join(self.tht_dir, env_name)]))
fields = {
'existing': True,
'stack_id': self.stack_id,
'template': template,
'files': dict(list(tpl_files.items()) +
list(env_files.items())),
'environment': env,
'parameters': parameters
}
LOG.debug('stack update params: %s', fields)
self.heatclient.stacks.update(**fields)
finally:
if self.tuskarclient:
if LOG.isEnabledFor(logging.DEBUG):
LOG.debug("Tuskar templates saved in %s", self.tht_dir)
else:
shutil.rmtree(self.tht_dir)
def _get_removal_params_from_plan(self, resources_by_role):
patch_params = []
stack_params = {}
for role, role_resources in resources_by_role.items():
param_name = "{0}::count".format(role)
old_count = next(x['value'] for x in self.plan.parameters if
x['name'] == param_name)
count = max(int(old_count) - len(role_resources), 0)
patch_params.append({'name': param_name, 'value': str(count)})
# add instance resource names into removal_policies
# so heat knows which instances should be removed
removal_param = "{0}::removal_policies".format(role)
stack_params[removal_param] = [{
'resource_list': [r.resource_name for r in role_resources]
}]
LOG.debug('updating plan %s: %s', self.plan.uuid, patch_params)
self.plan = self.tuskarclient.plans.patch(self.plan.uuid, patch_params)
return stack_params
def _get_removal_params_from_heat(self, resources_by_role):
stack_params = {}
stack = self.heatclient.stacks.get(self.stack_id)
for role, role_resources in resources_by_role.items():
param_name = "{0}Count".format(role)
old_count = next(v for k, v in stack.parameters.iteritems() if
k == param_name)
count = max(int(old_count) - len(role_resources), 0)
stack_params[param_name] = str(count)
# add instance resource names into removal_policies
# so heat knows which instances should be removed
removal_param = "{0}RemovalPolicies".format(role)
stack_params[removal_param] = [{
'resource_list': [r.resource_name for r in role_resources]
}]
return stack_params
|
Python
| 0.000007
|
@@ -1047,16 +1047,40 @@
dir=None
+, environment_files=None
):%0A
@@ -1216,16 +1216,67 @@
tht_dir%0A
+ self.environment_files = environment_files%0A
@@ -4251,16 +4251,175 @@
_name))%0A
+ env_paths = %5Bos.path.join(self.tht_dir, env_name)%5D%0A if self.environment_files:%0A env_paths.extend(self.environment_files)%0A
@@ -4441,16 +4441,16 @@
env = (%0A
-
@@ -4547,46 +4547,17 @@
ths=
-%5Bos.path.join(self.tht_dir, env_name)%5D
+env_paths
))%0A
|
6e257cef321023934477d95903d0b75c218c9264
|
Remove extra `max_length` from ticket `background`.
|
api_v3/models.py
|
api_v3/models.py
|
from django.db import models
from django.conf import settings
from activity.models import Action # noqa
from core.countries import COUNTRIES
from accounts.models import Profile # noqa
from ticket.constants import REQUESTER_TYPES, TICKET_STATUS, TICKET_TYPES
class Responder(models.Model):
"""Intermediate model for ticket responders (M2M)."""
user = models.ForeignKey(settings.AUTH_USER_MODEL, db_index=True)
ticket = models.ForeignKey(
'Ticket', related_name='responders', db_index=True)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = ('user', 'ticket')
@classmethod
def filter_by_user(cls, user, queryset=None):
"""Returns any user responder objects.
Either related to the tickets he created or he is subscribed to.
"""
return (queryset or cls.objects).filter(
# Allow own responder objects
models.Q(user=user) |
# Allow related to user responder tickets
models.Q(ticket__users=user) |
# Allow related to user created tickets
models.Q(ticket__requester=user)
)
class Ticket(models.Model):
"""Ticket model."""
STATUSES = TICKET_STATUS
users = models.ManyToManyField(
settings.AUTH_USER_MODEL, through=Responder, db_index=True)
requester = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='requested_tickets',
db_index=True)
kind = models.CharField(
blank=False, max_length=70, choices=TICKET_TYPES,
default=TICKET_TYPES[-1][0], db_index=True)
request_type = models.CharField(
blank=False, max_length=70, choices=REQUESTER_TYPES,
default=REQUESTER_TYPES[0][0], db_index=True)
status = models.CharField(
max_length=70, choices=TICKET_STATUS,
default=TICKET_STATUS[0][0], db_index=True)
sensitive = models.BooleanField(default=False)
whysensitive = models.CharField(max_length=150, null=True, blank=True)
deadline_at = models.DateTimeField(null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
sent_notifications_at = models.DateTimeField(null=True)
# Other ticket type fields, also common to all other types
background = models.TextField(blank=False, max_length=1000)
# Person ticket type fields
first_name = models.CharField(max_length=512, null=True, blank=True)
last_name = models.CharField(max_length=512, null=True, blank=True)
born_at = models.DateTimeField(null=True)
connections = models.TextField(max_length=1000, null=True, blank=True)
sources = models.TextField(max_length=1000, null=True, blank=True)
business_activities = models.TextField(
null=True, max_length=1000, blank=True)
initial_information = models.TextField(
max_length=1000, null=True, blank=True)
# Company ticket type fields
company_name = models.CharField(max_length=512, null=True, blank=True)
country = models.CharField(
max_length=100, choices=COUNTRIES, null=True, db_index=True, blank=True)
@classmethod
def filter_by_user(cls, user, queryset=None):
"""Returns any user tickets.
Either the ones he created or he is subscribed to.
"""
return (queryset or cls.objects).filter(
# Allow ticket authors
models.Q(requester=user) |
# Allow ticket responders
models.Q(users=user)
).distinct()
class Attachment(models.Model):
"""Ticket attachment model."""
ticket = models.ForeignKey(
Ticket, blank=False, related_name='attachments', db_index=True)
user = models.ForeignKey(
settings.AUTH_USER_MODEL, blank=False, db_index=True)
upload = models.FileField(upload_to='attachments/%Y/%m/%d', max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
@classmethod
def filter_by_user(cls, user, queryset=None):
"""Returns any user accessible attachments.
Either the ones he created or he has access to through the tickets.
"""
return (queryset or cls.objects).filter(
# Let ticket authors and responders see ticket attachments
models.Q(ticket__in=Ticket.filter_by_user(user)) |
# Let attachment authors see own attachments
models.Q(user=user)
)
class Comment(models.Model):
"""Ticket comment model."""
ticket = models.ForeignKey(
Ticket, blank=False, related_name='comments', db_index=True)
user = models.ForeignKey(
settings.AUTH_USER_MODEL, blank=False, db_index=True)
body = models.TextField(blank=False)
created_at = models.DateTimeField(auto_now_add=True)
@classmethod
def filter_by_user(cls, user, queryset=None):
"""Returns any user accessible comments.
Either the ones he created or he has access to through the tickets.
"""
return (queryset or cls.objects).filter(
# Let ticket authors and responders see ticket attachments
models.Q(ticket__in=Ticket.filter_by_user(user)) |
# Let attachment authors
models.Q(user=user)
)
|
Python
| 0.000011
|
@@ -2348,33 +2348,16 @@
nk=False
-, max_length=1000
)%0A%0A #
|
4acce9768d0dee9df75dc28806149bc4a7663d24
|
fix #433
|
datajoint/jobs.py
|
datajoint/jobs.py
|
from .hash import key_hash
import os
import pymysql
from .base_relation import BaseRelation
from . import DataJointError
ERROR_MESSAGE_LENGTH = 2047
TRUNCATION_APPENDIX = '...truncated'
class JobTable(BaseRelation):
"""
A base relation with no definition. Allows reserving jobs
"""
def __init__(self, arg, database=None):
if isinstance(arg, JobTable):
super().__init__(arg)
# copy constructor
self.database = arg.database
self._connection = arg._connection
self._definition = arg._definition
self._user = arg._user
return
super().__init__()
self.database = database
self._connection = arg
self._definition = """ # job reservation table for `{database}`
table_name :varchar(255) # className of the table
key_hash :char(32) # key hash
---
status :enum('reserved','error','ignore') # if tuple is missing, the job is available
key=null :blob # structure containing the key
error_message="" :varchar({error_message_length}) # error message returned if failed
error_stack=null :blob # error stack if failed
user="" :varchar(255) # database user
host="" :varchar(255) # system hostname
pid=0 :int unsigned # system process id
connection_id = 0 : bigint unsigned # connection_id()
timestamp=CURRENT_TIMESTAMP :timestamp # automatic timestamp
""".format(database=database, error_message_length=ERROR_MESSAGE_LENGTH)
if not self.is_declared:
self.declare()
self._user = self.connection.get_user()
@property
def definition(self):
return self._definition
@property
def table_name(self):
return '~jobs'
def delete(self):
"""bypass interactive prompts and dependencies"""
self.delete_quick()
def drop(self):
"""bypass interactive prompts and dependencies"""
self.drop_quick()
def reserve(self, table_name, key):
"""
Reserve a job for computation. When a job is reserved, the job table contains an entry for the
job key, identified by its hash. When jobs are completed, the entry is removed.
:param table_name: `database`.`table_name`
:param key: the dict of the job's primary key
:return: True if reserved job successfully. False = the jobs is already taken
"""
job = dict(
table_name=table_name,
key_hash=key_hash(key),
status='reserved',
host=os.uname().nodename,
pid=os.getpid(),
connection_id=self.connection.connection_id,
key=key,
user=self._user)
try:
self.insert1(job, ignore_extra_fields=True)
except (pymysql.err.IntegrityError, DataJointError):
return False
return True
def complete(self, table_name, key):
"""
Log a completed job. When a job is completed, its reservation entry is deleted.
:param table_name: `database`.`table_name`
:param key: the dict of the job's primary key
"""
job_key = dict(table_name=table_name, key_hash=key_hash(key))
(self & job_key).delete_quick()
def error(self, table_name, key, error_message, error_stack=None):
"""
Log an error message. The job reservation is replaced with an error entry.
if an error occurs, leave an entry describing the problem
:param table_name: `database`.`table_name`
:param key: the dict of the job's primary key
:param error_message: string error message
:param error_stack: stack trace
"""
if len(error_message) > ERROR_MESSAGE_LENGTH:
error_message = error_message[:ERROR_MESSAGE_LENGTH-len(TRUNCATION_APPENDIX)] + TRUNCATION_APPENDIX
job_key = dict(table_name=table_name, key_hash=key_hash(key))
self.insert1(
dict(job_key,
status="error",
host=os.uname().nodename,
pid=os.getpid(),
connection_id=self.connection.connection_id,
user=self._user,
key=key,
error_message=error_message,
error_stack=error_stack),
replace=True, ignore_extra_fields=True)
|
Python
| 0.000001
|
@@ -1,12 +1,41 @@
+from _decimal import Decimal%0A
from .hash i
@@ -49,16 +49,16 @@
ey_hash%0A
-
import o
@@ -2060,32 +2060,199 @@
f.drop_quick()%0A%0A
+ @staticmethod%0A def packable_or_none(key):%0A for v in key.values():%0A if isinstance(v, Decimal):%0A return None%0A return key%0A%0A
def reserve(
@@ -2936,35 +2936,58 @@
key=
+self.packable_or_none(
key
+)
,%0A us
@@ -4499,19 +4499,42 @@
key=
+self.packable_or_none(
key
+)
,%0A
|
cfae8d0430f3e3be1a67ff81dd7e3d8d74e8a7a8
|
Remove spaces.
|
partner_create_by_vat/res_partner.py
|
partner_create_by_vat/res_partner.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Romanian accounting localization for OpenERP V7
# @author - Fekete Mihai, Tatár Attila <atta@nvm.ro>
# Copyright (C) 2011-2013 TOTAL PC SYSTEMS (http://www.www.erpsystems.ro).
# Copyright (C) 2013 Tatár Attila
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import datetime
import time
from string import maketrans
import requests
from stdnum.eu.vat import check_vies
from lxml import html
from openerp import models, fields, api, _
from openerp.exceptions import Warning
CEDILLATRANS = maketrans(u'\u015f\u0163\u015e\u0162'.encode(
'utf8'), u'\u0219\u021b\u0218\u021a'.encode('utf8'))
def getMfinante(cod):
headers = {
"User-Agent": "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)",
"Content-Type": "multipart/form-data;"
}
params = {'cod': cod}
res = requests.get(
'http://www.mfinante.ro/infocodfiscal.html',
params=params,
headers=headers
)
res.raise_for_status()
htm = html.fromstring(res.text)
# sunt 2 tabele primul e important
table = htm.xpath("//div[@id='main']//center/table")[0]
result = dict()
for tr in table.iterchildren():
key = ' '.join([x.strip() for x in tr.getchildren()[
0].text_content().split('\n') if x.strip() != ''])
val = ' '.join([x.strip() for x in tr.getchildren()[
1].text_content().split('\n') if x.strip() != ''])
result[key] = val.encode('utf8').translate(CEDILLATRANS).decode('utf8')
return result
class res_partner(models.Model):
_name = "res.partner"
_inherit = "res.partner"
name = fields.Char('Name', required=True, select=True, default=' ')
@api.one
def button_get_partner_data(self):
def _check_vat_ro(vat):
return bool(len(part.name.strip()) > 2 and
part.name.strip().upper()[:2] == 'RO' and
part.name.strip()[2:].isdigit())
part = self[0]
vat = part.vat
if vat:
self.write({'vat': part.vat.upper().replace(" ","")})
elif part.name and len(part.name.strip())>2 and part.name.strip().upper()[:2]=='RO' and part.name.strip()[2:].isdigit():
self.write( {'vat': part.name.upper().replace(" ","")})
if not part.vat and part.name:
try:
vat_country, vat_number = self._split_vat(part.name.upper().replace(" ",""))
valid = self.vies_vat_check(vat_country, vat_number)
if valid:
self.write( {'vat': part.name.upper().replace(" ","")})
except:
raise Warning(_("No VAT number found"))
vat_country, vat_number = self._split_vat(part.vat)
if part.vat_subjected:
self.write({'vat_subjected': False})
if vat_number and vat_country:
self.write({
'is_company': True,
'country_id': self.env['res.country'].search(
[('code', 'ilike', vat_country)])[0].id
})
if vat_country == 'ro':
nrc_key = 'Numar de inmatriculare la Registrul Comertului:'
tva_key = 'Taxa pe valoarea adaugata (data luarii in evidenta):'
try:
result = getMfinante(vat_number)
name = nrc = adresa = tel = fax = False
zip1 = vat_s = state = False
if 'Denumire platitor:' in result.keys():
name = result['Denumire platitor:'].upper()
if 'Adresa:' in result.keys():
adresa = result['Adresa:'].title() or ''
if nrc_key in result.keys():
nrc = result[nrc_key].replace(' ', '')
if nrc == '-/-/-':
nrc = ''
if 'Codul postal:' in result.keys():
zip1 = result['Codul postal:'] or ''
if 'Judetul:' in result.keys():
jud = result['Judetul:'].title() or ''
if jud.lower().startswith('municip'):
jud = ' '.join(jud.split(' ')[1:])
if jud != '':
state = self.env['res.country.state'].search(
[('name', 'ilike', jud)])
if state:
state = state[0].id
if 'Telefon:' in result.keys():
tel = result['Telefon:'].replace('.', '') or ''
if 'Fax:' in result.keys():
fax = result['Fax:'].replace('.', '') or ''
if tva_key in result.keys():
vat_s = bool(
result[tva_key] != 'NU')
self.write({
'name': name or '',
'nrc': nrc or '',
'street': adresa or '',
'phone': tel or '',
'fax': fax or '',
'zip': zip1 or '',
'vat_subjected': vat_s or False,
'state_id': state,
})
except:
res = requests.get(
'http://openapi.ro/api/companies/%s.json' % vat_number)
if res.status_code == 200:
res = res.json()
state = False
if res['state']:
state = self.env['res.country.state'].search(
[('name', '=', res['state'].title())])
if state:
state = state[0].id
self.write({
'name': res['name'].upper(),
'nrc': res['registration_id'] and res['registration_id'].upper() or '',
'street': res['address'].title(),
'city': res['city'].title(),
'phone': res['phone'] and res['phone'] or '',
'fax': res['fax'] and res['fax'] or '',
'zip': res['zip'] and res['zip'] or '',
'vat_subjected': bool(res['vat'] == '1'),
'state_id': state,
})
else:
try:
result = check_vies(part.vat)
if result.name and result.name != '---':
self.write({
'name': unicode(result.name).upper(),
'is_company': True,
'vat_subjected': True
})
if (not part.street and
result.address and
result.address != '---'):
self.write({
'street': result.address.decode('utf-8').title()
})
self.write({'vat_subjected': result.valid})
except:
self.write({
'vat_subjected': self.vies_vat_check(vat_country, vat_number)
})
|
Python
| 0.000771
|
@@ -3539,33 +3539,8 @@
vat)
- %0A
%0A%0A
|
1431a54d77ebded23b0842b219dc011ac080c471
|
update django 1.3 deprecated context processor & template loader
|
findingaids/settings.py
|
findingaids/settings.py
|
from os import path
import os
os.environ['CELERY_LOADER'] = 'django'
# use a differently-named default queue to keep separate from other projects using celery
CELERY_DEFAULT_QUEUE = 'findingaids'
#Logger Setup
#Add custom logging level to allow us to tun off logging via tha config file
import logging
logging.NOLOG = 60
logging.addLevelName(logging.NOLOG, "NOLOG")
# Get the directory of this file for relative dir paths.
# Django sets too many absolute paths.
BASE_DIR = path.dirname(path.abspath(__file__))
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'fa.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = path.join(BASE_DIR, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'ha=7$wd7wq7n)8!#h&qn_%0*rul!ez*h-xm#v)l$wg&5nkjk%7'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.http.ConditionalGetMiddleware',
)
ROOT_URLCONF = 'findingaids.urls'
TEMPLATE_DIRS = [
path.join(BASE_DIR, 'templates'),
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
]
# also look for templates in virtualenv
import os
if 'VIRTUAL_ENV' in os.environ:
genlib_path = os.path.join(os.environ['VIRTUAL_ENV'], 'themes', 'genlib')
TEMPLATE_DIRS.append(genlib_path)
TEMPLATE_CONTEXT_PROCESSORS = (
#django default context processors
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.contrib.messages.context_processors.messages",
# additional context processors
"django.core.context_processors.request", # always include request in render context
"findingaids.fa.context_processors.searchform", # search form on every page
"findingaids.fa.context_processors.version", # software version on every page
)
# Enable additional backends.
# Enable this for LDAP and see ReadMe for install dependencies.
AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend',
'eullocal.django.emory_ldap.backends.EmoryLDAPBackend')
AUTH_PROFILE_MODULE = 'emory_ldap.EmoryLDAPUserProfile'
LOGIN_URL="/admin/accounts/login/"
LOGIN_REDIRECT_URL="/admin/"
MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'
# NOTE: using memory cache for now for simplicity
CACHE_BACKEND = 'locmem://'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'djcelery',
#'eulcore', # https://svn.library.emory.edu/svn/python-eulcore/
'eullocal.django.emory_ldap',
'eullocal.django.taskresult',
'eullocal.django.util',
'eulexistdb',
'eulxml',
'findingaids.fa',
'findingaids.fa_admin',
'findingaids.content',
)
EXTENSION_DIRS = (
#path.join(BASE_DIR, '../external/django-modules'),
)
EXISTDB_INDEX_CONFIGFILE = path.join(BASE_DIR, "exist_index.xconf")
import sys
try:
sys.path.extend(EXTENSION_DIRS)
except NameError:
pass # EXTENSION_DIRS not defined. This is OK; we just won't use it.
del sys
try:
from localsettings import *
except ImportError:
import sys
print >>sys.stderr, 'No local settings. Trying to start, but if ' + \
'stuff blows up, try copying localsettings-sample.py to ' + \
'localsettings.py and setting appropriately for your environment.'
pass
TEST_RUNNER = 'eulexistdb.testutil.ExistDBTextTestSuiteRunner'
try:
# use xmlrunner if it's installed; default runner otherwise. download
# it from http://github.com/danielfm/unittest-xml-reporting/ to output
# test results in JUnit-compatible XML.
import xmlrunner
TEST_RUNNER = 'eulexistdb.testutil.ExistDBXmlTestSuiteRunner'
TEST_OUTPUT_DIR='test-results'
except ImportError:
pass
|
Python
| 0
|
@@ -196,181 +196,8 @@
'%0A%0A%0A
-#Logger Setup%0A#Add custom logging level to allow us to tun off logging via tha config file%0Aimport logging%0Alogging.NOLOG = 60%0Alogging.addLevelName(logging.NOLOG, %22NOLOG%22)%0A%0A%0A%0A
# Ge
@@ -474,36 +474,22 @@
esystem.
-l
+L
oad
-_template_sourc
e
+r
',%0A '
@@ -2506,298 +2506,8 @@
7'%0A%0A
-# List of callables that know how to import templates from various sources.%0ATEMPLATE_LOADERS = (%0A 'django.template.loaders.filesystem.load_template_source',%0A 'django.template.loaders.app_directories.load_template_source',%0A# 'django.template.loaders.eggs.load_template_source',%0A)%0A%0A
MIDD
@@ -3437,34 +3437,42 @@
s%0A %22django.co
-re
+ntrib.auth
.context_process
|
ca076bbd397edd87fd1a26ee119ac29622868f03
|
Fix test
|
paystackapi/tests/test_bulkcharge.py
|
paystackapi/tests/test_bulkcharge.py
|
import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.bulkcharge import BulkCharge
class TestBulkCharge(BaseTestCase):
@httpretty.activate
def test_initiate_bulk_charge(self):
""" Method for testing the initiation of a bulk charge"""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/bulkcharge"),
content_type='applicationn/json',
body='{"status": true, "message": "Charges have been queued"}',
status=200,
)
response = BulkCharge.initiate_bulk_charge(
bulkcharge=[
{"authorization": "AUTH_n95vpedf", "amount": 2500},
{"authorization": "AUTH_ljdt4e4j", "amount": 1500}
]
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list_bulk_charge(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/bulkcharge"),
content_type='application/json',
body = '{"status": true, "message": "Bulk charges retrieved",}',
status=200,
)
response = BulkCharge.list()
self.assertTrue(response['status'])
@httpretty.activate
def test_fetch_bulk_batch(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/bulkcharge/BCH_orj0ttn8vtp80hx/charges"),
content_type='text/json',
body = '{"status": true, "message": "Bulk charges retrieved",}',
status=200,
)
response = BulkCharge.fetch_bulk_batch(
id_or_code= "BCH_orj0ttn8vtp80hx",
)
self.assertTrue(response['status'])
@httpretty.activate
def test_fetch_charges_batch(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/bulkcharge/BCH_180tl7oq7cayggh/charges"),
content_type= 'text/json',
body='{"status": true, "message": "Bulk charge items retrieved",}',
status=200,
)
response = BulkCharge.fetch_charges_batch(
id_or_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_pause_bulk_batch(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("bulkcharge/pause/BCH_orj0ttn8vtp80hx"),
content_type='text/json',
body='{""status": true, "message": "Bulk charge batch has been paused""}',
status=201,
)
response = BulkCharge.pause_bulk_batch(
batch_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_resume_batch_charges(self):
""" """
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("bulkcharge/resume/BCH_orj0ttn8vtp80hx"),
content_type='text/json',
body='{"status": true, "message": "Bulk charge batch has been resumed"}',
status=201,
)
response = BulkCharge.resume_bulk_charge(
batch_code="BCH_orj0ttn8vtp80hx"
)
self.assertTrue(response['status'])
|
Python
| 0.000004
|
@@ -872,28 +872,16 @@
est_list
-_bulk_charge
(self):%0A
@@ -1057,33 +1057,32 @@
body =
-
'%7B%22status%22: true
@@ -1110,33 +1110,32 @@
arges retrieved%22
-,
%7D',%0A
@@ -1442,24 +1442,16 @@
8vtp80hx
-/charges
%22),%0A
@@ -1500,19 +1500,17 @@
body
- =
+=
'%7B%22statu
@@ -1546,33 +1546,32 @@
arges retrieved%22
-,
%7D',%0A
@@ -1664,17 +1664,16 @@
or_code=
-
%22BCH_orj
@@ -1685,17 +1685,16 @@
vtp80hx%22
-,
%0A
@@ -1933,23 +1933,23 @@
BCH_
-180tl7oq7cayggh
+orj0ttn8vtp80hx
/cha
@@ -2070,17 +2070,16 @@
trieved%22
-,
%7D',%0A
@@ -2527,17 +2527,16 @@
body='%7B%22
-%22
status%22:
@@ -2588,17 +2588,16 @@
paused%22
-%22
%7D',%0A
|
65837af3a7af766af695eb2a934b5bb7a451bfcb
|
Remove mongodb from google_set
|
perfkitbenchmarker/benchmark_sets.py
|
perfkitbenchmarker/benchmark_sets.py
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Benchmark set specific functions and definitions."""
from perfkitbenchmarker import benchmarks
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import flags
from perfkitbenchmarker import windows_benchmarks
FLAGS = flags.FLAGS
MESSAGE = 'message'
BENCHMARK_LIST = 'benchmark_list'
STANDARD_SET = 'standard_set'
BENCHMARK_SETS = {
STANDARD_SET: {
MESSAGE: ('The standard_set is a community agreed upon set of '
'benchmarks to measure Cloud performance.'),
BENCHMARK_LIST: [
'aerospike', 'cassandra_stress', 'object_storage_service',
'cluster_boot', 'copy_throughput', 'coremark', 'fio',
'hadoop_terasort', 'hpcc', 'iperf', 'mesh_network', 'mongodb_ycsb',
'netperf', 'ping', 'redis', 'speccpu2006', 'block_storage_workload',
'sysbench_oltp', 'unixbench']
},
'arm_set': {
MESSAGE: 'ARM benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'broadcom_set': {
MESSAGE: 'Broadcom benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'canonical_set': {
MESSAGE: 'Canonical benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'centurylinkcloud_set': {
MESSAGE: 'This benchmark set is supported on CenturyLink Cloud.',
BENCHMARK_LIST: ['hpcc', 'unixbench', 'sysbench_oltp', 'mongodb_ycsb',
'mesh_network', 'ping', 'iperf', 'redis',
'cassandra_stress', 'copy_throughput']
},
'cisco_set': {
MESSAGE: 'Cisco benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'cloudharmony_set': {
MESSAGE: 'CloudHarmony benchmark set.',
BENCHMARK_LIST: ['speccpu2006', 'unixbench']
},
'cloudspectator_set': {
MESSAGE: 'CloudSpectator benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'ecocloud_epfl_set': {
MESSAGE: 'EcoCloud/EPFL benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'google_set': {
MESSAGE: ('This benchmark set is maintained by Google Cloud Platform '
'Performance Team.'),
BENCHMARK_LIST: [STANDARD_SET, 'oldisim', 'mongodb_ycsb']
},
'intel_set': {
MESSAGE: 'Intel benchmark set.',
BENCHMARK_LIST: ['fio', 'iperf', 'unixbench', 'hpcc',
'cluster_boot', 'redis', 'cassandra_stress',
'object_storage_service', 'sysbench_oltp']
},
'mellanox_set': {
MESSAGE: 'Mellanox benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'microsoft_set': {
MESSAGE: 'Microsoft benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'qualcomm_technologies_set': {
MESSAGE: 'Qualcomm Technologies, Inc. benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'rackspace_set': {
MESSAGE: 'Rackspace benchmark set.',
BENCHMARK_LIST: ['aerospike', 'cassandra_stress', 'cluster_boot',
'copy_throughput', 'fio', 'hpcc', 'iperf',
'mesh_network', 'mongodb_ycsb', 'netperf', 'ping',
'redis', 'block_storage_workload', 'sysbench_oltp',
'unixbench', 'oldisim', 'silo']
},
'red_hat_set': {
MESSAGE: 'Red Hat benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'tradeworx_set': {
MESSAGE: 'Tradeworx Inc. benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'thesys_technologies_set': {
MESSAGE: 'Thesys Technologies LLC. benchmark set.',
BENCHMARK_LIST: [STANDARD_SET]
},
'stanford_set': {
MESSAGE: 'Stanford University benchmark set.',
BENCHMARK_LIST: [STANDARD_SET, 'oldisim']
},
'mit_set': {
MESSAGE: 'Massachusetts Institute of Technology benchmark set.',
BENCHMARK_LIST: [STANDARD_SET, 'silo']
}
}
def GetBenchmarksFromFlags():
"""Returns a list of benchmarks to run based on the benchmarks flag.
If no benchmarks (or sets) are specified, this will return the standard set.
If multiple sets or mixes of sets and benchmarks are specified, this will
return the union of all sets and individual benchmarks.
"""
benchmark_names = set()
for benchmark in FLAGS.benchmarks:
if benchmark in BENCHMARK_SETS:
benchmark_names |= set(BENCHMARK_SETS[benchmark][BENCHMARK_LIST])
else:
benchmark_names.add(benchmark)
# Expand recursive sets
expanded = set()
did_expansion = True
while did_expansion:
did_expansion = False
for benchmark_name in benchmark_names:
if (benchmark_name in BENCHMARK_SETS):
did_expansion = True
benchmark_names.remove(benchmark_name)
if (benchmark_name not in expanded):
expanded.add(benchmark_name)
benchmark_names |= set(BENCHMARK_SETS[
benchmark_name][BENCHMARK_LIST])
break
if FLAGS.os_type == benchmark_spec.WINDOWS:
valid_benchmarks = windows_benchmarks.VALID_BENCHMARKS
else:
valid_benchmarks = benchmarks.VALID_BENCHMARKS
# create a list of modules to return
benchmark_module_list = []
for benchmark_name in benchmark_names:
if benchmark_name in valid_benchmarks:
benchmark_module_list.append(valid_benchmarks[benchmark_name])
else:
raise ValueError('Benchmark "%s" not valid on os_type "%s"' %
(benchmark_name, FLAGS.os_type))
return benchmark_module_list
|
Python
| 0.000002
|
@@ -2796,32 +2796,16 @@
oldisim'
-, 'mongodb_ycsb'
%5D%0A %7D,
|
c838bee36ac1e68afd5f00630b98f806289f89c8
|
Update fetch_metrics.py
|
perfmetrics/scripts/fetch_metrics.py
|
perfmetrics/scripts/fetch_metrics.py
|
"""Executes fio_metrics.py and vm_metrics.py by passing appropriate arguments.
"""
import socket
import sys
import time
from fio import fio_metrics
from vm_metrics import vm_metrics
from gsheet import gsheet
INSTANCE = socket.gethostname()
PERIOD = 120
# Google sheet worksheets
FIO_WORKSHEET_NAME = 'fio_metrics'
VM_WORKSHEET_NAME = 'vm_metrics'
if __name__ == '__main__':
argv = sys.argv
if len(argv) != 2:
raise TypeError('Incorrect number of arguments.\n'
'Usage: '
'python3 fetch_metrics.py <fio output json filepath>')
fio_metrics_obj = fio_metrics.FioMetrics()
print('Getting fio metrics...')
temp = fio_metrics_obj.get_metrics(argv[1], FIO_WORKSHEET_NAME)
print('Waiting for 250 seconds for metrics to be updated on VM...')
# It takes up to 240 seconds for sampled data to be visible on the VM metrics graph
# So, waiting for 250 seconds to ensure the returned metrics are not empty
time.sleep(250)
vm_metrics_obj = vm_metrics.VmMetrics()
vm_metrics_data = []
# Getting VM metrics for every job
for ind, job in enumerate(temp):
start_time_sec = job[fio_metrics.consts.START_TIME]
end_time_sec = job[fio_metrics.consts.END_TIME]
rw = job[fio_metrics.consts.PARAMS][fio_metrics.consts.RW]
print(f'Getting VM metrics for job at index {ind+1}...')
metrics_data = vm_metrics_obj.fetch_metrics(start_time_sec, end_time_sec, INSTANCE, PERIOD, rw)
for row in metrics_data:
vm_metrics_data.append(row)
gsheet.write_to_google_sheet(VM_WORKSHEET_NAME, vm_metrics_data)
|
Python
| 0.000001
|
@@ -240,16 +240,20 @@
)%0APERIOD
+_SEC
= 120%0A%0A
@@ -1435,16 +1435,20 @@
, PERIOD
+_SEC
, rw)%0A
|
597ea6bd20c9c1dbca46891d8c2aa12c625da555
|
Fix unit tests
|
Tests/ConsoleWorkerTest.py
|
Tests/ConsoleWorkerTest.py
|
import TankTests
import os
import unittest
from Tank.ConsoleWorker import ConsoleTank
from Tests.TankTests import FakeOptions
from Tank.Plugins.DataUploader import DataUploaderPlugin
from Tests.DataUploaderTest import FakeAPICLient
from Tank.Plugins.ConsoleOnline import ConsoleOnlinePlugin
from Tests.ConsoleOnlinePluginTest import FakeConsoleMarkup
import time
import logging
class ConsoleWorkerTestCase(TankTests.TankTestCase):
def setUp(self):
self.foo = ConsoleTank(FakeOptions(), None)
self.foo.set_baseconfigs_dir('full')
def tearDown(self):
del self.foo
self.foo = None
def test_perform(self):
self.foo.configure()
uploader = self.foo.core.get_plugin_of_type(DataUploaderPlugin)
uploader.api_client = FakeAPICLient()
uploader.api_client.get_results.append('[{"closed":"", "name": "test task"}]')
uploader.api_client.get_results.append('[{"success":1}]')
uploader.api_client.post_results.append('[{"job":' + str(time.time()) + '}]')
for n in range(1, 120):
uploader.api_client.post_results.append('[{"success":1}]')
console = self.foo.core.get_plugin_of_type(ConsoleOnlinePlugin)
console.console_markup = FakeConsoleMarkup()
if self.foo.perform_test() != 0:
raise RuntimeError()
def test_option_override(self):
options = FakeOptions()
options.config = ["config/old-style.conf"]
options.option = ["owner.address=overridden"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
res = self.foo.core.get_option("owner", "address")
logging.debug(res)
self.assertEquals("overridden", res)
def test_option_old_convert(self):
options = FakeOptions()
options.config = ["data/old_to_migrate.conf"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000005
|
@@ -1,47 +1,4 @@
-import TankTests%0Aimport os%0Aimport unittest%0A
from
@@ -46,41 +46,60 @@
om T
-ests.TankTests import FakeOptions
+ank.Plugins.ConsoleOnline import ConsoleOnlinePlugin
%0Afro
@@ -167,139 +167,126 @@
sts.
-DataUploaderTest import FakeAPICLient%0Afrom Tank.Plugins.ConsoleOnline import ConsoleOnlinePlugin%0Afrom Tests.ConsoleOnlinePlugin
+ConsoleOnlinePluginTest import FakeConsoleMarkup%0Afrom Tests.DataUploaderTest import FakeAPICLient%0Afrom Tests.Tank
Test
+s
imp
@@ -293,29 +293,55 @@
ort Fake
-ConsoleMarkup
+Options%0Aimport TankTests%0Aimport logging
%0Aimport
@@ -352,23 +352,24 @@
%0Aimport
-logging
+unittest
%0A%0A%0Aclass
@@ -430,32 +430,88 @@
ef setUp(self):%0A
+ opts = FakeOptions()%0A opts.no_rc = False%0A
self.foo
@@ -525,29 +525,20 @@
oleTank(
-FakeOptions()
+opts
, None)%0A
|
bdbb364c182920e1274d401e6a114267cdd9d0bb
|
Version 0.11.0
|
flask_dance/__init__.py
|
flask_dance/__init__.py
|
# coding=utf-8
from __future__ import unicode_literals
from .consumer import OAuth1ConsumerBlueprint, OAuth2ConsumerBlueprint
__version__ = "0.10.1"
|
Python
| 0.000001
|
@@ -139,13 +139,13 @@
_ = %220.1
-0.1
+1.0
%22%0A
|
8a4d259df272a65f95bacf233dc8654c68f5f54f
|
add identity coordinate mapping to ToUint8 and ToFloat32 augmentors (#339)
|
tensorpack/dataflow/imgaug/convert.py
|
tensorpack/dataflow/imgaug/convert.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: convert.py
from .base import ImageAugmentor
from .meta import MapImage
import numpy as np
import cv2
__all__ = ['ColorSpace', 'Grayscale', 'ToUint8', 'ToFloat32']
class ColorSpace(ImageAugmentor):
""" Convert into another colorspace. """
def __init__(self, mode, keepdims=True):
"""
Args:
mode: opencv colorspace conversion code (e.g., `cv2.COLOR_BGR2HSV`)
keepdims (bool): keep the dimension of image unchanged if opencv
changes it.
"""
self._init(locals())
def _augment(self, img, _):
transf = cv2.cvtColor(img, self.mode)
if self.keepdims:
if len(transf.shape) is not len(img.shape):
transf = transf[..., None]
return transf
class Grayscale(ColorSpace):
""" Convert image to grayscale. """
def __init__(self, keepdims=True, rgb=False):
"""
Args:
keepdims (bool): return image of shape [H, W, 1] instead of [H, W]
rgb (bool): interpret input as RGB instead of the default BGR
"""
mode = cv2.COLOR_RGB2GRAY if rgb else cv2.COLOR_BGR2GRAY
super(Grayscale, self).__init__(mode, keepdims)
class ToUint8(MapImage):
""" Convert image to uint8. Useful to reduce communication overhead. """
def __init__(self):
super(ToUint8, self).__init__(lambda x: np.clip(x, 0, 255).astype(np.uint8))
class ToFloat32(MapImage):
""" Convert image to float32, may increase quality of the augmentor. """
def __init__(self):
super(ToFloat32, self).__init__(lambda x: x.astype(np.float32))
|
Python
| 0
|
@@ -1463,16 +1463,29 @@
p.uint8)
+, lambda x: x
)%0A%0A%0Aclas
@@ -1678,10 +1678,23 @@
float32)
+, lambda x: x
)%0A
|
1e7a6b0fbbdb57053d3510b67c95c5d7e2fb6b81
|
Enable to display accuracy graph
|
floppy/report_widget.py
|
floppy/report_widget.py
|
from floppy.train_configuration import TrainParamServer
from PyQt5.QtWidgets import QWidget
from PyQt5.QtGui import QPixmap
from PyQt5.QtGui import QPainter
from PyQt5.QtCore import Qt
from PyQt5.QtCore import QPoint
class ReportWidget(QWidget):
def __init__(self, *args, **kwargs):
super(ReportWidget, self).__init__(height=200, *args, **kwargs)
self.setStyleSheet('''ReportWidget{background: rgb(55,55,55)}
''')
try:
loss_image = TrainParamServer()['WorkDir'] + "result/loss.png"
except KeyError:
loss_image = "result/loss.png"
self.pixmap = QPixmap(loss_image)
self.resize(200, 200)
def paintEvent(self, event):
self.adjustSize()
size = self.size()
painter = QPainter(self)
point = QPoint(0, 0)
scaled_pix = self.pixmap.scaled(size, Qt.KeepAspectRatio,
transformMode=Qt.SmoothTransformation)
# start painting the label from left upper corner
point.setX((size.width() - scaled_pix.width()) / 2)
point.setY((size.height() - scaled_pix.height()) / 2)
painter.drawPixmap(point, scaled_pix)
|
Python
| 0.000001
|
@@ -86,16 +86,55 @@
QWidget%0A
+from PyQt5.QtWidgets import QTabWidget%0A
from PyQ
@@ -272,16 +272,19 @@
Widget(Q
+Tab
Widget):
@@ -377,17 +377,17 @@
height=2
-0
+1
0, *args
@@ -654,109 +654,825 @@
elf.
-pixmap = QPixmap(loss_image)%0A self.resize(200, 200)%0A%0A def paintEvent(self, event):%0A
+addTab(GraphWidget(loss_image, parent=self), 'Loss')%0A try:%0A acc_image = TrainParamServer()%5B'WorkDir'%5D + %22result/accuracy.png%22%0A except KeyError:%0A acc_image = %22result/accuracy.png%22%0A self.addTab(GraphWidget(acc_image, parent=self), 'Accuracy')%0A self.resize(200, 200)%0A%0A%0Aclass GraphWidget(QWidget):%0A%0A def __init__(self, image_file, *args, **kwargs):%0A super(GraphWidget, self).__init__(height=200)%0A self.setStyleSheet('''ReportWidget%7Bbackground: rgb(55,55,55)%7D%0A ''')%0A self.pixmap = None%0A self.image_file = image_file%0A%0A def paintEvent(self, event):%0A if 'Class' not in TrainParamServer()%5B'TrainMode'%5D:%0A if 'accuracy' in self.image_file:%0A return%0A self.pixmap = QPixmap(self.image_file)%0A #
self
|
ec82c7d7181803f577adb1a697ed53fbc42476ca
|
add goliad health check
|
plugins/bongo/check-goliad-health.py
|
plugins/bongo/check-goliad-health.py
|
#!/usr/bin/env python
from optparse import OptionParser
import socket
import sys
import httplib
import json
PASS = 0
WARNING = 1
FAIL = 2
def get_bongo_host(server, app):
try:
con = httplib.HTTPConnection(server, timeout=45)
con.request("GET","/v2/apps/" + app)
data = con.getresponse()
if data.status >= 300:
print "get_bongo_host: Recieved non-2xx response= %s" % (data.status)
sys.exit(FAIL)
json_data = json.loads(data.read())
host = json_data['app']['tasks'][0]['host']
port = json_data['app']['tasks'][0]['ports'][0]
con.close()
return host, port
except Exception, e:
print "%s Exception caught in get_bongo_host" % (e)
sys.exit(FAIL)
def get_status(host, group):
try:
con = httplib.HTTPConnection(host,timeout=45)
con.request("GET","/v1/health/betty/" + group)
data = con.getresponse()
if data.status >= 300:
print "Recieved non-2xx response= %s in get_status" % (data.status)
sys.exit(FAIL)
json_data = json.loads(data.read())
con.close()
if json_data['status'] == 2:
print "%s" % (json_data['msg'])
sys.exit(FAIL)
elif json_data['status'] == 1:
print "%s" % (json_data['msg'])
sys.exit(WARNING)
else:
print " `%s` is fine" %group
sys.exit(PASS)
except Exception, e:
print "%s Exception caught in get_status" % (e)
sys.exit(FAIL)
if __name__=="__main__":
parser = OptionParser()
parser.add_option("-s", dest="server", action="store", default="localhost:8080", help="Marathon Cluster address with port no")
parser.add_option("-a", dest="app", action="store", default="bongo.useast.prod", help="App Id to retrieve the slave address")
parser.add_option("-c", dest="group", action="store", default="betty.useast.prod", help="Name of betty Consumer Group")
(options, args) = parser.parse_args()
host, port = get_bongo_host(options.server, options.app)
if "useast" in host:
host = host.rsplit("prd",1)
consul_host = "%snode.us-east-1.consul:%s" % (host[0], port)
else:
consul_host = "%s:%s" % (host, port)
get_status(consul_host, options.group)
|
Python
| 0
|
@@ -888,21 +888,22 @@
/health/
-betty
+goliad
/%22 + gro
@@ -1931,21 +1931,22 @@
efault=%22
-betty
+goliad
.useast.
@@ -1970,13 +1970,14 @@
of
-betty
+goliad
Con
|
335abb7a4ddeabf9175b522d9336b94b7e32acc0
|
Fix incorrect FAIL data.
|
test/broker/01-connect-anon-denied.py
|
test/broker/01-connect-anon-denied.py
|
#!/usr/bin/python
# Test whether an anonymous connection is correctly denied.
import subprocess
import socket
import time
from struct import *
rc = 1
keepalive = 10
connect_packet = pack('!BBH6sBBHH17s', 16, 12+2+17,6,"MQIsdp",3,2,keepalive,17,"connect-anon-test")
connack_packet = pack('!BBBB', 32, 2, 0, 5);
broker = subprocess.Popen(['../../src/mosquitto', '-c', '01-connect-anon-denied.conf'], stderr=subprocess.PIPE)
try:
time.sleep(0.1)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1888))
sock.send(connect_packet)
connack_recvd = sock.recv(256)
sock.close()
if connack_recvd != connack_packet:
(cmd, rl, resv, rc) = unpack('!BBBB', connack_recvd)
print("FAIL: Expected 32,2,0,0 got " + str(cmd) + "," + str(rl) + "," + str(resv) + "," + str(rc))
else:
rc = 0
finally:
broker.terminate()
exit(rc)
|
Python
| 0.002419
|
@@ -733,17 +733,17 @@
32,2,0,
-0
+5
got %22 +
|
19b6207f6ec2cefa28e79fb10639d1d1f5602d2c
|
clean up test
|
app/app/tests.py
|
app/app/tests.py
|
import unittest
import transaction
import os
import app
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from pyramid import testing
from .models import DBSession
DEFAULT_WAIT = 5
SCREEN_DUMP_LOCATION = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'screendumps'
)
class TestMyViewSuccessCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_passing_view(self):
pass
class TestMyViewFailureCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_failing_view(self):
pass
class FunctionalTest(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(DEFAULT_WAIT)
def tearDown(self):
DBSession.remove()
testing.tearDown()
self.browser.quit()
class HomePageTest(FunctionalTest):
def map_move(self, key_move, repeat=1, sleep_time=.5):
"""Move the map with a repeat and sleep"""
map_ = self.browser.find_element_by_id("map")
key_moves = {
'zoom_in': self.browser.find_element_by_class_name("leaflet-control-zoom-in").click(),
'zoom_out': self.browser.find_element_by_class_name("leaflet-control-zoom-out").click(),
'arrow_down': map_.send_keys(Keys.ARROW_DOWN),
'arrow_right': map_.send_keys(Keys.ARROW_RIGHT),
'arrow_left': map_.send_keys(Keys.ARROW_LEFT),
'arrow_up': map_.send_keys(Keys.ARROW_UP),
}
for _ in range(repeat):
key_moves[key_move]
sleep(sleep_time)
def test_home_page_loads(self):
#Billy sees the landsat.club homepage and rejoices.
self.browser.get('localhost:8000')
self.map_move('zoom_in', repeat=5)
self.map_move('arrow_right', repeat=5, sleep_time=.75)
#zoom_in.click()
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.75)
#zoom_in.click()
#sleep(.75)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.5)
#zoom_in.click()
#sleep(.75)
#zoom_in.click()
#sleep(5)
#self.browser.find_element_by_class_name('leaflet-control-mapbox-geocoder-toggle').click()
#self.browser.find_element_by_xpath('//*[@id="map"]/div[2]/div[1]/div[2]/div[2]/form/input').send_keys('10010', Keys.RETURN)
#
#
#sleep(.75)
#zoom_out.click()
#sleep(.75)
#zoom_out.click()
#sleep(.75)
#zoom_out.click()
#sleep(.75)
#zoom_out.click()
#
#sleep(600)
#
|
Python
| 0.000001
|
@@ -1363,16 +1363,17 @@
om_in':
+'
self.bro
@@ -1430,32 +1430,33 @@
oom-in%22).click()
+'
,%0A 'z
@@ -1465,16 +1465,17 @@
m_out':
+'
self.bro
@@ -1541,16 +1541,17 @@
.click()
+'
,%0A
@@ -1570,16 +1570,17 @@
_down':
+'
map_.sen
@@ -1602,16 +1602,17 @@
OW_DOWN)
+'
,%0A
@@ -1628,24 +1628,25 @@
row_right':
+'
map_.send_ke
@@ -1665,16 +1665,17 @@
W_RIGHT)
+'
,%0A
@@ -1694,16 +1694,17 @@
_left':
+'
map_.sen
@@ -1726,16 +1726,17 @@
OW_LEFT)
+'
,%0A
@@ -1753,16 +1753,17 @@
ow_up':
+'
map_.sen
@@ -1783,16 +1783,17 @@
RROW_UP)
+'
,%0A
@@ -1829,16 +1829,112 @@
epeat):%0A
+ %0A import pdb; pdb.set_trace()%0A %0A %0A %0A
@@ -2156,18 +2156,19 @@
e('zoom_
-in
+out
', repea
@@ -2170,16 +2170,30 @@
repeat=5
+, sleep_time=1
)%0A
|
e7b53fcaaaed7f59534775e2e35f75a562354e79
|
Add a UTC timezone to the TIMESTAMPs retrieved from MySQL, in addition to the UNIX epochs we synthesise.
|
app/dbqueries.py
|
app/dbqueries.py
|
from datetime import datetime, tzinfo, timedelta
from MySQLdb import escape_string
from common.dbconn import ResultSet, ResultSetSlice
class UTC(tzinfo):
"""
A timezone representing Coordinated Universal Time (UTC).
"""
def utcoffset(self, dt):
"""
UTC is always 0 offset from UTC.
"""
return timedelta(0)
def dst(self, dt):
"""
UTC never has a Daylight Savings Time offset.
"""
return timedelta(0)
def tzname(self, dt):
return "UTC"
class DBQueries(object):
"""
Holds a set of canned database queries.
"""
UPDATE_PROC_SUFFIX = '_update'
QUERY_SHOW_TABLES = 'SHOW TABLES;'
METADATA_TABLE = 'metadata'
METADATA_LAST_UPDATE_COLUMN = 'last_update'
METADATA_TABLE_NAME_COLUMN = 'table_name'
@classmethod
def _get_tables_comments(cls, dbconn, dbname, table_names):
"""
Return an iterator over the SQL92 table comments for the given tables.
"""
# In this query, schema and table names are literals, so can be parameters
query = "SELECT table_comment FROM information_schema.tables WHERE table_schema=%s AND table_name IN (" + ",".join([ '%s' ] * len(table_names)) + ");"
parameters = [ dbname ]
parameters.extend(table_names)
cursor = dbconn.execute(query, False, parameters)
return ResultSetSlice(cursor, 0)
@classmethod
def _get_table_comment(cls, dbconn, dbname, table_name):
"""
Obtain a single table's SQL92 table comment.
"""
comments = cls._get_tables_comments(dbconn, dbname, [ table_name ])
return iter(comments).next()
@classmethod
def _get_table_lastupdates(cls, dbconn, table_names):
"""
Return an iterator over the last update times for the given tables.
This is looked for in an optional table named 'metadata'.
FIXME: Remove this knowledge about the underlying schema.
"""
# In this query, table names are literals, so can be parameters
query = "SELECT " + escape_string(cls.METADATA_LAST_UPDATE_COLUMN) + " FROM " + escape_string(cls.METADATA_TABLE) + " WHERE " + escape_string(cls.METADATA_TABLE_NAME_COLUMN) + " IN (" + ",".join([ '%s' ] * len(table_names)) + ");"
cursor = dbconn.execute(query, False, table_names)
return ResultSetSlice(cursor, 0)
@classmethod
def _get_table_lastupdate(cls, dbconn, table_name):
"""
Obtain a single table's last update time.
"""
rows = cls._get_table_lastupdates(dbconn, [ table_name ])
try:
row = iter(rows).next()
except StopIteration:
"""
Despite the name, utcfromtimestamp returns a 'naive'
datetime lacking any timezone, UTC or otherwise.
"""
row = datetime.utcfromtimestamp(0).replace(tzinfo = UTC())
return row
@classmethod
def _get_table_list(cls, dbconn):
"""
Return an iterator over names of available tables.
"""
query = cls.QUERY_SHOW_TABLES
cursor = dbconn.execute(query, False)
return ResultSetSlice(cursor, 0)
@classmethod
def _make_update_proc_name(cls, table_name):
# FIXME: Remove this knowledge about the underlying schema.
return table_name + cls.UPDATE_PROC_SUFFIX
@classmethod
def _update_table(cls, dbconn, table_name):
"""
Call a stored procedure to update the given-named table.
"""
try:
# Stored procedure names cannot be parameters, so must be escaped
cursor = dbconn.callproc(escape_string(cls._make_update_proc_name(table_name)), True)
cursor.fetchall()
except:
# Can't refresh the report. Degrade gracefully by serving old data.
pass
@classmethod
def _filter_table(cls, dbconn, table_name, filter_args):
"""
Return an iterator over the records in a resultset
selecting all columns from the given-named table.
The filter_args are ANDed together then used as a WHERE criterion.
"""
DBQueries._update_table(dbconn, table_name)
# Table names cannot be parameters, so must be escaped
query = 'SELECT * FROM ' + escape_string(table_name)
parameters = []
if filter_args:
query += ' WHERE '
criteria = []
for (key, val) in filter_args.items():
# Column names cannot be parameters, so must escaped
criteria.append(escape_string(key) + "=%s")
# Filter values can be parameters
parameters.append(val[0])
query += ' AND '.join(criteria)
query += ';'
cursor = dbconn.execute(query, True, parameters)
return ResultSet(cursor)
|
Python
| 0
|
@@ -2898,16 +2898,35 @@
stamp(0)
+%0A return row
.replace
@@ -2941,35 +2941,16 @@
= UTC())
-%0A return row
%0A%0A @c
|
3e5641470afd3c1aea6273c47bc6fb9c326a8d79
|
FIx srv.bin to work -- listens on 0.0.0.0
|
bin/srv.py
|
bin/srv.py
|
#!/usr/bin/env python
# vim: set encoding=utf-8
"""
Main server program.
"""
from __future__ import print_function
from gevent.wsgi import WSGIServer
from gevent.monkey import patch_all
patch_all()
# pylint: disable=wrong-import-position,wrong-import-order
import sys
import logging
import os
import requests
import jinja2
from flask import Flask, request, send_from_directory, redirect, Response
MYDIR = os.path.abspath(os.path.join(__file__, '..', '..'))
sys.path.append("%s/lib/" % MYDIR)
from globals import FILE_QUERIES_LOG, LOG_FILE, TEMPLATES, STATIC, MALFORMED_RESPONSE_HTML_PAGE
from limits import Limits
from cheat_wrapper import cheat_wrapper
from post import process_post_request
from options import parse_args
from stateful_queries import save_query, last_query
# pylint: disable=wrong-import-position,wrong-import-order
if not os.path.exists(os.path.dirname(LOG_FILE)):
os.makedirs(os.path.dirname(LOG_FILE))
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s %(message)s')
app = Flask(__name__) # pylint: disable=invalid-name
app.jinja_loader = jinja2.ChoiceLoader([
app.jinja_loader,
jinja2.FileSystemLoader(TEMPLATES),
])
LIMITS = Limits()
def is_html_needed(user_agent):
"""
Basing on `user_agent`, return whether it needs HTML or ANSI
"""
plaintext_clients = ['curl', 'wget', 'fetch', 'httpie', 'lwp-request', 'python-requests']
return all([x not in user_agent for x in plaintext_clients])
def is_result_a_script(query):
return query in [':cht.sh']
@app.route('/files/<path:path>')
def send_static(path):
"""
Return static file `path`.
Can be served by the HTTP frontend.
"""
return send_from_directory(STATIC, path)
@app.route('/favicon.ico')
def send_favicon():
"""
Return static file `favicon.ico`.
Can be served by the HTTP frontend.
"""
return send_from_directory(STATIC, 'favicon.ico')
@app.route('/malformed-response.html')
def send_malformed():
"""
Return static file `malformed-response.html`.
Can be served by the HTTP frontend.
"""
return send_from_directory(STATIC, 'malformed-response.html')
def log_query(ip_addr, found, topic, user_agent):
"""
Log processed query and some internal data
"""
log_entry = "%s %s %s %s" % (ip_addr, found, topic, user_agent)
with open(FILE_QUERIES_LOG, 'a') as my_file:
my_file.write(log_entry.encode('utf-8')+"\n")
def get_request_ip(req):
"""
Extract IP address from `request`
"""
if req.headers.getlist("X-Forwarded-For"):
ip_addr = req.headers.getlist("X-Forwarded-For")[0]
if ip_addr.startswith('::ffff:'):
ip_addr = ip_addr[7:]
else:
ip_addr = req.remote_addr
if req.headers.getlist("X-Forwarded-For"):
ip_addr = req.headers.getlist("X-Forwarded-For")[0]
if ip_addr.startswith('::ffff:'):
ip_addr = ip_addr[7:]
else:
ip_addr = req.remote_addr
return ip_addr
def _proxy(*args, **kwargs):
# print "method=", request.method,
# print "url=", request.url.replace('/:shell-x/', ':3000/')
# print "headers=", {key: value for (key, value) in request.headers if key != 'Host'}
# print "data=", request.get_data()
# print "cookies=", request.cookies
# print "allow_redirects=", False
url_before, url_after = request.url.split('/:shell-x/', 1)
url = url_before + ':3000/'
if 'q' in request.args:
url_after = '?' + "&".join("arg=%s" % x for x in request.args['q'].split())
url += url_after
print(url)
print(request.get_data())
resp = requests.request(
method=request.method,
url=url,
headers={key: value for (key, value) in request.headers if key != 'Host'},
data=request.get_data(),
cookies=request.cookies,
allow_redirects=False)
excluded_headers = ['content-encoding', 'content-length', 'transfer-encoding', 'connection']
headers = [(name, value) for (name, value) in resp.raw.headers.items()
if name.lower() not in excluded_headers]
response = Response(resp.content, resp.status_code, headers)
return response
@app.route("/", methods=['GET', 'POST'])
@app.route("/<path:topic>", methods=["GET", "POST"])
def answer(topic=None):
"""
Main rendering function, it processes incoming weather queries.
Depending on user agent it returns output in HTML or ANSI format.
Incoming data:
request.args
request.headers
request.remote_addr
request.referrer
request.query_string
"""
user_agent = request.headers.get('User-Agent', '').lower()
html_needed = is_html_needed(user_agent)
options = parse_args(request.args)
if topic in ['apple-touch-icon-precomposed.png', 'apple-touch-icon.png', 'apple-touch-icon-120x120-precomposed.png']:
return ''
request_id = request.cookies.get('id')
if topic is not None and topic.lstrip('/') == ':last':
if request_id:
topic = last_query(request_id)
else:
return "ERROR: you have to set id for your requests to use /:last\n"
else:
if request_id:
save_query(request_id, topic)
if request.method == 'POST':
process_post_request(request, html_needed)
if html_needed:
return redirect("/")
return "OK\n"
if 'topic' in request.args:
return redirect("/%s" % request.args.get('topic'))
if topic is None:
topic = ":firstpage"
if topic.startswith(':shell-x/'):
return _proxy()
#return requests.get('http://127.0.0.1:3000'+topic[8:]).text
ip_address = get_request_ip(request)
if '+' in topic:
not_allowed = LIMITS.check_ip(ip_address)
if not_allowed:
return "429 %s\n" % not_allowed, 429
html_is_needed = is_html_needed(user_agent) and not is_result_a_script(topic)
result, found = cheat_wrapper(topic, request_options=options, html=html_is_needed)
if 'Please come back in several hours' in result and html_is_needed:
return MALFORMED_RESPONSE_HTML_PAGE
log_query(ip_address, found, topic, user_agent)
if html_is_needed:
return result
return Response(result, mimetype='text/plain')
SRV = WSGIServer(("", 8002), app) # log=None)
SRV.serve_forever()
|
Python
| 0
|
@@ -122,16 +122,18 @@
gevent.
+py
wsgi imp
@@ -6321,16 +6321,23 @@
erver((%22
+0.0.0.0
%22, 8002)
|
9de0a05d28c83742224c0e708e80b8add198a8a8
|
Add user data export for comments
|
froide/comments/apps.py
|
froide/comments/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class CommentConfig(AppConfig):
name = 'froide.comments'
verbose_name = _('Comments')
def ready(self):
from froide.account import account_canceled
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import FroideComment
if user is None:
return
FroideComment.objects.filter(user=user).update(
user_name='',
user_email='',
user_url=''
)
|
Python
| 0
|
@@ -1,20 +1,33 @@
+import json%0A%0A
from django.apps imp
@@ -265,16 +265,67 @@
canceled
+%0A from froide.account.export import registry
%0A%0A
@@ -364,16 +364,60 @@
l_user)%0A
+ registry.register(export_user_data)%0A
%0A%0Adef ca
@@ -624,24 +624,24 @@
r_email='',%0A
-
user
@@ -654,8 +654,545 @@
'%0A )%0A
+%0A%0Adef export_user_data(user):%0A from .models import FroideComment%0A%0A comments = FroideComment.objects.filter(user=user)%0A if not comments:%0A return%0A yield ('comments.json', json.dumps(%5B%0A %7B%0A 'submit_date': (%0A c.submit_date.isoformat() if c.submit_date else None%0A ),%0A 'comment': c.comment,%0A 'is_public': c.is_public,%0A 'is_removed': c.is_removed,%0A 'url': c.get_absolute_url(),%0A %7D%0A for c in comments%5D).encode('utf-8')%0A )%0A
|
4a201a37318d5eea1e50e0619580a23f37e2e7da
|
Fix path for boringssl
|
libwebsockets.gyp
|
libwebsockets.gyp
|
{
'targets': [
{
'target_name': 'libwebsockets',
'type': 'static_library',
'standalone_static_library': 1,
'sources': [
'lib/base64-decode.c',
'lib/handshake.c',
'lib/libwebsockets.c',
'lib/service.c',
'lib/pollfd.c',
'lib/output.c',
'lib/parsers.c',
'lib/context.c',
'lib/sha-1.c',
'lib/alloc.c',
'lib/header.c',
'lib/client.c',
'lib/client-handshake.c',
'lib/client-parser.c',
'lib/ssl.c',
'lib/server.c',
'lib/server-handshake.c',
'lib/extension.c',
'lib/extension-deflate-frame.c',
'lib/extension-deflate-stream.c',
],
'dependencies': [
'<(DEPTH)/third_party/zlib/zlib.gyp:zlib',
'<(peeracle_webrtc_root)/chromium/src/third_party/boringssl/boringssl.gyp:boringssl'
],
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
'direct_dependent_settings': {
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
},
'conditions': [
['OS == "win"', {
'sources': [
'lib/lws-plat-win.c',
],
'msvs_settings': {
'VCCLCompilerTool': {
'WarnAsError': 'true',
'DisableSpecificWarnings': ['4018']
}
}
}, {
'sources': [
'lib/lws-plat-unix.c',
],
}],
],
},
],
}
|
Python
| 0.000013
|
@@ -824,21 +824,8 @@
ot)/
-chromium/src/
thir
|
eed4faf3bfe670421e7dc9c3065adbfceef0d2b6
|
fix test for heapify
|
linear_heapify.py
|
linear_heapify.py
|
# Building hash in O(n) time and O(1) additional space. Inspired by https://www.youtube.com/watch?v=MiyLo8adrWw
def heapify(a):
for i in range(len(a) // 2, -1, -1):
parent = i
while True:
candidates = [parent, 2 * parent + 1, 2 * parent + 2]
candidates = [e for e in candidates if e < len(a)]
largest = max(candidates, key=lambda e: a[e])
if largest == parent:
break
else:
a[parent], a[largest], parent = a[largest], a[parent], largest
# Test
arr = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
print(heapify(arr))
|
Python
| 0.000005
|
@@ -634,14 +634,8 @@
19%5D%0A
-print(
heap
@@ -642,10 +642,20 @@
ify(arr)
+%0Aprint(arr
)%0A
|
b113cf82004b608b371d1a249801340f57195587
|
add __str__.
|
linguist/cache.py
|
linguist/cache.py
|
# -*- coding: utf-8 -*-
class CachedTranslation(object):
def __init__(self, **kwargs):
from .models import Translation
self.instances = ['instance', 'translation']
self.fields = Translation._meta.get_all_field_names()
self.fields.remove('id')
attrs = self.fields + self.instances
for attr in attrs:
setattr(self, attr, None)
self.__dict__.update(**kwargs)
self.is_new = True
if self.instance is not None:
self.identifier = self.instance.linguist_identifier
self.object_id = self.instance.pk
if self.translation is not None:
self.is_new = bool(self.translation.pk is None)
for attr in ('language', 'field_name', 'field_value'):
setattr(self, attr, getattr(self.translation, attr))
@property
def attrs(self):
"""
Returns Translation attributes to pass as kwargs for creating or updating objects.
"""
return dict((k, getattr(self, k)) for k in self.fields)
@property
def lookup(self):
"""
Returns Translation lookup to use for filter method.
"""
lookup = {'identifier': self.identifier,
'object_id': self.object_id}
if self.language is not None:
lookup['language'] = self.language
return lookup
@classmethod
def from_object(cls, obj):
"""
Updates values from the given object.
"""
from .models import Translation
fields = Translation._meta.get_all_field_names()
fields.remove('id')
return cls(**dict((field, getattr(obj, field)) for field in fields))
|
Python
| 0.000011
|
@@ -17,17 +17,108 @@
f-8 -*-%0A
-%0A
+from django.utils.encoding import python_2_unicode_compatible%0A%0A%0A@python_2_unicode_compatible
%0Aclass C
@@ -1800,8 +1800,178 @@
ields))%0A
+%0A def __str__(self):%0A return '%25s:%25s:%25s:%25s' %25 (%0A self.identifier,%0A self.object_id,%0A self.field_name,%0A self.language)%0A
|
2466ca9839aaf1b5cfe98312c015a2defea71971
|
to 0.1.0
|
loris/__init__.py
|
loris/__init__.py
|
# __init__.py
__version__ = '0.1.0dev'
|
Python
| 0.999999
|
@@ -31,9 +31,6 @@
.1.0
-dev
'%0A
|
fc059403c95dd3180f9e51f0d48be80a84504301
|
Add reader from documentation for csv files
|
mailbox_filter.py
|
mailbox_filter.py
|
#!/usr/bin/python
# Arguments:
# <sample_mbox.csv> <filter_out.csv> <phrases.csv> <file_to_write_occurances_of_phrases.csv>(by default "phrase_occurances.csv")
import pandas as pd
import sys
import re
from collections import defaultdict
def search_for_phrases_in(questions, phrases):
phrases_count = defaultdict(lambda: 0)
for client_email, client_questions in questions.iteritems():
for question, amount in client_questions.items():
for phrase in phrases:
for m in re.finditer(phrase, question):
phrases_count[phrase] += amount
return phrases_count
def search_for_questions(data, filter_out_emails, start_questions = ["How ", "Why ", "What ", "Where "]):
questions = defaultdict(lambda: dict())
for index in range(data.shape[0]):
body = str(data.iloc[index].values[-1])
for email in filter_out_emails:
email = str(email).strip()
if email[0:2] == "*@":
email = email[1:]
if len(email) > len(body):
break
for m in re.finditer(email, body):
body = body[:m.start()]
break
message_from = data.iloc[index].values[1]
for m in re.finditer("<.+@.+>", message_from):
message_sender = message_from[m.start() + 1: m.end() - 1]
break
for start_question in start_questions:
for m in re.finditer(start_question + ".+\?", str(body)):
questions[message_sender][body[m.start():m.end()]] = 1
with open("Question.csv", "wb") as question_out:
for key, value in questions.iteritems():
question_out.write(key + "\r\n")
for k in value.keys():
question_out.write(k + "\r\n")
return questions
def write_questions(questions):
with open("FAQ.csv", "wb") as write_questions:
for client_mail, client_questions in questions.iteritems():
[write_questions.write(question + "\r\n") for question in client_questions.keys()]
return None
def filter_data_by(filter_out_emails, data):
for email in filter_out_emails:
if email[0:2] == "*@":
email = str(email[1:]).strip()
data = data[~data["from"].str.contains(email)]
return data
def main():
if len(sys.argv) > 2:
data = pd.read_csv(sys.argv[1], sep = ",", header = None)
data.columns = ["subject", "from", "date", "body"]
with open(sys.argv[2], "r") as filter_file:
filter_out_emails = [line.strip() for line in filter_file]
data = filter_data_by(filter_out_emails, data)
questions = search_for_questions(data, filter_out_emails)
write_questions(questions)
if len(sys.argv) > 3:
phrases = defaultdict(lambda: 0)
with open(sys.argv[3], "r") as phrases_file:
phrases = [line.strip() for line in phrases_file]
phrases = search_for_phrases_in(questions, phrases)
phrases_frame = pd.DataFrame(dict(phrases).items(), columns = ["phrase", "occurance"])
file_name = "phrase_occurances.csv"
if len(sys.argv) > 4:
file_name = sys.argv[4]
phrases_frame.sort(["phrase", "occurance"], ascending = [1, 1], inplace = True)
phrases_frame.to_csv(file_name, sep=",", index = False, header = True)
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -232,16 +232,27 @@
aultdict
+%0Aimport csv
%0A%0Adef se
@@ -2185,24 +2185,25 @@
.argv%5B2%5D, %22r
+b
%22) as filter
@@ -2209,16 +2209,69 @@
r_file:%0A
+%09%09%09reader = csv.reader(filter_file, delimiter = ',')%0A
%09%09%09filte
@@ -2282,32 +2282,35 @@
t_emails = %5Bline
+%5B0%5D
.strip() for lin
@@ -2314,27 +2314,22 @@
line in
-filter_file
+reader
%5D%0A%09%09%0A%09%09d
@@ -2552,16 +2552,17 @@
v%5B3%5D, %22r
+b
%22) as ph
@@ -2573,16 +2573,71 @@
s_file:%0A
+%09%09%09%09reader = csv.reader(phrases_file, delimiter = ',')%0A
%09%09%09%09phra
@@ -2647,16 +2647,19 @@
= %5Bline
+%5B0%5D
.strip()
@@ -2671,28 +2671,22 @@
line in
-phrases_file
+reader
%5D%0A%09%09%09phr
|
615fd3b0b7d67905cf4568971602c6dfd7c5eff3
|
Remove outdated code from `fs.sshfs.error_tools`
|
fs/sshfs/error_tools.py
|
fs/sshfs/error_tools.py
|
# coding: utf-8
"""Utils to work with `paramiko` errors.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import errno
import sys
import six
from .. import errors
class _ConvertSSHFSErrors(object):
"""Context manager to convert OSErrors in to FS Errors."""
FILE_ERRORS = {
64: errors.RemoteConnectionError, # ENONET
errno.ENOENT: errors.ResourceNotFound,
errno.EFAULT: errors.ResourceNotFound,
errno.ESRCH: errors.ResourceNotFound,
errno.ENOTEMPTY: errors.DirectoryNotEmpty,
errno.EEXIST: errors.FileExists,
183: errors.DirectoryExists,
#errno.ENOTDIR: errors.DirectoryExpected,
errno.ENOTDIR: errors.ResourceNotFound,
errno.EISDIR: errors.FileExpected,
errno.EINVAL: errors.FileExpected,
errno.ENOSPC: errors.InsufficientStorage,
errno.EPERM: errors.PermissionDenied,
errno.EACCES: errors.PermissionDenied,
errno.ENETDOWN: errors.RemoteConnectionError,
errno.ECONNRESET: errors.RemoteConnectionError,
errno.ENAMETOOLONG: errors.PathError,
errno.EOPNOTSUPP: errors.Unsupported,
errno.ENOSYS: errors.Unsupported,
}
#
DIR_ERRORS = FILE_ERRORS.copy()
DIR_ERRORS[errno.ENOTDIR] = errors.DirectoryExpected
DIR_ERRORS[errno.EEXIST] = errors.DirectoryExists
DIR_ERRORS[errno.EINVAL] = errors.DirectoryExpected
# if _WINDOWS_PLATFORM: # pragma: no cover
# DIR_ERRORS[13] = errors.DirectoryExpected
# DIR_ERRORS[267] = errors.DirectoryExpected
# FILE_ERRORS[13] = errors.FileExpected
def __init__(self, opname, path, directory=False):
self._opname = opname
self._path = path
self._directory = directory
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
ssh_errors = (
self.DIR_ERRORS
if self._directory
else self.FILE_ERRORS
)
if exc_type and isinstance(exc_value, EnvironmentError):
_errno = exc_value.errno
fserror = ssh_errors.get(_errno, errors.OperationFailed)
if _errno == errno.EACCES and sys.platform == "win32":
if getattr(exc_value, 'args', None) == 32: # pragma: no cover
fserror = errors.ResourceLocked
six.reraise(
fserror,
fserror(
self._path,
exc=exc_value
),
traceback
)
# Stops linter complaining about invalid class name
convert_sshfs_errors = _ConvertSSHFSErrors
|
Python
| 0.000002
|
@@ -1426,210 +1426,8 @@
ed%0A%0A
- # if _WINDOWS_PLATFORM: # pragma: no cover%0A # DIR_ERRORS%5B13%5D = errors.DirectoryExpected%0A # DIR_ERRORS%5B267%5D = errors.DirectoryExpected%0A # FILE_ERRORS%5B13%5D = errors.FileExpected%0A%0A
@@ -1974,206 +1974,8 @@
ed)%0A
- if _errno == errno.EACCES and sys.platform == %22win32%22:%0A if getattr(exc_value, 'args', None) == 32: # pragma: no cover%0A fserror = errors.ResourceLocked%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.