commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
90597ba2e70ce73a7531a27f851bdbb27424d04f | Update nbtest.py | newB_in_progress/test/arj/nbtest.py | newB_in_progress/test/arj/nbtest.py | #.*.F means function #see line 28 adding to dict not working
#resolved just wrong debug
import re
operators = ['+','-']
conditionals = ['if','when'] #then replaces the : as in if x=3 :
with open('txt.nub','r+') as f:
filecontent = f.read()+' <EOF> '
values={} #global so as to be accessible from funcs
def parseF(textlist):
data=textlist
incr_var = 0
while incr_var < len(data) :
forwardIndex = incr_var + 1
backwardIndex = incr_var - 1
if forwardIndex >= len(data): #preventing out of bounds in array
forwardIndex = incr_var
cur_token = data[incr_var]
next_token = data[forwardIndex]
previous_token = data[backwardIndex]
index=data.index(cur_token)
if cur_token == 'ASSIGN':
values[previous_token]=next_token #adding to dict not working
elif cur_token == 'output':
if next_token in values :
print(values[next_token]) #printing the var by fetching the value
else :
print(next_token.replace('STRING','').replace('NUM','').strip())
incr_var+=1
def tokenF(load):
data = load #takes in a list
t_var=''
incr_var=0
#num = ['0','1','2','3','4','5','6','7','8','9'] not needed checked in isdigit()
while incr_var < len(data):
cur_char = data[incr_var]
index=data.index(cur_char) #get index of current char
pattern=r"'(.)*'" #regex for string
match= re.search(pattern,cur_char) #cur_char is not only one char but can also be 20 for example
if cur_char in conditionals :
data[index] = 'COND ' #
#cur_char.isdigit()==True: # or unicode.isNumeric()
elif cur_char in operators:
data[index] = 'OPER '+data[index]
elif cur_char == '=' and data[incr_var+1] != '=' and data[incr_var-1] != '=':
data[index] = 'ASSIGN'
elif cur_char == '=' and data[incr_var+1] == '=':
data[index] = 'EQUAL'
data.remove('=')
elif match is not None:
data[index] = 'STRING '+data[index]
incr_var+=1
return data
#print(values)
def splitF(feed):
raw = feed
rawChar = ['(',')','+','-','*','/','&','%','=',' ','\n',';','/*','*/','==']
formattedChar = [' ( ',' ) ',' + ',' - ',' * ',' / ',' & ',' % ',' = ',' ',' NEWLINE ',' ; ',' /* ',' */ ','dequal'] #replace with space
incr_var = 0
while incr_var < len(rawChar):
raw =''+raw.replace(rawChar[incr_var],formattedChar[incr_var])
incr_var +=1
#print(raw)
return raw.split()
print(splitF(filecontent)) #debug
print(tokenF(splitF(filecontent)))
print(values)
print(' ')
print(parseF(tokenF(splitF(filecontent)))) #real
| #.*.F means function #see line 28 adding to dict not working
import re
operators = ['+','-']
conditionals = ['if','when'] #then replaces the : as in if x=3 :
with open('txt.nub','r+') as f:
filecontent = f.read()+' <EOF> '
values={} #global so as to be accessible from funcs
def parseF(textlist):
data=textlist
incr_var = 0
while incr_var < len(data) :
forwardIndex = incr_var + 1
backwardIndex = incr_var - 1
if forwardIndex >= len(data): #preventing out of bounds in array
forwardIndex = incr_var
cur_token = data[incr_var]
next_token = data[forwardIndex]
previous_token = data[backwardIndex]
index=data.index(cur_token)
if cur_token == 'ASSIGN':
values[previous_token]=next_token #adding to dict not working
elif cur_token == 'output':
if next_token in values :
print(values[next_token]) #printing the var by fetching the value
else :
print(next_token.replace('STRING','').replace('NUM','').strip())
incr_var+=1
def tokenF(load):
data = load #takes in a list
t_var=''
incr_var=0
#num = ['0','1','2','3','4','5','6','7','8','9'] not needed checked in isdigit()
while incr_var < len(data):
cur_char = data[incr_var]
index=data.index(cur_char) #get index of current char
pattern=r"'(.)*'" #regex for string
match= re.search(pattern,cur_char) #cur_char is not only one char but can also be 20 for example
if cur_char in conditionals :
data[index] = 'COND ' #
#cur_char.isdigit()==True: # or unicode.isNumeric()
elif cur_char in operators:
data[index] = 'OPER '+data[index]
elif cur_char == '=' and data[incr_var+1] != '=' and data[incr_var-1] != '=':
data[index] = 'ASSIGN'
elif cur_char == '=' and data[incr_var+1] == '=':
data[index] = 'EQUAL'
data.remove('=')
elif match is not None:
data[index] = 'STRING '+data[index]
incr_var+=1
return data
#print(values)
def splitF(feed):
raw = feed
rawChar = ['(',')','+','-','*','/','&','%','=',' ','\n',';','/*','*/','==']
formattedChar = [' ( ',' ) ',' + ',' - ',' * ',' / ',' & ',' % ',' = ',' ',' NEWLINE ',' ; ',' /* ',' */ ','dequal'] #replace with space
incr_var = 0
while incr_var < len(rawChar):
raw =''+raw.replace(rawChar[incr_var],formattedChar[incr_var])
incr_var +=1
#print(raw)
return raw.split()
print(splitF(filecontent)) #debug
print(tokenF(splitF(filecontent)))
print(values)
print(' ')
print(parseF(tokenF(splitF(filecontent)))) #real
| Python | 0.000001 |
49ad9b8162a9113f3c4c69818553de2cb6bf66df | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/da541333433f74881d8f44947369756d40d5e7fe. | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "da541333433f74881d8f44947369756d40d5e7fe"
TFRT_SHA256 = "df492c902908141405e88af81c4bb72580e3a5615bd91448b7c44a2c0d29009a"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "bdb99de6e7e5fcd5a7e55895bb1c658ea0336136"
TFRT_SHA256 = "a251c274cf0bbd805e221677cf4988c27156af54655b906eab11d9e3ee37d0b5"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| Python | 0.000001 |
be0e9cf9a195f44a033bb8b3aeb13febf3cea9cf | Remove check in token credential (#14134) | src/azure-cli/azure/cli/command_modules/storage/oauth_token_util.py | src/azure-cli/azure/cli/command_modules/storage/oauth_token_util.py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import threading
from knack.log import get_logger
logger = get_logger(__name__)
class TokenUpdater(object):
"""
This class updates a given token_credential periodically using the provided callback function.
It shows one way of making sure the credential does not become expired.
"""
def __init__(self, token_credential, cli_ctx):
self.token_credential = token_credential
self.cli_ctx = cli_ctx
# the timer needs to be protected, as later on it is possible that one thread is setting a new timer and
# another thread is trying to cancel the timer
self.lock = threading.Lock()
self.timer_callback()
def timer_callback(self):
# call to get a new token and set a timer
from azure.cli.core._profile import Profile
from datetime import datetime
# should give back token that is valid for at least 5 mins
token = Profile(cli_ctx=self.cli_ctx).get_raw_token(
resource="https://storage.azure.com", subscription=self.cli_ctx.data['subscription_id'])[0][2]
try:
self.token_credential.token = token['accessToken']
expire = token['expiresOn']
seconds_left = (datetime.strptime(expire, "%Y-%m-%d %H:%M:%S.%f") - datetime.now()).seconds
except KeyError: # needed to deal with differing unserialized MSI token payload
self.token_credential.token = token['access_token']
expire = datetime.fromtimestamp(int(token['expires_on']))
seconds_left = (expire - datetime.now()).seconds
if seconds_left < 180:
logger.warning("Acquired token will expire on %s. Current time is %s.", expire, datetime.now())
with self.lock:
self.timer = threading.Timer(seconds_left - 180, self.timer_callback)
self.timer.daemon = True
self.timer.start()
def cancel(self):
# the timer needs to be canceled once the command has finished executing
# if not the timer will keep going
with self.lock:
self.timer.cancel()
| # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import threading
class TokenUpdater(object):
"""
This class updates a given token_credential periodically using the provided callback function.
It shows one way of making sure the credential does not become expired.
"""
def __init__(self, token_credential, cli_ctx):
self.token_credential = token_credential
self.cli_ctx = cli_ctx
# the timer needs to be protected, as later on it is possible that one thread is setting a new timer and
# another thread is trying to cancel the timer
self.lock = threading.Lock()
self.timer_callback()
def timer_callback(self):
# call to get a new token and set a timer
from azure.cli.core._profile import Profile
from datetime import datetime
# should give back token that is valid for at least 5 mins
token = Profile(cli_ctx=self.cli_ctx).get_raw_token(
resource="https://storage.azure.com", subscription=self.cli_ctx.data['subscription_id'])[0][2]
try:
self.token_credential.token = token['accessToken']
seconds_left = (datetime.strptime(token['expiresOn'], "%Y-%m-%d %H:%M:%S.%f") - datetime.now()).seconds
except KeyError: # needed to deal with differing unserialized MSI token payload
self.token_credential.token = token['access_token']
seconds_left = (datetime.fromtimestamp(int(token['expires_on'])) - datetime.now()).seconds
if seconds_left < 180:
# acquired token expires in less than 3 mins
raise Exception("Acquired a token expiring in less than 3 minutes")
with self.lock:
self.timer = threading.Timer(seconds_left - 180, self.timer_callback)
self.timer.daemon = True
self.timer.start()
def cancel(self):
# the timer needs to be canceled once the command has finished executing
# if not the timer will keep going
with self.lock:
self.timer.cancel()
| Python | 0 |
a8a0e24d9ee90676601a52c564eadb7ff264d5cd | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/356740e3a2bf884abd27b2ca362fe8108a7cd257. | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "356740e3a2bf884abd27b2ca362fe8108a7cd257"
TFRT_SHA256 = "c5c806b5f5acb345eca8db4bc49053df60d0b368193f5b78346cf6acdc4bc3e8"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "b570a1921c9e55ac53c8972bd2bfd37cd0eb510d"
TFRT_SHA256 = "01295fc2a90aa2d665890adbe8701e2ae2372028d3b8266cba38ceddccb42af6"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| Python | 0.000003 |
ce977d24d49b7e03b6db5b5590e8fc0ddf8e9127 | fix the deploy order in the daemon. closes #862 | fabfile/daemons.py | fabfile/daemons.py | #!/usr/bin/env python
from time import sleep, time
from fabric.api import execute, task, env
import app_config
import sys
import traceback
def safe_execute(*args, **kwargs):
"""
Wrap execute() so that all exceptions are caught and logged.
"""
try:
execute(*args, **kwargs)
except:
print "ERROR [timestamp: %d]: Here's the traceback" % time()
ex_type, ex, tb = sys.exc_info()
traceback.print_tb(tb)
del tb
@task
def deploy():
"""
Harvest data and deploy slides indefinitely
"""
while True:
start = time()
safe_execute('ap.update')
safe_execute('data.load_updates', 'data/update.json')
safe_execute('liveblog.update')
safe_execute('deploy_bop')
safe_execute('deploy_big_boards')
safe_execute('deploy_slides')
duration = int(time() - start)
wait = app_config.DEPLOY_INTERVAL - duration
print "== Deploying slides ran in %ds, waiting %ds ==" % (duration, wait)
if wait < 0:
print "WARN: Deploying slides took %d seconds longer than %d" % (abs(wait), app_config.DEPLOY_INTERVAL)
wait = 0
sleep(wait)
| #!/usr/bin/env python
from time import sleep, time
from fabric.api import execute, task, env
import app_config
import sys
import traceback
def safe_execute(*args, **kwargs):
"""
Wrap execute() so that all exceptions are caught and logged.
"""
try:
execute(*args, **kwargs)
except:
print "ERROR [timestamp: %d]: Here's the traceback" % time()
ex_type, ex, tb = sys.exc_info()
traceback.print_tb(tb)
del tb
@task
def deploy():
"""
Harvest data and deploy slides indefinitely
"""
while True:
start = time()
safe_execute('ap.update')
safe_execute('data.load_updates', 'data/update.json')
safe_execute('liveblog.update')
safe_execute('deploy_slides')
safe_execute('deploy_big_boards')
safe_execute('deploy_bop')
duration = int(time() - start)
wait = app_config.DEPLOY_INTERVAL - duration
print "== Deploying slides ran in %ds, waiting %ds ==" % (duration, wait)
if wait < 0:
print "WARN: Deploying slides took %d seconds longer than %d" % (abs(wait), app_config.DEPLOY_INTERVAL)
wait = 0
sleep(wait)
| Python | 0.000201 |
7e95e0b8adb4315c8f8a0c5aa8c6ccc588cbee18 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/0d8bae2de531db2e4e4efd3a4e168b39795458b9. | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "0d8bae2de531db2e4e4efd3a4e168b39795458b9"
TFRT_SHA256 = "fa7cd1e72eec99562bf916e071222df2e72e90c67dcb14137ffbef07a4fcac5f"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "4b2fe81ea82e4c33783b5b62973fbe84dbc6f484"
TFRT_SHA256 = "f0e6e0fd3e5245d993cd4146d8245e130e724d0070401a25f730b02c7296d1c4"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| Python | 0.000001 |
e2d066811a5e943600c170aba0cf797c104d1588 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/5f6e52142a3592d0cfa058dbfd140cad49ed451a. | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "5f6e52142a3592d0cfa058dbfd140cad49ed451a"
TFRT_SHA256 = "8e1efbd7df0fdeb5186b178d7c8b90c33ba80cef54999e988097bd1ff0f4e8fe"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "736eeebfb56c6d0de138f4a29286140d8c26d927"
TFRT_SHA256 = "b584ee5ce5ecaadf289b0997987dfb5eec6cf3623f30b83028923cad20914e61"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| Python | 0 |
9f47b0cf3d7e26339aaf51dce912db30cc50c587 | Debug values | promgen/prometheus.py | promgen/prometheus.py | import collections
import json
import logging
import subprocess
import tempfile
import requests
from django.conf import settings
from django.template.loader import render_to_string
from promgen import models
logger = logging.getLogger(__name__)
def check_rules(rules):
with tempfile.NamedTemporaryFile(mode='w', encoding='utf8') as fp:
logger.debug('Rendering to %s', fp.name)
fp.write(render_rules(rules))
fp.flush()
subprocess.check_call([
settings.PROMGEN['rule_writer']['promtool_path'],
'check-rules',
fp.name
])
def render_rules(rules=None):
if rules is None:
rules = models.Rule.objects.all()
return render_to_string('promgen/prometheus.rule', {'rules': rules})
def render_config(service=None, project=None):
data = []
for exporter in models.Exporter.objects.all():
if not exporter.project.farm:
continue
if service and exporter.project.service.name != service.name:
continue
if project and exporter.project.name != project.name:
continue
labels = {
'project': exporter.project.name,
'service': exporter.project.service.name,
'farm': exporter.project.farm.name,
'job': exporter.job,
}
if exporter.path:
labels['__metrics_path__'] = exporter.path
hosts = []
for host in models.Host.objects.filter(farm=exporter.project.farm):
hosts.append('{}:{}'.format(host.name, exporter.port))
data.append({
'labels': labels,
'targets': hosts,
})
return json.dumps(data, indent=2, sort_keys=True)
def write_config():
print(settings.PROMGEN)
print('write config')
with open(settings.PROMGEN['config_writer']['path'], 'w+', encoding='utf8') as fp:
fp.write(render_config())
print('send notification')
for target in settings.PROMGEN['config_writer'].get('notify', []):
try:
requests.post(target).raise_for_status()
except Exception as e:
logger.error('%s while notifying %s', e, target)
def write_rules():
with open(settings.PROMGEN['rule_writer']['rule_path'], 'w+', encoding='utf8') as fp:
fp.write(render_rules())
for target in settings.PROMGEN['rule_writer'].get('notify', []):
try:
requests.post(target).raise_for_status()
except Exception as e:
logger.error('%s while notifying %s', e, target)
def reload_prometheus():
target = '{}/-/reload'.format(settings.PROMGEN['prometheus']['url'])
try:
requests.post(target).raise_for_status()
except Exception as e:
logger.error('%s while notifying %s', e, target)
def import_config(config):
counters = collections.defaultdict(int)
for entry in config:
service, created = models.Service.objects.get_or_create(
name=entry['labels']['service'],
)
if created:
counters['Service'] += 1
farm, created = models.Farm.objects.get_or_create(
name=entry['labels']['farm'],
defaults={'source': 'pmc'}
)
if created:
counters['Farm'] += 1
project, created = models.Project.objects.get_or_create(
name=entry['labels']['project'],
service=service,
defaults={'farm': farm}
)
if created:
counters['Project'] += 1
if not project.farm:
project.farm = farm
project.save()
for target in entry['targets']:
target, port = target.split(':')
host, created = models.Host.objects.get_or_create(
name=target,
farm_id=farm.id,
)
if created:
counters['Host'] += 1
exporter, created = models.Exporter.objects.get_or_create(
job=entry['labels']['job'],
port=port,
project=project,
path=entry['labels'].get('__metrics_path__', '')
)
if created:
counters['Exporter'] += 1
return dict(counters)
| import collections
import json
import logging
import subprocess
import tempfile
import requests
from django.conf import settings
from django.template.loader import render_to_string
from promgen import models
logger = logging.getLogger(__name__)
def check_rules(rules):
with tempfile.NamedTemporaryFile(mode='w', encoding='utf8') as fp:
logger.debug('Rendering to %s', fp.name)
fp.write(render_rules(rules))
fp.flush()
subprocess.check_call([
settings.PROMGEN['rule_writer']['promtool_path'],
'check-rules',
fp.name
])
def render_rules(rules=None):
if rules is None:
rules = models.Rule.objects.all()
return render_to_string('promgen/prometheus.rule', {'rules': rules})
def render_config(service=None, project=None):
data = []
for exporter in models.Exporter.objects.all():
if not exporter.project.farm:
continue
if service and exporter.project.service.name != service.name:
continue
if project and exporter.project.name != project.name:
continue
labels = {
'project': exporter.project.name,
'service': exporter.project.service.name,
'farm': exporter.project.farm.name,
'job': exporter.job,
}
if exporter.path:
labels['__metrics_path__'] = exporter.path
hosts = []
for host in models.Host.objects.filter(farm=exporter.project.farm):
hosts.append('{}:{}'.format(host.name, exporter.port))
data.append({
'labels': labels,
'targets': hosts,
})
return json.dumps(data, indent=2, sort_keys=True)
def write_config():
with open(settings.PROMGEN['config_writer']['path'], 'w+', encoding='utf8') as fp:
fp.write(render_config())
for target in settings.PROMGEN['config_writer'].get('notify', []):
try:
requests.post(target).raise_for_status()
except Exception as e:
logger.error('%s while notifying %s', e, target)
def write_rules():
with open(settings.PROMGEN['rule_writer']['rule_path'], 'w+', encoding='utf8') as fp:
fp.write(render_rules())
for target in settings.PROMGEN['rule_writer'].get('notify', []):
try:
requests.post(target).raise_for_status()
except Exception as e:
logger.error('%s while notifying %s', e, target)
def reload_prometheus():
target = '{}/-/reload'.format(settings.PROMGEN['prometheus']['url'])
try:
requests.post(target).raise_for_status()
except Exception as e:
logger.error('%s while notifying %s', e, target)
def import_config(config):
counters = collections.defaultdict(int)
for entry in config:
service, created = models.Service.objects.get_or_create(
name=entry['labels']['service'],
)
if created:
counters['Service'] += 1
farm, created = models.Farm.objects.get_or_create(
name=entry['labels']['farm'],
defaults={'source': 'pmc'}
)
if created:
counters['Farm'] += 1
project, created = models.Project.objects.get_or_create(
name=entry['labels']['project'],
service=service,
defaults={'farm': farm}
)
if created:
counters['Project'] += 1
if not project.farm:
project.farm = farm
project.save()
for target in entry['targets']:
target, port = target.split(':')
host, created = models.Host.objects.get_or_create(
name=target,
farm_id=farm.id,
)
if created:
counters['Host'] += 1
exporter, created = models.Exporter.objects.get_or_create(
job=entry['labels']['job'],
port=port,
project=project,
path=entry['labels'].get('__metrics_path__', '')
)
if created:
counters['Exporter'] += 1
return dict(counters)
| Python | 0.000001 |
a70e0abdf409d770ddbb9faf3cc66c26fc03b076 | fix fbproject tests following new pystan version | tests/test_fbprophet.py | tests/test_fbprophet.py | import unittest
import numpy as np
import pandas as pd
from fbprophet import Prophet
class TestFbProphet(unittest.TestCase):
def test_fit(self):
train = pd.DataFrame({
'ds': np.array(['2012-05-18', '2012-05-20']),
'y': np.array([38.23, 21.25])
})
forecaster = Prophet(mcmc_samples=1)
forecaster.fit(train, control={'adapt_engaged': False})
| import unittest
import numpy as np
import pandas as pd
from fbprophet import Prophet
class TestFbProphet(unittest.TestCase):
def test_fit(self):
train = pd.DataFrame({
'ds': np.array(['2012-05-18', '2012-05-20']),
'y': np.array([38.23, 21.25])
})
forecaster = Prophet(mcmc_samples=1)
forecaster.fit(train)
| Python | 0 |
768d4fa324826d9c96a96be66ea4049a0eb120ed | Move public comment tags into this app from template_utils | comment_utils/templatetags/comment_utils.py | comment_utils/templatetags/comment_utils.py | """
Template tags designed to work with applications which use comment
moderation.
"""
from django import template
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import get_model
from django.contrib.comments.models import Comment, FreeComment
from django.contrib.comments.templatetags import comments
from django.contrib.contenttypes.models import ContentType
class PublicCommentCountNode(comments.CommentCountNode):
def render(self, context):
from django.conf import settings
manager = self.free and FreeComment.objects or Comment.objects
if self.context_var_name is not None:
object_id = template.resolve_variable(self.context_var_name, context)
comment_count = manager.filter(object_id__exact=object_id,
content_type__app_label__exact=self.package,
content_type__model__exact=self.module,
site__id__exact=settings.SITE_ID,
is_public__exact=True).count()
context[self.var_name] = comment_count
return ''
class DoPublicCommentList(comments.DoGetCommentList):
"""
Retrieves comments for a particular object and stores them in a
context variable.
The difference between this tag and Django's built-in comment list
tags is that this tag will only return comments with
``is_public=True``. If your application uses any sort of comment
moderation which sets ``is_public=False``, you'll probably want to
use this tag, as it makes the template logic simpler by only
returning approved comments.
Syntax::
{% get_public_comment_list for [app_name].[model_name] [object_id] as [varname] %}
or::
{% get_public_free_comment_list for [app_name].[model_name] [object_id] as [varname] %}
When called as ``get_public_comment_list``, this tag retrieves
instances of ``Comment`` (comments which require
registration). When called as ``get_public_free_comment_list``,
this tag retrieves instances of ``FreeComment`` (comments which do
not require registration).
To retrieve comments in reverse order (e.g., newest comments
first), pass 'reversed' as an extra argument after ``varname``.
So, for example, to retrieve registered comments for a flatpage
with ``id`` 12, use like this::
{% get_public_comment_list for flatpages.flatpage 12 as comment_list %}
To retrieve unregistered comments for the same object::
{% get_public_free_comment_list for flatpages.flatpage 12 as comment_list %}
To retrieve in reverse order (newest comments first)::
{% get_public_free_comment_list for flatpages.flatpage 12 as comment_list reversed %}
"""
def __call__(self, parser, token):
bits = token.contents.split()
if len(bits) not in (6, 7):
raise template.TemplateSyntaxError("'%s' tag takes 5 or 6 arguments" % bits[0])
if bits[1] != 'for':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'for'" % bits[0])
try:
app_name, model_name = bits[2].split('.')
except ValueError:
raise template.TemplateSyntaxError("second argument to '%s' tag must be in the form 'app_name.model_name'" % bits[0])
model = get_model(app_name, model_name)
if model is None:
raise template.TemplateSyntaxError("'%s' tag got invalid model '%s.%s'" % (bits[0], app_name, model_name))
content_type = ContentType.objects.get_for_model(model)
var_name, object_id = None, None
if bits[3].isdigit():
object_id = bits[3]
try:
content_type.get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist:
raise template.TemplateSyntaxError("'%s' tag got reference to %s object with id %s, which doesn't exist" % (bits[0], content_type.name, object_id))
else:
var_name = bits[3]
if bits[4] != 'as':
raise template.TemplateSyntaxError("fourth argument to '%s' tag must be 'as'" % bits[0])
if len(bits) == 7:
if bits[6] != 'reversed':
raise template.TemplateSyntaxError("sixth argument to '%s' tag, if given, must be 'reversed'" % bits[0])
ordering = '-'
else:
ordering = ''
return comments.CommentListNode(app_name, model_name, var_name, object_id, bits[5], self.free, ordering, extra_kwargs={ 'is_public__exact': True })
class DoPublicCommentCount(comments.DoCommentCount):
"""
Retrieves the number of comments attached to a particular object
and stores them in a context variable.
The difference between this tag and Django's built-in comment
count tags is that this tag will only count comments with
``is_public=True``. If your application uses any sort of comment
moderation which sets ``is_public=False``, you'll probably want to
use this tag, as it gives an accurate count of the comments which
will be publicly displayed.
Syntax::
{% get_public_comment_count for [app_name].[model_name] [object_id] as [varname] %}
or::
{% get_public_free_comment_count for [app_name].[model_name] [object_id] as [varname] %}
Example::
{% get_public_comment_count for weblog.entry entry.id as comment_count %}
When called as ``get_public_comment_list``, this tag counts
instances of ``Comment`` (comments which require
registration). When called as ``get_public_free_comment_count``,
this tag counts instances of ``FreeComment`` (comments which do
not require registration).
"""
def __call__(self, parser, token):
bits = token.contents.split()
if len(bits) != 6:
raise template.TemplateSyntaxError("'%s' tag takes five arguments" % bits[0])
if bits[1] != 'for':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'for'" % bits[0])
try:
app_name, model_name = bits[2].split('.')
except ValueError:
raise template.TemplateSyntaxError("second argument to '%s tag must be in the format app_name.model_name'" % bits[0])
model = get_model(app_name, model_name)
if model is None:
raise template.TemplateSyntaxError("'%s' tag got invalid model '%s.%s'" % (app_name, model_name))
content_type = ContentType.objects.get_for_model(model)
var_name, object_id = None, None
if bits[3].isdigit():
object_id = bits[3]
try:
content_type.get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist:
raise template.TemplateSyntaxError("'%s' tag got reference to %s object with id %s, which doesn't exist" % (bits[0], content_type.name, object_id))
else:
var_name = bits[3]
if bits[4] != 'as':
raise template.TemplateSyntaxError("fourth argument to '%s' tag must be 'as'" % bits[0])
return PublicCommentCountNode(app_name, model_name, var_name, object_id, bits[5], self.free)
register = template.Library()
register.tag('get_public_comment_list', DoPublicCommentList(False))
register.tag('get_public_free_comment_list', DoPublicCommentList(True))
register.tag('get_public_comment_count', DoPublicCommentCount(False))
register.tag('get_public_free_comment_count', DoPublicCommentCount(True))
| Python | 0.000002 | |
7461c7b6b729c38194ebb5e88b33e7bcc73b4c9c | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/53604b1779bdbea70bed75fe1695b503e06be323. | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "53604b1779bdbea70bed75fe1695b503e06be323"
TFRT_SHA256 = "b2ce14585f2707ec56b013323fde0ff10ddecdf608854dcf332c46244e0dbd20"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "1c915c952cea8e5d290d241b3a0178856a9ec35b"
TFRT_SHA256 = "97f8ad0010b924f8489ca04e8e5aa5aea4a69013293e6575137176a6a8d80168"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| Python | 0.000002 |
0fe76a38aff965aca9f672b48ed4a4933ee10161 | add an argument taskid to EventLoopProgressReportWriter.write() | AlphaTwirl/EventReader/EventLoopProgressReportWriter.py | AlphaTwirl/EventReader/EventLoopProgressReportWriter.py | # Tai Sakuma <tai.sakuma@cern.ch>
from AlphaTwirl.ProgressBar import ProgressReport
##____________________________________________________________________________||
class EventLoopProgressReportWriter(object):
def write(self, taskid, component, event):
return ProgressReport(
name = component.name,
done = event.iEvent + 1,
total = event.nEvents,
taskid = taskid
)
##____________________________________________________________________________||
| # Tai Sakuma <tai.sakuma@cern.ch>
from AlphaTwirl.ProgressBar import ProgressReport
##____________________________________________________________________________||
class EventLoopProgressReportWriter(object):
def write(self, component, event):
return ProgressReport(name = component.name, done = event.iEvent + 1, total = event.nEvents)
##____________________________________________________________________________||
| Python | 0.000001 |
662cc443f7c32182aaef89e5b61e90797b7e3e58 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/81d27bd006f86cc3fd3d78a7193583ab9d18367a. | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "81d27bd006f86cc3fd3d78a7193583ab9d18367a"
TFRT_SHA256 = "f7cafc8d2b512ff3be61dc5a3d8a3a5bcc3e749b213c1afa4909116b90710e2e"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "bd4c5dc54997aaffe6f37a802b106c3ac88f150f"
TFRT_SHA256 = "a3ee3c259c5d7ea631177a75195b35bbfb695d69ad70adf4b0830ee2d91a9625"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| Python | 0.000002 |
f0d76cae236cded0bfa6cc0f6486efb04daeb133 | convert latency to int before posting to cbmonitor | cbagent/collectors/secondary_latency.py | cbagent/collectors/secondary_latency.py | import os.path
from cbagent.collectors import Collector
class SecondaryLatencyStats(Collector):
COLLECTOR = "secondaryscan_latency"
def _get_secondaryscan_latency(self):
stats = {}
if os.path.isfile(self.secondary_statsfile):
with open(self.secondary_statsfile, 'rb') as fh:
next(fh).decode()
fh.seek(-400, 2)
last = fh.readlines()[-1].decode()
duration = last.split(',')[-1]
stats = {}
latency = duration.split(':')[1]
latency = latency.rstrip()
latency_key = duration.split(':')[0]
latency_key = latency_key.strip()
stats[latency_key] = int(latency)
return stats
def sample(self):
stats = self._get_secondaryscan_latency()
if stats:
self.update_metric_metadata(stats.keys())
self.store.append(stats, cluster=self.cluster, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
| import os.path
from cbagent.collectors import Collector
class SecondaryLatencyStats(Collector):
COLLECTOR = "secondaryscan_latency"
def _get_secondaryscan_latency(self):
stats = {}
if os.path.isfile(self.secondary_statsfile):
with open(self.secondary_statsfile, 'rb') as fh:
next(fh).decode()
fh.seek(-400, 2)
last = fh.readlines()[-1].decode()
duration = last.split(',')[-1]
stats = {}
latency = duration.split(':')[1]
latency = latency.rstrip()
latency_key = duration.split(':')[0]
latency_key = latency_key.strip()
stats[latency_key] = latency
return stats
def sample(self):
stats = self._get_secondaryscan_latency()
if stats:
self.update_metric_metadata(stats.keys())
self.store.append(stats, cluster=self.cluster, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
| Python | 0 |
69c9322827ed95ce845b49119bc58aa4f36d82bb | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/ecf8607212b519546828e3fcc66f68985597a622. | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "ecf8607212b519546828e3fcc66f68985597a622"
TFRT_SHA256 = "545c097a241ff80701e54d1e088762f27a7494980f01c08fee3ce3aeb4fd22cf"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "078534d79809852ea069d23bbacd2483ade18c11"
TFRT_SHA256 = "55905ff389c5294ac1ce4be5e3f0af2d171e6061aa886fb66d59e3636f03412b"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| Python | 0.000002 |
66284e57accec5977d606fc91a0b28177b352eb4 | Add end-to-end integration testing for all compression types | test/test_producer.py | test/test_producer.py | import pytest
from kafka import KafkaConsumer, KafkaProducer
from test.conftest import version
from test.testutil import random_string
@pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set")
@pytest.mark.parametrize("compression", [None, 'gzip', 'snappy', 'lz4'])
def test_end_to_end(kafka_broker, compression):
# LZ4 requires 0.8.2
if compression == 'lz4' and version() < (0, 8, 2):
return
connect_str = 'localhost:' + str(kafka_broker.port)
producer = KafkaProducer(bootstrap_servers=connect_str,
max_block_ms=10000,
compression_type=compression,
value_serializer=str.encode)
consumer = KafkaConsumer(bootstrap_servers=connect_str,
group_id=None,
consumer_timeout_ms=10000,
auto_offset_reset='earliest',
value_deserializer=bytes.decode)
topic = random_string(5)
for i in range(1000):
producer.send(topic, 'msg %d' % i)
producer.flush()
producer.close()
consumer.subscribe([topic])
msgs = set()
for i in range(1000):
try:
msgs.add(next(consumer).value)
except StopIteration:
break
assert msgs == set(['msg %d' % i for i in range(1000)])
| import pytest
from kafka import KafkaConsumer, KafkaProducer
from test.conftest import version
from test.testutil import random_string
@pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set")
def test_end_to_end(kafka_broker):
connect_str = 'localhost:' + str(kafka_broker.port)
producer = KafkaProducer(bootstrap_servers=connect_str,
max_block_ms=10000,
value_serializer=str.encode)
consumer = KafkaConsumer(bootstrap_servers=connect_str,
group_id=None,
consumer_timeout_ms=10000,
auto_offset_reset='earliest',
value_deserializer=bytes.decode)
topic = random_string(5)
for i in range(1000):
producer.send(topic, 'msg %d' % i)
producer.flush()
producer.close()
consumer.subscribe([topic])
msgs = set()
for i in range(1000):
try:
msgs.add(next(consumer).value)
except StopIteration:
break
assert msgs == set(['msg %d' % i for i in range(1000)])
| Python | 0 |
7c12b82cb410540dfa3b65150ce39924b5793bce | handle package.json exceptions | python_package_manager/utils/package_json.py | python_package_manager/utils/package_json.py | import os
import json
def get_dependencies():
package_file_path = os.path.join(os.getcwd(), 'package.json')
try:
with open(package_file_path, 'r') as infile:
package_dict = json.load(infile)
dependencies = package_dict.get("pythonDependencies", [])
dependencies_dev = package_dict.get("pythonDevDependencies", [])
except:
print("unable to read package.json")
return []
return dependencies
def write_dependencies(dependencies):
package_file_path = os.path.join(os.getcwd(), 'package.json')
try:
with open(package_file_path, 'r') as infile:
package_dict = json.load(infile)
package_dict["pythonDependencies"] = dependencies
except:
print("unable to read package.json")
return
try:
with open(package_file_path, 'w') as outfile:
json.dump(package_dict, outfile, indent=2)
except:
print("unable to write package.json")
return
| import os
import json
def get_dependencies():
package_file_path = os.path.join(os.getcwd(), 'package.json')
with open(package_file_path, 'r') as infile:
package_dict = json.load(infile)
dependencies = package_dict.get("pythonDependencies", [])
dependencies_dev = package_dict.get("pythonDevDependencies", [])
return dependencies
def write_dependencies(dependencies):
package_file_path = os.path.join(os.getcwd(), 'package.json')
with open(package_file_path, 'r') as infile:
package_dict = json.load(infile)
package_dict["pythonDependencies"] = dependencies
with open(package_file_path, 'w') as outfile:
json.dump(package_dict, outfile, indent=2)
| Python | 0.000003 |
b0dd18d4e4e18dafae9d93848f633afc396c91b4 | remove outdated/misguided meta __variables__, https://mail.python.org/pipermail/python-dev/2001-March/013328.html | fastly/__init__.py | fastly/__init__.py | from fastly import *
| """
"""
from fastly import *
__author__ = 'Tyler McMullen <tbmcmullen@gmail.com>'
__copyright__ = 'Copyright (c) 2012 Fastly Inc'
__license__ = 'BSD'
__version__ = '0.0.1'
__url__ = 'http://www.fastly.com/docs/fastly-py'
| Python | 0.000065 |
bc02e845f4a8b726f7474efa77753c7de6fe600b | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/300e7ac61cda0eb2ddb13b7f2ad850d80646adcd. | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "300e7ac61cda0eb2ddb13b7f2ad850d80646adcd"
TFRT_SHA256 = "2b79ada8dbacd5de1b868121822ffde58564a1f8749c4f3d91f8f951e76c3fbc"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "ed6f666ac14b939d7303607c950b88b7d5607c46"
TFRT_SHA256 = "b99fed746abe39cb0b072e773af53a4c7189056737fc0118ef3b013c187660c9"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| Python | 0.000001 |
42a147b0dcc24ea51207cca020d2bfc6fa7bde46 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/926650aa8e303d62814e45f709d16673501d96bc. | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "926650aa8e303d62814e45f709d16673501d96bc"
TFRT_SHA256 = "f178d137127c3a67962362f596b8015fdcdc58271e1e3d692eba47b09d31402a"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "d50aae4b79fb4aa5a3c4dd280004313c7f1fda51"
TFRT_SHA256 = "3d02021cbd499d749eeb4e3e6bdcd47a67695bfc145827c5821548c3c6f1494c"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| Python | 0 |
4cbbe7c3ab891a11492f368d780a1416d37358ff | Change the method of generating content of GUID element | feedzilla/syndication.py | feedzilla/syndication.py | # -*- coding: utf-8 -*-
# Copyright: 2011, Grigoriy Petukhov
# Author: Grigoriy Petukhov (http://lorien.name)
# License: BSD
from django.contrib.syndication.views import Feed
from django.conf import settings
from feedzilla.models import Post
class PostFeed(Feed):
title_template = 'feedzilla/feed/post_title.html'
description_template = 'feedzilla/feed/post_description.html'
title = settings.FEEDZILLA_SITE_TITLE
description = settings.FEEDZILLA_SITE_DESCRIPTION
link = '/'
def items(self, obj):
return Post.active_objects.all()\
.order_by('-created')[:settings.FEEDZILLA_PAGE_SIZE]
#def item_title(self, item):
#return item.name
#def item_description(self, item):
#return item.description
def item_pubdate(self, item):
return item.created
def item_guid(self, item):
return item.link
| # -*- coding: utf-8 -*-
# Copyright: 2011, Grigoriy Petukhov
# Author: Grigoriy Petukhov (http://lorien.name)
# License: BSD
from django.contrib.syndication.views import Feed
from django.conf import settings
from feedzilla.models import Post
class PostFeed(Feed):
title_template = 'feedzilla/feed/post_title.html'
description_template = 'feedzilla/feed/post_description.html'
title = settings.FEEDZILLA_SITE_TITLE
description = settings.FEEDZILLA_SITE_DESCRIPTION
link = '/'
def items(self, obj):
return Post.active_objects.all()\
.order_by('-created')[:settings.FEEDZILLA_PAGE_SIZE]
#def item_title(self, item):
#return item.name
#def item_description(self, item):
#return item.description
def item_pubdate(self, item):
return item.created
def item_guid(self, item):
return str(item.guid)
| Python | 0.000003 |
4e942272c00c943eb2402a94b86f2c5a0c778ac0 | update post_sync tests | tests/test_post_sync.py | tests/test_post_sync.py | import pytest
from user_sync.post_sync.manager import PostSyncData
@pytest.fixture
def example_user():
return {
'type': 'federatedID',
'username': 'user@example.com',
'domain': 'example.com',
'email': 'user@example.com',
'firstname': 'Example',
'lastname': 'User',
'groups': set(),
'country': 'US',
}
def test_add_umapi_user(example_user):
email_id = 'user@example.com'
post_sync_data = PostSyncData()
post_sync_data.update_umapi_data(None, email_id, [], [], **example_user)
assert post_sync_data.umapi_data[None][email_id] == example_user
def test_add_groups(example_user):
post_sync_data = PostSyncData()
email_id = 'user@example.com'
example_user['groups'] = {'group1', 'group2', 'group3'}
groups_add = ['group3', 'group4', 'group5']
post_sync_data.update_umapi_data(None, email_id, groups_add, [], **example_user)
assert post_sync_data.umapi_data[None][email_id]['groups'] == example_user['groups'] | set(groups_add)
def test_remove_groups(example_user):
post_sync_data = PostSyncData()
email_id = 'user@example.com'
example_user['groups'] = {'group1', 'group2', 'group3'}
groups_remove = ['group1', 'group2']
post_sync_data.update_umapi_data(None, email_id, [], groups_remove, **example_user)
assert post_sync_data.umapi_data[None][email_id]['groups'] == example_user['groups'] - set(groups_remove)
def test_add_remove_groups(example_user):
post_sync_data = PostSyncData()
email_id = 'user@example.com'
example_user['groups'] = {'group1', 'group2', 'group3', 'group4', 'group5'}
groups_add = ['group6']
groups_remove = ['group1', 'group2']
post_sync_data.update_umapi_data(None, email_id, groups_add, groups_remove, **example_user)
delta_groups = example_user['groups'] | set(groups_add)
delta_groups -= set(groups_remove)
assert post_sync_data.umapi_data[None][email_id]['groups'] == delta_groups
| import pytest
from user_sync.post_sync import manager
PostSyncManager = manager.PostSyncManager
@pytest.fixture
def example_user():
return {
'identity_type': 'federatedID',
'username': 'user@example.com',
'domain': 'example.com',
'email': 'user@example.com',
'firstname': 'Example',
'lastname': 'User',
'groups': [],
'country': 'US',
}
def test_add_umapi_user(example_user, monkeypatch):
with monkeypatch.context() as m:
m.setattr(manager, '_SYNC_DATA_STORE', {})
email_id = 'user@example.com'
PostSyncManager.update_sync_data(email_id, 'umapi_data', [], [], **example_user)
assert manager._SYNC_DATA_STORE[email_id.lower()]['umapi_data'] == example_user
def test_add_groups(example_user, monkeypatch):
with monkeypatch.context() as m:
m.setattr(manager, '_SYNC_DATA_STORE', {})
email_id = 'user@example.com'
example_user['groups'] = ['group1', 'group2', 'group3']
groups_add = ['group3', 'group4', 'group5']
PostSyncManager.update_sync_data(email_id, 'umapi_data', groups_add, [], **example_user)
assert sorted(manager._SYNC_DATA_STORE[email_id.lower()]['umapi_data']['groups']) == sorted(list(set(
example_user['groups']) | set(groups_add)))
def test_remove_groups(example_user, monkeypatch):
with monkeypatch.context() as m:
m.setattr(manager, '_SYNC_DATA_STORE', {})
email_id = 'user@example.com'
example_user['groups'] = ['group1', 'group2', 'group3']
groups_remove = ['group1', 'group2']
PostSyncManager.update_sync_data(email_id, 'umapi_data', [], groups_remove, **example_user)
assert sorted(manager._SYNC_DATA_STORE[email_id.lower()]['umapi_data']['groups']) == sorted(list(set(
example_user['groups']) - set(groups_remove)))
def test_add_remove_groups(example_user, monkeypatch):
with monkeypatch.context() as m:
m.setattr(manager, '_SYNC_DATA_STORE', {})
email_id = 'user@example.com'
example_user['groups'] = ['group1', 'group2', 'group3', 'group4', 'group5']
groups_add = ['group6']
groups_remove = ['group1', 'group2']
PostSyncManager.update_sync_data(email_id, 'umapi_data', groups_add, groups_remove, **example_user)
delta_groups = list(set(example_user['groups']) | set(groups_add))
delta_groups = list(set(delta_groups) - set(groups_remove))
assert sorted(manager._SYNC_DATA_STORE[email_id.lower()]['umapi_data']['groups']) == sorted(delta_groups)
| Python | 0 |
7744aca1edc6afd263ac386efa9a1e92a41c30aa | Add variable PY2 | file_metadata/_compat.py | file_metadata/_compat.py | # -*- coding: utf-8 -*-
"""
Provides utilities to handle the python2 and python3 differences.
"""
from __future__ import (division, absolute_import, unicode_literals,
print_function)
import json
import re
import subprocess
import sys
try: # Python 3
from urllib.request import urlopen # flake8: noqa (unused import)
except ImportError: # Python 2
from urllib2 import urlopen # flake8: noqa (unused import)
try: # pragma: no cover
JSONDecodeError = json.decoder.JSONDecodeError
except AttributeError: # pragma: no cover
JSONDecodeError = ValueError
PY2 = sys.version_info[0] == 2
def check_output(*popenargs, **kwargs):
"""
Run command with arguments and return its output.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the returncode and output attributes.
The arguments are the same as for the Popen constructor. Example::
>>> check_output(["echo", "hello world"]).strip()
'hello world'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use ``stderr=subprocess.STDOUT``::
>>> check_output(["non_existent_file"], stderr=subprocess.STDOUT)
Traceback (most recent call last):
...
OSError: [Errno 2] No such file or directory
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
exc = subprocess.CalledProcessError(retcode,
kwargs.get("args", popenargs[0]))
exc.output = output # output attrib not there in python2.6
raise exc
return output
def ffprobe_parser(output):
"""
Parse output from the older versions of avprode/ffprobe. The -of or
-print_format argument was added in versions 0.9+. This allows json
output. But in older versions like 0.8.17 which is used in ubuntu
precise, json output is not possible. In such cases, this function
can be used to parse the output.
:param output: The INI like syntax from ffprobe.
:return: The parsed dict.
"""
streams = re.findall('\[STREAM\](.*?)\[\/STREAM\]', output, re.S)
_format = re.findall('\[FORMAT\](.*?)\[\/FORMAT\]', output, re.S)
def parse_section(section):
section_dict = {}
for line in section.strip().splitlines():
key, val = line.strip().split("=", 1)
section_dict[key.strip()] = val.strip()
return section_dict
data = {}
if streams:
parsed_streams = [parse_section(stream) for stream in streams]
data['streams'] = parsed_streams
if _format:
parsed_format = parse_section(_format[0])
data['format'] = parsed_format
return data
| # -*- coding: utf-8 -*-
"""
Provides utilities to handle the python2 and python3 differences.
"""
from __future__ import (division, absolute_import, unicode_literals,
print_function)
import json
import re
import subprocess
try: # Python 3
from urllib.request import urlopen # flake8: noqa (unused import)
except ImportError: # Python 2
from urllib2 import urlopen # flake8: noqa (unused import)
try: # pragma: no cover
JSONDecodeError = json.decoder.JSONDecodeError
except AttributeError: # pragma: no cover
JSONDecodeError = ValueError
def check_output(*popenargs, **kwargs):
"""
Run command with arguments and return its output.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the returncode and output attributes.
The arguments are the same as for the Popen constructor. Example::
>>> check_output(["echo", "hello world"]).strip()
'hello world'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use ``stderr=subprocess.STDOUT``::
>>> check_output(["non_existent_file"], stderr=subprocess.STDOUT)
Traceback (most recent call last):
...
OSError: [Errno 2] No such file or directory
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
exc = subprocess.CalledProcessError(retcode,
kwargs.get("args", popenargs[0]))
exc.output = output # output attrib not there in python2.6
raise exc
return output
def ffprobe_parser(output):
"""
Parse output from the older versions of avprode/ffprobe. The -of or
-print_format argument was added in versions 0.9+. This allows json
output. But in older versions like 0.8.17 which is used in ubuntu
precise, json output is not possible. In such cases, this function
can be used to parse the output.
:param output: The INI like syntax from ffprobe.
:return: The parsed dict.
"""
streams = re.findall('\[STREAM\](.*?)\[\/STREAM\]', output, re.S)
_format = re.findall('\[FORMAT\](.*?)\[\/FORMAT\]', output, re.S)
def parse_section(section):
section_dict = {}
for line in section.strip().splitlines():
key, val = line.strip().split("=", 1)
section_dict[key.strip()] = val.strip()
return section_dict
data = {}
if streams:
parsed_streams = [parse_section(stream) for stream in streams]
data['streams'] = parsed_streams
if _format:
parsed_format = parse_section(_format[0])
data['format'] = parsed_format
return data
| Python | 0.000272 |
20dc4b6d80842579740ed91ebb848446a0cecdbf | fix test_settings | test_settings.py | test_settings.py | from settings import *
__import__('dev-settings', globals(), locals(), ['*'], -1)
ROOT_URLCONF = 'urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': rel('mirosubs.sqlite3'),
}
}
INSTALLED_APPS += ('django_nose', )
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.remove('mirosubs')
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' | from settings import *
ROOT_URLCONF = 'urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': rel('mirosubs.sqlite3'),
}
}
INSTALLED_APPS += ('django_nose', )
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.remove('mirosubs')
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' | Python | 0.000002 |
555981d288b1e3970e2cb9432db3e72f57ba48b4 | deal with zero args corner case and return correct type | finat/pyop2_interface.py | finat/pyop2_interface.py | try:
from pyop2.pyparloop import Kernel
except:
Kernel = None
from .interpreter import evaluate
def pyop2_kernel(kernel, kernel_args, interpreter=False):
"""Return a :class:`pyop2.Kernel` from the recipe and kernel data
provided.
:param kernel: The :class:`~.utils.Kernel` to map to PyOP2.
:param kernel_args: The ordered list of Pymbolic variables constituting
the kernel arguments, excluding the result of the recipe (the latter
should be prepended to the argument list).
:param interpreter: If set to ``True``, the kernel will be
evaluated using the FInAT interpreter instead of generating a
compiled kernel.
:result: The :class:`pyop2.Kernel`
"""
if Kernel is None:
raise ImportError("pyop2 was not imported. Is it installed?")
if kernel_args and \
set(kernel_args) != kernel.kernel_data.kernel_args:
raise ValueError("Incomplete value list")
if interpreter:
def kernel_function(*args):
context = {kernel_args: args[1:]}
args[0][:] = evaluate(kernel.recipe, context, kernel.kernel_data)
return Kernel(kernel_function)
else:
raise NotImplementedError
| try:
from pyop2.pyparloop import Kernel
except:
Kernel = None
from .interpreter import evaluate
def pyop2_kernel(kernel, kernel_args, interpreter=False):
"""Return a :class:`pyop2.Kernel` from the recipe and kernel data
provided.
:param kernel: The :class:`~.utils.Kernel` to map to PyOP2.
:param kernel_args: The ordered list of Pymbolic variables constituting
the kernel arguments, excluding the result of the recipe (the latter
should be prepended to the argument list).
:param interpreter: If set to ``True``, the kernel will be
evaluated using the FInAT interpreter instead of generating a
compiled kernel.
:result: The :class:`pyop2.Kernel`
"""
if Kernel is None:
raise ImportError("pyop2 was not imported. Is it installed?")
if set(kernel_args) != kernel.kernel_data.kernel_args:
raise ValueError("Incomplete value list")
if interpreter:
def kernel_function(*args):
context = {kernel_args: args[1:]}
args[0][:] = evaluate(kernel.recipe, context, kernel.kernel_data)
return (Kernel(kernel_function), kernel_args)
else:
raise NotImplementedError
| Python | 0 |
dfff8bf474663d4efc02fdd3344054e20a6385ca | Update test_smoothing.py | tests/test_smoothing.py | tests/test_smoothing.py | try:
from . import generic as g
except BaseException:
import generic as g
class SmoothTest(g.unittest.TestCase):
def test_smooth(self):
"""
Load a collada scene with pycollada.
"""
m = g.trimesh.creation.icosahedron()
m.vertices, m.faces = g.trimesh.remesh.subdivide_to_size(
m.vertices, m.faces, 0.1)
s = m.copy()
q = m.copy()
f = m.copy()
d = m.copy()
assert m.is_volume
# Equal Weights
lap = g.trimesh.smoothing.laplacian_calculation(
mesh=m, equal_weight=True)
g.trimesh.smoothing.filter_laplacian(s, 0.5, 10, False, True, lap)
g.trimesh.smoothing.filter_laplacian(q, 0.5, 10, True, True, lap)
g.trimesh.smoothing.filter_humphrey(f, 0.1, 0.5, 10, lap)
g.trimesh.smoothing.filter_taubin(d, 0.5, 0.53, 10, lap)
assert s.is_volume
assert q.is_volume
assert f.is_volume
assert d.is_volume
assert g.np.isclose(s.volume, m.volume, rtol=0.1)
assert g.np.isclose(q.volume, m.volume, rtol=0.1)
assert g.np.isclose(f.volume, m.volume, rtol=0.1)
assert g.np.isclose(d.volume, m.volume, rtol=0.1)
s = m.copy()
q = m.copy()
f = m.copy()
d = m.copy()
# umbrella Weights
lap = g.trimesh.smoothing.laplacian_calculation(m, equal_weight=False)
g.trimesh.smoothing.filter_laplacian(s, 0.5, 10, False, True, lap)
g.trimesh.smoothing.filter_laplacian(q, 0.5, 10, True, True, lap)
g.trimesh.smoothing.filter_humphrey(f, 0.1, 0.5, 10, lap)
g.trimesh.smoothing.filter_taubin(d, 0.5, 0.53, 10, lap)
assert s.is_volume
assert q.is_volume
assert f.is_volume
assert d.is_volume
assert g.np.isclose(s.volume, m.volume, rtol=0.1)
assert g.np.isclose(q.volume, m.volume, rtol=0.1)
assert g.np.isclose(f.volume, m.volume, rtol=0.1)
assert g.np.isclose(d.volume, m.volume, rtol=0.1)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
| try:
from . import generic as g
except BaseException:
import generic as g
class SmoothTest(g.unittest.TestCase):
def test_smooth(self):
"""
Load a collada scene with pycollada.
"""
m = g.trimesh.creation.icosahedron()
m.vertices, m.faces = g.trimesh.remesh.subdivide_to_size(
m.vertices, m.faces, 0.1)
s = m.copy()
f = m.copy()
d = m.copy()
assert m.is_volume
# Equal Weights
lap = g.trimesh.smoothing.laplacian_calculation(
mesh=m, equal_weight=True)
g.trimesh.smoothing.filter_laplacian(s, 0.5, 10, lap)
g.trimesh.smoothing.filter_humphrey(f, 0.1, 0.5, 10, lap)
g.trimesh.smoothing.filter_taubin(d, 0.5, 0.53, 10, lap)
assert s.is_volume
assert f.is_volume
assert d.is_volume
assert g.np.isclose(s.volume, m.volume, rtol=0.1)
assert g.np.isclose(f.volume, m.volume, rtol=0.1)
assert g.np.isclose(d.volume, m.volume, rtol=0.1)
s = m.copy()
f = m.copy()
d = m.copy()
# umbrella Weights
lap = g.trimesh.smoothing.laplacian_calculation(m, equal_weight=False)
g.trimesh.smoothing.filter_laplacian(s, 0.5, 10, lap)
g.trimesh.smoothing.filter_humphrey(f, 0.1, 0.5, 10, lap)
g.trimesh.smoothing.filter_taubin(d, 0.5, 0.53, 10, lap)
assert s.is_volume
assert f.is_volume
assert d.is_volume
assert g.np.isclose(s.volume, m.volume, rtol=0.1)
assert g.np.isclose(f.volume, m.volume, rtol=0.1)
assert g.np.isclose(d.volume, m.volume, rtol=0.1)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
| Python | 0.000001 |
3f85183da738e337d51a8523524eb992e2dd29bf | Reorder tests | tests/test_tictactoe.py | tests/test_tictactoe.py | import unittest
from games import TicTacToe
class TestTicTacToe(unittest.TestCase):
def setUp(self):
self.game = TicTacToe()
def test_copy(self):
self.game.make_moves([1, 3, 2])
clone = self.game.copy()
self.assertItemsEqual(self.game.legal_moves(), clone.legal_moves())
self.assertEqual(self.game.cur_player, clone.cur_player)
self.assertEqual(self.game, clone)
def test_cur_player_start(self):
self.assertEqual(self.game.cur_player, 0)
def test_cur_player_after_one_move(self):
self.game.make_move(3)
self.assertEqual(self.game.cur_player, 1)
def test_cur_player_after_two_moves(self):
self.game.make_move(3)
self.game.make_move(7)
self.assertEqual(self.game.cur_player, 0)
def test_is_not_over_at_start(self):
self.assertFalse(self.game.is_over())
def test_is_over_at_end_of_game(self):
self.game.make_moves([1, 4, 2, 5, 3])
self.assertTrue(self.game.is_over())
def test_make_move_returns_self(self):
self.assertIs(self.game.make_move(1), self.game)
def test_make_moves(self):
self.game.make_moves([1, 2, 3])
actual = self.game.legal_moves()
expected = [4, 5, 6, 7, 8, 9]
self.assertItemsEqual(actual, expected)
def test_make_moves_returns_self(self):
actual = self.game.make_moves([1, 2, 3])
expected = self.game
self.assertEquals(actual, expected)
def test_legal_moves_start(self):
actual = self.game.legal_moves()
expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
self.assertItemsEqual(actual, expected)
def test_legal_moves_after_one_move(self):
self.game.make_move(1)
actual = self.game.legal_moves()
expected = [2, 3, 4, 5, 6, 7, 8, 9]
self.assertItemsEqual(actual, expected)
def test_legal_moves_after_two_moves(self):
self.game.make_move(3)
self.game.make_move(7)
actual = self.game.legal_moves()
expected = [1, 2, 4, 5, 6, 8, 9]
self.assertItemsEqual(actual, expected)
def test_legal_moves_are_empty_when_is_over(self):
self.game.make_moves([1, 4, 2, 5, 3])
self.assertTrue(len(self.game.legal_moves()) == 0)
def test_outcomes_win_first_player(self):
self.game.make_moves([1, 4, 2, 5, 3])
self.assertItemsEqual(self.game.outcomes(), ['W', 'L'])
def test_outcomes_win_second_player(self):
self.game.make_moves([1, 4, 2, 5, 9, 6])
self.assertItemsEqual(self.game.outcomes(), ['L', 'W'])
def test_outcomes_draw(self):
self.game.make_moves([1, 3, 2, 4, 6, 5, 7, 8, 9])
self.assertItemsEqual(self.game.outcomes(), ['D', 'D'])
| import unittest
from games import TicTacToe
class TestTicTacToe(unittest.TestCase):
def setUp(self):
self.game = TicTacToe()
def test_cur_player_start(self):
self.assertEqual(self.game.cur_player, 0)
def test_cur_player_after_one_move(self):
self.game.make_move(3)
self.assertEqual(self.game.cur_player, 1)
def test_cur_player_after_two_moves(self):
self.game.make_move(3)
self.game.make_move(7)
self.assertEqual(self.game.cur_player, 0)
def test_is_not_over_at_start(self):
self.assertFalse(self.game.is_over())
def test_is_over_at_end_of_game(self):
self.game.make_moves([1, 4, 2, 5, 3])
self.assertTrue(self.game.is_over())
def test_make_move_returns_self(self):
self.assertIs(self.game.make_move(1), self.game)
def test_make_moves(self):
self.game.make_moves([1, 2, 3])
actual = self.game.legal_moves()
expected = [4, 5, 6, 7, 8, 9]
self.assertItemsEqual(actual, expected)
def test_make_moves_returns_self(self):
actual = self.game.make_moves([1, 2, 3])
expected = self.game
self.assertEquals(actual, expected)
def test_legal_moves_start(self):
actual = self.game.legal_moves()
expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
self.assertItemsEqual(actual, expected)
def test_legal_moves_after_one_move(self):
self.game.make_move(1)
actual = self.game.legal_moves()
expected = [2, 3, 4, 5, 6, 7, 8, 9]
self.assertItemsEqual(actual, expected)
def test_legal_moves_after_two_moves(self):
self.game.make_move(3)
self.game.make_move(7)
actual = self.game.legal_moves()
expected = [1, 2, 4, 5, 6, 8, 9]
self.assertItemsEqual(actual, expected)
def test_legal_moves_are_empty_when_is_over(self):
self.game.make_moves([1, 4, 2, 5, 3])
self.assertTrue(len(self.game.legal_moves()) == 0)
def test_outcomes_win_first_player(self):
self.game.make_moves([1, 4, 2, 5, 3])
self.assertItemsEqual(self.game.outcomes(), ['W', 'L'])
def test_outcomes_win_second_player(self):
self.game.make_moves([1, 4, 2, 5, 9, 6])
self.assertItemsEqual(self.game.outcomes(), ['L', 'W'])
def test_outcomes_draw(self):
self.game.make_moves([1, 3, 2, 4, 6, 5, 7, 8, 9])
self.assertItemsEqual(self.game.outcomes(), ['D', 'D'])
def test_copy(self):
self.game.make_moves([1, 3, 2])
clone = self.game.copy()
self.assertItemsEqual(self.game.legal_moves(), clone.legal_moves())
self.assertEqual(self.game.cur_player, clone.cur_player)
self.assertEqual(self.game, clone)
| Python | 0 |
379822ab16229174071172792132cbb549c5f841 | celery task for requests | ekanalyzer.py | ekanalyzer.py | import os
from flask import Flask
from flask import render_template
from flask import request, redirect, url_for
from werkzeug import secure_filename
import hashlib
from pymongo import Connection
import dpkt
import sys
import socket
from celery import Celery
from requests import Request, Session
# FIXME: move to config.py
ALLOWED_EXTENSIONS = set(['pcap'])
def create_app():
return Flask("ekanalyzer")
app = create_app()
app.config.from_pyfile('config.py')
connection = Connection(app.config['MONGODB_SERVER'] , app.config['MONGODB_PORT'])
db = connection.ekanalyzer
app.debug = True
celery = Celery('ekanalyzer', broker=app.config['BROKER_URL'] )
@celery.task
def perform_results(hash):
try:
pcap = {'hash' : hash}
result = db.pcap.find(pcap)
if result.count() > 0:
return
else:
db.pcap.insert(pcap)
f = open(app.config['UPLOAD_FOLDER'] + hash)
pcap = dpkt.pcap.Reader(f)
for ts, buf in pcap:
eth = dpkt.ethernet.Ethernet(buf)
ip = eth.data
tcp = ip.data
# FIXME: assuming only http traffic on port 80
if tcp.dport == 80 and len(tcp.data) > 0:
http = dpkt.http.Request(tcp.data)
ipaddress = socket.inet_ntoa(ip.dst)
data = { 'ip' : ipaddress,
'uri' : http.uri,
'method' : http.method,
'data' : http.data,
'headers' : http.headers,
'hash': hash
}
db.requests.insert(data)
print "Data imported"
status = process_requests(hash)
except NameError as e:
print e
except :
print "Unexpected error:", sys.exc_info()
pass
def process_requests(hash):
request = { 'hash' : hash}
result = db.requests.find(request)
for r in result:
print process_request.delay(r['ip'], r['uri'], r['method'], r['headers'], r['data'])
@celery.task
def process_request(ip, uri, method, headers, data):
#FIXME: port 80
url = "http://{0}:80{1}".format(ip, uri)
s = Session()
req = Request(method, url,
data=data,
headers=headers
)
prepped = req.prepare()
resp = s.send(prepped)
return resp.status_code
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@app.route('/upload-ek/', methods=['POST'])
def upload_file():
file = request.files['pcap']
if file and allowed_file(file.filename):
hash = hashlib.sha256()
try:
for chunk in file.chunks():
hash.update(chunk)
finally:
file.seek(0)
hash_name = "%s" % (hash.hexdigest())
file.save(os.path.join(app.config['UPLOAD_FOLDER'], hash_name))
return redirect(url_for('launch', hash=hash_name))
@app.route('/launch/<hash>/')
def launch(hash):
perform_results.delay(hash)
return render_template('launch.html', hash=hash)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == "__main__":
app.run(debug=True)
| import os
from flask import Flask
from flask import render_template
from flask import request, redirect, url_for
from werkzeug import secure_filename
import hashlib
from pymongo import Connection
import dpkt
import sys
import socket
from celery import Celery
from requests import Request, Session
# FIXME: move to config.py
ALLOWED_EXTENSIONS = set(['pcap'])
def create_app():
return Flask("ekanalyzer")
app = create_app()
app.config.from_pyfile('config.py')
connection = Connection(app.config['MONGODB_SERVER'] , app.config['MONGODB_PORT'])
db = connection.ekanalyzer
app.debug = True
celery = Celery('ekanalyzer', broker=app.config['BROKER_URL'] )
@celery.task
def perform_results(hash):
try:
pcap = {'hash' : hash}
result = db.pcap.find(pcap)
if result.count() > 0:
return
else:
db.pcap.insert(pcap)
f = open(app.config['UPLOAD_FOLDER'] + hash)
pcap = dpkt.pcap.Reader(f)
for ts, buf in pcap:
eth = dpkt.ethernet.Ethernet(buf)
ip = eth.data
tcp = ip.data
# FIXME: assuming only http traffic on port 80
if tcp.dport == 80 and len(tcp.data) > 0:
http = dpkt.http.Request(tcp.data)
ipaddress = socket.inet_ntoa(ip.dst)
data = { 'ip' : ipaddress,
'uri' : http.uri,
'method' : http.method,
'data' : http.data,
'headers' : http.headers,
'hash': hash
}
db.requests.insert(data)
print "Data imported"
status = process_requests(hash)
except NameError as e:
print e
except :
print "Unexpected error:", sys.exc_info()
pass
def process_requests(hash):
request = { 'hash' : hash}
result = db.requests.find(request)
for r in result:
print process_request(r['ip'], r['uri'], r['method'], r['headers'], r['data'])
def process_request(ip, uri, method, headers, data):
#FIXME: port 80
url = "http://{0}:80{1}".format(ip, uri)
s = Session()
req = Request(method, url,
data=data,
headers=headers
)
prepped = req.prepare()
resp = s.send(prepped)
return resp.status_code
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@app.route('/upload-ek/', methods=['POST'])
def upload_file():
file = request.files['pcap']
if file and allowed_file(file.filename):
hash = hashlib.sha256()
try:
for chunk in file.chunks():
hash.update(chunk)
finally:
file.seek(0)
hash_name = "%s" % (hash.hexdigest())
file.save(os.path.join(app.config['UPLOAD_FOLDER'], hash_name))
return redirect(url_for('launch', hash=hash_name))
@app.route('/launch/<hash>/')
def launch(hash):
perform_results.delay(hash)
return render_template('launch.html', hash=hash)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == "__main__":
app.run(debug=True)
| Python | 0.999885 |
337f6d67bce331a26b44e65671c63f223f0c5ebc | add help | ckstyle/command/ConsoleCommandParser.py | ckstyle/command/ConsoleCommandParser.py | #/usr/bin/python
#encoding=utf-8
import sys
import os
import getopt
import string
from ckstyle.doCssCheck import checkFile, checkDir, checkDirRecursively
import CommandFileParser
def usage():
print '''
[Usage]
ckstyle -h / ckstyle --help
ckstyle
ckstyle file.css
ckstyle dir
ckstyle -r dir
ckstyle -p -r dir
ckstyle -c xxx.ini
ckstyle -c xxx.ini -r -p
[Example]
ckstyle -c xxx.ini -r -p -c xxx.ini --extension=.test.txt --include=all --exclude=none --errorLevel=2 dirpath
[Options]
-h / --help show help
-r check files in directory recursively
-p print check result to console(delete result files at the same time)
-c / --config specify the config file name(use "~/ckstyle.ini" as default)
--include specify rules(can be configed in .ini file)
--exclude specify exclude rules(can be configed in .ini file)
--extension specify check result file extension(use ".ckstyle.txt" as default)
--errorLevel specify error level(0-error, 1-warning, 2-log)
'''
def getDefaultConfigPath():
homedir = os.getenv('USERPROFILE') or os.getenv('HOME')
return os.path.realpath(os.path.join(homedir, 'ckstyle.ini'))
def getErrorLevel(value):
if value.strip() == '':
return None
try:
realValue = string.atoi(value)
errorLevel = realValue
if errorLevel > 2:
errorLevel = 2
elif errorLevel < 0:
errorLevel = 0
return errorLevel
except ValueError:
print '[error] --errorLevel option should be number\n'
return None
def getExtension(value):
if value.strip() == '':
return None
value = value.strip()
if not value.startswith('.'):
value = '.' + value
return value
def getValue(value):
if value.strip() == '':
return None
return value.strip()
def getConfigFile(value):
value = value.strip()
if value == '':
print '[error] no config file, ckstyle.ini path should be after -c.\n'
return None
if os.path.exists(value) and value.endswith('.ini'):
return value
else:
print '[error] %s does not exist, or is not a ".ini" file' % value
return None
def parseCmdArgs(config, opts, args, parser):
recur = False
printFlag = False
configFile = None
errorLevel = None
extension = None
include = None
exclude = None
for op, value in opts:
if op == "-r":
recur = True
elif op == '-p':
printFlag = True
elif op == '-c' or op == '-config':
configFile = getConfigFile(value)
elif op == "--help" or op == '-h':
usage()
sys.exit()
elif op == '--extension':
extension = getExtension(value)
elif op == '--errorLevel':
errorLevel = getErrorLevel(value)
elif op == '--include':
include = getValue(value)
elif op == '--exclude':
exclude = getValue(value)
if configFile is not None :
parser.load(configFile)
config = parser.args
if recur: config.recursive = True
if printFlag: config.printFlag = True
if errorLevel: config.errorLevel = errorLevel
if extension: config.extension = extension
if include: config.include = include
if exclude: config.exclude = exclude
return config
def handleCmdArgs():
try:
opts, args = getopt.getopt(sys.argv[1:], "hrpc:", ["help", "config=", "errorLevel=", "extension=", "include=", "exclude="])
except getopt.GetoptError, e:
print '[option error] %s ' % e.msg
return
configFile = getDefaultConfigPath()
parser = CommandFileParser.CommandFileParser(configFile)
config = parser.args
if len(args) == 0 and len(opts) == 0:
checkDir(os.getcwd(), config = config)
return
config = parseCmdArgs(config, opts, args, parser)
filePath = None
if len(args) == 0:
filePath = os.getcwd()
else:
filePath = args[0]
if not os.path.exists(filePath):
print '[error] %s not exist' % filePath
return
if filePath.endswith('.css'):
checkFile(filePath, config = config)
return
checkDir(filePath, config = config)
| import sys
import os
import getopt
import string
from ckstyle.doCssCheck import checkFile, checkDir, checkDirRecursively
import CommandFileParser
def usage():
print '''
[Usage]
ckstyle -h / ckstyle --help
ckstyle file.css
ckstyle dir
ckstyle -r dir
ckstyle -p file.css
ckstyle -p -r dir
ckstyle -c config_file_path
ckstyle -c config_file_path -r -p
'''
def getDefaultConfigPath():
homedir = os.getenv('USERPROFILE') or os.getenv('HOME')
return os.path.realpath(os.path.join(homedir, 'ckstyle.ini'))
def getErrorLevel(value):
if value.strip() == '':
return None
try:
realValue = string.atoi(value)
errorLevel = realValue
if errorLevel > 2:
errorLevel = 2
elif errorLevel < 0:
errorLevel = 0
return errorLevel
except ValueError:
print '[error] --errorLevel option should be number\n'
return None
def getExtension(value):
if value.strip() == '':
return None
value = value.strip()
if not value.startswith('.'):
value = '.' + value
return value
def getValue(value):
if value.strip() == '':
return None
return value.strip()
def getConfigFile(value):
value = value.strip()
if value == '':
print '[error] no config file, ckstyle.ini path should be after -c.\n'
return None
if os.path.exists(value) and value.endswith('.ini'):
return value
else:
print '[error] %s does not exist, or is not a ".ini" file' % value
return None
def parseCmdArgs(config, opts, args, parser):
recur = False
printFlag = False
configFile = None
errorLevel = None
extension = None
include = None
exclude = None
for op, value in opts:
if op == "-r":
recur = True
elif op == '-p':
printFlag = True
elif op == '-c' or op == '-config':
configFile = getConfigFile(value)
elif op == "--help" or op == '-h':
usage()
sys.exit()
elif op == '--extension':
extension = getExtension(value)
elif op == '--errorLevel':
errorLevel = getErrorLevel(value)
elif op == '--include':
include = getValue(value)
elif op == '--exclude':
exclude = getValue(value)
if configFile is not None :
parser.load(configFile)
config = parser.args
if recur: config.recursive = True
if printFlag: config.printFlag = True
if errorLevel: config.errorLevel = errorLevel
if extension: config.extension = extension
if include: config.include = include
if exclude: config.exclude = exclude
return config
def handleCmdArgs():
try:
opts, args = getopt.getopt(sys.argv[1:], "hrpc:", ["help", "config=", "errorLevel=", "extension=", "include=", "exclude="])
except getopt.GetoptError, e:
print '[option error] %s ' % e.msg
return
configFile = getDefaultConfigPath()
parser = CommandFileParser.CommandFileParser(configFile)
config = parser.args
if len(args) == 0 and len(opts) == 0:
checkDir(os.getcwd(), config = config)
return
config = parseCmdArgs(config, opts, args, parser)
filePath = None
if len(args) == 0:
filePath = os.getcwd()
else:
filePath = args[0]
if not os.path.exists(filePath):
print '[error] %s not exist' % filePath
return
if filePath.endswith('.css'):
checkFile(filePath, config = config)
return
checkDir(filePath, config = config)
| Python | 0 |
12a85a17194610f81c9ff0c73ea69f4adfc2b307 | remove old routine | floss/render/sanitize.py | floss/render/sanitize.py | import string
def sanitize_string_for_printing(s: str) -> str:
"""
Return sanitized string for printing to cli.
"""
sanitized_string = s.replace("\\\\", "\\") # print single backslashes
sanitized_string = "".join(c for c in sanitized_string if c in string.printable)
return sanitized_string | import string
def sanitize_string_for_printing(s: str) -> str:
"""
Return sanitized string for printing to cli.
"""
sanitized_string = s.replace("\\\\", "\\") # print single backslashes
sanitized_string = "".join(c for c in sanitized_string if c in string.printable)
return sanitized_string
def sanitize_string_for_script(s: str) -> str:
"""
Return sanitized string that is added to IDA script source.
"""
sanitized_string = sanitize_string_for_printing(s)
sanitized_string = sanitized_string.replace("\\", "\\\\")
sanitized_string = sanitized_string.replace('"', '\\"')
return sanitized_string
| Python | 0.000654 |
8379d56ac1be68c9c1d255893644813df8300ed8 | add verbose name | awesomepose/categories/models/category.py | awesomepose/categories/models/category.py | from django.db import models
from mptt.models import MPTTModel, TreeForeignKey
class Category(MPTTModel):
name = models.CharField(max_length=50, unique=True)
parent = TreeForeignKey('self', null=True, blank=True, related_name="children", db_index=True)
class MPTTMeta:
order_insertion_by = ['name']
class Meta:
verbose_name = "카테고리"
verbose_name_plural = verbose_name
def __str__(self):
return self.name
| from django.db import models
from mptt.models import MPTTModel, TreeForeignKey
class Category(models.Model):
name = models.CharField(max_length=50, unique=True)
parent = TreeForeignKey('self', null=True, blank=True, related_name="children", db_index=True)
class MPTTMeta:
order_insertion_by = ['name']
def __str__(self):
return self.name
| Python | 0.999996 |
bcd8d27194131e48d73d843bdae9930e6720130f | Update Vartype | dimod/vartypes.py | dimod/vartypes.py | """
Enumeration of valid variable types for binary quadratic models.
Examples:
This example shows easy access to different Vartypes, which are in the main
namespace.
>>> vartype = dimod.SPIN
>>> print(vartype)
Vartype.SPIN
>>> vartype = dimod.BINARY
>>> print(vartype)
Vartype.BINARY
>>> vartype = dimod.Vartype.SPIN
>>> print(vartype)
Vartype.SPIN
>>> isinstance(vartype, dimod.Vartype)
True
This example shows access by value or name.
>>> print(dimod.Vartype({0, 1}))
Vartype.BINARY
>>> print(dimod.Vartype['SPIN'])
Vartype.SPIN
This example uses the `.value` parameter to validate.
>>> sample = {'u': -1, 'v': 1}
>>> vartype = dimod.Vartype.SPIN
>>> all(val in vartype.value for val in sample.values())
True
"""
import enum
__all__ = ['Vartype', 'SPIN', 'BINARY']
class Vartype(enum.Enum):
"""An :py:class:`~enum.Enum` over the types of variables for the binary quadratic model.
Attributes:
SPIN (:class:`.Vartype`): Vartype for spin-valued models; variables of
the model are either -1 or 1.
BINARY (:class:`.Vartype`): Vartype for binary models; variables of the
model are either 0 or 1.
"""
SPIN = frozenset({-1, 1})
BINARY = frozenset({0, 1})
SPIN = Vartype.SPIN
BINARY = Vartype.BINARY
| """
Vartype is an enumeration of the valid types for variables in a binary quadratic models.
Examples:
>>> vartype = dimod.Vartype.SPIN
>>> print(vartype)
Vartype.SPIN
>>> isinstance(vartype, dimod.Vartype)
True
Access can also be by value or name.
>>> print(dimod.Vartype({0, 1}))
Vartype.BINARY
>>> print(dimod.Vartype['SPIN'])
Vartype.SPIN
To check correctness, use the `.value` parameter.
>>> sample = {'u': -1, 'v': 1}
>>> vartype = dimod.Vartype.SPIN
>>> all(val in vartype.value for val in sample.values())
True
The different Vartypes are also in the main namespace
for easy access.
>>> vartype = dimod.SPIN
>>> print(vartype)
Vartype.SPIN
>>> vartype = dimod.BINARY
>>> print(vartype)
Vartype.BINARY
"""
import enum
__all__ = ['Vartype', 'SPIN', 'BINARY']
class Vartype(enum.Enum):
"""An :py:class:`~enum.Enum` over the types of variables for the binary quadratic model.
Attributes:
SPIN (:class:`.Vartype`): The vartype for spin-valued models. That
is the variables of the model are either -1 or 1.
BINARY (:class:`.Vartype`): The vartype for binary models. That is
the variables of the model are either 0 or 1.
"""
SPIN = frozenset({-1, 1})
BINARY = frozenset({0, 1})
SPIN = Vartype.SPIN
BINARY = Vartype.BINARY
| Python | 0 |
8ed7b3e4367f5ed43a901fee048228c6e6aeeb8c | fix for edge case with invalid user logged in | backend/unpp_api/apps/management/views.py | backend/unpp_api/apps/management/views.py | from django.db.models import Q
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.generics import CreateAPIView, ListAPIView, UpdateAPIView
from account.models import User
from agency.models import AgencyOffice
from agency.permissions import AgencyPermission
from common.pagination import SmallPagination
from common.permissions import HasUNPPPermission
from management.filters import AgencyUserFilter, PartnerUserFilter
from management.serializers import AgencyUserManagementSerializer, PartnerOfficeManagementSerializer, \
AgencyOfficeManagementSerializer, PartnerUserManagementSerializer
from partner.models import Partner
from partner.permissions import PartnerPermission
class UserViewSet(CreateAPIView, ListAPIView, UpdateAPIView):
permission_classes = (
HasUNPPPermission(
agency_permissions=[
AgencyPermission.MANAGE_OWN_AGENCY_USERS,
],
partner_permissions=[
PartnerPermission.MANAGE_OFFICE_USERS,
]
),
)
pagination_class = SmallPagination
filter_backends = (DjangoFilterBackend,)
@property
def filter_class(self):
if self.request.agency_member:
return AgencyUserFilter
elif self.request.partner_member:
return PartnerUserFilter
def get_serializer_class(self):
if self.request.agency_member:
return AgencyUserManagementSerializer
elif self.request.partner_member:
return PartnerUserManagementSerializer
def get_queryset(self):
queryset = User.objects.none()
if self.request.agency_member:
queryset = User.objects.filter(agency_members__office__agency=self.request.user.agency).distinct('id')
elif self.request.partner_member:
query = Q(partner_members__partner=self.request.partner_member.partner)
if self.request.partner_member.partner.is_hq:
query |= Q(partner_members__partner__hq=self.request.partner_member.partner)
queryset = User.objects.filter(query).distinct('id')
# We don't want user to edit own account
return queryset.exclude(id=self.request.user.id)
class OfficeListView(ListAPIView):
permission_classes = (
HasUNPPPermission(
agency_permissions=[
AgencyPermission.MANAGE_OWN_AGENCY_USERS,
],
partner_permissions=[
PartnerPermission.MANAGE_OFFICE_USERS,
]
),
)
def get_queryset(self):
if self.request.agency_member:
return AgencyOffice.objects.filter(agency=self.request.user.agency)
elif self.request.partner_member:
query = Q(id=self.request.partner_member.partner_id)
if self.request.partner_member.partner.is_hq:
query |= Q(hq=self.request.partner_member.partner)
return Partner.objects.filter(query)
return Partner.objects.none()
def get_serializer_class(self):
if self.request.agency_member:
return AgencyOfficeManagementSerializer
else:
return PartnerOfficeManagementSerializer
| from django.db.models import Q
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.generics import CreateAPIView, ListAPIView, UpdateAPIView
from account.models import User
from agency.models import AgencyOffice
from agency.permissions import AgencyPermission
from common.pagination import SmallPagination
from common.permissions import HasUNPPPermission
from management.filters import AgencyUserFilter, PartnerUserFilter
from management.serializers import AgencyUserManagementSerializer, PartnerOfficeManagementSerializer, \
AgencyOfficeManagementSerializer, PartnerUserManagementSerializer
from partner.models import Partner
from partner.permissions import PartnerPermission
class UserViewSet(CreateAPIView, ListAPIView, UpdateAPIView):
permission_classes = (
HasUNPPPermission(
agency_permissions=[
AgencyPermission.MANAGE_OWN_AGENCY_USERS,
],
partner_permissions=[
PartnerPermission.MANAGE_OFFICE_USERS,
]
),
)
pagination_class = SmallPagination
filter_backends = (DjangoFilterBackend,)
@property
def filter_class(self):
if self.request.agency_member:
return AgencyUserFilter
elif self.request.partner_member:
return PartnerUserFilter
def get_serializer_class(self):
if self.request.agency_member:
return AgencyUserManagementSerializer
elif self.request.partner_member:
return PartnerUserManagementSerializer
def get_queryset(self):
if self.request.agency_member:
queryset = User.objects.filter(agency_members__office__agency=self.request.user.agency).distinct('id')
elif self.request.partner_member:
query = Q(partner_members__partner=self.request.partner_member.partner)
if self.request.partner_member.partner.is_hq:
query |= Q(partner_members__partner__hq=self.request.partner_member.partner)
queryset = User.objects.filter(query).distinct('id')
# We don't want user to edit own account
return queryset.exclude(id=self.request.user.id)
class OfficeListView(ListAPIView):
permission_classes = (
HasUNPPPermission(
agency_permissions=[
AgencyPermission.MANAGE_OWN_AGENCY_USERS,
],
partner_permissions=[
PartnerPermission.MANAGE_OFFICE_USERS,
]
),
)
def get_queryset(self):
if self.request.agency_member:
return AgencyOffice.objects.filter(agency=self.request.user.agency)
elif self.request.partner_member:
query = Q(id=self.request.partner_member.partner_id)
if self.request.partner_member.partner.is_hq:
query |= Q(hq=self.request.partner_member.partner)
return Partner.objects.filter(query)
def get_serializer_class(self):
if self.request.agency_member:
return AgencyOfficeManagementSerializer
elif self.request.partner_member:
return PartnerOfficeManagementSerializer
| Python | 0 |
0cb9b65fc0030922fea122a82451fef0d6d3653b | update version 1.0.0 | flyingpigeon/__init__.py | flyingpigeon/__init__.py | from .wsgi import application
from .demo import main
__version__ = "1.0.0"
| from .wsgi import application
from .demo import main
__version__ = "0.11.0"
| Python | 0.000001 |
cfc6083c58d151934403ccf55444b122fec46604 | Resolve here | takeyourmeds/utils/test.py | takeyourmeds/utils/test.py | from django.test import TestCase
from django.shortcuts import resolve_url
from django.contrib.auth import get_user_model
User = get_user_model()
class TestCase(TestCase):
def setUp(self):
self.user = self.create_user('testuser')
def assertStatusCode(self, status_code, fn, urlconf, *args, **kwargs):
if kwargs.pop('login', False):
user = kwargs.pop('user', self.user)
self.client.login(email=user.email, password='password')
response = fn(resolve_url(urlconf, *args, **kwargs))
self.assertEqual(
response.status_code,
status_code,
"Got HTTP %d but expected HTTP %d. Response:\n%s" % (
response.status_code,
status_code,
response,
)
)
return response
def assertGET(self, status_code, urlconf, *args, **kwargs):
return self.assertStatusCode(
status_code,
self.client.get,
urlconf,
*args,
**kwargs
)
def assertPOST(self, status_code, data, *args, **kwargs):
return self.assertStatusCode(
status_code, lambda x: self.client.post(x, data), *args, **kwargs
)
def assertRedirectsTo(self, response, urlconf, *args, **kwargs):
status_code = kwargs.pop('status_code', 302)
target_status_code = kwargs.pop('target_status_code', 200)
return self.assertRedirects(
response,
resolve_url(urlconf, *args, **kwargs),
status_code,
target_status_code,
)
def create_user(self, email):
return User.objects.create_user(email, 'password')
class SuperuserTestCase(TestCase):
def setUp(self):
super(SuperuserTestCase, self).setUp()
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
| from django.test import TestCase
from django.shortcuts import resolve_url
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
User = get_user_model()
class TestCase(TestCase):
def setUp(self):
self.user = self.create_user('testuser')
def assertStatusCode(self, status_code, fn, urlconf, *args, **kwargs):
if kwargs.pop('login', False):
user = kwargs.pop('user', self.user)
self.client.login(email=user.email, password='password')
response = fn(resolve_url(urlconf, *args, **kwargs))
self.assertEqual(
response.status_code,
status_code,
"Got HTTP %d but expected HTTP %d. Response:\n%s" % (
response.status_code,
status_code,
response,
)
)
return response
def assertGET(self, status_code, urlconf, *args, **kwargs):
return self.assertStatusCode(
status_code,
self.client.get,
urlconf,
*args,
**kwargs
)
def assertPOST(self, status_code, data, *args, **kwargs):
return self.assertStatusCode(
status_code, lambda x: self.client.post(x, data), *args, **kwargs
)
def assertRedirectsTo(self, response, urlconf, *args, **kwargs):
status_code = kwargs.pop('status_code', 302)
target_status_code = kwargs.pop('target_status_code', 200)
return self.assertRedirects(
response,
reverse(urlconf, args=args, kwargs=kwargs),
status_code,
target_status_code,
)
def create_user(self, email):
return User.objects.create_user(email, 'password')
class SuperuserTestCase(TestCase):
def setUp(self):
super(SuperuserTestCase, self).setUp()
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
| Python | 0 |
d9ca3d7113423a84026ad59e1369321baa54d532 | Add a simple neutron_hanler | drcontroller/replication/controller/neutron_handler.py | drcontroller/replication/controller/neutron_handler.py | import logging
import base_handler
class NeutronHandler(base_handler.BaseHandler):
def __init__(self, set_conf, handle_type):
'''
set_conf: the configuration file path of keystone authorization
handle_type: the handle service type, eg, glance, nova, neutron
'''
self.logger = logging.getLogger("NeutronHandler")
self.logger.info('Init NeutronHandler')
super(NeutronHandler, self).__init__(set_conf, handle_type)
| import logging
def post_handle(message):
pass
def delete_handle(message):
pass
def put_handle(mesage):
pass
class NeutronHandler(object):
def __init__(self):
self.logger = logging.getLogger("NeutronHandler")
self.logger.info('Init NeutronHandler')
def accept(self, *req, **kwargs):
self.logger = logging.getLogger("NeutronHandler:accept")
self.logger.info("--- Hello Neutron ---")
return ['Hello Neutron']
| Python | 0.999996 |
ca8622f5af66ef01c9c185065f2e77fca30bef79 | Remove unused update method | irctk/nick.py | irctk/nick.py | import re
class Nick(object):
IRC_USERHOST_REGEX = re.compile(r'^(.*)!(.*)@(.*)$')
@classmethod
def parse(cls, client, userhost):
m = cls.IRC_USERHOST_REGEX.match(userhost)
if m:
return cls(client, m.group(1), m.group(2), m.group(3))
return cls(client, host=userhost)
def __init__(self, client, nick='', ident='', host=''):
self.client = client
self.nick = nick
self.ident = ident
self.host = host
def __str__(self):
return self.nick
def __repr__(self):
return '<Nick %s!%s@%s>' % (self.nick, self.ident, self.host)
def __eq__(self, other):
return self.client.irc_equal(str(other), self.nick)
@property
def channels(self):
"""
Returns all the Channels that both the nick and the client has joined.
"""
return [channel for channel in self.client.channels if channel.has_nick(self)]
| import re
class Nick(object):
IRC_USERHOST_REGEX = re.compile(r'^(.*)!(.*)@(.*)$')
@classmethod
def parse(cls, client, userhost):
m = cls.IRC_USERHOST_REGEX.match(userhost)
if m:
return cls(client, m.group(1), m.group(2), m.group(3))
return cls(client, host=userhost)
def __init__(self, client, nick='', ident='', host=''):
self.client = client
self.nick = nick
self.ident = ident
self.host = host
def __str__(self):
return self.nick
def __repr__(self):
return '<Nick %s!%s@%s>' % (self.nick, self.ident, self.host)
def __eq__(self, other):
return self.client.irc_equal(str(other), self.nick)
@property
def channels(self):
"""
Returns all the Channels that both the nick and the client has joined.
"""
return [channel for channel in self.client.channels if channel.has_nick(self)]
def update(self):
if self == self.client.nick:
self.client.nick.ident = self.ident
self.client.nick.host = self.host
for channel in self.client.channels:
n = channel.find_nick(self)
if n:
n.ident = self.ident
n.host = self.host
| Python | 0.000001 |
52c17672d73a9461771c3ec09465d91992160fc5 | Fix quota init migration | src/nodeconductor_saltstack/exchange/migrations/0004_init_quotas.py | src/nodeconductor_saltstack/exchange/migrations/0004_init_quotas.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from uuid import uuid4
from django.contrib.contenttypes.models import ContentType
from django.db import models, migrations
GLOBAL_MAILBOX_SIZE_QUOTA = 'global_mailbox_size'
USER_COUNT_QUOTA = 'user_count'
def convert_mailbox_size_to_mb(apps, schema_editor):
Tenant = apps.get_model('exchange', 'ExchangeTenant')
for tenant in Tenant.objects.all():
tenant.mailbox_size *= 1024
tenant.save()
def init_quotas(apps, schema_editor):
Quota = apps.get_model('quotas', 'Quota')
Tenant = apps.get_model('exchange', 'ExchangeTenant')
tenant_ct = ContentType.objects.get_for_model(Tenant)
for tenant in Tenant.objects.all():
if not Quota.objects.filter(content_type_id=tenant_ct.id, object_id=tenant.id, name=GLOBAL_MAILBOX_SIZE_QUOTA):
Quota.objects.create(
uuid=uuid4(), name=GLOBAL_MAILBOX_SIZE_QUOTA, limit=tenant.max_users*tenant.mailbox_size, usage=0,
content_type_id=tenant_ct.id, object_id=tenant.id)
if not Quota.objects.filter(content_type_id=tenant_ct.id, object_id=tenant.id, name=USER_COUNT_QUOTA):
Quota.objects.create(
uuid=uuid4(), name=USER_COUNT_QUOTA, limit=tenant.max_users, usage=0,
content_type_id=tenant_ct.id, object_id=tenant.id)
class Migration(migrations.Migration):
dependencies = [
('exchange', '0003_rename_tenant_model'),
]
operations = [
migrations.AlterField(
model_name='exchangetenant',
name='mailbox_size',
field=models.PositiveSmallIntegerField(help_text=b'Maximum size of single mailbox, MB'),
preserve_default=True,
),
migrations.RunPython(convert_mailbox_size_to_mb),
migrations.RunPython(init_quotas),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from uuid import uuid4
from django.contrib.contenttypes.models import ContentType
from django.db import models, migrations
GLOBAL_MAILBOX_SIZE_QUOTA = 'global_mailbox_size'
USER_COUNT_QUOTA = 'user_count'
def convert_mailbox_size_to_mb(apps, schema_editor):
Tenant = apps.get_model('exchange', 'Tenant')
for tenant in Tenant.objects.all():
tenant.mailbox_size *= 1024
tenant.save()
def init_quotas(apps, schema_editor):
Quota = apps.get_model('quotas', 'Quota')
Tenant = apps.get_model('exchange', 'Tenant')
tenant_ct = ContentType.objects.get_for_model(Tenant)
for tenant in Tenant.objects.all():
if not Quota.objects.filter(content_type_id=tenant_ct.id, object_id=tenant.id, name=GLOBAL_MAILBOX_SIZE_QUOTA):
Quota.objects.create(
uuid=uuid4(), name=GLOBAL_MAILBOX_SIZE_QUOTA, limit=tenant.max_users*tenant.mailbox_size, usage=0,
content_type_id=tenant_ct.id, object_id=tenant.id)
if not Quota.objects.filter(content_type_id=tenant_ct.id, object_id=tenant.id, name=USER_COUNT_QUOTA):
Quota.objects.create(
uuid=uuid4(), name=USER_COUNT_QUOTA, limit=tenant.max_users, usage=0,
content_type_id=tenant_ct.id, object_id=tenant.id)
class Migration(migrations.Migration):
dependencies = [
('exchange', '0003_rename_tenant_model'),
]
operations = [
migrations.AlterField(
model_name='tenant',
name='mailbox_size',
field=models.PositiveSmallIntegerField(help_text=b'Maximum size of single mailbox, MB'),
preserve_default=True,
),
migrations.RunPython(convert_mailbox_size_to_mb),
migrations.RunPython(init_quotas),
]
| Python | 0.000002 |
e91eb6aaad52010b7441595cc55695e6ee21b360 | Add support for setting login_service | oauthenticator/azuread.py | oauthenticator/azuread.py | """
Custom Authenticator to use Azure AD with JupyterHub
"""
import json
import jwt
import os
import urllib
from tornado.auth import OAuth2Mixin
from tornado.log import app_log
from tornado.httpclient import HTTPRequest, AsyncHTTPClient
from jupyterhub.auth import LocalAuthenticator
from traitlets import Unicode, default
from .oauth2 import OAuthLoginHandler, OAuthenticator
def azure_token_url_for(tentant):
return 'https://login.microsoftonline.com/{0}/oauth2/token'.format(tentant)
def azure_authorize_url_for(tentant):
return 'https://login.microsoftonline.com/{0}/oauth2/authorize'.format(
tentant)
class AzureAdMixin(OAuth2Mixin):
tenant_id = os.environ.get('AAD_TENANT_ID', '')
_OAUTH_ACCESS_TOKEN_URL = azure_token_url_for(tenant_id)
_OAUTH_AUTHORIZE_URL = azure_authorize_url_for(tenant_id)
class AzureAdLoginHandler(OAuthLoginHandler, AzureAdMixin):
pass
class AzureAdOAuthenticator(OAuthenticator):
login_service = Unicode(
os.environ.get('LOGIN_SERVICE', 'Azure AD'),
config=True,
help="""Azure AD domain name string, e.g. My College"""
)
login_handler = AzureAdLoginHandler
tenant_id = Unicode(config=True)
username_claim = Unicode(config=True)
@default('tenant_id')
def _tenant_id_default(self):
return os.environ.get('AAD_TENANT_ID', '')
@default('username_claim')
def _username_claim_default(self):
return 'name'
async def authenticate(self, handler, data=None):
code = handler.get_argument("code")
http_client = AsyncHTTPClient()
params = dict(
client_id=self.client_id,
client_secret=self.client_secret,
grant_type='authorization_code',
code=code,
resource=self.client_id,
redirect_uri=self.get_callback_url(handler))
data = urllib.parse.urlencode(
params, doseq=True, encoding='utf-8', safe='=')
url = azure_token_url_for(self.tenant_id)
headers = {
'Content-Type':
'application/x-www-form-urlencoded; ; charset=UTF-8"'
}
req = HTTPRequest(
url,
method="POST",
headers=headers,
body=data # Body is required for a POST...
)
resp = await http_client.fetch(req)
resp_json = json.loads(resp.body.decode('utf8', 'replace'))
# app_log.info("Response %s", resp_json)
access_token = resp_json['access_token']
id_token = resp_json['id_token']
decoded = jwt.decode(id_token, verify=False)
userdict = {"name": decoded[self.username_claim]}
userdict["auth_state"] = auth_state = {}
auth_state['access_token'] = access_token
# results in a decoded JWT for the user data
auth_state['user'] = decoded
return userdict
class LocalAzureAdOAuthenticator(LocalAuthenticator, AzureAdOAuthenticator):
"""A version that mixes in local system user creation"""
pass
| """
Custom Authenticator to use Azure AD with JupyterHub
"""
import json
import jwt
import os
import urllib
from tornado.auth import OAuth2Mixin
from tornado.log import app_log
from tornado.httpclient import HTTPRequest, AsyncHTTPClient
from jupyterhub.auth import LocalAuthenticator
from traitlets import Unicode, default
from .oauth2 import OAuthLoginHandler, OAuthenticator
def azure_token_url_for(tentant):
return 'https://login.microsoftonline.com/{0}/oauth2/token'.format(tentant)
def azure_authorize_url_for(tentant):
return 'https://login.microsoftonline.com/{0}/oauth2/authorize'.format(
tentant)
class AzureAdMixin(OAuth2Mixin):
tenant_id = os.environ.get('AAD_TENANT_ID', '')
_OAUTH_ACCESS_TOKEN_URL = azure_token_url_for(tenant_id)
_OAUTH_AUTHORIZE_URL = azure_authorize_url_for(tenant_id)
class AzureAdLoginHandler(OAuthLoginHandler, AzureAdMixin):
pass
class AzureAdOAuthenticator(OAuthenticator):
login_service = "Azure AD"
login_handler = AzureAdLoginHandler
tenant_id = Unicode(config=True)
username_claim = Unicode(config=True)
@default('tenant_id')
def _tenant_id_default(self):
return os.environ.get('AAD_TENANT_ID', '')
@default('username_claim')
def _username_claim_default(self):
return 'name'
async def authenticate(self, handler, data=None):
code = handler.get_argument("code")
http_client = AsyncHTTPClient()
params = dict(
client_id=self.client_id,
client_secret=self.client_secret,
grant_type='authorization_code',
code=code,
resource=self.client_id,
redirect_uri=self.get_callback_url(handler))
data = urllib.parse.urlencode(
params, doseq=True, encoding='utf-8', safe='=')
url = azure_token_url_for(self.tenant_id)
headers = {
'Content-Type':
'application/x-www-form-urlencoded; ; charset=UTF-8"'
}
req = HTTPRequest(
url,
method="POST",
headers=headers,
body=data # Body is required for a POST...
)
resp = await http_client.fetch(req)
resp_json = json.loads(resp.body.decode('utf8', 'replace'))
# app_log.info("Response %s", resp_json)
access_token = resp_json['access_token']
id_token = resp_json['id_token']
decoded = jwt.decode(id_token, verify=False)
userdict = {"name": decoded[self.username_claim]}
userdict["auth_state"] = auth_state = {}
auth_state['access_token'] = access_token
# results in a decoded JWT for the user data
auth_state['user'] = decoded
return userdict
class LocalAzureAdOAuthenticator(LocalAuthenticator, AzureAdOAuthenticator):
"""A version that mixes in local system user creation"""
pass
| Python | 0 |
edebc05d3df68faadc6c0547de7cc06f1469915e | make click example do stuff | fosscon2015/cli_click.py | fosscon2015/cli_click.py | #!/usr/bin/env python
import click
import json
try:
from collections import Counter
except ImportError:
# backport_collections needed for python 2.6 compatibility
from backport_collections import Counter
@click.command()
@click.argument('infile', type=click.File('r'), default='-')
@click.argument('outfile', type=click.File('w'), default='-')
@click.option('--verbose', '-v')
def cli(infile, outfile, verbose):
""" Count the occurances of characters in INFILE and save results in OUTFILE. """
click.echo("Hi!")
click.secho("infile: {0}".format(infile))
click.secho("outfile: {0}".format(outfile))
text = infile.read()
char_counts = Counter(text)
click.secho(json.dumps(dict(char_counts.most_common())), file=outfile,
fg='green')
if __name__ == '__main__':
cli()
| import click
@click.command()
def cli():
click.echo("I'm a click CLI.")
if __name__ == '__main__':
cli()
| Python | 0.000001 |
514c6c145e6e2f2c327fb89cfe780eb196508f79 | change absolute site url | publishconf.py | publishconf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://blog.3strandcode.com/'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = "3strandcode"
#GOOGLE_ANALYTICS = ""
| #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://3-strand-code.github.io/3sc-blog/'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = "3strandcode"
#GOOGLE_ANALYTICS = ""
| Python | 0.000001 |
ca863134d20cda67c6e7f4abf1df595d5d549952 | Fix agent changelog command (#3233) | datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/common.py | datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/common.py | # (C) Datadog, Inc. 2019
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from semver import parse_version_info
from ...git import git_tag_list
def get_agent_tags(since, to):
"""
Return a list of tags from integrations-core representing an Agent release,
sorted by more recent first.
"""
agent_tags = sorted(parse_version_info(t) for t in git_tag_list(r'^\d+\.\d+\.\d+$'))
# default value for `to` is the latest tag
if to:
to = parse_version_info(to)
else:
to = agent_tags[-1]
since = parse_version_info(since)
# filter out versions according to the interval [since, to]
agent_tags = [t for t in agent_tags if since <= t <= to]
# reverse so we have descendant order
return [str(t) for t in reversed(agent_tags)]
| # (C) Datadog, Inc. 2019
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from ...git import git_tag_list
def get_agent_tags(since, to):
"""
Return a list of tags from integrations-core representing an Agent release,
sorted by more recent first.
"""
agent_tags = git_tag_list(r'^\d+\.\d+\.\d+$')
# default value for `to` is the latest tag
if not to:
to = agent_tags[-1]
# filter out versions according to the interval [since, to]
agent_tags = [t for t in agent_tags if since <= t <= to]
# reverse so we have descendant order
return agent_tags[::-1]
| Python | 0 |
2988d50cdef4a2c617f20817911826f2f7863f0e | Fix string comparison in the fedimg proc. | fedmsg_meta_fedora_infrastructure/fedimg.py | fedmsg_meta_fedora_infrastructure/fedimg.py | # This file is part of fedmsg.
# Copyright (C) 2014 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: David Gay <oddshocks@riseup.net
from fedmsg_meta_fedora_infrastructure import BaseProcessor
class FedimgProcessor(BaseProcessor):
__name__ = "fedimg"
__description__ = "The Fedora cloud image service"
__link__ = "https://github.com/oddshocks/fedimg"
# TODO: Create an icon and set its URL to __icon__
__docs__ = "https://fedoraproject.org/wiki/Features/" + \
"FirstClassCloudImages/KojiPlan"
__obj__ = "New cloud image upload"
def subtitle(self, msg, **config):
if 'image.upload' in msg['topic']:
if msg['msg']['status'] == "started":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} started uploading to {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] == "completed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} finished uploading to to {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] == "failed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} failed to upload to {dest}')
return tmpl.format(image_name=name, dest=dest)
if 'image.test' in msg['topic']:
if msg['msg']['status'] == "started":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} started testing on {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] == "completed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} finished testing on {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] == "failed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} failed testing on {dest}')
return tmpl.format(image_name=name, dest=dest)
| # This file is part of fedmsg.
# Copyright (C) 2014 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: David Gay <oddshocks@riseup.net
from fedmsg_meta_fedora_infrastructure import BaseProcessor
class FedimgProcessor(BaseProcessor):
__name__ = "fedimg"
__description__ = "The Fedora cloud image service"
__link__ = "https://github.com/oddshocks/fedimg"
# TODO: Create an icon and set its URL to __icon__
__docs__ = "https://fedoraproject.org/wiki/Features/" + \
"FirstClassCloudImages/KojiPlan"
__obj__ = "New cloud image upload"
def subtitle(self, msg, **config):
if 'image.upload' in msg['topic']:
if msg['msg']['status'] is "started":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} started uploading to {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] is "completed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} finished uploading to to {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] is "failed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} failed to upload to {dest}')
return tmpl.format(image_name=name, dest=dest)
if 'image.test' in msg['topic']:
if msg['msg']['status'] is "started":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} started testing on {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] is "completed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} finished testing on {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] is "failed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} failed testing on {dest}')
return tmpl.format(image_name=name, dest=dest)
| Python | 0.000266 |
7b585baab70e85e4d28f8827a11c0be9c6cc2938 | fix travis | _unittests/ut_cli/test_pymy_install_cli_tool.py | _unittests/ut_cli/test_pymy_install_cli_tool.py | # coding: latin-1
"""
@brief test log(time=1s)
"""
import sys
import os
import unittest
import warnings
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
try:
import pyquickhelper as skip_
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..",
"..",
"pyquickhelper",
"src")))
if path not in sys.path:
sys.path.append(path)
if "PYQUICKHELPER" in os.environ and len(os.environ["PYQUICKHELPER"]) > 0:
sys.path.append(os.environ["PYQUICKHELPER"])
import pyquickhelper as skip_
from src.pymyinstall.installhelper.install_cmd_helper import run_cmd
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder, is_travis_or_appveyor
class TestPyMyInstallCliTool(unittest.TestCase):
def test_install_tool(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
temp = get_temp_folder(__file__, "temp_install_tool")
this = os.path.abspath(os.path.dirname(__file__))
script = os.path.normpath(os.path.join(
this, "..", "..", "src", "pymyinstall", "cli", "pymy_install.py"))
cmd = "{0} {1} {2} --force --folder={3}".format(
sys.executable, script, "graphviz --task=tool --source=zip", temp)
out, err = run_cmd(cmd, wait=True, do_not_log=True)
fLOG("----", cmd)
fLOG(out.replace("\r", "").replace("\n\n", "\n"))
fLOG("-----")
fLOG(err.replace("\r", "").replace("\n\n", "\n"))
content = os.listdir(temp)
if not content:
if is_travis_or_appveyor():
warnings.warn("content is empty for: " + temp)
else:
raise Exception("content is empty for: " + temp)
if __name__ == "__main__":
unittest.main()
| # coding: latin-1
"""
@brief test log(time=1s)
"""
import sys
import os
import unittest
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
try:
import pyquickhelper as skip_
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..",
"..",
"pyquickhelper",
"src")))
if path not in sys.path:
sys.path.append(path)
if "PYQUICKHELPER" in os.environ and len(os.environ["PYQUICKHELPER"]) > 0:
sys.path.append(os.environ["PYQUICKHELPER"])
import pyquickhelper as skip_
from src.pymyinstall.installhelper.install_cmd_helper import run_cmd
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder
class TestPyMyInstallCliTool(unittest.TestCase):
def test_install_tool(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
temp = get_temp_folder(__file__, "temp_install_tool")
this = os.path.abspath(os.path.dirname(__file__))
script = os.path.normpath(os.path.join(
this, "..", "..", "src", "pymyinstall", "cli", "pymy_install.py"))
cmd = "{0} {1} {2} --force --folder={3}".format(
sys.executable, script, "graphviz --task=tool --source=zip", temp)
out, err = run_cmd(cmd, wait=True, do_not_log=True)
fLOG("----", cmd)
fLOG(out.replace("\r", "").replace("\n\n", "\n"))
fLOG("-----")
fLOG(err.replace("\r", "").replace("\n\n", "\n"))
content = os.listdir(temp)
assert content
if __name__ == "__main__":
unittest.main()
| Python | 0.000002 |
07e12dd0942329aadc8fb3ed47b6f088779800b9 | fix logcollector | src/bots/outputs/logcollector/logcollector.py | src/bots/outputs/logcollector/logcollector.py | import sys
import time
import socket
from lib.bot import *
from lib.utils import *
from lib.event import *
class LogCollectorBot(Bot):
def process(self):
event = self.receive_message()
if event:
data = ''
for key, value in event.items():
data += key.replace(' ','_') + '="' + value + '" '
data += "\n"
self.send_data(data)
self.acknowledge_message()
def connect(self):
address = (self.parameters.ip, int(self.parameters.port))
self.con = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
try:
self.con.connect(address)
break
except socket.error, e:
self.logger.error(e.args[1] + ". Retrying in 10 seconds.")
time.sleep(10)
self.logger.info("Connected successfully to %s:%i", address[0], address[1])
def send_data(self, data):
while True:
try:
self.con.send(unicode(data).encode("utf-8"))
self.con.sendall("")
break
except socket.error, e:
self.logger.error(e.args[1] + ". Reconnecting..")
self.con.close()
self.connect()
except AttributeError:
self.connect()
if __name__ == "__main__":
bot = LogCollectorBot(sys.argv[1])
bot.start()
| import sys
import time
import socket
from lib.bot import *
from lib.utils import *
from lib.event import *
try:
import simplejson as json
except ImportError:
import json
class LogCollectorBot(Bot):
def process(self):
event = self.receive_message()
if event:
data = ''
for key, value in event.items():
data += key.replace(' ','_') + '=' + json.dumps(value) + ' '
data += "\n"
self.send_data(data)
self.acknowledge_message()
def connect(self):
address = (self.parameters.ip, int(self.parameters.port))
self.con = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
try:
self.con.connect(address)
break
except socket.error, e:
self.logger.error(e.args[1] + ". Retrying in 10 seconds.")
time.sleep(10)
self.logger.info("Connected successfully to %s:%i", address[0], address[1])
def send_data(self, data):
while True:
try:
self.con.send(unicode(data).encode("utf-8"))
self.con.sendall("")
break
except socket.error, e:
self.logger.error(e.args[1] + ". Reconnecting..")
self.con.close()
self.connect()
except AttributeError:
self.connect()
if __name__ == "__main__":
bot = LogCollectorBot(sys.argv[1])
bot.start()
| Python | 0.000002 |
cc1d72d68fb46cccdf22e08d416a49b18e4a39b2 | Disable cache during CLI tests | tests/whack_cli_test.py | tests/whack_cli_test.py | import os
import subprocess
import contextlib
from nose.tools import istest, assert_equal
from whack import cli
from whack.sources import SourceTarball
from . import whack_test
@istest
def params_are_passed_to_install_command_as_dict():
argv = [
"whack", "install", "hello=1", "apps/hello",
"-p", "version=1.2.4", "-p", "pcre_version=8.32"
]
expected_params = {"version": "1.2.4", "pcre_version": "8.32"}
_test_install_arg_parse(argv, params=expected_params)
@istest
def param_values_can_contain_equals_sign():
argv = [
"whack", "install", "hello=1", "apps/hello",
"-p", "version_range===1.2.4"
]
expected_params = {"version_range": "==1.2.4"}
_test_install_arg_parse(argv, params=expected_params)
@istest
def param_without_equal_sign_has_value_of_empty_string():
argv = [
"whack", "install", "hello=1", "apps/hello",
"-p", "verbose"
]
expected_params = {"verbose": ""}
_test_install_arg_parse(argv, params=expected_params)
def _test_install_arg_parse(argv, **expected_kwargs):
args = cli.parse_args(argv)
for key, value in expected_kwargs.iteritems():
assert_equal(value, getattr(args, key))
class CliOperations(object):
def install(self, package_name, install_dir, params):
self._command("install", package_name, install_dir, params)
def build(self, package_name, target_dir, params):
self._command("build", package_name, target_dir, params)
def deploy(self, package_dir, target_dir=None):
if target_dir is None:
self._whack("deploy", package_dir, "--in-place")
else:
self._whack("deploy", package_dir, target_dir)
def create_source_tarball(self, source_dir, tarball_dir):
output = self._whack(
"create-source-tarball",
source_dir, tarball_dir,
)
return SourceTarball(output.strip())
def _command(self, command_name, package_name, target_dir, params):
params_args = [
"-p{0}={1}".format(key, value)
for key, value in params.iteritems()
]
self._whack(command_name, package_name, target_dir, *params_args)
def _whack(self, *args):
return subprocess.check_output(["whack"] + list(args) + ["--no-cache"])
def _run_cli_operations_test(test_func):
ops = CliOperations()
test_func(ops)
WhackCliOperationsTest = whack_test.create(
"WhackCliOperationsTest",
_run_cli_operations_test,
)
@contextlib.contextmanager
def _updated_env(env):
original_env = os.environ.copy()
for key, value in env.iteritems():
os.environ[key] = value
yield
for key in env:
if key in original_env:
os.environ[key] = original_env[value]
else:
del os.environ[key]
| import os
import subprocess
import contextlib
from nose.tools import istest, assert_equal
from whack import cli
from whack.sources import SourceTarball
from . import whack_test
@istest
def params_are_passed_to_install_command_as_dict():
argv = [
"whack", "install", "hello=1", "apps/hello",
"-p", "version=1.2.4", "-p", "pcre_version=8.32"
]
expected_params = {"version": "1.2.4", "pcre_version": "8.32"}
_test_install_arg_parse(argv, params=expected_params)
@istest
def param_values_can_contain_equals_sign():
argv = [
"whack", "install", "hello=1", "apps/hello",
"-p", "version_range===1.2.4"
]
expected_params = {"version_range": "==1.2.4"}
_test_install_arg_parse(argv, params=expected_params)
@istest
def param_without_equal_sign_has_value_of_empty_string():
argv = [
"whack", "install", "hello=1", "apps/hello",
"-p", "verbose"
]
expected_params = {"verbose": ""}
_test_install_arg_parse(argv, params=expected_params)
def _test_install_arg_parse(argv, **expected_kwargs):
args = cli.parse_args(argv)
for key, value in expected_kwargs.iteritems():
assert_equal(value, getattr(args, key))
class CliOperations(object):
def install(self, package_name, install_dir, params):
self._command("install", package_name, install_dir, params)
def build(self, package_name, target_dir, params):
self._command("build", package_name, target_dir, params)
def deploy(self, package_dir, target_dir=None):
if target_dir is None:
self._whack("deploy", package_dir, "--in-place")
else:
self._whack("deploy", package_dir, target_dir)
def create_source_tarball(self, source_dir, tarball_dir):
output = self._whack(
"create-source-tarball",
source_dir, tarball_dir,
)
return SourceTarball(output.strip())
def _command(self, command_name, package_name, target_dir, params):
params_args = [
"-p{0}={1}".format(key, value)
for key, value in params.iteritems()
]
self._whack(command_name, package_name, target_dir, *params_args)
def _whack(self, *args):
return subprocess.check_output(["whack"] + list(args))
def _run_cli_operations_test(test_func):
ops = CliOperations()
test_func(ops)
WhackCliOperationsTest = whack_test.create(
"WhackCliOperationsTest",
_run_cli_operations_test,
)
@contextlib.contextmanager
def _updated_env(env):
original_env = os.environ.copy()
for key, value in env.iteritems():
os.environ[key] = value
yield
for key in env:
if key in original_env:
os.environ[key] = original_env[value]
else:
del os.environ[key]
| Python | 0.000001 |
3ba71de7d03b99376b70cd40de6dfcd45f1d35c0 | replace distutils with which | tmscoring/tests/test.py | tmscoring/tests/test.py | from __future__ import division
import subprocess
import tmscoring
import numpy as np
from numpy.testing import assert_almost_equal, TestCase
from nose.exc import SkipTest
from shutil import which
class TestAligningBase(TestCase):
def test_matrix(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb')
np.random.seed(124)
for _ in range(100):
theta, phi, psi = 2 * np.pi * np.random.random(3)
dx, dy, dz = 10 * np.random.random(3)
matrix = align_object.get_matrix(theta, phi, psi, dx, dy, dz)
rotation = matrix[:3, :3]
assert_almost_equal(1, np.linalg.det(rotation), 6)
assert_almost_equal(1, np.linalg.det(matrix), 6)
def test_tm_valuex(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb')
np.random.seed(124)
for _ in range(100):
theta, phi, psi = 2 * np.pi * np.random.random(3)
dx, dy, dz = 10 * np.random.random(3)
tm = align_object._tm(theta, phi, psi, dx, dy, dz)
assert np.all(0 <= -tm / align_object.N)
def test_load_data_alignment(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb', mode='align')
assert align_object.coord1.shape[0] == 4
assert align_object.coord2.shape[0] == 4
assert align_object.coord1.shape == align_object.coord2.shape
def test_load_data_index(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb', mode='index')
assert align_object.coord1.shape[0] == 4
assert align_object.coord2.shape[0] == 4
assert align_object.coord1.shape == align_object.coord2.shape
def test_identity():
sc = tmscoring.TMscoring('pdb1.pdb', 'pdb1.pdb')
assert sc.tmscore(0, 0, 0, 0, 0, 0) == 1
sc = tmscoring.RMSDscoring('pdb1.pdb', 'pdb1.pdb')
assert sc.rmsd(0, 0, 0, 0, 0, 0) == 0.0
def test_tm_output():
if which("TMscore") is None:
raise SkipTest('TMscore is not installed in the system.')
pdb1, pdb2 = 'pdb1.pdb', 'pdb2.pdb'
sc = tmscoring.TMscoring(pdb1, pdb2)
_, tm, rmsd = sc.optimise()
p = subprocess.Popen('TMscore {} {} | grep TM-score | grep d0'.format(pdb1, pdb2), stdout=subprocess.PIPE, shell=True)
ref_tm = float(p.communicate()[0].decode('utf-8').split('=')[1].split('(')[0])
assert_almost_equal(ref_tm, tm, decimal=2)
p = subprocess.Popen('TMscore {} {} | grep RMSD | grep common'.format(pdb1, pdb2),
stdout=subprocess.PIPE, shell=True)
ref_rmsd = float(p.communicate()[0].decode('utf-8').split('=')[1])
assert abs(ref_rmsd - rmsd) < 0.1
def test_repeated():
pdb1, pdb2 = 'pdbrep_1.pdb', 'pdbrep_2.pdb'
sc = tmscoring.TMscoring(pdb1, pdb2)
_, tm, rmsd = sc.optimise()
assert_almost_equal(tm, 0.27426501120343644)
assert_almost_equal(rmsd, 15.940038528551929)
| from __future__ import division
import subprocess
import distutils
import tmscoring
import numpy as np
from numpy.testing import assert_almost_equal, TestCase
from nose.exc import SkipTest
class TestAligningBase(TestCase):
def test_matrix(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb')
np.random.seed(124)
for _ in range(100):
theta, phi, psi = 2 * np.pi * np.random.random(3)
dx, dy, dz = 10 * np.random.random(3)
matrix = align_object.get_matrix(theta, phi, psi, dx, dy, dz)
rotation = matrix[:3, :3]
assert_almost_equal(1, np.linalg.det(rotation), 6)
assert_almost_equal(1, np.linalg.det(matrix), 6)
def test_tm_valuex(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb')
np.random.seed(124)
for _ in range(100):
theta, phi, psi = 2 * np.pi * np.random.random(3)
dx, dy, dz = 10 * np.random.random(3)
tm = align_object._tm(theta, phi, psi, dx, dy, dz)
assert 0 <= -tm / align_object.N <= 1
def test_load_data_alignment(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb', mode='align')
assert align_object.coord1.shape[0] == 4
assert align_object.coord2.shape[0] == 4
assert align_object.coord1.shape == align_object.coord2.shape
def test_load_data_index(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb', mode='index')
assert align_object.coord1.shape[0] == 4
assert align_object.coord2.shape[0] == 4
assert align_object.coord1.shape == align_object.coord2.shape
def test_identity():
sc = tmscoring.TMscoring('pdb1.pdb', 'pdb1.pdb')
assert sc.tmscore(0, 0, 0, 0, 0, 0) == 1
sc = tmscoring.RMSDscoring('pdb1.pdb', 'pdb1.pdb')
assert sc.rmsd(0, 0, 0, 0, 0, 0) == 0.0
def test_tm_output():
if not distutils.spawn.find_executable('TMscore'):
raise SkipTest('TMscore is not installed in the system.')
pdb1, pdb2 = 'pdb1.pdb', 'pdb2.pdb'
sc = tmscoring.TMscoring(pdb1, pdb2)
_, tm, rmsd = sc.optimise()
p = subprocess.Popen('TMscore {} {} | grep TM-score | grep d0'.format(pdb1, pdb2),
stdout=subprocess.PIPE, shell=True)
ref_tm = float(p.communicate()[0].split('=')[1].split('(')[0])
assert_almost_equal(ref_tm, tm, decimal=2)
p = subprocess.Popen('TMscore {} {} | grep RMSD | grep common'.format(pdb1, pdb2),
stdout=subprocess.PIPE, shell=True)
ref_rmsd = float(p.communicate()[0].split('=')[1])
assert abs(ref_rmsd - rmsd) < 0.1
def test_repeated():
pdb1, pdb2 = 'pdbrep_1.pdb', 'pdbrep_2.pdb'
sc = tmscoring.TMscoring(pdb1, pdb2)
_, tm, rmsd = sc.optimise()
assert_almost_equal(tm, 0.27426501120343644)
assert_almost_equal(rmsd, 15.940038528551929)
| Python | 0.000011 |
d451814584318ac45cdefa9702c72eb5c15fe690 | Fix Flakes Errors: openspending/ui/controllers/home.py | openspending/ui/controllers/home.py | openspending/ui/controllers/home.py | import logging
import os
import subprocess
from pylons import request, tmpl_context as c
from pylons.controllers.util import redirect
from pylons.i18n import _
from openspending.model import Dataset
from openspending.lib.solr_util import dataset_entries
from openspending.ui.i18n import set_session_locale
from openspending.ui.lib.base import BaseController
from openspending.ui.lib.helpers import flash_success
from openspending.ui.alttemplates import templating
log = logging.getLogger(__name__)
class HomeController(BaseController):
def index(self):
# Get all of the datasets available to the account of the logged in
# or an anonymous user (if c.account is None)
c.datasets = Dataset.all_by_account(c.account)
c.num_entries = dataset_entries(None)
return templating.render('home/index.html')
def set_locale(self):
locale = request.params.get('locale')
if locale is not None:
set_session_locale(locale)
def version(self):
cwd = os.path.dirname(__file__)
process = subprocess.Popen('git rev-parse --verify HEAD'.split(' '),
cwd=cwd,
stdout=subprocess.PIPE)
output = process.communicate()[0]
if process.returncode == 0:
return output
else:
import openspending.version
return openspending.version.__version__
def favicon(self):
return redirect('/static/img/favicon.ico', code=301)
def ping(self):
from openspending.tasks import ping
ping.delay()
flash_success(_("Sent ping!"))
redirect('/')
| import logging
import os
import random
import subprocess
from datetime import datetime
from pylons import request, response, tmpl_context as c, url, config
from pylons.controllers.util import redirect
from pylons.decorators.cache import beaker_cache
from pylons.i18n import _
from openspending.model import Dataset
from openspending.lib.solr_util import dataset_entries
from openspending.ui.i18n import set_session_locale
from openspending.ui.lib import views
from openspending.ui.lib.base import BaseController, require
from openspending.ui.lib.helpers import flash_success, flash_error
from openspending.ui.lib import helpers as h
from openspending.ui.alttemplates import templating
log = logging.getLogger(__name__)
class HomeController(BaseController):
def index(self):
# Get all of the datasets available to the account of the logged in
# or an anonymous user (if c.account is None)
c.datasets = Dataset.all_by_account(c.account)
c.num_entries = dataset_entries(None)
return templating.render('home/index.html')
def set_locale(self):
locale = request.params.get('locale')
if locale is not None:
set_session_locale(locale)
def version(self):
cwd = os.path.dirname(__file__)
process = subprocess.Popen('git rev-parse --verify HEAD'.split(' '),
cwd=cwd,
stdout=subprocess.PIPE)
output = process.communicate()[0]
if process.returncode == 0:
return output
else:
import openspending.version
return openspending.version.__version__
def favicon(self):
return redirect('/static/img/favicon.ico', code=301)
def ping(self):
from openspending.tasks import ping
ping.delay()
flash_success(_("Sent ping!"))
redirect('/')
| Python | 0.000013 |
7db11fa7aad4b53a1f50988e83de2abfbae61dde | Fix the senddeletionnotices command to take into account the new default SMS limit. | hc/accounts/management/commands/senddeletionnotices.py | hc/accounts/management/commands/senddeletionnotices.py | from datetime import timedelta
import time
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now
from hc.accounts.models import Profile, Member
from hc.api.models import Ping
from hc.lib import emails
class Command(BaseCommand):
help = """Send deletion notices to inactive user accounts.
Conditions for sending the notice:
- deletion notice has not been sent recently
- last login more than a year ago
- none of the owned projects has invited team members
"""
def handle(self, *args, **options):
year_ago = now() - timedelta(days=365)
q = Profile.objects.order_by("id")
# Exclude accounts with logins in the last year_ago
q = q.exclude(user__last_login__gt=year_ago)
# Exclude accounts less than a year_ago old
q = q.exclude(user__date_joined__gt=year_ago)
# Exclude accounts with the deletion notice already sent
q = q.exclude(deletion_notice_date__gt=year_ago)
# Exclude paid accounts
q = q.exclude(sms_limit__gt=5)
sent = 0
for profile in q:
members = Member.objects.filter(project__owner_id=profile.user_id)
if members.exists():
print("Skipping %s, has team members" % profile)
continue
pings = Ping.objects
pings = pings.filter(owner__project__owner_id=profile.user_id)
pings = pings.filter(created__gt=year_ago)
if pings.exists():
print("Skipping %s, has pings in last year" % profile)
continue
self.stdout.write("Sending notice to %s" % profile.user.email)
profile.deletion_notice_date = now()
profile.save()
ctx = {"email": profile.user.email, "support_email": settings.SUPPORT_EMAIL}
emails.deletion_notice(profile.user.email, ctx)
# Throttle so we don't send too many emails at once:
time.sleep(1)
sent += 1
return "Done! Sent %d notices" % sent
| from datetime import timedelta
import time
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now
from hc.accounts.models import Profile, Member
from hc.api.models import Ping
from hc.lib import emails
class Command(BaseCommand):
help = """Send deletion notices to inactive user accounts.
Conditions for sending the notice:
- deletion notice has not been sent recently
- last login more than a year ago
- none of the owned projects has invited team members
"""
def handle(self, *args, **options):
year_ago = now() - timedelta(days=365)
q = Profile.objects.order_by("id")
# Exclude accounts with logins in the last year_ago
q = q.exclude(user__last_login__gt=year_ago)
# Exclude accounts less than a year_ago old
q = q.exclude(user__date_joined__gt=year_ago)
# Exclude accounts with the deletion notice already sent
q = q.exclude(deletion_notice_date__gt=year_ago)
# Exclude paid accounts
q = q.exclude(sms_limit__gt=0)
sent = 0
for profile in q:
members = Member.objects.filter(project__owner_id=profile.user_id)
if members.exists():
print("Skipping %s, has team members" % profile)
continue
pings = Ping.objects
pings = pings.filter(owner__project__owner_id=profile.user_id)
pings = pings.filter(created__gt=year_ago)
if pings.exists():
print("Skipping %s, has pings in last year" % profile)
continue
self.stdout.write("Sending notice to %s" % profile.user.email)
profile.deletion_notice_date = now()
profile.save()
ctx = {"email": profile.user.email, "support_email": settings.SUPPORT_EMAIL}
emails.deletion_notice(profile.user.email, ctx)
# Throttle so we don't send too many emails at once:
time.sleep(1)
sent += 1
return "Done! Sent %d notices" % sent
| Python | 0 |
43ab753c4a9892c55f115a4dd5345e94c4bb5d41 | Fix auth initialization logging | opwen_email_server/services/auth.py | opwen_email_server/services/auth.py | from ast import literal_eval
from os import environ
from typing import Callable
from typing import Mapping
from opwen_email_server.utils.log import LogMixin
class EnvironmentAuth(LogMixin):
def __init__(self, client_to_domain: Mapping[str, str]=None,
envgetter: Callable[[str, str], str]=environ.get,
envkey: str='LOKOLE_CLIENTS') -> None:
self.__client_to_domain = dict(client_to_domain or {})
self._envgetter = envgetter
self._envkey = envkey
@property
def _client_to_domain(self):
if not self.__client_to_domain:
self.__client_to_domain = self._create_client_to_domain()
self.log_debug('initialized auth to %r', self.__client_to_domain)
return self.__client_to_domain
def _create_client_to_domain(self) -> Mapping[str, str]:
client_to_domain = literal_eval(self._envgetter(self._envkey, '{}'))
if not client_to_domain:
raise ValueError('environment key {} not set'.format(self._envkey))
return client_to_domain
def __contains__(self, client: str) -> bool:
return client in self._client_to_domain
def domain_for(self, client: str) -> str:
return self._client_to_domain[client]
| from ast import literal_eval
from os import environ
from typing import Callable
from typing import Mapping
from opwen_email_server.utils.log import LogMixin
class EnvironmentAuth(LogMixin):
def __init__(self, client_to_domain: Mapping[str, str]=None,
envgetter: Callable[[str, str], str]=environ.get,
envkey: str='LOKOLE_CLIENTS') -> None:
self.__client_to_domain = dict(client_to_domain or {})
self._envgetter = envgetter
self._envkey = envkey
@property
def _client_to_domain(self):
if not self.__client_to_domain:
self.log_debug('initialized auth to %r', self.__client_to_domain)
self.__client_to_domain = self._create_client_to_domain()
return self.__client_to_domain
def _create_client_to_domain(self) -> Mapping[str, str]:
client_to_domain = literal_eval(self._envgetter(self._envkey, '{}'))
if not client_to_domain:
raise ValueError('environment key {} not set'.format(self._envkey))
return client_to_domain
def __contains__(self, client: str) -> bool:
return client in self._client_to_domain
def domain_for(self, client: str) -> str:
return self._client_to_domain[client]
| Python | 0.000004 |
5118104dad921128e4dec0cd1ea00aa7d854c0a9 | fix token create params to match new keystone - this change will need to be ported to novaclient | openstackx/auth/tokens.py | openstackx/auth/tokens.py | from openstackx.api import base
class Tenant(base.Resource):
def __repr__(self):
return "<Tenant %s>" % self._info
@property
def id(self):
return self._info['id']
@property
def description(self):
return self._info['description']
@property
def enabled(self):
return self._info['enabled']
class Token(base.Resource):
def __repr__(self):
return "<Token %s>" % self._info
@property
def id(self):
return self._info['token']['id']
@property
def username(self):
try:
return self._info['user']['username']
except:
return "?"
@property
def tenant_id(self):
try:
return self._info['user']['tenantId']
except:
return "?"
def delete(self):
self.manager.delete(self)
class TokenManager(base.ManagerWithFind):
resource_class = Token
def create(self, tenant, username, password):
params = {"auth": {"passwordCredentials": {"username": username,
"password": password},
"tenantId": tenant}}
return self._create('tokens', params, "access")
class TenantManager(base.ManagerWithFind):
resource_class = Tenant
def for_token(self, token):
# FIXME(ja): now that tenants & tokens are separate managers we shouldn't
# need the uglyness of setting token this way?
orig = self.api.connection.auth_token
self.api.connection.auth_token = token
rval = self._list('tenants', "tenants")
self.api.connection.auth_token = orig
return rval
| from openstackx.api import base
class Tenant(base.Resource):
def __repr__(self):
return "<Tenant %s>" % self._info
@property
def id(self):
return self._info['id']
@property
def description(self):
return self._info['description']
@property
def enabled(self):
return self._info['enabled']
class Token(base.Resource):
def __repr__(self):
return "<Token %s>" % self._info
@property
def id(self):
return self._info['token']['id']
@property
def username(self):
try:
return self._info['user']['username']
except:
return "?"
@property
def tenant_id(self):
try:
return self._info['user']['tenantId']
except:
return "?"
def delete(self):
self.manager.delete(self)
class TokenManager(base.ManagerWithFind):
resource_class = Token
def create(self, tenant, username, password):
params = {"passwordCredentials": {"username": username,
"password": password,
"tenantId": tenant}}
return self._create('tokens', params, "auth")
class TenantManager(base.ManagerWithFind):
resource_class = Tenant
def for_token(self, token):
# FIXME(ja): now that tenants & tokens are separate managers we shouldn't
# need the uglyness of setting token this way?
orig = self.api.connection.auth_token
self.api.connection.auth_token = token
rval = self._list('tenants', "tenants")
self.api.connection.auth_token = orig
return rval
| Python | 0 |
180b7810cdc9a2a17a13c184fc2143f9a5f10cd6 | change var name article to container (opps 0.2) in sitemap generator class | opps/sitemaps/sitemaps.py | opps/sitemaps/sitemaps.py | # -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
container = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
container = container[:1000]
return {
'queryset': container,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
| # -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
article = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
article = article[:1000]
return {
'queryset': article,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
| Python | 0 |
ba92d4b5854f31b97255c98b83d8dfb8874c8668 | Fix arg list for thumbnailpath override | frontend/src/indexing.py | frontend/src/indexing.py | #!/usr/bin/env python
from pymongo import MongoClient
#from subprocess import Popen, PIPE
import FindVid as fv
from sys import argv, exit
import hashlib
import os
def hashFile(filename, blocksize):
hash = hashlib.sha1()
with open(filename, 'rb') as f:
buffer = f.read(blocksize)
while len(buffer) > 0:
hash.update(buffer)
buffer = f.read(blocksize)
return hash.hexdigest()
# returns the configuration dictionary
def config(db="findvid", collection="videos", config={"_id": "config"}):
client = MongoClient()
db = client[db]
videos = db[collection]
return videos.find(config).next()
CONFIG = config() # abs, thumbnail, video
VIDEOPATH = CONFIG["abspath"] + CONFIG["videopath"]
# path to shotbounds program
SHOTBOUNDS = "{0}main/impl/shotbounds".format(CONFIG["abspath"])
THUMBNAILER = "{0}main/impl/thumbnails".format(CONFIG["abspath"])
#Index the given videofile (abs. path), create thumbnails in the
def index_video(videofile, searchable=False, uploaded=True, thumbpath = None):
#Get PyMongo client
client = MongoClient()
db = client["findvid"]
videos = db["videos"]
#Get Hash
fileHash = str(hashFile(videofile, 65536))
#Check if this exact video exists already
video = videos.find_one({'_id': fileHash})
if (video):
return False
#Use C-Lib to get cuts in the video
cuts = fv.getCuts(videofile)
#Heuristic approach: Suitable keyframe between 2 cuts
keyframes = [(cuts[i-1] + cuts[i])/2 for i in range(1, len(cuts))]
#extract features from videofile given the keyframes array, use the middle keyframe as videothumb and save to default folder
if (thumbpath == None):
features = fv.getFeatures(videofile, keyframes[len(keyframes)/2], keyframes)
else:
features = fv.getFeatures(videofile, keyframes[len(keyframes)/2], keyframes, thumbpath)
prev = 0
scenes = [] # scenes collection
for i, c in enumerate(cuts[1:]):
scene = {} # scene document
scene["_id"] = str(i)
scene["startframe"] = prev
scene["endframe"] = c
# save features
scene["colorhist"] = []
for v in features[i][0]:
scene["colorhist"].append(v)
scene["edges"] = []
for v in features[i][1]:
scene["edges"].append(v)
# TinyIMG
# scene["tinyimg"]
# for v in features[i][2]:
# scene["tinyimg"].append(v)
# GIST
# scene["gist"]
# for v in features[i][2]:
# scene["gist"].append(v)
scenes.append(scene)
prev = c
video = {}
# TODO sequence counter
video["_id"] = fileHash
video["filename"] = videofile
fps = fv.getFramerate(videofile)
video["fps"] = fps
video["framecount"] = cuts[-1:][0] # last entry
video["scenes"] = scenes
video["upload"] = uploaded
video["searchable"] = searchable
videos.insert(video)
return True
if __name__ == "__main__":
if len(argv) < 2:
print "ERROR: file missing!"
exit(1)
videofile = argv[1]
index_video(videofile)
| #!/usr/bin/env python
from pymongo import MongoClient
#from subprocess import Popen, PIPE
import FindVid as fv
from sys import argv, exit
import hashlib
import os
def hashFile(filename, blocksize):
hash = hashlib.sha1()
with open(filename, 'rb') as f:
buffer = f.read(blocksize)
while len(buffer) > 0:
hash.update(buffer)
buffer = f.read(blocksize)
return hash.hexdigest()
# returns the configuration dictionary
def config(db="findvid", collection="videos", config={"_id": "config"}):
client = MongoClient()
db = client[db]
videos = db[collection]
return videos.find(config).next()
CONFIG = config() # abs, thumbnail, video
VIDEOPATH = CONFIG["abspath"] + CONFIG["videopath"]
# path to shotbounds program
SHOTBOUNDS = "{0}main/impl/shotbounds".format(CONFIG["abspath"])
THUMBNAILER = "{0}main/impl/thumbnails".format(CONFIG["abspath"])
#Index the given videofile (abs. path), create thumbnails in the
def index_video(videofile, searchable=False, uploaded=True, thumbpath = None):
#Get PyMongo client
client = MongoClient()
db = client["findvid"]
videos = db["videos"]
#Get Hash
fileHash = str(hashFile(videofile, 65536))
#Check if this exact video exists already
video = videos.find_one({'_id': fileHash})
if (video):
return False
#Use C-Lib to get cuts in the video
cuts = fv.getCuts(videofile)
#Heuristic approach: Suitable keyframe between 2 cuts
keyframes = [(cuts[i-1] + cuts[i])/2 for i in range(1, len(cuts))]
#extract features from videofile given the keyframes array, use the middle keyframe as videothumb and save to default folder
features = fv.getFeatures(videofile, keyframes[len(keyframes)/2], keyframes, thumbpath)
prev = 0
scenes = [] # scenes collection
for i, c in enumerate(cuts[1:]):
scene = {} # scene document
scene["_id"] = str(i)
scene["startframe"] = prev
scene["endframe"] = c
# save features
scene["colorhist"] = []
for v in features[i][0]:
scene["colorhist"].append(v)
scene["edges"] = []
for v in features[i][1]:
scene["edges"].append(v)
# TinyIMG
# scene["tinyimg"]
# for v in features[i][2]:
# scene["tinyimg"].append(v)
# GIST
# scene["gist"]
# for v in features[i][2]:
# scene["gist"].append(v)
scenes.append(scene)
prev = c
video = {}
# TODO sequence counter
video["_id"] = fileHash
video["filename"] = videofile
fps = fv.getFramerate(videofile)
video["fps"] = fps
video["framecount"] = cuts[-1:][0] # last entry
video["scenes"] = scenes
video["upload"] = uploaded
video["searchable"] = searchable
videos.insert(video)
return True
if __name__ == "__main__":
if len(argv) < 2:
print "ERROR: file missing!"
exit(1)
videofile = argv[1]
index_video(videofile)
| Python | 0.000002 |
0d01756c1db9a6c19d263edadeda775adf5291af | Add unit tests on the sphinxext indent function | oslo_policy/tests/test_sphinxext.py | oslo_policy/tests/test_sphinxext.py | # Copyright 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import textwrap
from oslotest import base
from oslo_policy import policy
from oslo_policy import sphinxext
class IndentTest(base.BaseTestCase):
def test_indent(self):
result = sphinxext._indent("foo\nbar")
self.assertEqual(" foo\n bar", result)
result = sphinxext._indent("")
self.assertEqual("", result)
result = sphinxext._indent("\n")
self.assertEqual("\n", result)
result = sphinxext._indent("test\ntesting\n\nafter blank")
self.assertEqual(" test\n testing\n\n after blank", result)
result = sphinxext._indent("\tfoo\nbar")
self.assertEqual(" \tfoo\n bar", result)
result = sphinxext._indent(" foo\nbar")
self.assertEqual(" foo\n bar", result)
result = sphinxext._indent("foo\n bar")
self.assertEqual(" foo\n bar", result)
result = sphinxext._indent("foo\n\n bar")
self.assertEqual(" foo\n\n bar", result)
self.assertRaises(AttributeError, sphinxext._indent, None)
class FormatPolicyTest(base.BaseTestCase):
def test_minimal(self):
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [policy.RuleDefault('rule_a', '@')])))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
(no description provided)
""").lstrip(), results)
def test_with_description(self):
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [policy.RuleDefault('rule_a', '@', 'My sample rule')]
)))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
My sample rule
""").lstrip(), results)
def test_with_operations(self):
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [policy.DocumentedRuleDefault(
'rule_a', '@', 'My sample rule', [
{'method': 'GET', 'path': '/foo'},
{'method': 'POST', 'path': '/some'}])]
)))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
:Operations:
- **GET** ``/foo``
- **POST** ``/some``
My sample rule
""").lstrip(), results)
def test_with_scope_types(self):
operations = [
{'method': 'GET', 'path': '/foo'},
{'method': 'POST', 'path': '/some'}
]
scope_types = ['bar']
rule = policy.DocumentedRuleDefault(
'rule_a', '@', 'My sample rule', operations,
scope_types=scope_types
)
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [rule]
)))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
:Operations:
- **GET** ``/foo``
- **POST** ``/some``
:Scope Types:
- **bar**
My sample rule
""").lstrip(), results)
| # Copyright 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import textwrap
from oslotest import base
from oslo_policy import policy
from oslo_policy import sphinxext
class FormatPolicyTest(base.BaseTestCase):
def test_minimal(self):
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [policy.RuleDefault('rule_a', '@')])))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
(no description provided)
""").lstrip(), results)
def test_with_description(self):
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [policy.RuleDefault('rule_a', '@', 'My sample rule')]
)))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
My sample rule
""").lstrip(), results)
def test_with_operations(self):
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [policy.DocumentedRuleDefault(
'rule_a', '@', 'My sample rule', [
{'method': 'GET', 'path': '/foo'},
{'method': 'POST', 'path': '/some'}])]
)))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
:Operations:
- **GET** ``/foo``
- **POST** ``/some``
My sample rule
""").lstrip(), results)
def test_with_scope_types(self):
operations = [
{'method': 'GET', 'path': '/foo'},
{'method': 'POST', 'path': '/some'}
]
scope_types = ['bar']
rule = policy.DocumentedRuleDefault(
'rule_a', '@', 'My sample rule', operations,
scope_types=scope_types
)
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [rule]
)))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
:Operations:
- **GET** ``/foo``
- **POST** ``/some``
:Scope Types:
- **bar**
My sample rule
""").lstrip(), results)
| Python | 0.000029 |
f127f0e9bb0b8778feafbdbc1fa68e79a923d639 | Update product listing test to use product ids rather than index | whats_fresh/whats_fresh_api/tests/views/entry/test_list_products.py | whats_fresh/whats_fresh_api/tests/views/entry/test_list_products.py | from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListProductTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-products')
self.assertEqual(url, '/entry/products')
def test_list_items(self):
"""
Tests to see if the list of products contains the proper products and
proper product data
"""
response = self.client.get(reverse('entry-list-products'))
items = response.context['item_list']
product_dict = {}
for product in items:
product_id = product['link'].split('/')[-1]
product_dict[str(product_id)] = product
for product in Product.objects.all():
self.assertEqual(
product_dict[str(product.id)]['description'],
product.description)
self.assertEqual(
product_dict[str(product.id)]['name'], product.name)
self.assertEqual(
product_dict[str(product.id)]['link'],
reverse('edit-product', kwargs={'id': product.id}))
self.assertEqual(
product_dict[str(product.id)]['modified'],
product.modified.strftime("%I:%M %P, %d %b %Y"))
self.assertEqual(
sort(product_dict[str(product.id)]['preparations']),
sort([prep.name for prep in product.preparations.all()]))
| from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListProductTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-products')
self.assertEqual(url, '/entry/products')
def test_list_items(self):
"""
Tests to see if the list of products contains the proper productss and
proper product data
"""
response = self.client.get(reverse('entry-list-products'))
items = response.context['item_list']
for product in Product.objects.all():
self.assertEqual(
items[product.id-1]['description'], product.description)
self.assertEqual(
items[product.id-1]['name'], product.name)
self.assertEqual(
items[product.id-1]['link'],
reverse('edit-product', kwargs={'id': product.id}))
self.assertEqual(
items[product.id-1]['modified'],
product.modified.strftime("%I:%M %P, %d %b %Y"))
self.assertEqual(
sort(items[product.id-1]['preparations']),
sort([prep.name for prep in product.preparations.all()]))
| Python | 0 |
266ee5a6798c1fe09e9c6b36ee5831a1060b4624 | Fix lust.py | python/lust.py | python/lust.py | #!/usr/bin/env python
class LustObject(object):
# executes the command
def handle(self, arguments): pass
def print_help(self): pass
class FactorialCommand(LustObject):
def handle(self, arguments):
try: argument = int(arguments[0])
except (ValueError, IndexError):
print("fact: could not read integer argument.")
return
if argument < 0:
print("fact: argument has to be non-negative!")
return
print(self.__calculate_factorial(argument))
def print_help(self):
print(" fact <integer>")
print(" Calculates the factorial of <integer>.")
def __calculate_factorial(self, argument):
# Hmmm...
result = 1
for i in range(1, argument+1):
result *= i
return result
class QuitCommand(LustObject):
def handle(self, arguments = None):
print("Bye!")
exit()
def print_help(self):
print(" quit")
print(" Quits.")
class HelpCommand(LustObject):
def __init__(self, commands):
self.commands = commands
def handle(self, arguments = None):
print("List of all commands")
print("--------------------")
for command in sorted(self.commands):
self.commands[command].print_help()
def print_help(self):
print(" help")
print(" Prints help for all commands.")
print("Hello! Welcome to the LARICS Universal Shell Terminal (LUST)!")
print("Enter 'help' for a list of commands. Press Ctrl-D or enter 'quit' to quit.")
# dictionary for storing all commands
commands = { }
commands["fact"] = FactorialCommand()
commands["quit"] = QuitCommand()
# help command needs a reference to the parent dictionary in order to call each
# command's print_help() function
commands["help"] = HelpCommand(commands)
# input from Python 3 is raw_input in Python 2
try: input = raw_input
except NameError: pass
while True:
# read current line and try to extract command name
try:
cmd_line = input(">> ")
except (EOFError):
break
arguments = cmd_line.split()
try: cmd_name = arguments[0].lower()
except IndexError: continue
# look up the appropriate command in commands dictionary
if cmd_name not in commands:
print("lust: no such command '{}'.".format(cmd_name))
continue
else:
# command found, pass its handler the rest of the read arguments
commands[cmd_name].handle(arguments[1:])
print
commands["quit"].handle()
| #!/usr/bin/env python
class LustObject(object):
# executes the command
def handle(self, arguments): pass
def print_help(self): pass
class FactorialCommand(LustObject):
def handle(self, arguments):
try: argument = int(arguments[0])
except (ValueError, IndexError):
print("fact: could not read integer argument.")
return
if argument < 0:
print("fact: argument has to be non-negative!")
return
print(self.__calculate_factorial(argument))
def print_help(self):
print(" fact <integer>")
print(" Calculates the factorial of <integer>.")
def __calculate_factorial(self, argument):
# Hmmm...
result = 0
for i in range(1, argument+1):
result *= i
return result
class QuitCommand(LustObject):
def handle(self, arguments = None):
print("Bye!")
exit()
def print_help(self):
print(" quit")
print(" Quits.")
class HelpCommand(LustObject):
def __init__(self, commands):
self.commands = commands
def handle(self, arguments = None):
print("List of all commands")
print("--------------------")
for command in sorted(self.commands):
self.commands[command].print_help()
def print_help(self):
print(" help")
print(" Prints help for all commands.")
print("Hello! Welcome to the LARICS Universal Shell Terminal (LUST)!")
print("Enter 'help' for a list of commands. Press Ctrl-D or enter 'quit' to quit.")
# dictionary for storing all commands
commands = { }
commands["fact"] = FactorialCommand()
commands["quit"] = QuitCommand()
# help command needs a reference to the parent dictionary in order to call each
# command's print_help() function
commands["help"] = HelpCommand(commands)
# input from Python 3 is raw_input in Python 2
try: input = raw_input
except NameError: pass
while True:
# read current line and try to extract command name
try:
cmd_line = input(">> ")
except (EOFError):
break
arguments = cmd_line.split()
try: cmd_name = arguments[0].lower()
except IndexError: continue
# look up the appropriate command in commands dictionary
if cmd_name not in commands:
print("lust: no such command '{}'.".format(cmd_name))
continue
else:
# command found, pass its handler the rest of the read arguments
commands[cmd_name].handle(arguments[1:])
print
commands["quit"].handle()
| Python | 0.000004 |
0c0613f8b5719e396d6a053201c32482f95bdb7f | simplified the dict merging. small fix | qface/utils.py | qface/utils.py |
def merge(a, b):
"merges b into a recursively if a and b are dicts"
for key in b:
if isinstance(a.get(key), dict) and isinstance(b.get(key), dict):
merge(a[key], b[key])
else:
a[key] = b[key]
return a
|
def merge(a, b, path=None):
"merges b into a"
# import pdb; pdb.set_trace()
path = path or []
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
merge(a[key], b[key], path + [str(key)])
else:
a[key] = b[key]
else:
a[key] = b[key]
return a
| Python | 0.999987 |
12b34fc09baa5060495e25e57680d1f6170559c5 | Enable estimation reports for FPBŻ | addons/bestja_configuration_fpbz/__openerp__.py | addons/bestja_configuration_fpbz/__openerp__.py | # -*- coding: utf-8 -*-
{
'name': "Bestja: FBŻ",
'summary': "Installation configuration for FPBŻ",
'description': "Installation configuration for Federacja Polskich Banków Żywności",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'category': 'Specific Industry Applications',
'depends': [
'base',
'bestja_base',
'bestja_volunteer',
'bestja_volunteer_notes',
'bestja_account_deletion',
'bestja_organization',
'bestja_organization_hierarchy',
'bestja_project',
'bestja_project_hierarchy',
'bestja_stores',
'bestja_requests',
'bestja_detailed_reports',
'bestja_estimation_reports',
'bestja_offers',
'bestja_offers_by_org',
'bestja_files',
'quizzes',
'bestja_organization_warehouse',
'bestja_age_verification',
'bestja_frontend_fpbz',
'bestja_page_fixtures_fpbz',
],
'data': [
'data.xml',
],
'application': True,
}
| # -*- coding: utf-8 -*-
{
'name': "Bestja: FBŻ",
'summary': "Installation configuration for FPBŻ",
'description': "Installation configuration for Federacja Polskich Banków Żywności",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'category': 'Specific Industry Applications',
'depends': [
'base',
'bestja_base',
'bestja_volunteer',
'bestja_volunteer_notes',
'bestja_account_deletion',
'bestja_organization',
'bestja_organization_hierarchy',
'bestja_project',
'bestja_project_hierarchy',
'bestja_stores',
'bestja_requests',
'bestja_detailed_reports',
'bestja_offers',
'bestja_offers_by_org',
'bestja_files',
'quizzes',
'bestja_organization_warehouse',
'bestja_age_verification',
'bestja_frontend_fpbz',
'bestja_page_fixtures_fpbz',
],
'data': [
'data.xml',
],
'application': True,
}
| Python | 0 |
a0b488490ad32f7251e0ec2a35607a78c3022695 | FIx Entity.save signature in alias loader. | grano/service/aliases.py | grano/service/aliases.py | import logging
from unicodecsv import DictReader, DictWriter
from grano.core import db
from grano.model import Entity, Schema
log = logging.getLogger(__name__)
## Import commands
def import_aliases(path):
with open(path, 'r') as fh:
reader = DictReader(fh)
for row in reader:
data = {}
for k, v in row.items():
k = k.lower().strip()
data[k] = v
assert 'canonical' in data, 'No "canonical" column!'
assert 'alias' in data, 'No "alias" column!'
import_alias(data)
db.session.commit()
def import_alias(data):
# TODO: this actually deleted old entities, i.e. makes invalid
# entities - we should try and either re-direct them, or keep
# old entities whenever that makes sense.
canonical = Entity.by_name(data.get('canonical'))
if canonical is None:
schema = Schema.cached(Entity, 'base')
prop = {
'value': data.get('canonical'),
'active': True,
'schema': schema,
'source_url': data.get('source_url')
}
canonical = Entity.save([schema], {'name': prop}, [])
db.session.flush()
alias = Entity.by_name(data.get('alias'))
if alias is None:
Entity.PROPERTIES.save(canonical, 'name', {
'schema': Schema.cached(Entity, 'base'),
'value': data.get('alias'),
'active': False,
'source_url': data.get('source_url')
})
elif alias.id != canonical.id:
alias.merge_into(canonical)
if alias.id != canonical.id:
log.info("Mapped: %s -> %s", alias.id, canonical.id)
## Export commands
def export_aliases(path):
with open(path, 'w') as fh:
writer = DictWriter(fh, ['entity_id', 'alias', 'canonical'])
writer.writeheader()
for entity in Entity.all():
#print entity
export_entity(entity, writer)
def export_entity(entity, writer):
canonical = None
aliases = []
for prop in entity.properties.filter_by(name='name'):
aliases.append(prop.value)
if prop.active:
canonical = prop.value
for alias in aliases:
writer.writerow({
'entity_id': entity.id,
'alias': alias,
'canonical': canonical
})
| import logging
from unicodecsv import DictReader, DictWriter
from grano.core import db
from grano.model import Entity, Schema
log = logging.getLogger(__name__)
## Import commands
def import_aliases(path):
with open(path, 'r') as fh:
reader = DictReader(fh)
for row in reader:
data = {}
for k, v in row.items():
k = k.lower().strip()
data[k] = v
assert 'canonical' in data, 'No "canonical" column!'
assert 'alias' in data, 'No "alias" column!'
import_alias(data)
db.session.commit()
def import_alias(data):
# TODO: this actually deleted old entities, i.e. makes invalid
# entities - we should try and either re-direct them, or keep
# old entities whenever that makes sense.
canonical = Entity.by_name(data.get('canonical'))
if canonical is None:
schema = Schema.cached(Entity, 'base')
prop = {
'name': 'name',
'value': data.get('canonical'),
'active': True,
'schema': schema,
'source_url': data.get('source_url')
}
canonical = Entity.save([schema], [prop], [])
db.session.flush()
alias = Entity.by_name(data.get('alias'))
if alias is None:
Entity.PROPERTIES.save(canonical, 'name', {
'schema': Schema.cached(Entity, 'base'),
'value': data.get('alias'),
'active': False,
'source_url': data.get('source_url')
})
elif alias.id != canonical.id:
alias.merge_into(canonical)
if alias.id != canonical.id:
log.info("Mapped: %s -> %s", alias.id, canonical.id)
## Export commands
def export_aliases(path):
with open(path, 'w') as fh:
writer = DictWriter(fh, ['entity_id', 'alias', 'canonical'])
writer.writeheader()
for entity in Entity.all():
#print entity
export_entity(entity, writer)
def export_entity(entity, writer):
canonical = None
aliases = []
for prop in entity.properties.filter_by(name='name'):
aliases.append(prop.value)
if prop.active:
canonical = prop.value
for alias in aliases:
writer.writerow({
'entity_id': entity.id,
'alias': alias,
'canonical': canonical
})
| Python | 0 |
d7b27052db97aafe920530a2b40db571cef495ff | add locking to config file | tools/lib/configfile.py | tools/lib/configfile.py | #
# Configuration file parser (a wrapper around SafeConfigParser class)
#
# Note: characters "[]," are interpreted as meta-symbols and cannot be part of config values!
import ConfigParser, threading
class ConfigFile(object):
def __init__(self, filename, defaults = {}, automaticSections = False):
self.cfg = ConfigParser.SafeConfigParser(defaults)
self.currentSection = "main"
self.filename = filename
# do not load the file yet
self.automaticSections = automaticSections
self.lock = threading.Lock()
def load(self):
self.cfg.read(self.filename)
def save(self):
self.lock.acquire()
try:
with open(self.filename, 'wb') as configfile:
self.cfg.write(configfile)
finally:
self.lock.release()
def selectSection(self, sectionName):
self.currentSection = sectionName
def ensureCorrectSection(self, optionName):
if self.automaticSections and \
not self.cfg.has_option(self.currentSection, optionName):
for s in self.cfg.sections():
if self.cfg.has_option(s, optionName):
self.currentSection = s
break
def getCfgValue(self, name):
self.lock.acquire()
try:
self.ensureCorrectSection(name)
result = self.cfg.get(self.currentSection, name)
finally:
self.lock.release()
return result
def getCfgValueAsInt(self, name):
self.lock.acquire()
try:
self.ensureCorrectSection(name)
result = self.cfg.getint(self.currentSection, name)
finally:
self.lock.release()
return result
def getCfgValueAsFloat(self, name):
self.lock.acquire()
try:
self.ensureCorrectSection(name)
result = self.cfg.getfloat(self.currentSection, name)
finally:
self.lock.release()
return result
def getCfgValueAsBool(self, name):
self.lock.acquire()
try:
self.ensureCorrectSection(name)
result = self.cfg.getboolean(self.currentSection, name)
finally:
self.lock.release()
return result
def getCfgValueAsList(self, name):
self.lock.acquire()
try:
self.ensureCorrectSection(name)
value = self.cfg.get(self.currentSection, name)
# convert to list
split_by_bracket = value.split("]")
if len(split_by_bracket) > 1:
# ok, got list in a list here
result = []
for s in split_by_bracket:
if len(s):
result.append(s.strip(",[").split(","))
result
else:
# XXX: this means that comma cannot be part of well-formed config values!
result = value.split(",")
finally:
self.lock.release()
return result
def setCfgValue(self, name, value):
# make sure the value is in acceptable format (lists are stored as strings)
if isinstance(value, list):
value = ",".join(value)
elif not isinstance(value, str):
value = str(value)
self.lock.acquire()
try:
if self.cfg.has_section(self.currentSection):
# make sure the write is in correct section
self.ensureCorrectSection(name)
else:
# make sure the selected section is present in the file
self.cfg.add_section(self.currentSection)
# write the value
self.cfg.set(self.currentSection, name, value)
finally:
self.lock.release()
| #
# Configuration file parser (a wrapper around SafeConfigParser class)
#
# Note: characters "[]," are interpreted as meta-symbols and cannot be part of config values!
import ConfigParser
class ConfigFile(object):
def __init__(self, filename, defaults = {}, automaticSections = False):
self.cfg = ConfigParser.SafeConfigParser(defaults)
self.currentSection = "main"
self.filename = filename
# do not load the file yet
self.automaticSections = automaticSections
def load(self):
self.cfg.read(self.filename)
def save(self):
with open(self.filename, 'wb') as configfile:
self.cfg.write(configfile)
def selectSection(self, sectionName):
self.currentSection = sectionName
def ensureCorrectSection(self, optionName):
if self.automaticSections and \
not self.cfg.has_option(self.currentSection, optionName):
for s in self.cfg.sections():
if self.cfg.has_option(s, optionName):
self.currentSection = s
break
def getCfgValue(self, name):
self.ensureCorrectSection(name)
return self.cfg.get(self.currentSection, name)
def getCfgValueAsInt(self, name):
self.ensureCorrectSection(name)
return self.cfg.getint(self.currentSection, name)
def getCfgValueAsFloat(self, name):
self.ensureCorrectSection(name)
return self.cfg.getfloat(self.currentSection, name)
def getCfgValueAsBool(self, name):
self.ensureCorrectSection(name)
return self.cfg.getboolean(self.currentSection, name)
def getCfgValueAsList(self, name):
self.ensureCorrectSection(name)
value = self.cfg.get(self.currentSection, name)
# convert to list
split_by_bracket = value.split("]")
if len(split_by_bracket) > 1:
# ok, got list in a list here
result = []
for s in split_by_bracket:
if len(s):
result.append(s.strip(",[").split(","))
return result
# XXX: this means that comma cannot be part of well-formed config values!
return value.split(",")
def setCfgValue(self, name, value):
# make sure the write is in correct section
self.ensureCorrectSection(name)
# make sure the value is in acceptable format (lists are stored as strings)
if isinstance(value, list):
value = ",".join(value)
elif not isinstance(value, str):
value = str(value)
# make sure the selected section is present in the file
if not self.cfg.has_section(self.currentSection):
self.cfg.add_section(self.currentSection)
# write the value
self.cfg.set(self.currentSection, name, value)
| Python | 0.000001 |
7856a9f7e63c6a5146dc6976686e22420155f80c | if heroku debug=False | app.py | app.py | from flask import Flask, render_template, jsonify, request
from get_solution import Solution
import os
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
return render_template('base.html', title="Codeforces Solution by Handle Name")
@app.route('/get_solution', methods=['POST', 'GET'])
def get_solution():
user = request.args.get('user')
contest = request.args.get('contest')
pid = request.args.get('problem')
_solution = Solution(user, contest, pid)
return _solution.compute()
if __name__ == '__main__':
if os.environ.get('HEROKU') is None:
app.run(debug=True)
else:
app.run()
| from flask import Flask, render_template, jsonify, request
from get_solution import Solution
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
return render_template('base.html', title="Codeforces Solution by Handle Name")
@app.route('/get_solution', methods=['POST', 'GET'])
def get_solution():
user = request.args.get('user')
contest = request.args.get('contest')
pid = request.args.get('problem')
_solution = Solution(user, contest, pid)
return _solution.compute()
LOCAL = True
if __name__ == '__main__':
app.run(debug=LOCAL) | Python | 0.999999 |
2dc3e7eb3e6e5b32347d24d5353f9a5f0f6915c2 | Create app.py | app.py | app.py | #!/usr/bin/env python
import urllib
import json
import os
import time
from flask import Flask
from flask import request
from flask import make_response
from datetime import datetime
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def makeWebhookResult(req):
if req.get("result").get("action") != "time.get":
return {}
result = req.get("result")
parameters = result.get("parameters")
zone = parameters.get("sys.location")
cost = {'Europe':100, 'North America':200, 'South America':300, 'Asia':400, 'Africa':500}
speech = "The cost of shipping to " + zone + " is " + str(cost[zone]) + " euros."
localtime = time.localtime(time.time())
print "Local current time :", localtime
print("Response:")
print(speech)
return {
"speech": speech,
"displayText": speech,
#"data": {},
# "contextOut": [],
"source": "apiai-onlinestore-shipping"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print "Starting app on port %d" % port
app.run(debug=True, port=port, host='0.0.0.0')
| #!/usr/bin/env python
import urllib
import json
import os
import time
from flask import Flask
from flask import request
from flask import make_response
from datetime import datetime
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def makeWebhookResult(req):
if req.get("result").get("action") != "time.get":
return {}
result = req.get("result")
parameters = result.get("parameters")
zone = parameters.get("sys.location")
// cost = {'Europe':100, 'North America':200, 'South America':300, 'Asia':400, 'Africa':500}
// speech = "The cost of shipping to " + zone + " is " + str(cost[zone]) + " euros."
localtime = time.localtime(time.time())
print "Local current time :", localtime
// print("Response:")
// print(speech)
return {
"speech": speech,
"displayText": speech,
#"data": {},
# "contextOut": [],
"source": "apiai-onlinestore-shipping"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print "Starting app on port %d" % port
app.run(debug=True, port=port, host='0.0.0.0')
| Python | 0.000003 |
cb5aa965254c6abe3a865747e823fbe90f894a2c | return addresses as 'items' | app.py | app.py | from flask import Flask, Response, url_for, request, json
import os
import urllib2
import datetime
app = Flask(__name__)
@app.route('/hello')
def hello():
return 'Hello world'
@app.route('/')
def index():
response_data = json.dumps({ 'closures_href': url_for('closures') })
response = Response(response_data, status=200, mimetype='application/json')
return response
@app.route('/closures')
def closures():
d = datetime.datetime.today().strftime('%Y-%m-%d')
scraperwiki_query = "https://api.scraperwiki.com/api/1.0/datastore/sqlite?format=jsondict&name=denver_streets_and_sidewalks&query=select%20*%20from%20%60swdata%60%20where%20start_date%20%3C%20date('"+ d +"')%20and%20end_date%20%3E%20date('" + d + "')"
scraperwiki_response = { 'items': json.loads(urllib2.urlopen(scraperwiki_query).read()) }
response = Response(json.dumps(scraperwiki_response), status=200, mimetype='application/json')
return response
@app.route('/closures/<int:closure_id>')
def closure_id():
return ""
# find closure with closure_id
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| from flask import Flask, Response, url_for, request, json
import os
import urllib2
import datetime
app = Flask(__name__)
@app.route('/hello')
def hello():
return 'Hello world'
@app.route('/')
def index():
response_data = json.dumps({ 'closures_href': url_for('closures') })
response = Response(response_data, status=200, mimetype='application/json')
return response
@app.route('/closures')
def closures():
d = datetime.datetime.today().strftime('%Y-%m-%d')
scraperwiki_query = "https://api.scraperwiki.com/api/1.0/datastore/sqlite?format=jsondict&name=denver_streets_and_sidewalks&query=select%20*%20from%20%60swdata%60%20where%20start_date%20%3C%20date('"+ d +"')%20and%20end_date%20%3E%20date('" + d + "')"
scraperwiki_response = urllib2.urlopen(scraperwiki_query).read()
response = Response(scraperwiki_response, status=200, mimetype='application/json')
#return url_for('closure_id', closure_id=1)
return response
@app.route('/closures/<int:closure_id>')
def closure_id():
return ""
# find closure with closure_id
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| Python | 0.999977 |
32cb09df8f8c62bdc2ba5331b46b217abed49705 | Falling back to batch mode should be a WARNING not ERROR | rootpy/logger/roothandler.py | rootpy/logger/roothandler.py | import ctypes
import logging
import re
import sys
from . import root_logger, log
from .magic import DANGER, set_error_handler, re_execute_with_exception
class SHOWTRACE:
enabled = False
SANE_REGEX = re.compile("^[^\x80-\xFF]*$")
class Initialized:
value = False
ABORT_LEVEL = log.ERROR
def fixup_msg(lvl, msg):
# Fixup for this ERROR to a WARNING because it has a reasonable fallback.
# WARNING:ROOT.TGClient.TGClient] can't open display "localhost:10.0", switching to batch mode...
# In case you run from a remote ssh session, reconnect with ssh -Y
if "switching to batch mode..." in msg and lvl == logging.ERROR:
return logging.WARNING, msg
return lvl, msg
def python_logging_error_handler(level, root_says_abort, location, msg):
"""
A python error handler for ROOT which maps ROOT's errors and warnings on
to python's.
"""
import rootpy.util.quickroot as QROOT
if not Initialized.value:
QROOT.kInfo, QROOT.kWarning, QROOT.kError, QROOT.kFatal, QROOT.kSysError
QROOT.kTRUE
QROOT.gErrorIgnoreLevel
Initialized.value = True
try:
QROOT.kTRUE
except RuntimeError:
# Note: If the above causes us problems, it's because this logging
# handler has been called multiple times already with an
# exception. In that case we need to force upstream to raise it.
_, exc, traceback = sys.exc_info()
caller = sys._getframe(2)
re_execute_with_exception(caller, exc, traceback)
if level < QROOT.gErrorIgnoreLevel:
# Needed to silence some "normal" startup warnings
# (copied from PyROOT Utility.cxx)
return
log = root_logger.getChild(location.replace("::", "."))
if level >= QROOT.kSysError or level >= QROOT.kFatal:
lvl = logging.CRITICAL
elif level >= QROOT.kError:
lvl = logging.ERROR
elif level >= QROOT.kWarning:
lvl = logging.WARNING
elif level >= QROOT.kInfo:
lvl = logging.INFO
else:
lvl = logging.DEBUG
if not SANE_REGEX.match(msg):
# Not ASCII characters. Escape them.
msg = repr(msg)[1:-1]
# Apply fixups to improve consistency of errors/warnings
lvl, msg = fixup_msg(lvl, msg)
log.log(lvl, msg)
# String checks are used because we need a way of (un)forcing abort without
# modifying a global variable (gErrorAbortLevel) for the multithread tests
abort = lvl >= ABORT_LEVEL or "rootpy.ALWAYSABORT" in msg or root_says_abort
if abort and not "rootpy.NEVERABORT" in msg:
caller = sys._getframe(1)
try:
# We can't raise an exception from here because ctypes/PyROOT swallows it.
# Hence the need for dark magic, we re-raise it within a trace.
from rootpy import ROOTError
raise ROOTError(level, location, msg)
except RuntimeError:
_, exc, traceback = sys.exc_info()
if SHOWTRACE.enabled:
from traceback import print_stack
print_stack(caller)
if DANGER.enabled:
# Avert your eyes, dark magic be within...
re_execute_with_exception(caller, exc, traceback)
if root_says_abort:
log.CRITICAL("abort().. expect a stack trace")
ctypes.CDLL(None).abort()
| import ctypes
import logging
import re
import sys
from . import root_logger, log
from .magic import DANGER, set_error_handler, re_execute_with_exception
class SHOWTRACE:
enabled = False
SANE_REGEX = re.compile("^[^\x80-\xFF]*$")
class Initialized:
value = False
ABORT_LEVEL = log.ERROR
def python_logging_error_handler(level, root_says_abort, location, msg):
"""
A python error handler for ROOT which maps ROOT's errors and warnings on
to python's.
"""
import rootpy.util.quickroot as QROOT
if not Initialized.value:
QROOT.kInfo, QROOT.kWarning, QROOT.kError, QROOT.kFatal, QROOT.kSysError
QROOT.kTRUE
QROOT.gErrorIgnoreLevel
Initialized.value = True
try:
QROOT.kTRUE
except RuntimeError:
# Note: If the above causes us problems, it's because this logging
# handler has been called multiple times already with an
# exception. In that case we need to force upstream to raise it.
_, exc, traceback = sys.exc_info()
caller = sys._getframe(2)
re_execute_with_exception(caller, exc, traceback)
if level < QROOT.gErrorIgnoreLevel:
# Needed to silence some "normal" startup warnings
# (copied from PyROOT Utility.cxx)
return
log = root_logger.getChild(location.replace("::", "."))
if level >= QROOT.kSysError or level >= QROOT.kFatal:
lvl = logging.CRITICAL
elif level >= QROOT.kError:
lvl = logging.ERROR
elif level >= QROOT.kWarning:
lvl = logging.WARNING
elif level >= QROOT.kInfo:
lvl = logging.INFO
else:
lvl = logging.DEBUG
if not SANE_REGEX.match(msg):
# Not ASCII characters. Escape them.
msg = repr(msg)[1:-1]
log.log(lvl, msg)
# String checks are used because we need a way of (un)forcing abort without
# modifying a global variable (gErrorAbortLevel) for the multithread tests
abort = lvl >= ABORT_LEVEL or "rootpy.ALWAYSABORT" in msg or root_says_abort
if abort and not "rootpy.NEVERABORT" in msg:
caller = sys._getframe(1)
try:
# We can't raise an exception from here because ctypes/PyROOT swallows it.
# Hence the need for dark magic, we re-raise it within a trace.
from rootpy import ROOTError
raise ROOTError(level, location, msg)
except RuntimeError:
_, exc, traceback = sys.exc_info()
if SHOWTRACE.enabled:
from traceback import print_stack
print_stack(caller)
if DANGER.enabled:
# Avert your eyes, dark magic be within...
re_execute_with_exception(caller, exc, traceback)
if root_says_abort:
log.CRITICAL("abort().. expect a stack trace")
ctypes.CDLL(None).abort()
| Python | 0.999601 |
070589ee7dba86dd3d0a8928ebdd331d4faaa0c6 | Remove pdf_file field from plugin | addons/plugin_thunderbird/plugin_thunderbird.py | addons/plugin_thunderbird/plugin_thunderbird.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields
from osv import osv
class plugin_thunderbird_installer(osv.osv_memory):
_name = 'plugin_thunderbird.installer'
_inherit = 'res.config.installer'
_columns = {
'thunderbird': fields.boolean('Thunderbird Plug-in', help="Allows you to select an object that you would like to add to your email and its attachments."),
'plugin_name': fields.char('File name', size=64),
'plugin_file': fields.char('Thunderbird Plug-in', size=256, readonly=True, help="Thunderbird plug-in file. Save this file and install it in Thunderbird."),
}
_defaults = {
'thunderbird': True,
'plugin_name': 'openerp_plugin.xpi',
}
def default_get(self, cr, uid, fields, context=None):
res = super(plugin_thunderbird_installer, self).default_get(cr, uid, fields, context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
res['plugin_file'] = base_url + '/plugin_thunderbird/static/openerp_plugin.xpi'
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields
from osv import osv
class plugin_thunderbird_installer(osv.osv_memory):
_name = 'plugin_thunderbird.installer'
_inherit = 'res.config.installer'
_columns = {
'thunderbird': fields.boolean('Thunderbird Plug-in', help="Allows you to select an object that you would like to add to your email and its attachments."),
'plugin_name': fields.char('File name', size=64),
'plugin_file': fields.char('Thunderbird Plug-in', size=256, readonly=True, help="Thunderbird plug-in file. Save this file and install it in Thunderbird."),
}
_defaults = {
'thunderbird': True,
'plugin_name': 'openerp_plugin.xpi',
'pdf_file' : 'http://doc.openerp.com/v6.1/book/2/3_CRM_Contacts/communicate.html#managing-your-crm-from-mozilla-thunderbird',
}
def default_get(self, cr, uid, fields, context=None):
res = super(plugin_thunderbird_installer, self).default_get(cr, uid, fields, context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
res['plugin_file'] = base_url + '/plugin_thunderbird/static/openerp_plugin.xpi'
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Python | 0 |
8dbe0a3f0b9371ef63b099389fc2a8e12b2632c6 | test button | app.py | app.py | #!/usr/bin/env python
from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError
import json
import os
from flask import Flask
from flask import request
from flask import make_response
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = processRequest(req)
res = json.dumps(res, indent=4)
# print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def processRequest(req):
if req.get("result").get("action") != "yahooWeatherForecast":
return {}
baseurl = "https://query.yahooapis.com/v1/public/yql?"
yql_query = makeYqlQuery(req)
if yql_query is None:
return {}
yql_url = baseurl + urlencode({'q': yql_query}) + "&format=json"
result = urlopen(yql_url).read()
data = json.loads(result)
res = makeWebhookResult(data)
return res
def makeYqlQuery(req):
result = req.get("result")
parameters = result.get("parameters")
city = parameters.get("geo-city")
if city is None:
return None
return "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text='" + city + "')"
def makeWebhookResult(data):
query = data.get('query')
if query is None:
return {}
result = query.get('results')
if result is None:
return {}
channel = result.get('channel')
if channel is None:
return {}
item = channel.get('item')
location = channel.get('location')
units = channel.get('units')
if (location is None) or (item is None) or (units is None):
return {}
condition = item.get('condition')
if condition is None:
return {}
# print(json.dumps(item, indent=4))
speech = "Today in " + location.get('city') + ": " + condition.get('text') + \
", the temperature is " + condition.get('temp') + " " + units.get('temperature')
print("Response:")
print(speech)
return {
"speech": speech,
"displayText": speech,
# "data": data,
# "contextOut": [],
"source": "apiai-weather-webhook-sample"
"buttons":[
{
"type":"web_url",
"url":"https://petersapparel.parseapp.com",
"title":"Show Website"
},
{
"type":"postback",
"title":"Start Chatting",
"payload":"USER_DEFINED_PAYLOAD"
}
]
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print("Starting app on port %d" % port)
app.run(debug=False, port=port, host='0.0.0.0')
| #!/usr/bin/env python
from __future__ import print_function
from future.standard_library import install_aliases
install_aliases()
from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError
import json
import os
from flask import Flask
from flask import request
from flask import make_response
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = processRequest(req)
res = json.dumps(res, indent=4)
# print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def processRequest(req):
if req.get("result").get("action") != "yahooWeatherForecast":
return {}
baseurl = "https://query.yahooapis.com/v1/public/yql?"
yql_query = makeYqlQuery(req)
if yql_query is None:
return {}
yql_url = baseurl + urlencode({'q': yql_query}) + "&format=json"
result = urlopen(yql_url).read()
data = json.loads(result)
res = makeWebhookResult(data)
return res
def makeYqlQuery(req):
result = req.get("result")
parameters = result.get("parameters")
city = parameters.get("geo-city")
if city is None:
return None
return "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text='" + city + "')"
def makeWebhookResult(data):
query = data.get('query')
if query is None:
return {}
result = query.get('results')
if result is None:
return {}
channel = result.get('channel')
if channel is None:
return {}
item = channel.get('item')
location = channel.get('location')
units = channel.get('units')
if (location is None) or (item is None) or (units is None):
return {}
condition = item.get('condition')
if condition is None:
return {}
# print(json.dumps(item, indent=4))
speech = "Today in " + location.get('city') + ": " + condition.get('text') + \
", the temperature is " + condition.get('temp') + " " + units.get('temperature')
print("Response:")
print(speech)
return {
"speech": speech,
"displayText": speech,
# "data": data,
# "contextOut": [],
"source": "apiai-weather-webhook-sample"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print("Starting app on port %d" % port)
app.run(debug=False, port=port, host='0.0.0.0')
| Python | 0.000002 |
052dc22a82267d381636f5f5fbbf4b5149ffb518 | check if the values are empty before adding the data | gui/specieslistdialog.py | gui/specieslistdialog.py | # -*- coding: utf8 -*-
from PyQt4 import QtGui, QtCore
class speciesListDialog(QtGui.QDialog):
_tableview = None
def __init__(self, parent, app):
QtGui.QDialog.__init__(self, parent)
self._app = app
self._parent = parent
self.initUI()
self.setWindowTitle('List species')
self.show()
def initUI(self):
layout = QtGui.QVBoxLayout(self)
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview = QtGui.QTableView()
self._tableview.setModel(tablemodel)
form = QtGui.QGridLayout()
nameLabel = QtGui.QLabel("Species name")
self._nameField = QtGui.QLineEdit()
descriptionLabel = QtGui.QLabel("Species Description")
self._descriptionField = QtGui.QTextEdit()
self._saveButton = QtGui.QPushButton("Create")
self._saveButton.clicked.connect(self.createSpecies)
closeButton = QtGui.QPushButton("Close")
closeButton.clicked.connect(self.close)
form.addWidget(nameLabel, 0, 0)
form.addWidget(self._nameField, 0, 1)
form.addWidget(descriptionLabel, 1, 0)
form.addWidget(self._descriptionField, 1, 1)
form.addWidget(self._saveButton, 2, 1)
layout.addWidget(self._tableview)
layout.addLayout(form)
layout.addWidget(closeButton)
self.setLayout(layout)
def createSpecies(self):
name = str(self._nameField.text())
description = str(self._descriptionField.toPlainText())
if name is "" or description is "":
return False
self._app.addSpecies(name, description)
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview.setModel(tablemodel)
class SpeciesTableModel(QtCore.QAbstractTableModel):
def __init__(self, datain, parent = None, *args):
QtCore.QAbstractTableModel.__init__(self, parent, *args)
self.dataChanged.connect(self.saveChange)
self.arraydata = datain
def rowCount(self, parent):
return len(self.arraydata)
def columnCount(self, parent):
if len(self.arraydata) == 0:
return 0
return len(self.arraydata[0])
def data(self, index, role):
if not index.isValid():
return None
elif role != QtCore.Qt.DisplayRole:
return None
return (self.arraydata[index.row()][index.column()])
def saveChange(self, x, y):
print x, y
def flags(self, index):
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
| # -*- coding: utf8 -*-
from PyQt4 import QtGui, QtCore
class speciesListDialog(QtGui.QDialog):
_tableview = None
def __init__(self, parent, app):
QtGui.QDialog.__init__(self, parent)
self._app = app
self._parent = parent
self.initUI()
self.setWindowTitle('List species')
self.show()
def initUI(self):
layout = QtGui.QVBoxLayout(self)
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview = QtGui.QTableView()
self._tableview.setModel(tablemodel)
form = QtGui.QGridLayout()
nameLabel = QtGui.QLabel("Species name")
self._nameField = QtGui.QLineEdit()
descriptionLabel = QtGui.QLabel("Species Description")
self._descriptionField = QtGui.QTextEdit()
self._saveButton = QtGui.QPushButton("Create")
self._saveButton.clicked.connect(self.createSpecies)
closeButton = QtGui.QPushButton("Close")
closeButton.clicked.connect(self.close)
form.addWidget(nameLabel, 0, 0)
form.addWidget(self._nameField, 0, 1)
form.addWidget(descriptionLabel, 1, 0)
form.addWidget(self._descriptionField, 1, 1)
form.addWidget(self._saveButton, 2, 1)
layout.addWidget(self._tableview)
layout.addLayout(form)
layout.addWidget(closeButton)
self.setLayout(layout)
def createSpecies(self):
self._app.addSpecies(self._nameField.text(), self._descriptionField.toPlainText())
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview.setModel(tablemodel)
class SpeciesTableModel(QtCore.QAbstractTableModel):
def __init__(self, datain, parent = None, *args):
QtCore.QAbstractTableModel.__init__(self, parent, *args)
self.dataChanged.connect(self.saveChange)
self.arraydata = datain
def rowCount(self, parent):
return len(self.arraydata)
def columnCount(self, parent):
if len(self.arraydata) == 0:
return 0
return len(self.arraydata[0])
def data(self, index, role):
if not index.isValid():
return None
elif role != QtCore.Qt.DisplayRole:
return None
return (self.arraydata[index.row()][index.column()])
def saveChange(self, x, y):
print x, y
def flags(self, index):
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
| Python | 0.000005 |
ffe23433056e0a710f81bb22a9161cdaf3ff2c12 | fix format call for python 2.6 | moban/filters/repr.py | moban/filters/repr.py | from moban.extensions import JinjaFilter
@JinjaFilter('repr')
def repr_function(string):
if isinstance(string, list):
return ["'{0}'".format(str(element)) for element in string]
else:
return "'{0}'".format(str(string))
| from moban.extensions import JinjaFilter
@JinjaFilter('repr')
def repr_function(string):
if isinstance(string, list):
return ["'{}'".format(str(element)) for element in string]
else:
return "'{}'".format(str(string))
| Python | 0.000003 |
9255fd2c34a403b14b423628f47b7a7419c0d526 | update language lexer translations | packages/wakatime/wakatime/stats.py | packages/wakatime/wakatime/stats.py | # -*- coding: utf-8 -*-
"""
wakatime.stats
~~~~~~~~~~~~~~
Stats about files
:copyright: (c) 2013 Alan Hamlett.
:license: BSD, see LICENSE for more details.
"""
import logging
import os
import sys
if sys.version_info[0] == 2:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments2'))
else:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments3'))
from pygments.lexers import guess_lexer_for_filename
log = logging.getLogger(__name__)
# force file name extensions to be recognized as a certain language
EXTENSIONS = {
'md': 'Markdown',
}
TRANSLATIONS = {
'CSS+Genshi Text': 'CSS',
'CSS+Lasso': 'CSS',
'HTML+Django/Jinja': 'HTML',
'HTML+Lasso': 'HTML',
'JavaScript+Genshi Text': 'JavaScript',
'JavaScript+Lasso': 'JavaScript',
'Perl6': 'Perl',
}
def guess_language(file_name):
if file_name:
language = guess_language_from_extension(file_name.rsplit('.', 1)[-1])
if language:
return language
lexer = None
try:
with open(file_name) as f:
lexer = guess_lexer_for_filename(file_name, f.read(512000))
except:
pass
if lexer:
return translate_language(str(lexer.name))
else:
return None
def guess_language_from_extension(extension):
if extension:
if extension in EXTENSIONS:
return EXTENSIONS[extension]
if extension.lower() in EXTENSIONS:
return mapping[EXTENSIONS.lower()]
return None
def translate_language(language):
if language in TRANSLATIONS:
language = TRANSLATIONS[language]
return language
def number_lines_in_file(file_name):
lines = 0
try:
with open(file_name) as f:
for line in f:
lines += 1
except IOError:
return None
return lines
def get_file_stats(file_name):
stats = {
'language': guess_language(file_name),
'lines': number_lines_in_file(file_name),
}
return stats
| # -*- coding: utf-8 -*-
"""
wakatime.stats
~~~~~~~~~~~~~~
Stats about files
:copyright: (c) 2013 Alan Hamlett.
:license: BSD, see LICENSE for more details.
"""
import logging
import os
import sys
if sys.version_info[0] == 2:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments2'))
else:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments3'))
from pygments.lexers import guess_lexer_for_filename
log = logging.getLogger(__name__)
# force file name extensions to be recognized as a certain language
EXTENSIONS = {
'md': 'Markdown',
}
def guess_language(file_name):
if file_name:
language = guess_language_from_extension(file_name.rsplit('.', 1)[-1])
if language:
return language
lexer = None
try:
with open(file_name) as f:
lexer = guess_lexer_for_filename(file_name, f.read(512000))
except:
pass
if lexer:
return str(lexer.name)
else:
return None
def guess_language_from_extension(extension):
if extension:
if extension in EXTENSIONS:
return EXTENSIONS[extension]
if extension.lower() in EXTENSIONS:
return mapping[EXTENSIONS.lower()]
return None
def number_lines_in_file(file_name):
lines = 0
try:
with open(file_name) as f:
for line in f:
lines += 1
except IOError:
return None
return lines
def get_file_stats(file_name):
stats = {
'language': guess_language(file_name),
'lines': number_lines_in_file(file_name),
}
return stats
| Python | 0 |
b91c15f745d18bba4a884666cd3bd7eb87f82943 | Bind to 0.0.0.0. | bot.py | bot.py | import functools
import os
from bottle import Bottle, request, jinja2_view
view = functools.partial(jinja2_view, template_lookup=['templates'])
app = Bottle()
@app.get('/')
@view('home.html')
def instructions():
return {}
@app.post('/')
def do_correlation():
data = request.json
print data
return "Here's your data!"
if __name__ == '__main__':
PORT = os.environ.get("PORT", 80)
DEBUG = os.environ.get("DEBUG_ON", False)
app.run(host='0.0.0.0', port=PORT, debug=DEBUG)
| import functools
import os
from bottle import Bottle, request, jinja2_view
view = functools.partial(jinja2_view, template_lookup=['templates'])
app = Bottle()
@app.get('/')
@view('home.html')
def instructions():
return {}
@app.post('/')
def do_correlation():
data = request.json
print data
return "Here's your data!"
if __name__ == '__main__':
PORT = os.environ.get("PORT", 80)
DEBUG = os.environ.get("DEBUG_ON", False)
app.run(host='localhost', port=PORT, debug=DEBUG)
| Python | 0.997724 |
324cae7bc3d8d758205a0760dffd8d78ce611d48 | Increase tweet length | bot.py | bot.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import tweepy
import datetime
import os
import os.path
import requests
import json
from models import Aufmacher, Author, Image, TweetJob
from config import db
from playhouse.shortcuts import model_to_dict
from secrets import twitter_secrets
def tweet(tweetjob):
href = tweetjob.aufmacher.unique_id.replace("http://xml", "http://www")
tweet_text = """
{supertitle}: {title}
{subtitle}
""".format(**model_to_dict(tweetjob.aufmacher)).strip()
if len(tweet_text) > 250:
tweet_text = "{:.250}…".format(tweet_text)
tweet = """
{tweet_text}
{href}
""".format(tweet_text=tweet_text,
href=href).strip()
auth = tweepy.OAuthHandler(twitter_secrets["CONSUMER_KEY"], twitter_secrets["CONSUMER_SECRET"])
auth.set_access_token(twitter_secrets["ACCESS_TOKEN"], twitter_secrets["ACCESS_TOKEN_SECRET"])
api = tweepy.API(auth)
api.update_status(status=tweet)
tweetjob.tweeted_at = datetime.datetime.now()
tweetjob.save()
def go():
tweetjobs = TweetJob.select().where(TweetJob.tweeted_at == None)
for tweetjob in tweetjobs:
tweet(tweetjob)
if __name__ == "__main__":
go()
# #media_upload_response = api.media_upload(image_filename)
# #print(media_upload_response.media_id_string)
# #api.update_status(status="test with image", media_ids=[media_upload_response.media_id_string])
# with open("last_tweeted", 'w') as file:
# file.write(todays_date)
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import tweepy
import datetime
import os
import os.path
import requests
import json
from models import Aufmacher, Author, Image, TweetJob
from config import db
from playhouse.shortcuts import model_to_dict
from secrets import twitter_secrets
def tweet(tweetjob):
href = tweetjob.aufmacher.unique_id.replace("http://xml", "http://www")
tweet_text = """
{supertitle}: {title}
{subtitle}
""".format(**model_to_dict(tweetjob.aufmacher)).strip()
if len(tweet_text) > 115:
tweet_text = "{:.115}…".format(tweet_text)
tweet = """
{tweet_text}
{href}
""".format(tweet_text=tweet_text,
href=href).strip()
auth = tweepy.OAuthHandler(twitter_secrets["CONSUMER_KEY"], twitter_secrets["CONSUMER_SECRET"])
auth.set_access_token(twitter_secrets["ACCESS_TOKEN"], twitter_secrets["ACCESS_TOKEN_SECRET"])
api = tweepy.API(auth)
api.update_status(status=tweet)
tweetjob.tweeted_at = datetime.datetime.now()
tweetjob.save()
def go():
tweetjobs = TweetJob.select().where(TweetJob.tweeted_at == None)
for tweetjob in tweetjobs:
tweet(tweetjob)
if __name__ == "__main__":
go()
# #media_upload_response = api.media_upload(image_filename)
# #print(media_upload_response.media_id_string)
# #api.update_status(status="test with image", media_ids=[media_upload_response.media_id_string])
# with open("last_tweeted", 'w') as file:
# file.write(todays_date)
| Python | 0.998431 |
afc1c7331e683aeffe05a914780a5ec60cdbf81b | use 4GB RAM for garmin | conversion_service/converters/converter.py | conversion_service/converters/converter.py | import os
import shutil
import subprocess
import time
from converters import garmin_converter
from converters import gis_converter
from converters.gis_converter.bootstrap import bootstrap
from utils import chg_dir_with
class Conversion(object):
def __init__(self, formats, output_dir, osm_pbf_path, basename='osmaxx_excerpt'):
self.formats = formats
self.output_dir = output_dir
self.filename_prefix = '_'.join([
basename,
time.strftime("%Y-%m-%d_%H%M%S"),
])
self.tmp_statistics_filename = self.filename_prefix + '_tmp'
self.pbf_path = osm_pbf_path
def start_format_extraction(self):
garmin_formats, gis_formats = self._split_formats()
self._create_garmin_export(garmin_formats)
self._extract_postgis_formats(gis_formats)
def _extract_postgis_formats(self, formats):
if len(formats) > 0:
bootstrap.boostrap(self.pbf_path)
with chg_dir_with(os.path.dirname(__file__)):
# only create statistics once and remove it when done with all formats
self._get_statistics(self.tmp_statistics_filename)
for format in formats:
file_basename = '_'.join([self.filename_prefix, format])
self._copy_statistics_file_to_format_dir(file_basename)
self._export_from_db_to_format(file_basename, format)
# remove the temporary statistics file
os.remove(os.path.join(self.output_dir, 'tmp', self.tmp_statistics_filename + '_STATISTICS.csv'))
def _create_garmin_export(self, formats):
if len(formats) == 1:
garmin_format = formats[0]
path_to_mkgmap = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'garmin_converter', 'command_line_utils', 'mkgmap', 'mkgmap.jar')
)
garmin_out_dir = os.path.join(self.output_dir, garmin_format)
os.makedirs(garmin_out_dir, exist_ok=True)
subprocess.check_call(['java', '-Xms32m', '-Xmx4096m', '-jar', path_to_mkgmap, '--output-dir={0}'.format(garmin_out_dir), '--input-file={0}'.format(self.pbf_path)])
subprocess.check_call(["zip", "-r", "--move", '.'.join([garmin_out_dir, 'zip']), garmin_out_dir])
# Export files of the specified format (file_format) from existing database
def _export_from_db_to_format(self, file_basename, file_format): # pragma: nocover
dbcmd = 'sh', './extract/extract_format.sh', self.output_dir, file_basename, file_format
dbcmd = [str(arg) for arg in dbcmd]
subprocess.check_call(dbcmd)
# Extract Statistics
def _get_statistics(self, filename): # pragma: nocover
statcmd = 'bash', './extract/extract_statistics.sh', self.output_dir, filename
statcmd = [str(arg) for arg in statcmd]
subprocess.check_call(statcmd)
def _copy_statistics_file_to_format_dir(self, file_basename): # pragma: nocover
shutil.copyfile(
os.path.join(self.output_dir, 'tmp', self.tmp_statistics_filename + '_STATISTICS.csv'),
os.path.join(self.output_dir, 'tmp', file_basename + '_STATISTICS.csv')
)
def _split_formats(self):
garmin_formats = [garmin_format for garmin_format in self.formats
if garmin_format in garmin_converter.options.get_output_formats()]
gis_formats = [gis_format for gis_format in self.formats
if gis_format in gis_converter.options.get_output_formats()]
return garmin_formats, gis_formats
| import os
import shutil
import subprocess
import time
from converters import garmin_converter
from converters import gis_converter
from converters.gis_converter.bootstrap import bootstrap
from utils import chg_dir_with
class Conversion(object):
def __init__(self, formats, output_dir, osm_pbf_path, basename='osmaxx_excerpt'):
self.formats = formats
self.output_dir = output_dir
self.filename_prefix = '_'.join([
basename,
time.strftime("%Y-%m-%d_%H%M%S"),
])
self.tmp_statistics_filename = self.filename_prefix + '_tmp'
self.pbf_path = osm_pbf_path
def start_format_extraction(self):
garmin_formats, gis_formats = self._split_formats()
self._create_garmin_export(garmin_formats)
self._extract_postgis_formats(gis_formats)
def _extract_postgis_formats(self, formats):
if len(formats) > 0:
bootstrap.boostrap(self.pbf_path)
with chg_dir_with(os.path.dirname(__file__)):
# only create statistics once and remove it when done with all formats
self._get_statistics(self.tmp_statistics_filename)
for format in formats:
file_basename = '_'.join([self.filename_prefix, format])
self._copy_statistics_file_to_format_dir(file_basename)
self._export_from_db_to_format(file_basename, format)
# remove the temporary statistics file
os.remove(os.path.join(self.output_dir, 'tmp', self.tmp_statistics_filename + '_STATISTICS.csv'))
def _create_garmin_export(self, formats):
if len(formats) == 1:
garmin_format = formats[0]
path_to_mkgmap = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'garmin_converter', 'command_line_utils', 'mkgmap', 'mkgmap.jar')
)
garmin_out_dir = os.path.join(self.output_dir, garmin_format)
os.makedirs(garmin_out_dir, exist_ok=True)
subprocess.check_call(['java', '-jar', path_to_mkgmap, '--output-dir={0}'.format(garmin_out_dir), '--input-file={0}'.format(self.pbf_path)])
subprocess.check_call(["zip", "-r", "--move", '.'.join([garmin_out_dir, 'zip']), garmin_out_dir])
# Export files of the specified format (file_format) from existing database
def _export_from_db_to_format(self, file_basename, file_format): # pragma: nocover
dbcmd = 'sh', './extract/extract_format.sh', self.output_dir, file_basename, file_format
dbcmd = [str(arg) for arg in dbcmd]
subprocess.check_call(dbcmd)
# Extract Statistics
def _get_statistics(self, filename): # pragma: nocover
statcmd = 'bash', './extract/extract_statistics.sh', self.output_dir, filename
statcmd = [str(arg) for arg in statcmd]
subprocess.check_call(statcmd)
def _copy_statistics_file_to_format_dir(self, file_basename): # pragma: nocover
shutil.copyfile(
os.path.join(self.output_dir, 'tmp', self.tmp_statistics_filename + '_STATISTICS.csv'),
os.path.join(self.output_dir, 'tmp', file_basename + '_STATISTICS.csv')
)
def _split_formats(self):
garmin_formats = [garmin_format for garmin_format in self.formats
if garmin_format in garmin_converter.options.get_output_formats()]
gis_formats = [gis_format for gis_format in self.formats
if gis_format in gis_converter.options.get_output_formats()]
return garmin_formats, gis_formats
| Python | 0.000001 |
22c9ac7807ad493e9b2b0b97f4299cc87b316ada | Fix copy_resources overwrite. | modelmanager/utils.py | modelmanager/utils.py | """All handy, general utility functionality used throughout the package."""
import os
import os.path as osp
import fnmatch
import shutil
def load_module_path(name, path):
"""Load a python module source file python version aware."""
if True: # PY==27
import imp
m = imp.load_source(name, path)
elif False: # PY==33/34
from importlib.machinery import SourceFileLoader
srcloader = SourceFileLoader(name, path)
m = srcloader.load_module()
else: # PY 35
import importlib.util as iu
spec = iu.spec_from_file_location(name, path)
m = iu.module_from_spec(spec)
spec.loader.exec_module(m)
return m
def get_paths_pattern(pattern, startdir):
"""
Get all paths (including in subdirectories) matching pattern.
Returns list of relative paths from startdir.
"""
matches = []
for root, dirnames, filenames in os.walk(startdir):
fpaths = [os.path.relpath(os.path.join(root, fn), startdir)
for fn in filenames]
matches += fnmatch.filter(fpaths, pattern)
return matches
def copy_resources(sourcedir, destinationdir, overwrite=False,
ignorepatterns=[], linkpatterns=[], verbose=False):
"""
Copy/sync resource file tree from sourcedir to destinationdir.
overwrite: Overwrite existing files.
"""
def printverbose(args):
if verbose:
print(args)
return
pj = osp.join
if not osp.exists(destinationdir):
printverbose('mkdir %s' % destinationdir)
os.mkdir(destinationdir)
walker = os.walk(sourcedir, topdown=True)
for path, dirs, files in walker:
rpath = osp.relpath(path, sourcedir).replace('.', '')
# dirs
subsetdirs = []
for d in dirs:
rdir = pj(rpath, d)
dest = pj(destinationdir, rpath, d)
if any(fnmatch.fnmatch(rdir, p) for p in ignorepatterns):
printverbose('Ignoring %s' % rdir)
# dir to symlink with relative path
elif any(fnmatch.fnmatch(rdir, p) for p in linkpatterns):
rsrc = osp.relpath(pj(path, d), osp.dirname(dest))
printverbose('Linking %s to %s' % (dest, rsrc))
os.symlink(rsrc, dest)
# create new dir
else:
if not osp.exists(dest):
printverbose('mkdir %s' % dest)
os.mkdir(dest)
subsetdirs.append(d)
# update dirs (change in place will prevent walking into them)
dirs[:] = subsetdirs
# files
for f in files:
rfil = osp.join(rpath, f)
dest = pj(destinationdir, rpath, f)
src = pj(path, f)
# ignored files
if any(fnmatch.fnmatch(rfil, p) for p in ignorepatterns):
printverbose('Ignoring %s' % rfil)
continue
# file to symlink with relative path
elif any(fnmatch.fnmatch(rfil, p) for p in linkpatterns):
rsrc = osp.relpath(pj(path, f), osp.dirname(dest))
printverbose('Linking %s to %s' % (dest, rsrc))
os.symlink(rsrc, dest)
# copy/relink existing symlinks
elif osp.islink(src):
linkto = os.readlink(src)
lnabs = osp.abspath(pj(path, linkto))
rsrc = osp.relpath(lnabs, osp.dirname(dest))
printverbose('Linking %s to %s' % (dest, rsrc))
os.symlink(rsrc, dest)
# copy file
elif not osp.exists(dest) or overwrite:
printverbose('cp %s to %s' % (src, dest))
shutil.copy(src, dest)
return
| """All handy, general utility functionality used throughout the package."""
import os
import os.path as osp
import fnmatch
import shutil
def load_module_path(name, path):
"""Load a python module source file python version aware."""
if True: # PY==27
import imp
m = imp.load_source(name, path)
elif False: # PY==33/34
from importlib.machinery import SourceFileLoader
srcloader = SourceFileLoader(name, path)
m = srcloader.load_module()
else: # PY 35
import importlib.util as iu
spec = iu.spec_from_file_location(name, path)
m = iu.module_from_spec(spec)
spec.loader.exec_module(m)
return m
def get_paths_pattern(pattern, startdir):
"""
Get all paths (including in subdirectories) matching pattern.
Returns list of relative paths from startdir.
"""
matches = []
for root, dirnames, filenames in os.walk(startdir):
fpaths = [os.path.relpath(os.path.join(root, fn), startdir)
for fn in filenames]
matches += fnmatch.filter(fpaths, pattern)
return matches
def copy_resources(sourcedir, destinationdir, overwrite=False,
ignorepatterns=[], linkpatterns=[], verbose=False):
"""
Copy/sync resource file tree from sourcedir to destinationdir.
overwrite: Overwrite existing files.
"""
def printverbose(args):
if verbose:
print(args)
return
pj = osp.join
if not osp.exists(destinationdir):
printverbose('mkdir %s' % destinationdir)
os.mkdir(destinationdir)
walker = os.walk(sourcedir, topdown=True)
for path, dirs, files in walker:
rpath = osp.relpath(path, sourcedir).replace('.', '')
# dirs
subsetdirs = []
for d in dirs:
rdir = pj(rpath, d)
dest = pj(destinationdir, rpath, d)
if any(fnmatch.fnmatch(rdir, p) for p in ignorepatterns):
printverbose('Ignoring %s' % rdir)
# dir to symlink with relative path
elif any(fnmatch.fnmatch(rdir, p) for p in linkpatterns):
rsrc = osp.relpath(pj(path, d), osp.dirname(dest))
printverbose('Linking %s to %s' % (dest, rsrc))
os.symlink(rsrc, dest)
# create new dir
else:
if not osp.exists(dest):
printverbose('mkdir %s' % dest)
os.mkdir(dest)
subsetdirs.append(d)
# update dirs (change in place will prevent walking into them)
dirs[:] = subsetdirs
# files
for f in files:
rfil = osp.join(rpath, f)
dest = pj(destinationdir, rpath, f)
src = pj(path, f)
# ignored files
if any(fnmatch.fnmatch(rfil, p) for p in ignorepatterns):
printverbose('Ignoring %s' % rfil)
continue
# file to symlink with relative path
elif any(fnmatch.fnmatch(rfil, p) for p in linkpatterns):
rsrc = osp.relpath(pj(path, f), osp.dirname(dest))
printverbose('Linking %s to %s' % (dest, rsrc))
os.symlink(rsrc, dest)
# copy/relink existing symlinks
elif osp.islink(src):
linkto = os.readlink(src)
lnabs = osp.abspath(pj(path, linkto))
rsrc = osp.relpath(lnabs, osp.dirname(dest))
printverbose('Linking %s to %s' % (dest, rsrc))
os.symlink(rsrc, dest)
# copy file
else:
printverbose('cp %s to %s' % (src, dest))
shutil.copy(src, dest)
return
| Python | 0 |
3d52eca5b9a7cddcd1d2b67547c22c28847aa085 | fix print format for python3 | tools/run_tests/start_port_server.py | tools/run_tests/start_port_server.py | #!/usr/bin/env python2.7
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Wrapper around port server starting code.
Used by developers who wish to run individual C/C++ tests outside of the
run_tests.py infrastructure.
The path to this file is called out in test/core/util/port.c, and printed as
an error message to users.
"""
import python_utils.start_port_server as start_port_server
start_port_server.start_port_server()
print("Port server started successfully")
| #!/usr/bin/env python2.7
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Wrapper around port server starting code.
Used by developers who wish to run individual C/C++ tests outside of the
run_tests.py infrastructure.
The path to this file is called out in test/core/util/port.c, and printed as
an error message to users.
"""
import python_utils.start_port_server as start_port_server
start_port_server.start_port_server()
print "Port server started successfully"
| Python | 0.000027 |
6561ea0b329b2f42126dc23eab59676de305dd73 | remove unused imports | flask_wtf/forms.py | flask_wtf/forms.py | from __future__ import absolute_import
import jinja2
from flask import request, session, current_app
from wtforms.ext.csrf.session import SessionSecureForm
from wtforms.fields import HiddenField
class Form(SessionSecureForm):
"Implements a SessionSecureForm using app.SECRET_KEY and flask.session obj"
def __init__(self, formdata=None, obj=None, prefix='', csrf_enabled=None, **kwargs):
self.csrf_enabled = csrf_enabled
if csrf_enabled is None:
self.csrf_enabled = current_app.config.get('CSRF_ENABLED', True)
self.SECRET_KEY = current_app.config.get('CSRF_SESSION_KEY', '_csrf_token')
super(Form, self).__init__(formdata, obj, prefix, session, **kwargs)
def is_submitted(self):
"Check if request method is either PUT or POST"
return request and request.method in ("PUT", "POST")
def validate_on_submit(self):
"Call `form.validate()` if request method was either PUT or POST"
return self.is_submitted() and self.validate()
def validate_csrf_token(self, field):
if not self.csrf_enabled:
return True
return super(Form, self).validate_csrf_token(field)
def hidden_fields(self, *fields):
"hidden fields in a hidden DIV tag, in order to keep XHTML compliance."
if not fields:
fields = [f for f in self if isinstance(f, HiddenField)]
rv = [u'<div style="display:none;">']
for field in fields:
if isinstance(field, basestring):
field = getattr(self, field)
rv.append(unicode(field))
rv.append(u"</div>")
return jinja2.Markup(u"".join(rv))
def process(self, formdata=None, obj=None, **kwargs):
try:
if formdata is None:
formdata = request.form
except AttributeError:
pass
super(Form, self).process(formdata, obj, **kwargs)
| from __future__ import absolute_import
import jinja2
import wtforms
from flask import request, session, current_app
from wtforms.ext.csrf.session import SessionSecureForm
from wtforms.fields import HiddenField
class Form(SessionSecureForm):
"Implements a SessionSecureForm using app.SECRET_KEY and flask.session obj"
def __init__(self, formdata=None, obj=None, prefix='', csrf_enabled=None, **kwargs):
self.csrf_enabled = csrf_enabled
if csrf_enabled is None:
self.csrf_enabled = current_app.config.get('CSRF_ENABLED', True)
self.SECRET_KEY = current_app.config.get('CSRF_SESSION_KEY', '_csrf_token')
super(Form, self).__init__(formdata, obj, prefix, session, **kwargs)
def is_submitted(self):
"Check if request method is either PUT or POST"
return request and request.method in ("PUT", "POST")
def validate_on_submit(self):
"Call `form.validate()` if request method was either PUT or POST"
return self.is_submitted() and self.validate()
def validate_csrf_token(self, field):
if not self.csrf_enabled:
return True
return super(Form, self).validate_csrf_token(field)
def hidden_fields(self, *fields):
"hidden fields in a hidden DIV tag, in order to keep XHTML compliance."
if not fields:
fields = [f for f in self if isinstance(f, HiddenField)]
rv = [u'<div style="display:none;">']
for field in fields:
if isinstance(field, basestring):
field = getattr(self, field)
rv.append(unicode(field))
rv.append(u"</div>")
return jinja2.Markup(u"".join(rv))
def process(self, formdata=None, obj=None, **kwargs):
try:
if formdata is None:
formdata = request.form
except AttributeError:
pass
super(Form, self).process(formdata, obj, **kwargs)
| Python | 0.000001 |
413057374d55d851fa4717a66a0975f29b131f4f | Fix bytes output | cli.py | cli.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 30 13:58:24 2015
@author: mdurant
"""
import argparse
import hdfs
import sys
import inspect
parser = argparse.ArgumentParser(description='HDFS commands')
parser.add_argument("command", help="filesystem command to run")
parser.add_argument("par1", help="filesystem command to run", nargs="?", default=None)
parser.add_argument("par2", help="filesystem command to run", nargs="?", default=None)
parser.add_argument('--port', type=int,
help='Name node port')
parser.add_argument('--host', type=str,
help='Name node address')
parser.add_argument('--verbose', type=int, default=0,
help='Verbosity')
args = parser.parse_args()
par1, par2 = args.par1, args.par2
if args.verbose > 0:
print(args)
commands = ['ls', 'cat', 'info', 'mkdir', 'rmdir', 'rm', 'mv', 'exists',
'chmod', 'chmown', 'set_replication', 'get_block_locations',
'to_local', 'to_hdfs']
if __name__ == "__main__":
if args.command not in commands:
print("Available commands:", list(sorted(commands)))
sys.exit(1)
kwargs = {}
if args.host:
kwargs['host'] = args.host
if args.port:
kwargs['port'] = args.port
fs = hdfs.HDFileSystem(**kwargs)
cmd = getattr(fs, args.command)
nargs = len(inspect.getargspec(cmd).args) - 1
args = (par1, par2)[:nargs]
out = cmd(*args)
if isinstance(out, list):
for l in out:
print(l)
elif hasattr(out, 'decode'):
print(out.decode())
elif out is not None:
print(out)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 30 13:58:24 2015
@author: mdurant
"""
import argparse
import hdfs
import sys
import inspect
parser = argparse.ArgumentParser(description='HDFS commands')
parser.add_argument("command", help="filesystem command to run")
parser.add_argument("par1", help="filesystem command to run", nargs="?", default=None)
parser.add_argument("par2", help="filesystem command to run", nargs="?", default=None)
parser.add_argument('--port', type=int,
help='Name node port')
parser.add_argument('--host', type=str,
help='Name node address')
parser.add_argument('--verbose', type=int, default=0,
help='Verbosity')
args = parser.parse_args()
par1, par2 = args.par1, args.par2
if args.verbose > 0:
print(args)
commands = ['ls', 'cat', 'info', 'mkdir', 'rmdir', 'rm', 'mv', 'exists',
'chmod', 'chmown', 'set_replication', 'get_block_locations',
'to_local', 'to_hdfs']
if __name__ == "__main__":
if args.command not in commands:
print("Available commands:", list(sorted(commands)))
sys.exit(1)
kwargs = {}
if args.host:
kwargs['host'] = args.host
if args.port:
kwargs['port'] = args.port
fs = hdfs.HDFileSystem(**kwargs)
cmd = getattr(fs, args.command)
nargs = len(inspect.getargspec(cmd).args) - 1
args = (par1, par2)[:nargs]
out = cmd(*args)
if isinstance(out, list):
for l in out:
print(l)
elif out is not None:
print(out)
| Python | 0 |
a55a7162de4e237b4079c0517367ef23b7aa8b01 | PodcastList.by_rating() should return a list | mygpo/share/models.py | mygpo/share/models.py | from random import random
from couchdbkit.ext.django.schema import *
from django.template.defaultfilters import slugify
from mygpo.core.proxy import DocumentABCMeta
from mygpo.users.models import RatingMixin
from mygpo.cache import cache_result
class PodcastList(Document, RatingMixin):
""" A list of Podcasts that a user creates for the purpose of sharing """
__metaclass__ = DocumentABCMeta
title = StringProperty(required=True)
slug = StringProperty(required=True)
podcasts = StringListProperty()
user = StringProperty(required=True)
random_key = FloatProperty(default=random)
@classmethod
def for_user_slug(cls, user_id, slug):
r = cls.view('podcastlists/by_user_slug',
key = [user_id, slug],
include_docs = True,
)
return r.first() if r else None
@classmethod
def for_user(cls, user_id):
r = cls.view('podcastlists/by_user_slug',
startkey = [user_id, None],
endkey = [user_id, {}],
include_docs = True,
)
return list(r)
@classmethod
@cache_result(timeout=60*69)
def by_rating(cls, **kwargs):
r = cls.view('podcastlists/by_rating',
descending = True,
include_docs = True,
stale = 'update_after',
**kwargs
)
return list(r)
@classmethod
@cache_result(timeout=60*60)
def count(cls, with_rating=True):
view = 'podcastlists/by_rating' if with_rating else \
'podcastlists/by_user_slug'
return cls.view(view,
limit = 0,
stale = 'update_after',
).total_rows
@classmethod
def random(cls, chunk_size=1):
while True:
rnd = random()
res = cls.view('podcastlists/random',
startkey = rnd,
include_docs = True,
limit = chunk_size,
stale = 'ok',
)
if not res:
break
for r in res:
yield r
def __repr__(self):
return '<{cls} "{title}" by {user}>'.format(
cls=self.__class__.__name__, title=self.title, user=self.user)
| from random import random
from couchdbkit.ext.django.schema import *
from django.template.defaultfilters import slugify
from mygpo.core.proxy import DocumentABCMeta
from mygpo.users.models import RatingMixin
from mygpo.cache import cache_result
class PodcastList(Document, RatingMixin):
""" A list of Podcasts that a user creates for the purpose of sharing """
__metaclass__ = DocumentABCMeta
title = StringProperty(required=True)
slug = StringProperty(required=True)
podcasts = StringListProperty()
user = StringProperty(required=True)
random_key = FloatProperty(default=random)
@classmethod
def for_user_slug(cls, user_id, slug):
r = cls.view('podcastlists/by_user_slug',
key = [user_id, slug],
include_docs = True,
)
return r.first() if r else None
@classmethod
def for_user(cls, user_id):
r = cls.view('podcastlists/by_user_slug',
startkey = [user_id, None],
endkey = [user_id, {}],
include_docs = True,
)
return list(r)
@classmethod
@cache_result(timeout=60*69)
def by_rating(cls, **kwargs):
r = cls.view('podcastlists/by_rating',
descending = True,
include_docs = True,
stale = 'update_after',
**kwargs
)
return r.iterator()
@classmethod
@cache_result(timeout=60*60)
def count(cls, with_rating=True):
view = 'podcastlists/by_rating' if with_rating else \
'podcastlists/by_user_slug'
return cls.view(view,
limit = 0,
stale = 'update_after',
).total_rows
@classmethod
def random(cls, chunk_size=1):
while True:
rnd = random()
res = cls.view('podcastlists/random',
startkey = rnd,
include_docs = True,
limit = chunk_size,
stale = 'ok',
)
if not res:
break
for r in res:
yield r
def __repr__(self):
return '<{cls} "{title}" by {user}>'.format(
cls=self.__class__.__name__, title=self.title, user=self.user)
| Python | 0.999999 |
72fa091716e1e0d40a8219701da94bee6d49c58b | remove debugging | csw.py | csw.py | #!/usr/bin/python -u
# -*- coding: iso-8859-15 -*-
# =================================================================
#
# $Id$
#
# Authors: Tom Kralidis <tomkralidis@hotmail.com>
#
# Copyright (c) 2010 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
# CGI wrapper for pycsw
import os, sys
from StringIO import StringIO
from server import server
CONFIG = 'default.cfg'
GZIP = False
if os.environ.has_key('PYCSW_CONFIG'):
CONFIG = os.environ['PYCSW_CONFIG']
if os.environ['QUERY_STRING'].lower().find('config') != -1:
for kvp in os.environ['QUERY_STRING'].split('&'):
if kvp.lower().find('config') != -1:
CONFIG = kvp.split('=')[1]
if (os.environ.has_key('HTTP_ACCEPT_ENCODING') and
os.environ['HTTP_ACCEPT_ENCODING'].find('gzip') != -1):
# set for gzip compressed response
GZIP = True
# get runtime configuration
CSW = server.Csw(CONFIG)
# set compression level
if CSW.config.has_option('server', 'gzip_compresslevel'):
GZIP_COMPRESSLEVEL = \
int(CSW.config.get('server', 'gzip_compresslevel'))
else:
GZIP_COMPRESSLEVEL = 0
# go!
OUTP = CSW.dispatch_cgi()
sys.stdout.write("Content-Type:%s\r\n" % CSW.contenttype)
if GZIP and GZIP_COMPRESSLEVEL > 0:
import gzip
BUF = StringIO()
GZIPFILE = gzip.GzipFile(mode='wb', fileobj=BUF,
compresslevel=GZIP_COMPRESSLEVEL)
GZIPFILE.write(OUTP)
GZIPFILE.close()
OUTP = BUF.getvalue()
sys.stdout.write('Content-Encoding: gzip\r\n')
sys.stdout.write('Content-Length: %d\r\n' % len(OUTP))
sys.stdout.write('\r\n')
sys.stdout.write(OUTP)
| #!/usr/bin/python -u
# -*- coding: iso-8859-15 -*-
# =================================================================
#
# $Id$
#
# Authors: Tom Kralidis <tomkralidis@hotmail.com>
#
# Copyright (c) 2010 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
# CGI wrapper for pycsw
import cgitb
cgitb.enable()
import os, sys
from StringIO import StringIO
from server import server
CONFIG = 'default.cfg'
GZIP = False
if os.environ.has_key('PYCSW_CONFIG'):
CONFIG = os.environ['PYCSW_CONFIG']
if os.environ['QUERY_STRING'].lower().find('config') != -1:
for kvp in os.environ['QUERY_STRING'].split('&'):
if kvp.lower().find('config') != -1:
CONFIG = kvp.split('=')[1]
if (os.environ.has_key('HTTP_ACCEPT_ENCODING') and
os.environ['HTTP_ACCEPT_ENCODING'].find('gzip') != -1):
# set for gzip compressed response
GZIP = True
# get runtime configuration
CSW = server.Csw(CONFIG)
# set compression level
if CSW.config.has_option('server', 'gzip_compresslevel'):
GZIP_COMPRESSLEVEL = \
int(CSW.config.get('server', 'gzip_compresslevel'))
else:
GZIP_COMPRESSLEVEL = 0
# go!
OUTP = CSW.dispatch_cgi()
sys.stdout.write("Content-Type:%s\r\n" % CSW.contenttype)
if GZIP and GZIP_COMPRESSLEVEL > 0:
import gzip
BUF = StringIO()
GZIPFILE = gzip.GzipFile(mode='wb', fileobj=BUF,
compresslevel=GZIP_COMPRESSLEVEL)
GZIPFILE.write(OUTP)
GZIPFILE.close()
OUTP = BUF.getvalue()
sys.stdout.write('Content-Encoding: gzip\r\n')
sys.stdout.write('Content-Length: %d\r\n' % len(OUTP))
sys.stdout.write('\r\n')
sys.stdout.write(OUTP)
| Python | 0.000065 |
82bf7fbf5c92c29f058df06ba3828002322f6bf3 | Add Qantas 94 Heavy to privileged Tavern users | globalvars.py | globalvars.py | import os
from datetime import datetime
from ChatExchange.chatexchange.client import Client
import HTMLParser
import md5
class GlobalVars:
false_positives = []
whitelisted_users = []
blacklisted_users = []
ignored_posts = []
auto_ignored_posts = []
startup_utc = datetime.utcnow().strftime("%H:%M:%S")
latest_questions = []
blockedTime = 0
charcoal_room_id = "11540"
meta_tavern_room_id = "89"
site_filename = {"electronics.stackexchange.com": "ElectronicsGood.txt",
"gaming.stackexchange.com": "GamingGood.txt", "german.stackexchange.com": "GermanGood.txt",
"italian.stackexchange.com": "ItalianGood.txt", "math.stackexchange.com": "MathematicsGood.txt",
"spanish.stackexchange.com": "SpanishGood.txt", "stats.stackexchange.com": "StatsGood.txt"}
experimental_reasons = ["Code block"] # Don't widely report these
parser = HTMLParser.HTMLParser()
wrap = Client("stackexchange.com")
wrapm = Client("meta.stackexchange.com")
privileged_users = {charcoal_room_id: ["117490", "66258", "31768", "103081", "73046", "88521", "59776", "31465",
"88577", "34124"],
meta_tavern_room_id: ["259867", "244519", "244382", "194047", "158100", "178438", "237685",
"215468", "229438", "180276", "161974", "244382", "186281", "266094",
"245167", "230261", "213575", "241919", "203389", "202832", "160017",
"201151", "188558", "229166", "159034", "203972", "188673", "258672",
"227577", "255735", "279182", "271104", "220428"]}
smokeDetector_user_id = {charcoal_room_id: "120914", meta_tavern_room_id: "266345"}
censored_committer_names = {"3f4ed0f38df010ce300dba362fa63a62": "Undo1"}
commit = os.popen('git log --pretty=format:"%h" -n 1').read()
commit_author = os.popen('git log --pretty=format:"%cn" -n 1').read()
if md5.new(commit_author).hexdigest() in censored_committer_names:
commit_author = censored_committer_names[md5.new(commit_author).hexdigest()]
commit_with_author = os.popen('git log --pretty=format:"%h (' + commit_author + ': *%s*)" -n 1').read()
on_master = os.popen("git rev-parse --abbrev-ref HEAD").read().strip() == "master"
charcoal_hq = None
tavern_on_the_meta = None
s = ""
s_reverted = ""
specialrooms = []
bayesian_testroom = None
apiquota = -1
bodyfetcher = None
se_sites = []
tavern_users_chatting = []
| import os
from datetime import datetime
from ChatExchange.chatexchange.client import Client
import HTMLParser
import md5
class GlobalVars:
false_positives = []
whitelisted_users = []
blacklisted_users = []
ignored_posts = []
auto_ignored_posts = []
startup_utc = datetime.utcnow().strftime("%H:%M:%S")
latest_questions = []
blockedTime = 0
charcoal_room_id = "11540"
meta_tavern_room_id = "89"
site_filename = {"electronics.stackexchange.com": "ElectronicsGood.txt",
"gaming.stackexchange.com": "GamingGood.txt", "german.stackexchange.com": "GermanGood.txt",
"italian.stackexchange.com": "ItalianGood.txt", "math.stackexchange.com": "MathematicsGood.txt",
"spanish.stackexchange.com": "SpanishGood.txt", "stats.stackexchange.com": "StatsGood.txt"}
experimental_reasons = ["Code block"] # Don't widely report these
parser = HTMLParser.HTMLParser()
wrap = Client("stackexchange.com")
wrapm = Client("meta.stackexchange.com")
privileged_users = {charcoal_room_id: ["117490", "66258", "31768", "103081", "73046", "88521", "59776", "31465",
"88577", "34124"],
meta_tavern_room_id: ["259867", "244519", "244382", "194047", "158100", "178438", "237685",
"215468", "229438", "180276", "161974", "244382", "186281", "266094",
"245167", "230261", "213575", "241919", "203389", "202832", "160017",
"201151", "188558", "229166", "159034", "203972", "188673", "258672",
"227577", "255735", "279182", "271104"]}
smokeDetector_user_id = {charcoal_room_id: "120914", meta_tavern_room_id: "266345"}
censored_committer_names = {"3f4ed0f38df010ce300dba362fa63a62": "Undo1"}
commit = os.popen('git log --pretty=format:"%h" -n 1').read()
commit_author = os.popen('git log --pretty=format:"%cn" -n 1').read()
if md5.new(commit_author).hexdigest() in censored_committer_names:
commit_author = censored_committer_names[md5.new(commit_author).hexdigest()]
commit_with_author = os.popen('git log --pretty=format:"%h (' + commit_author + ': *%s*)" -n 1').read()
on_master = os.popen("git rev-parse --abbrev-ref HEAD").read().strip() == "master"
charcoal_hq = None
tavern_on_the_meta = None
s = ""
s_reverted = ""
specialrooms = []
bayesian_testroom = None
apiquota = -1
bodyfetcher = None
se_sites = []
tavern_users_chatting = []
| Python | 0 |
0b600e96f4778ea7f82f357cdadfd97967ecbe86 | Add PeterJ to privileged Tavern users | globalvars.py | globalvars.py | import os
from datetime import datetime
from ChatExchange.chatexchange.client import Client
import HTMLParser
import md5
class GlobalVars:
false_positives = []
whitelisted_users = []
blacklisted_users = []
ignored_posts = []
auto_ignored_posts = []
startup_utc = datetime.utcnow().strftime("%H:%M:%S")
latest_questions = []
blockedTime = 0
charcoal_room_id = "11540"
meta_tavern_room_id = "89"
site_filename = {"electronics.stackexchange.com": "ElectronicsGood.txt",
"gaming.stackexchange.com": "GamingGood.txt", "german.stackexchange.com": "GermanGood.txt",
"italian.stackexchange.com": "ItalianGood.txt", "math.stackexchange.com": "MathematicsGood.txt",
"spanish.stackexchange.com": "SpanishGood.txt", "stats.stackexchange.com": "StatsGood.txt"}
experimental_reasons = ["Code block"] # Don't widely report these
parser = HTMLParser.HTMLParser()
wrap = Client("stackexchange.com")
wrapm = Client("meta.stackexchange.com")
privileged_users = {charcoal_room_id: ["117490", "66258", "31768", "103081", "73046", "88521", "59776", "31465",
"88577"],
meta_tavern_room_id: ["259867", "244519", "244382", "194047", "158100", "178438", "237685",
"215468", "229438", "180276", "161974", "244382", "186281", "266094",
"245167", "230261", "213575", "241919", "203389", "202832", "160017",
"201151", "188558", "229166", "159034", "203972"]}
smokeDetector_user_id = {charcoal_room_id: "120914", meta_tavern_room_id: "266345"}
censored_committer_names = {"3f4ed0f38df010ce300dba362fa63a62": "Undo1"}
commit = os.popen('git log --pretty=format:"%h" -n 1').read()
commit_author = os.popen('git log --pretty=format:"%cn" -n 1').read()
if md5.new(commit_author).hexdigest() in censored_committer_names:
commit_author = censored_committer_names[md5.new(commit_author).hexdigest()]
commit_with_author = os.popen('git log --pretty=format:"%h (' + commit_author + ': *%s*)" -n 1').read()
on_master = os.popen("git rev-parse --abbrev-ref HEAD").read().strip() == "master"
charcoal_hq = None
tavern_on_the_meta = None
s = ""
s_reverted = ""
specialrooms = []
bayesian_testroom = None
apiquota = -1
bodyfetcher = None
se_sites = []
| import os
from datetime import datetime
from ChatExchange.chatexchange.client import Client
import HTMLParser
import md5
class GlobalVars:
false_positives = []
whitelisted_users = []
blacklisted_users = []
ignored_posts = []
auto_ignored_posts = []
startup_utc = datetime.utcnow().strftime("%H:%M:%S")
latest_questions = []
blockedTime = 0
charcoal_room_id = "11540"
meta_tavern_room_id = "89"
site_filename = {"electronics.stackexchange.com": "ElectronicsGood.txt",
"gaming.stackexchange.com": "GamingGood.txt", "german.stackexchange.com": "GermanGood.txt",
"italian.stackexchange.com": "ItalianGood.txt", "math.stackexchange.com": "MathematicsGood.txt",
"spanish.stackexchange.com": "SpanishGood.txt", "stats.stackexchange.com": "StatsGood.txt"}
experimental_reasons = ["Code block"] # Don't widely report these
parser = HTMLParser.HTMLParser()
wrap = Client("stackexchange.com")
wrapm = Client("meta.stackexchange.com")
privileged_users = {charcoal_room_id: ["117490", "66258", "31768", "103081", "73046", "88521", "59776", "31465",
"88577"],
meta_tavern_room_id: ["259867", "244519", "244382", "194047", "158100", "178438", "237685",
"215468", "229438", "180276", "161974", "244382", "186281", "266094",
"245167", "230261", "213575", "241919", "203389", "202832", "160017",
"201151", "188558", "229166", "159034"]}
smokeDetector_user_id = {charcoal_room_id: "120914", meta_tavern_room_id: "266345"}
censored_committer_names = {"3f4ed0f38df010ce300dba362fa63a62": "Undo1"}
commit = os.popen('git log --pretty=format:"%h" -n 1').read()
commit_author = os.popen('git log --pretty=format:"%cn" -n 1').read()
if md5.new(commit_author).hexdigest() in censored_committer_names:
commit_author = censored_committer_names[md5.new(commit_author).hexdigest()]
commit_with_author = os.popen('git log --pretty=format:"%h (' + commit_author + ': *%s*)" -n 1').read()
on_master = os.popen("git rev-parse --abbrev-ref HEAD").read().strip() == "master"
charcoal_hq = None
tavern_on_the_meta = None
s = ""
s_reverted = ""
specialrooms = []
bayesian_testroom = None
apiquota = -1
bodyfetcher = None
se_sites = []
| Python | 0 |
1c068004b9e1831bde30be527f20b0d2835c467c | fix multigpu bug | train_policy/trainer.py | train_policy/trainer.py | #!/usr/bin/python3
#-*-coding:utf-8-*-
#$File: trainer.py
#$Date: Sat May 7 11:00:10 2016
#$Author: Like Ma <milkpku[at]gmail[dot]com>
from config import Config
from dataset import load_data
from model import get_model
import tensorflow as tf
import argparse
def train(args):
device = args.device
load_path = args.load_path
# load data
train_data = load_data('train')
val_data = load_data('validation')
# load model
with tf.device('/gpu:%d' % device):
model = get_model('train')
# trainer init
optimizer = Config.optimizer
train_step = optimizer.minimize(model.loss)
# init session and server
sess = tf.InteractiveSession()
saver = tf.train.Saver()
if load_path==None:
sess.run(tf.initialize_all_variables())
else:
saver.restore(sess, load_path)
print("Model restored from %s" % load_path)
# accuracy
pred = tf.reshape(model.pred, [-1, 9*10*16])
label = tf.reshape(model.label, [-1, 9*10*16])
correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(label,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# train steps
for i in range(Config.n_epoch):
# training step
batch_data, batch_label = train_data.next_batch(Config.minibatch_size)
input_dict = {model.label:batch_label}
for var, data in zip(model.inputs, batch_data):
input_dict[var]=data
#from IPython import embed;embed()
sess.run(train_step, feed_dict=input_dict)
# evalue step
if (i+1)%Config.evalue_point == 0:
batch_data, batch_label = val_data.next_batch(Config.minibatch_size)
val_dict = {model.label:batch_label}
for var, data in zip(model.inputs, batch_data):
val_dict[var]=data
score = accuracy.eval(feed_dict=val_dict)
print("epoch %d, accuracy is %.2f" % (i,score))
# save step
if (i+1)%Config.check_point == 0:
save_path = saver.save(sess, "%s/epoch-%d" %(Config.save_path, i))
print("Model saved in file: %s" % save_path)
if __name__=='__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--device", type=int, default=0, help="gpu id")
parser.add_argument("-c", "--load_path", default=None, help="load trained model")
args = parser.parse_args()
train(args)
| #!/usr/bin/python3
#-*-coding:utf-8-*-
#$File: trainer.py
#$Date: Sat May 7 11:00:10 2016
#$Author: Like Ma <milkpku[at]gmail[dot]com>
from config import Config
from dataset import load_data
from model import get_model
import tensorflow as tf
import argparse
def train(load_path=None):
# load data
train_data = load_data('train')
val_data = load_data('validation')
# load model
model = get_model('train')
# trainer init
optimizer = Config.optimizer
train_step = optimizer.minimize(model.loss)
# init session and server
sess = tf.InteractiveSession()
saver = tf.train.Saver()
if load_path==None:
sess.run(tf.initialize_all_variables())
else:
saver.restore(sess, load_path)
print("Model restored from %s" % load_path)
# accuracy
pred = tf.reshape(model.pred, [-1, 9*10*16])
label = tf.reshape(model.label, [-1, 9*10*16])
correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(label,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# train steps
for i in range(Config.n_epoch):
# training step
batch_data, batch_label = train_data.next_batch(Config.minibatch_size)
input_dict = {model.label:batch_label}
for var, data in zip(model.inputs, batch_data):
input_dict[var]=data
#from IPython import embed;embed()
sess.run(train_step, feed_dict=input_dict)
# evalue step
if (i+1)%Config.evalue_point == 0:
batch_data, batch_label = val_data.next_batch(Config.minibatch_size)
val_dict = {model.label:batch_label}
for var, data in zip(model.inputs, batch_data):
val_dict[var]=data
score = accuracy.eval(feed_dict=val_dict)
print("epoch %d, accuracy is %.2f" % (i,score))
# save step
if (i+1)%Config.check_point == 0:
save_path = saver.save(sess, "%s/epoch-%d" %(Config.save_path, i))
print("Model saved in file: %s" % save_path)
if __name__=='__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--device", type=int, default=0, help="gpu id")
parser.add_argument("-c", "--load_path", default=None, help="load trained model")
args = parser.parse_args()
with tf.device('/gpu:%d' % args.device):
train(args.load_path)
| Python | 0.000001 |
b666228405e9b23e65d6d631968a7f6f334b6b46 | change string for utf8 translation (#48) | translation/samples/snippets/snippets_test.py | translation/samples/snippets/snippets_test.py | # -*- coding: utf-8 -*-
# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import snippets
def test_detect_language(capsys):
snippets.detect_language('Hæ sæta')
out, _ = capsys.readouterr()
assert 'is' in out
def test_list_languages(capsys):
snippets.list_languages()
out, _ = capsys.readouterr()
assert 'Icelandic (is)' in out
def test_list_languages_with_target(capsys):
snippets.list_languages_with_target('is')
out, _ = capsys.readouterr()
assert u'íslenska (is)' in out
def test_translate_text(capsys):
snippets.translate_text('is', 'Hello world')
out, _ = capsys.readouterr()
assert u'Halló heimur' in out
def test_translate_utf8(capsys):
text = u'파인애플 13개'
snippets.translate_text('en', text)
out, _ = capsys.readouterr()
assert u'13 pineapples' in out
| # -*- coding: utf-8 -*-
# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import snippets
def test_detect_language(capsys):
snippets.detect_language('Hæ sæta')
out, _ = capsys.readouterr()
assert 'is' in out
def test_list_languages(capsys):
snippets.list_languages()
out, _ = capsys.readouterr()
assert 'Icelandic (is)' in out
def test_list_languages_with_target(capsys):
snippets.list_languages_with_target('is')
out, _ = capsys.readouterr()
assert u'íslenska (is)' in out
def test_translate_text(capsys):
snippets.translate_text('is', 'Hello world')
out, _ = capsys.readouterr()
assert u'Halló heimur' in out
def test_translate_utf8(capsys):
text = u'나는 파인애플을 좋아한다.'
snippets.translate_text('en', text)
out, _ = capsys.readouterr()
assert u'I like pineapple' in out
| Python | 0 |
19acfbad5db83c20f6e6459f35b63600203ba09c | Test to make sure that the cinder-volumes vg exists | packstack/plugins/cinder_250.py | packstack/plugins/cinder_250.py | """
Installs and configures Cinder
"""
import logging
import packstack.installer.engine_validators as validate
from packstack.installer import basedefs
import packstack.installer.common_utils as utils
from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile
# Controller object will be initialized from main flow
controller = None
# Plugin name
PLUGIN_NAME = "OS-Cinder"
PLUGIN_NAME_COLORED = utils.getColoredText(PLUGIN_NAME, basedefs.BLUE)
logging.debug("plugin %s loaded", __name__)
def initConfig(controllerObject):
global controller
controller = controllerObject
logging.debug("Adding Openstack Cinder configuration")
paramsList = [
{"CMD_OPTION" : "cinder-host",
"USAGE" : "The IP address of the server on which to install Cinder",
"PROMPT" : "The IP address of the server on which to install Cinder",
"OPTION_LIST" : [],
"VALIDATION_FUNC" : validate.validatePing,
"DEFAULT_VALUE" : "127.0.0.1",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_HOST",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDER",
"DESCRIPTION" : "Cinder Config paramaters",
"PRE_CONDITION" : "CONFIG_CINDER_INSTALL",
"PRE_CONDITION_MATCH" : "y",
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def initSequences(controller):
if controller.CONF['CONFIG_CINDER_INSTALL'] != 'y':
return
cindersteps = [
{'title': 'Adding Cinder Keystone Manifest entries', 'functions':[createkeystonemanifest]},
{'title': 'Checking if the Cinder server has a cinder-volumes vg', 'functions':[checkcindervg]},
{'title': 'Creating Cinder Manifest', 'functions':[createmanifest]}
]
controller.addSequence("Installing Cinder", [], [], cindersteps)
def checkcindervg():
server = utils.ScriptRunner(controller.CONF['CONFIG_CINDER_HOST'])
server.append('vgdisplay cinder-volumes')
try:
server.execute()
except:
print "The cinder server should contain a cinder-volumes volume group"
raise
def createkeystonemanifest():
manifestfile = "%s_keystone.pp"%controller.CONF['CONFIG_KEYSTONE_HOST']
manifestdata = getManifestTemplate("keystone_cinder.pp")
appendManifestFile(manifestfile, manifestdata)
def createmanifest():
manifestfile = "%s_cinder.pp"%controller.CONF['CONFIG_CINDER_HOST']
manifestdata = getManifestTemplate("cinder.pp")
appendManifestFile(manifestfile, manifestdata)
| """
Installs and configures Cinder
"""
import logging
import packstack.installer.engine_validators as validate
from packstack.installer import basedefs
import packstack.installer.common_utils as utils
from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile
# Controller object will be initialized from main flow
controller = None
# Plugin name
PLUGIN_NAME = "OS-Cinder"
PLUGIN_NAME_COLORED = utils.getColoredText(PLUGIN_NAME, basedefs.BLUE)
logging.debug("plugin %s loaded", __name__)
def initConfig(controllerObject):
global controller
controller = controllerObject
logging.debug("Adding Openstack Cinder configuration")
paramsList = [
{"CMD_OPTION" : "cinder-host",
"USAGE" : "The IP address of the server on which to install Cinder",
"PROMPT" : "The IP address of the server on which to install Cinder",
"OPTION_LIST" : [],
"VALIDATION_FUNC" : validate.validatePing,
"DEFAULT_VALUE" : "127.0.0.1",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_HOST",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDER",
"DESCRIPTION" : "Cinder Config paramaters",
"PRE_CONDITION" : "CONFIG_CINDER_INSTALL",
"PRE_CONDITION_MATCH" : "y",
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def initSequences(controller):
if controller.CONF['CONFIG_CINDER_INSTALL'] != 'y':
return
cindersteps = [
{'title': 'Adding Cinder Keystone Manifest entries', 'functions':[createkeystonemanifest]},
{'title': 'Creating Cinder Manifest', 'functions':[createmanifest]}
]
controller.addSequence("Installing Cinder", [], [], cindersteps)
def createkeystonemanifest():
manifestfile = "%s_keystone.pp"%controller.CONF['CONFIG_KEYSTONE_HOST']
manifestdata = getManifestTemplate("keystone_cinder.pp")
appendManifestFile(manifestfile, manifestdata)
def createmanifest():
manifestfile = "%s_cinder.pp"%controller.CONF['CONFIG_CINDER_HOST']
manifestdata = getManifestTemplate("cinder.pp")
appendManifestFile(manifestfile, manifestdata)
| Python | 0 |
3b4d8d6ecae3ee1f57dd71af990bb480e6c82d6c | clearly indentify git-checkout as dev version | mitmproxy/version.py | mitmproxy/version.py | IVERSION = (3, 0, 0, 'dev')
VERSION = ".".join(str(i) for i in IVERSION)
PATHOD = "pathod " + VERSION
MITMPROXY = "mitmproxy " + VERSION
if __name__ == "__main__":
print(VERSION)
| IVERSION = (3, 0, 0)
VERSION = ".".join(str(i) for i in IVERSION)
PATHOD = "pathod " + VERSION
MITMPROXY = "mitmproxy " + VERSION
if __name__ == "__main__":
print(VERSION)
| Python | 0.999986 |
7db97dd21b7896f624f37ef44f72445965a65123 | provide urls for bug reports. refs #20 | h1ds_configdb/version.py | h1ds_configdb/version.py | """
Current h1ds_configdb version constant plus version pretty-print method.
Code copied from Fabric:
https://github.com/bitprophet/fabric/raw/master/fabric/version.py
This functionality is contained in its own module to prevent circular import
problems with ``__init__.py`` (which is loaded by setup.py during installation,
which in turn needs access to this version information.)
"""
from subprocess import Popen, PIPE
from os.path import abspath, dirname
def git_sha():
loc = abspath(dirname(__file__))
p = Popen(
"cd \"%s\" && git log -1 --format=format:%%h\ /\ %%cD" % loc,
shell=True,
stdout=PIPE,
stderr=PIPE
)
return p.communicate()[0]
VERSION = (0, 8, 1, 'final', 0)
def get_module_urls():
return ("https://code.h1svr.anu.edu.au/projects/h1ds-configdb", "https://code.h1svr.anu.edu.au/projects/h1ds-configdb/issues/new", )
def get_version(form='short'):
"""
Return a version string for this package, based on `VERSION`.
Takes a single argument, ``form``, which should be one of the following
strings:
* ``branch``: just the major + minor, e.g. "0.9", "1.0".
* ``short`` (default): compact, e.g. "0.9rc1", "0.9.0". For package
filenames or SCM tag identifiers.
* ``normal``: human readable, e.g. "0.9", "0.9.1", "0.9 beta 1". For e.g.
documentation site headers.
* ``verbose``: like ``normal`` but fully explicit, e.g. "0.9 final". For
tag commit messages, or anywhere that it's important to remove ambiguity
between a branch and the first final release within that branch.
"""
# Setup
versions = {}
branch = "%s.%s" % (VERSION[0], VERSION[1])
tertiary = VERSION[2]
type_ = VERSION[3]
final = (type_ == "final")
type_num = VERSION[4]
firsts = "".join([x[0] for x in type_.split()])
sha = git_sha()
sha1 = (" / %s" % sha) if sha else ""
# Branch
versions['branch'] = branch
# Short
v = branch
if (tertiary or final):
v += "." + str(tertiary)
if not final:
v += firsts
if type_num:
v += str(type_num)
else:
v += sha1
versions['short'] = v
# Normal
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
if type_num:
v += " " + type_ + " " + str(type_num)
else:
v += " pre-" + type_ + sha1
versions['normal'] = v
# Verbose
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
if type_num:
v += " " + type_ + " " + str(type_num)
else:
v += " pre-" + type_ + sha1
else:
v += " final"
versions['verbose'] = v
try:
return versions[form]
except KeyError:
raise TypeError, '"%s" is not a valid form specifier.' % form
__version__ = get_version('short')
| """
Current h1ds_configdb version constant plus version pretty-print method.
Code copied from Fabric:
https://github.com/bitprophet/fabric/raw/master/fabric/version.py
This functionality is contained in its own module to prevent circular import
problems with ``__init__.py`` (which is loaded by setup.py during installation,
which in turn needs access to this version information.)
"""
from subprocess import Popen, PIPE
from os.path import abspath, dirname
def git_sha():
loc = abspath(dirname(__file__))
p = Popen(
"cd \"%s\" && git log -1 --format=format:%%h\ /\ %%cD" % loc,
shell=True,
stdout=PIPE,
stderr=PIPE
)
return p.communicate()[0]
VERSION = (0, 8, 1, 'final', 0)
def get_version(form='short'):
"""
Return a version string for this package, based on `VERSION`.
Takes a single argument, ``form``, which should be one of the following
strings:
* ``branch``: just the major + minor, e.g. "0.9", "1.0".
* ``short`` (default): compact, e.g. "0.9rc1", "0.9.0". For package
filenames or SCM tag identifiers.
* ``normal``: human readable, e.g. "0.9", "0.9.1", "0.9 beta 1". For e.g.
documentation site headers.
* ``verbose``: like ``normal`` but fully explicit, e.g. "0.9 final". For
tag commit messages, or anywhere that it's important to remove ambiguity
between a branch and the first final release within that branch.
"""
# Setup
versions = {}
branch = "%s.%s" % (VERSION[0], VERSION[1])
tertiary = VERSION[2]
type_ = VERSION[3]
final = (type_ == "final")
type_num = VERSION[4]
firsts = "".join([x[0] for x in type_.split()])
sha = git_sha()
sha1 = (" / %s" % sha) if sha else ""
# Branch
versions['branch'] = branch
# Short
v = branch
if (tertiary or final):
v += "." + str(tertiary)
if not final:
v += firsts
if type_num:
v += str(type_num)
else:
v += sha1
versions['short'] = v
# Normal
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
if type_num:
v += " " + type_ + " " + str(type_num)
else:
v += " pre-" + type_ + sha1
versions['normal'] = v
# Verbose
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
if type_num:
v += " " + type_ + " " + str(type_num)
else:
v += " pre-" + type_ + sha1
else:
v += " final"
versions['verbose'] = v
try:
return versions[form]
except KeyError:
raise TypeError, '"%s" is not a valid form specifier.' % form
__version__ = get_version('short')
| Python | 0.999556 |
833cd8342385fc095181afc3306ce04414bfd447 | Add work around for destroying models too quickly. | perfscale_mass_model_destruction.py | perfscale_mass_model_destruction.py | #!/usr/bin/env python
"""Perfscale test measuring adding and destroying a large number of models.
Steps taken in this test:
- Bootstraps a provider
- Creates x amount of models and waits for them to be ready
- Delete all the models at once.
"""
import argparse
from datetime import datetime
import logging
import sys
from time import sleep
from deploy_stack import (
BootstrapManager,
)
from generate_perfscale_results import (
DeployDetails,
TimingData,
run_perfscale_test,
)
from utility import (
add_basic_testing_arguments,
configure_logging,
)
log = logging.getLogger("perfscale_mass_model_destruction")
__metaclass__ = type
def perfscale_assess_model_destruction(client, args):
"""Create a bunch of models and then destroy them all."""
model_count = args.model_count
all_models = []
for item in xrange(0, model_count):
model_name = 'model{}'.format(item)
log.info('Creating model: {}'.format(model_name))
new_model = client.add_model(client.env.clone(model_name))
new_model.wait_for_started()
all_models.append(new_model)
# Workaround for bug: https://bugs.launchpad.net/juju/+bug/1635052
# Noted here: https://bugs.launchpad.net/juju-ci-tools/+bug/1635109
sleep(10)
destruction_start = datetime.utcnow()
for doomed in all_models:
doomed.destroy_model()
destruction_end = datetime.utcnow()
destruction_timing = TimingData(destruction_start, destruction_end)
return DeployDetails(
'Destroy {} models'.format(model_count),
{'Model Count': model_count},
destruction_timing)
def parse_args(argv):
"""Parse all arguments."""
parser = argparse.ArgumentParser(
description="Perfscale bundle deployment test.")
add_basic_testing_arguments(parser)
parser.add_argument(
'--model-count',
type=int,
help='Number of models to create.',
default=100)
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
configure_logging(args.verbose)
bs_manager = BootstrapManager.from_args(args)
run_perfscale_test(perfscale_assess_model_destruction, bs_manager, args)
return 0
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
"""Perfscale test measuring adding and destroying a large number of models.
Steps taken in this test:
- Bootstraps a provider
- Creates x amount of models and waits for them to be ready
- Delete all the models at once.
"""
import argparse
from datetime import datetime
import logging
import sys
from deploy_stack import (
BootstrapManager,
)
from generate_perfscale_results import (
DeployDetails,
TimingData,
run_perfscale_test,
)
from utility import (
add_basic_testing_arguments,
configure_logging,
)
log = logging.getLogger("perfscale_mass_model_destruction")
__metaclass__ = type
def perfscale_assess_model_destruction(client, args):
"""Create a bunch of models and then destroy them all."""
model_count = args.model_count
all_models = []
for item in xrange(0, model_count):
model_name = 'model{}'.format(item)
log.info('Creating model: {}'.format(model_name))
new_model = client.add_model(client.env.clone(model_name))
new_model.wait_for_started()
all_models.append(new_model)
destruction_start = datetime.utcnow()
for doomed in all_models:
doomed.destroy_model()
destruction_end = datetime.utcnow()
destruction_timing = TimingData(destruction_start, destruction_end)
return DeployDetails(
'Destroy {} models'.format(model_count),
{'Model Count': model_count},
destruction_timing)
def parse_args(argv):
"""Parse all arguments."""
parser = argparse.ArgumentParser(
description="Perfscale bundle deployment test.")
add_basic_testing_arguments(parser)
parser.add_argument(
'--model-count',
type=int,
help='Number of models to create.',
default=100)
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
configure_logging(args.verbose)
bs_manager = BootstrapManager.from_args(args)
run_perfscale_test(perfscale_assess_model_destruction, bs_manager, args)
return 0
if __name__ == '__main__':
sys.exit(main())
| Python | 0 |
f29ff5eaa24b0671066f145d76b53e534a574119 | Set "text/javascript" type if given a callback | freegeoip/geoip.py | freegeoip/geoip.py | #!/usr/bin/env python
# coding: utf-8
#
# Copyright 2010 Alexandre Fiori
# freegeoip.net
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cyclone.web
import cyclone.escape
import socket
from twisted.python import log
from twisted.internet import defer
import freegeoip.search
class BaseHandler(cyclone.web.RequestHandler):
@defer.inlineCallbacks
def get(self, address):
try:
ip, data = yield freegeoip.search.geoip(self.settings.db,
address or self.request.remote_ip)
if data:
data = cyclone.escape.json_decode(data[0][0])
data["ip"] = ip
except socket.error:
raise cyclone.web.HTTPError(404)
except ValueError:
raise cyclone.web.HTTPError(400)
except Exception, e:
log.err("search.geoip('%s') failed: %s" % (address, e))
raise cyclone.web.HTTPError(503)
if data:
self.dump(data)
else:
raise cyclone.web.HTTPError(404)
def dump(self, data):
raise NotImplementedError
class CsvHandler(BaseHandler):
def dump(self, data):
self.set_header("Content-Type", "text/csv")
self.render("geoip.csv", data=data)
class XmlHandler(BaseHandler):
def dump(self, data):
self.set_header("Content-Type", "text/xml")
self.render("geoip.xml", data=data)
class JsonHandler(BaseHandler):
def dump(self, data):
callback = self.get_argument("callback", None)
if callback:
self.set_header("Content-Type", "text/javascript")
self.finish("%s(%s);" % (callback, cyclone.escape.json_encode(data)))
else:
self.set_header("Content-Type", "application/json")
self.finish(cyclone.escape.json_encode(data))
| #!/usr/bin/env python
# coding: utf-8
#
# Copyright 2010 Alexandre Fiori
# freegeoip.net
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cyclone.web
import cyclone.escape
import socket
from twisted.python import log
from twisted.internet import defer
import freegeoip.search
class BaseHandler(cyclone.web.RequestHandler):
@defer.inlineCallbacks
def get(self, address):
try:
ip, data = yield freegeoip.search.geoip(self.settings.db,
address or self.request.remote_ip)
if data:
data = cyclone.escape.json_decode(data[0][0])
data["ip"] = ip
except socket.error:
raise cyclone.web.HTTPError(404)
except ValueError:
raise cyclone.web.HTTPError(400)
except Exception, e:
log.err("search.geoip('%s') failed: %s" % (address, e))
raise cyclone.web.HTTPError(503)
if data:
self.dump(data)
else:
raise cyclone.web.HTTPError(404)
def dump(self, data):
raise NotImplementedError
class CsvHandler(BaseHandler):
def dump(self, data):
self.set_header("Content-Type", "text/csv")
self.render("geoip.csv", data=data)
class XmlHandler(BaseHandler):
def dump(self, data):
self.set_header("Content-Type", "text/xml")
self.render("geoip.xml", data=data)
class JsonHandler(BaseHandler):
def dump(self, data):
callback = self.get_argument("callback", None)
self.set_header("Content-Type", "application/json")
if callback:
self.finish("%s(%s);" % (callback, cyclone.escape.json_encode(data)))
else:
self.finish(cyclone.escape.json_encode(data))
| Python | 0.999994 |
827661e790bd407c29f4d109e428c8f36d44f537 | Update ReferralFollow test_form's build_form to allow True, False, None for has_appointment. | pttrack/test_forms.py | pttrack/test_forms.py | '''Module for testing the various custom forms used in Osler.'''
import datetime
from django.test import TestCase
from . import forms
from . import models
from . import followup_models
# pylint: disable=invalid-name
class TestReferralFollowupForms(TestCase):
'''
Test the validation and behavior of the forms used to do followups.
'''
def setUp(self):
self.contact_method = models.ContactMethod.objects.create(
name="Carrier Pidgeon")
self.pt = models.Patient.objects.create(
first_name="Juggie",
last_name="Brodeltein",
middle_name="Bayer",
phone='+49 178 236 5288',
gender=models.Gender.objects.create(long_name="Male",
short_name="M"),
address='Schulstrasse 9',
city='Munich',
state='BA',
zip_code='63108',
pcp_preferred_zip='63018',
date_of_birth=datetime.date(1990, 01, 01),
patient_comfortable_with_english=False,
preferred_contact_method=self.contact_method,
)
self.successful_res = followup_models.ContactResult.objects.create(
name="Got him", patient_reached=True)
self.unsuccessful_res = followup_models.ContactResult.objects.create(
name="Disaster", patient_reached=False)
self.reftype = models.ReferralType.objects.create(name="Chiropracter")
models.ReferralLocation.objects.create(
name="Franklin's Back Adjustment",
address="1435 Sillypants Drive")
followup_models.NoAptReason.objects.create(
name="better things to do")
def build_form(self, contact_successful, has_appointment, apt_location, noapt_reason):
'''
Construct a ReferralFollowup form to suit the needs of the testing
subroutines based upon what is provided and not provided.
'''
contact_resolution = self.successful_res if contact_successful else self.unsuccessful_res
form_data = {
'contact_method': self.contact_method,
'contact_resolution': contact_resolution,
'patient': self.pt,
'referral_type': self.reftype,
}
# Has appointment could (at least in principle) be True, False, or
# unspecified.
if has_appointment:
form_data['has_appointment'] = True
elif has_appointment is None:
pass
else:
form_data['has_appointment'] = False
if apt_location:
form_data['apt_location'] = models.ReferralLocation.objects.all()[0]
if noapt_reason:
form_data['noapt_reason'] = followup_models.NoAptReason.objects.all()[0]
return forms.ReferralFollowup(data=form_data)
def test_correct_successful_noapt(self):
'''
Test a correct submission of ReferralFollowup when
ContactResult.patient_reached is True but has_appointment is false.
That is, apt_location and noapt_reason are provided.
'''
form = self.build_form(
contact_successful=True,
has_appointment=False,
apt_location=True,
noapt_reason=True)
self.assertEqual(len(form['noapt_reason'].errors), 0)
def test_incorrect_successful_noapt(self):
'''
Test that a successful contact with no appointment that lacks a
noapt_reason is considered incorrect.
'''
form = self.build_form(
contact_successful=True,
has_appointment=False,
noapt_reason=False,
apt_location=False)
self.assertGreater(len(form['noapt_reason'].errors), 0)
def test_correct_unsuccssful_noapt(self):
'''
Test that an unsuccessful contact requires only has_appointment and
referral_type. apt_location and noapt_reason are not required.
'''
form = self.build_form(
contact_successful=False,
has_appointment=None,
apt_location=False,
noapt_reason=False)
self.assertEqual(len(form['noapt_reason'].errors), 0)
| '''Module for testing the various custom forms used in Osler.'''
import datetime
from django.test import TestCase
from . import forms
from . import models
from . import followup_models
# pylint: disable=invalid-name
class TestReferralFollowupForms(TestCase):
'''
Test the validation and behavior of the forms used to do followups.
'''
def setUp(self):
self.contact_method = models.ContactMethod.objects.create(
name="Carrier Pidgeon")
self.pt = models.Patient.objects.create(
first_name="Juggie",
last_name="Brodeltein",
middle_name="Bayer",
phone='+49 178 236 5288',
gender=models.Gender.objects.create(long_name="Male",
short_name="M"),
address='Schulstrasse 9',
city='Munich',
state='BA',
zip_code='63108',
pcp_preferred_zip='63018',
date_of_birth=datetime.date(1990, 01, 01),
patient_comfortable_with_english=False,
preferred_contact_method=self.contact_method,
)
self.successful_res = followup_models.ContactResult.objects.create(
name="Got him", patient_reached=True)
self.unsuccessful_res = followup_models.ContactResult.objects.create(
name="Disaster", patient_reached=False)
self.reftype = models.ReferralType.objects.create(name="Chiropracter")
models.ReferralLocation.objects.create(
name="Franklin's Back Adjustment",
address="1435 Sillypants Drive")
followup_models.NoAptReason.objects.create(
name="better things to do")
def build_form(self, contact_successful, has_appointment, apt_location, noapt_reason):
'''
Construct a ReferralFollowup form to suit the needs of the testing
subroutines based upon what is provided and not provided.
'''
contact_resolution = self.successful_res if contact_successful else self.unsuccessful_res
form_data = {
'contact_method': self.contact_method,
'contact_resolution': contact_resolution,
'patient': self.pt,
'referral_type': self.reftype,
'has_appointment': has_appointment,
}
if apt_location:
form_data['apt_location'] = models.ReferralLocation.objects.all()[0]
if noapt_reason:
form_data['noapt_reason'] = followup_models.NoAptReason.objects.all()[0]
return forms.ReferralFollowup(data=form_data)
def test_correct_successful_noapt(self):
'''
Test a correct submission of ReferralFollowup when
ContactResult.patient_reached is True but has_appointment is false.
That is, apt_location and noapt_reason are provided.
'''
form = self.build_form(
contact_successful=True,
has_appointment=False,
apt_location=True,
noapt_reason=True)
self.assertEqual(len(form['noapt_reason'].errors), 0)
def test_incorrect_successful_noapt(self):
'''
Test that a successful contact with no appointment that lacks a
noapt_reason is considered incorrect.
'''
form = self.build_form(
contact_successful=True,
has_appointment=False,
noapt_reason=False,
apt_location=False)
self.assertGreater(len(form['noapt_reason'].errors), 0)
def test_correct_unsuccssful_noapt(self):
'''
Test that an unsuccessful contact requires only has_appointment and
referral_type. apt_location and noapt_reason are not required.
'''
form = self.build_form(
contact_successful=False,
has_appointment=False,
apt_location=False,
noapt_reason=False)
self.assertEqual(len(form['noapt_reason'].errors), 0)
| Python | 0 |
c9917b3dc54290bb3fc7c977e8c1db76ac60cf82 | Update project queries | polyaxon/api/projects/queries.py | polyaxon/api/projects/queries.py | from django.db.models import Count, Q
from db.models.projects import Project
projects = Project.objects.select_related('user')
projects_details = projects.select_related('repo').annotate(
Count('experiments', distinct=True),
Count('jobs', distinct=True),
Count('build_jobs', distinct=True),
Count('experiment_groups', distinct=True),
independent_experiments__count=Count(
'experiments',
filter=Q(experiments__experiment_group__isnull=True),
distinct=True))
| from django.db.models import Count, Q
from db.models.projects import Project
projects = Project.objects.select_related('user')
projects_details = projects.select_related('repo').annotate(
Count('experiments', distinct=True),
Count('jobs', distinct=True),
Count('build_jobs', distinct=True),
Count('experiment_groups', distinct=True)).annotate(
independent_experiments__count=Count(
'experiments',
filter=Q(experiments__experiment_group__isnull=True),
distinct=True))
| Python | 0 |
086b7a7de994e30d2e5defa214eca846862aec59 | update default configuration in config | nova/common/config.py | nova/common/config.py | # Copyright 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_middleware import cors
def set_middleware_defaults():
"""Update default configuration options for oslo.middleware."""
cors.set_defaults(
allow_headers=['X-Auth-Token',
'X-Openstack-Request-Id',
'X-Identity-Status',
'X-Roles',
'X-Service-Catalog',
'X-User-Id',
'X-Tenant-Id'],
expose_headers=['X-Auth-Token',
'X-Openstack-Request-Id',
'X-Subject-Token',
'X-Service-Token'],
allow_methods=['GET',
'PUT',
'POST',
'DELETE',
'PATCH']
)
| # Copyright 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_middleware import cors
def set_middleware_defaults():
"""Update default configuration options for oslo.middleware."""
# CORS Defaults
# TODO(krotscheck): Update with https://review.openstack.org/#/c/285368/
cfg.set_defaults(cors.CORS_OPTS,
allow_headers=['X-Auth-Token',
'X-Openstack-Request-Id',
'X-Identity-Status',
'X-Roles',
'X-Service-Catalog',
'X-User-Id',
'X-Tenant-Id'],
expose_headers=['X-Auth-Token',
'X-Openstack-Request-Id',
'X-Subject-Token',
'X-Service-Token'],
allow_methods=['GET',
'PUT',
'POST',
'DELETE',
'PATCH']
)
| Python | 0.000001 |
bc224499e2f4f663a1fe5e41cbfad691e7c04de4 | Drop unused import | turbine/code/py/turbine_helpers.py | turbine/code/py/turbine_helpers.py | # Copyright 2013 University of Chicago and Argonne National Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
# TURBINE HELPERS PY
# Python helpers for JSON module
import json
# Type classes for comparison:
_zero = 0
_zerof = 0.0
type_str = "x".__class__
type_int = _zero.__class__
type_float = _zerof.__class__
type_list = [].__class__
type_dict = {}.__class__
type_none = None.__class__
def set_key_type(k):
""" Convert to integer if possible """
try:
result = int(k)
except ValueError:
result = k
return result
def json_path(J, path):
""" Reusable function to search a JSON tree """
J = json.loads(J)
P = path.split(",")
for p in P:
if len(p) > 0:
k = set_key_type(p)
J = J[k]
return J
def json_type(J, path):
""" Obtain the type of the entry at given path in the JSON tree """
J = json_path(J, path)
c = J.__class__
if c == type_str:
return "string"
elif c == type_int:
return "int"
elif c == type_float:
return "float"
elif c == type_list:
return "array"
elif c == type_dict:
return "object"
elif c == type_none:
return "null"
else:
raise Exception("json_type: ERROR class='%s'" % str(c))
def json_object_names(J, path):
""" Assume dict and return all names at given path """
J = json_path(J, path)
L = []
for i in J.keys():
L.append(i)
result = ",".join(L)
return result
def json_array_size(J, path):
""" Assume list and return length of it """
J = json_path(J, path)
return str(len(J))
def json_get(J, path):
""" Return whatever is at the given path (usually scalar) """
J = json_path(J, path)
if J == None:
return "null"
return str(J)
| # Copyright 2013 University of Chicago and Argonne National Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
# TURBINE HELPERS PY
# Python helpers for JSON module
import json
import sys
# Type classes for comparison:
_zero = 0
_zerof = 0.0
type_str = "x".__class__
type_int = _zero.__class__
type_float = _zerof.__class__
type_list = [].__class__
type_dict = {}.__class__
type_none = None.__class__
def set_key_type(k):
""" Convert to integer if possible """
try:
result = int(k)
except ValueError:
result = k
return result
def json_path(J, path):
""" Reusable function to search a JSON tree """
J = json.loads(J)
P = path.split(",")
for p in P:
if len(p) > 0:
k = set_key_type(p)
J = J[k]
return J
def json_type(J, path):
""" Obtain the type of the entry at given path in the JSON tree """
J = json_path(J, path)
c = J.__class__
if c == type_str:
return "string"
elif c == type_int:
return "int"
elif c == type_float:
return "float"
elif c == type_list:
return "array"
elif c == type_dict:
return "object"
elif c == type_none:
return "null"
else:
raise Exception("json_type: ERROR class='%s'" % str(c))
def json_object_names(J, path):
""" Assume dict and return all names at given path """
J = json_path(J, path)
L = []
for i in J.keys():
L.append(i)
result = ",".join(L)
return result
def json_array_size(J, path):
""" Assume list and return length of it """
J = json_path(J, path)
return str(len(J))
def json_get(J, path):
""" Return whatever is at the given path (usually scalar) """
J = json_path(J, path)
if J == None:
return "null"
return str(J)
| Python | 0 |
63cb9a8b3acb78be155bbc770cdd1d06170eccc1 | Fix customer filter field. | src/nodeconductor_assembly_waldur/invoices/filters.py | src/nodeconductor_assembly_waldur/invoices/filters.py | import django_filters
from django.conf import settings
from django.core import exceptions
from django.db.models import Q
from django import forms
from django.utils import timezone
from nodeconductor.core import filters as core_filters
from nodeconductor.structure import filters as structure_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
class PaymentDetailsFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.PaymentDetails
fields = '__all__'
class AccountingStartDateFilter(core_filters.BaseExternalFilter):
def filter(self, request, queryset, view):
if not settings.INVOICES['ENABLE_ACCOUNTING_START_DATE']:
return queryset
value = request.query_params.get('accounting_is_running')
boolean_field = forms.NullBooleanField()
try:
value = boolean_field.to_python(value)
except exceptions.ValidationError:
value = None
if value is None:
return queryset
query = Q(payment_details__isnull=True) | Q(payment_details__accounting_start_date__gt=timezone.now())
if value:
return queryset.exclude(query)
else:
return queryset.filter(query)
structure_filters.ExternalCustomerFilterBackend.register(AccountingStartDateFilter())
| import django_filters
from django.conf import settings
from django.core import exceptions
from django.db.models import Q, BooleanField
from django.utils import timezone
from nodeconductor.core import filters as core_filters
from nodeconductor.structure import filters as structure_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
class PaymentDetailsFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.PaymentDetails
fields = '__all__'
class AccountingStartDateFilter(core_filters.BaseExternalFilter):
def filter(self, request, queryset, view):
if not settings.INVOICES['ENABLE_ACCOUNTING_START_DATE']:
return queryset
value = request.query_params.get('accounting_is_running')
boolean_field = BooleanField()
try:
value = boolean_field.to_python(value)
except exceptions.ValidationError:
value = None
if value is None:
return queryset
query = Q(payment_details__isnull=True) | Q(payment_details__accounting_start_date__gt=timezone.now())
if value:
return queryset.exclude(query)
else:
return queryset.filter(query)
structure_filters.ExternalCustomerFilterBackend.register(AccountingStartDateFilter())
| Python | 0 |
881a27ab3d4ee0f9f988a7f183bdd0a76b517526 | Add additional activation tests. | panda/tests/test_views.py | panda/tests/test_views.py | #!/usr/bin/env python
from django.contrib.auth import authenticate
from django.test import TransactionTestCase
from django.test.client import Client
from django.utils import simplejson as json
from panda.models import User
from panda.tests import utils
class TestLogin(TransactionTestCase):
fixtures = ['init_panda.json']
def setUp(self):
self.user = utils.get_panda_user()
self.client = Client()
def test_login_success(self):
response = self.client.post('/login/', { 'email': 'user@pandaproject.net', 'password': 'user' })
self.assertEqual(response.status_code, 200)
body = json.loads(response.content)
self.assertEqual(body['email'], 'user@pandaproject.net')
self.assertEqual(body['api_key'], 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84c')
self.assertEqual(body['notifications'], [])
def test_login_disabled(self):
self.user.is_active = False
self.user.save()
response = self.client.post('/login/', { 'email': 'user@pandaproject.net', 'password': 'user' })
self.assertEqual(response.status_code, 400)
body = json.loads(response.content)
self.assertIn('disabled', body['__all__'])
self.user.is_active = True
self.user.save()
def test_login_invalid_email(self):
response = self.client.post('/login/', { 'email': 'NOTPANDA@pandaproject.net', 'password': 'panda' })
self.assertEqual(response.status_code, 400)
body = json.loads(response.content)
self.assertIn('incorrect', body['__all__'])
def test_login_incorrect_password(self):
response = self.client.post('/login/', { 'email': 'user@pandaproject.net', 'password': 'NOPANDA' })
self.assertEqual(response.status_code, 400)
body = json.loads(response.content)
self.assertIn('incorrect', body['__all__'])
def test_no_get(self):
response = self.client.get('/login/', { 'email': 'user@pandaproject.net', 'password': 'NOPANDA' })
self.assertEqual(response.status_code, 400)
body = json.loads(response.content)
self.assertEqual(body, None)
class TestActivate(TransactionTestCase):
fixtures = ['init_panda.json']
def setUp(self):
self.user = utils.get_panda_user()
self.client = Client()
def test_check_activation_key_valid(self):
new_user = User.objects.create(
email="foo@bar.com",
username="foo@bar.com",
is_active=False
)
key = new_user.get_profile().activation_key
response = self.client.get('/check_activation_key/%s/' % key)
self.assertEqual(response.status_code, 200)
body = json.loads(response.content)
self.assertEqual(body['activation_key'], key)
self.assertEqual(body['email'], new_user.email)
self.assertEqual(body['first_name'], '')
self.assertEqual(body['last_name'], '')
def test_check_activation_key_invalid(self):
response = self.client.get('/check_activation_key/NOT_A_VALID_KEY/')
self.assertEqual(response.status_code, 400)
def test_activate(self):
new_user = User.objects.create(
email="foo@bar.com",
username="foo@bar.com",
is_active=False
)
response = self.client.post('/activate/', { 'activation_key': new_user.get_profile().activation_key, 'email': 'foo@bar.com', 'password': 'foobarbaz' })
self.assertEqual(response.status_code, 200)
self.assertEqual(authenticate(username='foo@bar.com', password='foobarbaz'), new_user)
| #!/usr/bin/env python
from django.contrib.auth import authenticate
from django.test import TransactionTestCase
from django.test.client import Client
from django.utils import simplejson as json
from panda.models import User
from panda.tests import utils
class TestLogin(TransactionTestCase):
fixtures = ['init_panda.json']
def setUp(self):
self.user = utils.get_panda_user()
self.client = Client()
def test_login_success(self):
response = self.client.post('/login/', { 'email': 'user@pandaproject.net', 'password': 'user' })
self.assertEqual(response.status_code, 200)
body = json.loads(response.content)
self.assertEqual(body['email'], 'user@pandaproject.net')
self.assertEqual(body['api_key'], 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84c')
self.assertEqual(body['notifications'], [])
def test_login_disabled(self):
self.user.is_active = False
self.user.save()
response = self.client.post('/login/', { 'email': 'user@pandaproject.net', 'password': 'user' })
self.assertEqual(response.status_code, 400)
body = json.loads(response.content)
self.assertIn('disabled', body['__all__'])
self.user.is_active = True
self.user.save()
def test_login_invalid_email(self):
response = self.client.post('/login/', { 'email': 'NOTPANDA@pandaproject.net', 'password': 'panda' })
self.assertEqual(response.status_code, 400)
body = json.loads(response.content)
self.assertIn('incorrect', body['__all__'])
def test_login_incorrect_password(self):
response = self.client.post('/login/', { 'email': 'user@pandaproject.net', 'password': 'NOPANDA' })
self.assertEqual(response.status_code, 400)
body = json.loads(response.content)
self.assertIn('incorrect', body['__all__'])
def test_no_get(self):
response = self.client.get('/login/', { 'email': 'user@pandaproject.net', 'password': 'NOPANDA' })
self.assertEqual(response.status_code, 400)
body = json.loads(response.content)
self.assertEqual(body, None)
class TestActivate(TransactionTestCase):
fixtures = ['init_panda.json']
def setUp(self):
self.user = utils.get_panda_user()
self.client = Client()
def test_activate(self):
new_user = User.objects.create(
email="foo@bar.com",
username="foo@bar.com",
is_active=False
)
response = self.client.post('/activate/', { 'activation_key': new_user.get_profile().activation_key, 'email': 'foo@bar.com', 'password': 'foobarbaz' })
self.assertEqual(response.status_code, 200)
self.assertEqual(authenticate(username='foo@bar.com', password='foobarbaz'), new_user)
| Python | 0 |
2e4837721a22985894f932536c45989aaec8006b | Stop printing the exception object. | code/daemon/transporters/transporter.py | code/daemon/transporters/transporter.py | """transporter.py Transporter class for daemon"""
__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
import sys
import os
sys.path.append(os.path.abspath('../dependencies'))
from django.core.files.storage import Storage
from django.core.files import File
# Define exceptions.
class TransporterError(Exception): pass
class InvalidSettingError(TransporterError): pass
class MissingSettingError(TransporterError): pass
class InvalidCallbackError(TransporterError): pass
class ConnectionError(TransporterError): pass
import threading
import Queue
import time
from sets import Set, ImmutableSet
class Transporter(threading.Thread):
"""threaded abstraction around a Django Storage subclass"""
def __init__(self, settings, callback):
if not callable(callback):
raise InvalidCallbackError
self.settings = settings
self.storage = False
self.is_ready = False
self.lock = threading.Lock()
self.queue = Queue.Queue()
self.callback = callback
self.die = False
threading.Thread.__init__(self)
def run(self):
while not self.die:
self.lock.acquire()
try:
(filepath, path) = self.queue.get_nowait()
self.lock.release()
# Sync the file.
f = File(open(filepath, "rb"))
target = os.path.join(path, filepath)
if self.storage.exists(target):
self.storage.delete(target)
self.storage.save(target, f)
f.close()
# Call the callback function.
url = self.storage.url(filepath)
url = self.alter_url(url)
self.callback(filepath, url)
except Exception, e:
self.lock.release()
# Sleep a little bit.
time.sleep(0.1)
def alter_url(self, url):
"""allow some classes to alter the generated URL"""
return url
def stop(self):
self.lock.acquire()
self.die = True
self.lock.release()
def validate_settings(self, valid_settings, required_settings, settings):
if len(settings.difference(valid_settings)):
raise InvalidSettingError
if len(required_settings.difference(settings)):
raise InvalidSettingError
def sync_file(self, filepath, path=""):
"""sync a file"""
self.lock.acquire()
self.queue.put((filepath, path))
self.lock.release()
| """transporter.py Transporter class for daemon"""
__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
import sys
import os
sys.path.append(os.path.abspath('../dependencies'))
from django.core.files.storage import Storage
from django.core.files import File
# Define exceptions.
class TransporterError(Exception): pass
class InvalidSettingError(TransporterError): pass
class MissingSettingError(TransporterError): pass
class InvalidCallbackError(TransporterError): pass
class ConnectionError(TransporterError): pass
import threading
import Queue
import time
from sets import Set, ImmutableSet
class Transporter(threading.Thread):
"""threaded abstraction around a Django Storage subclass"""
def __init__(self, settings, callback):
if not callable(callback):
raise InvalidCallbackError
self.settings = settings
self.storage = False
self.is_ready = False
self.lock = threading.Lock()
self.queue = Queue.Queue()
self.callback = callback
self.die = False
threading.Thread.__init__(self)
def run(self):
while not self.die:
self.lock.acquire()
try:
(filepath, path) = self.queue.get_nowait()
self.lock.release()
# Sync the file.
f = File(open(filepath, "rb"))
target = os.path.join(path, filepath)
if self.storage.exists(target):
self.storage.delete(target)
self.storage.save(target, f)
f.close()
# Call the callback function.
url = self.storage.url(filepath)
url = self.alter_url(url)
self.callback(filepath, url)
except Exception, e:
print e
self.lock.release()
# Sleep a little bit.
time.sleep(0.1)
def alter_url(self, url):
"""allow some classes to alter the generated URL"""
return url
def stop(self):
self.lock.acquire()
self.die = True
self.lock.release()
def validate_settings(self, valid_settings, required_settings, settings):
if len(settings.difference(valid_settings)):
raise InvalidSettingError
if len(required_settings.difference(settings)):
raise InvalidSettingError
def sync_file(self, filepath, path=""):
"""sync a file"""
self.lock.acquire()
self.queue.put((filepath, path))
self.lock.release()
| Python | 0 |
fc75f5843af70c09e0d63284277bf88689cbb06d | Add apidoc to doc building | invocations/docs.py | invocations/docs.py | import os
from invoke.tasks import task
from invoke.runner import run
docs_dir = 'docs'
build = os.path.join(docs_dir, '_build')
@task
def clean_docs():
run("rm -rf %s" % build)
@task
def browse_docs():
run("open %s" % os.path.join(build, 'index.html'))
@task
def api_docs(target, output="api", exclude=""):
"""
Runs ``sphinx-apidoc`` to autogenerate your API docs.
Must give target directory/package as ``target``. Results are written out
to ``docs/<output>`` (``docs/api`` by default).
To exclude certain output files from the final build give ``exclude`` as a
comma separated list of file paths.
"""
output = os.path.join('docs', output)
# Have to make these absolute or apidoc is dumb :(
exclude = map(
lambda x: os.path.abspath(os.path.join(os.getcwd(), x)),
exclude.split(',')
)
run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude)))
@task
def docs(clean=False, browse=False, api_target=None, api_output=None,
api_exclude=None):
"""
Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing.
Can also build API docs by giving ``api_target`` and optionally
``api_output`` and/or ``api_exclude``.
"""
if api_target:
kwargs = {'target': api_target}
if api_output:
kwargs['output'] = api_output
if api_exclude:
kwargs['exclude'] = api_exclude
api_docs.body(**kwargs)
if clean:
clean_docs.body()
run("sphinx-build %s %s" % (docs_dir, build), pty=True)
if browse:
browse_docs.body()
| import os
from invoke.tasks import task
from invoke.runner import run
docs_dir = 'docs'
build = os.path.join(docs_dir, '_build')
@task
def clean_docs():
run("rm -rf %s" % build)
@task
def browse_docs():
run("open %s" % os.path.join(build, 'index.html'))
@task
def docs(clean=False, browse=False):
if clean:
clean_docs.body()
run("sphinx-build %s %s" % (docs_dir, build), pty=True)
if browse:
browse_docs.body()
| Python | 0 |
0a07f6ac82f099d836eb5276063adab245979258 | rename `recall` to `call` | chainer/training/triggers/once_trigger.py | chainer/training/triggers/once_trigger.py | class OnceTrigger(object):
"""Trigger based on the starting point of the iteration.
This trigger accepts only once at starting point of the iteration. There
are two ways to specify the starting point: only starting point in whole
iteration or called again when training resumed.
Args:
call_on_resume (bool): Whether the extension is called again or not
when restored from a snapshot. It is set to ``False`` by default.
"""
def __init__(self, call_on_resume=False):
self._call_on_resume = call_on_resume
self._flag_called = False
def trigger(self, trainer):
if self._flag_called:
return False
self._flag_called = True
return True
@property
def skip_initialize(self):
"""The flag decide to call `Extension.initialize` or not.
If this flag is exist and set `True`, `Extension.initialize` is
skipped.
"""
return self._flag_called
def serialize(self, serializer):
if not self._call_on_resume:
self._flag_called = serializer('_flag_called', self._flag_called)
| class OnceTrigger(object):
"""Trigger based on the starting point of the iteration.
This trigger accepts only once at starting point of the iteration. There
are two ways to specify the starting point: only starting point in whole
iteration or recalled when training resumed.
Args:
recall_on_resume (bool): Whether the extension is recalled or not when
restored from a snapshot. It is set to ``False`` by default.
"""
def __init__(self, recall_on_resume=False):
self._recall_on_resume = recall_on_resume
self._flag_called = False
def trigger(self, trainer):
if self._flag_called:
return False
self._flag_called = True
return True
@property
def skip_initialize(self):
"""The flag decide to call `Extension.initialize` or not.
If this flag is exist and set `True`, `Extension.initialize` is
skipped.
"""
return self._flag_called
def serialize(self, serializer):
if not self._recall_on_resume:
self._flag_called = serializer('_flag_called', self._flag_called)
| Python | 0.002129 |
2d6906bc58275b18102b4523a4faa5078a6e74f1 | fix wrong description (all?) | chainercv/transforms/image/random_crop.py | chainercv/transforms/image/random_crop.py | import random
import six
def random_crop(img, output_shape, return_slices=False, copy=False):
"""Crop array randomly into `output_shape`.
The input image is cropped by a randomly selected region whose shape
is :obj:`output_shape`.
Args:
img (~numpy.ndarray): An image array to be cropped. This is in
CHW format.
output_shape (tuple): the size of output image after cropping.
This value is :math:`(heihgt, width)`.
return_slices (bool): If :obj:`True`, this function returns
information of slices.
copy (bool): If :obj:`False`, a view of :obj:`img` is returned.
Returns:
This function returns :obj:`out_img, slice_H, slice_W` if
:obj:`return_slices = True`. Otherwise, this returns
:obj:`out_img`.
Note that :obj:`out_img` is the transformed image array.
Also, :obj:`slice_H` and :obj:`slice_W` are slices used to crop the
input image. The following relationship is satisfied.
.. code::
out_img = img[:, slice_H, slice_W]
"""
H, W = output_shape
if img.shape[1] == H:
start_H = 0
elif img.shape[1] > H:
start_H = random.choice(six.moves.range(img.shape[1] - H))
else:
raise ValueError('shape of image is larger than output shape')
slice_H = slice(start_H, start_H + H)
if img.shape[2] == W:
start_W = 0
elif img.shape[2] > W:
start_W = random.choice(six.moves.range(img.shape[2] - W))
else:
raise ValueError('shape of image is larger than output shape')
slice_W = slice(start_W, start_W + W)
img = img[:, slice_H, slice_W]
if copy:
img = img.copy()
if return_slices:
return img, slice_H, slice_W
else:
return img
| import random
import six
def random_crop(img, output_shape, return_slices=False, copy=False):
"""Crop array randomly into `output_shape`.
All arrays will be cropped by the same region randomly selected. The
output will all be in shape :obj:`output_shape`.
Args:
img (~numpy.ndarray): An image array to be cropped. This is in
CHW format.
output_shape (tuple): the size of output image after cropping.
This value is :math:`(heihgt, width)`.
return_slices (bool): If :obj:`True`, this function returns
information of slices.
copy (bool): If :obj:`False`, a view of :obj:`img` is returned.
Returns:
This function returns :obj:`out_img, slice_H, slice_W` if
:obj:`return_slices = True`. Otherwise, this returns
:obj:`out_img`.
Note that :obj:`out_img` is the transformed image array.
Also, :obj:`slice_H` and :obj:`slice_W` are slices used to crop the
input image. The following relationship is satisfied.
.. code::
out_img = img[:, slice_H, slice_W]
"""
H, W = output_shape
if img.shape[1] == H:
start_H = 0
elif img.shape[1] > H:
start_H = random.choice(six.moves.range(img.shape[1] - H))
else:
raise ValueError('shape of image is larger than output shape')
slice_H = slice(start_H, start_H + H)
if img.shape[2] == W:
start_W = 0
elif img.shape[2] > W:
start_W = random.choice(six.moves.range(img.shape[2] - W))
else:
raise ValueError('shape of image is larger than output shape')
slice_W = slice(start_W, start_W + W)
img = img[:, slice_H, slice_W]
if copy:
img = img.copy()
if return_slices:
return img, slice_H, slice_W
else:
return img
| Python | 0.000377 |
57e177e47bcc54683654e5d0de81af6e0cbd803d | update version | pimat_web/version.py | pimat_web/version.py | __version__ = '0.6.18'
| __version__ = '0.6.17'
| Python | 0 |
e3b6b9864376f2dabe42b6d80d4a5db65cb85d30 | Update docs for 'prep_command()' | src/python/pants/backend/core/targets/prep_command.py | src/python/pants/backend/core/targets/prep_command.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
from pants.base.target import Target
class PrepCommand(Target):
"""A command that must be run before some other target can be tested.
For example, you can use `prep_command()` to execute a script that sets up tunnels to database
servers. These tunnels could then be leveraged by integration tests.
Pants will only execute the `prep_command()` under the test goal, when testing targets that
depend on the `prep_command()` target.
"""
def __init__(self, prep_executable=None, prep_args=None, payload=None, prep_environ=False, **kwargs):
"""
:param prep_executable: The path to the executable that should be run.
:param prep_args: A list of command-line args to the excutable.
:param prep_environ: If True, the output of the command will be treated as
a \\\\0-separated list of key=value pairs to insert into the environment.
Note that this will pollute the environment for all future tests, so
avoid it if at all possible.
"""
payload = payload or Payload()
payload.add_fields({
'prep_command_executable': PrimitiveField(prep_executable),
'prep_command_args': PrimitiveField(prep_args or []),
'prep_environ': PrimitiveField(prep_environ),
})
super(PrepCommand, self).__init__(payload=payload, **kwargs)
| # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
from pants.base.target import Target
class PrepCommand(Target):
"""A command that must be run before some other target can be built.
For example, a script that sets up tunnels to database servers
might need to be run before running integration tests
"""
def __init__(self, prep_executable=None, prep_args=None, payload=None, prep_environ=False, **kwargs):
"""
:param prep_executable: The path to the executable that should be run.
:param prep_args: A list of command-line args to the excutable.
:param prep_environ: If True, the output of the command will be treated as
a \0-separated list of key=value pairs to insert into the environment.
Note that this will pollute the environment for all future tests, so
avoid it if at all possible.
"""
payload = payload or Payload()
payload.add_fields({
'prep_command_executable': PrimitiveField(prep_executable),
'prep_command_args': PrimitiveField(prep_args or []),
'prep_environ': PrimitiveField(prep_environ),
})
super(PrepCommand, self).__init__(payload=payload, **kwargs)
| Python | 0.000001 |
a9ac098ec492739f37005c9bd6278105df0261c5 | Add fields to save question url and annexure links | parliamentsearch/items.py | parliamentsearch/items.py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
q_url = scrapy.Field()
q_annex = scrapy.Field()
| # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
| Python | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.