commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
f22ae1dcf3ea674b8b39382f74a305c12841313f | Fix TabError for Python 3 (#11669) | szha/mxnet,apache/incubator-mxnet,tlby/mxnet,larroy/mxnet,tlby/mxnet,sxjscience/mxnet,rahul003/mxnet,tlby/mxnet,eric-haibin-lin/mxnet,DickJC123/mxnet,larroy/mxnet,sxjscience/mxnet,tlby/mxnet,szha/mxnet,dmlc/mxnet,ptrendx/mxnet,reminisce/mxnet,dmlc/mxnet,leezu/mxnet,leezu/mxnet,reminisce/mxnet,mbaijal/incubator-mxnet,sxjscience/mxnet,ptrendx/mxnet,zhreshold/mxnet,ptrendx/mxnet,larroy/mxnet,zhreshold/mxnet,zhreshold/mxnet,szha/mxnet,rahul003/mxnet,tlby/mxnet,yajiedesign/mxnet,rahul003/mxnet,mbaijal/incubator-mxnet,szha/mxnet,reminisce/mxnet,sxjscience/mxnet,dmlc/mxnet,yajiedesign/mxnet,leezu/mxnet,yajiedesign/mxnet,reminisce/mxnet,larroy/mxnet,zhreshold/mxnet,ptrendx/mxnet,zhreshold/mxnet,zhreshold/mxnet,yajiedesign/mxnet,leezu/mxnet,leezu/mxnet,ForkedReposBak/mxnet,rahul003/mxnet,dmlc/mxnet,sxjscience/mxnet,eric-haibin-lin/mxnet,reminisce/mxnet,dmlc/mxnet,mbaijal/incubator-mxnet,sxjscience/mxnet,rahul003/mxnet,ptrendx/mxnet,eric-haibin-lin/mxnet,eric-haibin-lin/mxnet,apache/incubator-mxnet,rahul003/mxnet,apache/incubator-mxnet,szha/mxnet,larroy/mxnet,sxjscience/mxnet,tlby/mxnet,dmlc/mxnet,eric-haibin-lin/mxnet,ptrendx/mxnet,reminisce/mxnet,sxjscience/mxnet,reminisce/mxnet,tlby/mxnet,mbaijal/incubator-mxnet,ForkedReposBak/mxnet,yajiedesign/mxnet,rahul003/mxnet,rahul003/mxnet,larroy/mxnet,ptrendx/mxnet,yajiedesign/mxnet,ptrendx/mxnet,szha/mxnet,eric-haibin-lin/mxnet,reminisce/mxnet,mbaijal/incubator-mxnet,dmlc/mxnet,zhreshold/mxnet,leezu/mxnet,mbaijal/incubator-mxnet,zhreshold/mxnet,tlby/mxnet,eric-haibin-lin/mxnet,leezu/mxnet,tlby/mxnet,larroy/mxnet,sxjscience/mxnet,yajiedesign/mxnet,tlby/mxnet,eric-haibin-lin/mxnet,apache/incubator-mxnet,DickJC123/mxnet,ForkedReposBak/mxnet,ForkedReposBak/mxnet,dmlc/mxnet,ForkedReposBak/mxnet,szha/mxnet,leezu/mxnet,yajiedesign/mxnet,DickJC123/mxnet,szha/mxnet,ForkedReposBak/mxnet,reminisce/mxnet,larroy/mxnet,ptrendx/mxnet,ptrendx/mxnet,ForkedReposBak/mxnet,ForkedReposBak/mxnet,larroy/mxnet,mbaijal/incubator-mxnet,rahul003/mxnet,mbaijal/incubator-mxnet,reminisce/mxnet,leezu/mxnet,dmlc/mxnet,mbaijal/incubator-mxnet,yajiedesign/mxnet,DickJC123/mxnet,szha/mxnet,ForkedReposBak/mxnet,zhreshold/mxnet,eric-haibin-lin/mxnet | example/reinforcement-learning/ddpg/strategies.py | example/reinforcement-learning/ddpg/strategies.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
class BaseStrategy(object):
"""
Base class of exploration strategy.
"""
def get_action(self, obs, policy):
raise NotImplementedError
def reset(self):
pass
class OUStrategy(BaseStrategy):
"""
Ornstein-Uhlenbeck process: dxt = theta * (mu - xt) * dt + sigma * dWt
where Wt denotes the Wiener process.
"""
def __init__(self, env_spec, mu=0, theta=0.15, sigma=0.3):
self.mu = mu
self.theta = theta
self.sigma = sigma
self.action_space = env_spec.action_space
self.state = np.ones(self.action_space.flat_dim) * self.mu
def evolve_state(self):
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * np.random.randn(len(x))
self.state = x + dx
return self.state
def reset(self):
self.state = np.ones(self.action_space.flat_dim) * self.mu
def get_action(self, obs, policy):
# get_action accepts a 2D tensor with one row
obs = obs.reshape((1, -1))
action = policy.get_action(obs)
increment = self.evolve_state()
return np.clip(action + increment,
self.action_space.low,
self.action_space.high)
if __name__ == "__main__":
class Env1(object):
def __init__(self):
self.action_space = Env2()
class Env2(object):
def __init__(self):
self.flat_dim = 2
env_spec = Env1()
test = OUStrategy(env_spec)
states = []
for i in range(1000):
states.append(test.evolve_state()[0])
import matplotlib.pyplot as plt
plt.plot(states)
plt.show()
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
class BaseStrategy(object):
"""
Base class of exploration strategy.
"""
def get_action(self, obs, policy):
raise NotImplementedError
def reset(self):
pass
class OUStrategy(BaseStrategy):
"""
Ornstein-Uhlenbeck process: dxt = theta * (mu - xt) * dt + sigma * dWt
where Wt denotes the Wiener process.
"""
def __init__(self, env_spec, mu=0, theta=0.15, sigma=0.3):
self.mu = mu
self.theta = theta
self.sigma = sigma
self.action_space = env_spec.action_space
self.state = np.ones(self.action_space.flat_dim) * self.mu
def evolve_state(self):
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * np.random.randn(len(x))
self.state = x + dx
return self.state
def reset(self):
self.state = np.ones(self.action_space.flat_dim) * self.mu
def get_action(self, obs, policy):
# get_action accepts a 2D tensor with one row
obs = obs.reshape((1, -1))
action = policy.get_action(obs)
increment = self.evolve_state()
return np.clip(action + increment,
self.action_space.low,
self.action_space.high)
if __name__ == "__main__":
class Env1(object):
def __init__(self):
self.action_space = Env2()
class Env2(object):
def __init__(self):
self.flat_dim = 2
env_spec = Env1()
test = OUStrategy(env_spec)
states = []
for i in range(1000):
states.append(test.evolve_state()[0])
import matplotlib.pyplot as plt
plt.plot(states)
plt.show()
| apache-2.0 | Python |
c55f0d847fe3b8e3ab77dca9f0e3ce3461530862 | Remove tests for osx | NeblioTeam/neblio,NeblioTeam/neblio,NeblioTeam/neblio,NeblioTeam/neblio,NeblioTeam/neblio | ci_scripts/test_osx-gui_wallet.py | ci_scripts/test_osx-gui_wallet.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import multiprocessing as mp
import neblio_ci_libs as nci
working_dir = os.getcwd()
build_dir = "build"
nci.mkdir_p(build_dir)
os.chdir(build_dir)
nci.call_with_err_code('brew update')
nci.call_with_err_code('brew outdated qt || brew upgrade qt')
nci.call_with_err_code('brew outdated berkeley-db@4 || brew upgrade berkeley-db@4')
nci.call_with_err_code('brew outdated boost@1.60 || brew upgrade boost@1.60')
nci.call_with_err_code('brew outdated miniupnpc || brew upgrade miniupnpc')
nci.call_with_err_code('brew outdated curl || brew upgrade curl')
nci.call_with_err_code('brew outdated numpy || brew upgrade numpy')
nci.call_with_err_code('brew outdated python || brew upgrade python')
nci.call_with_err_code('brew outdated openssl || brew upgrade openssl')
nci.call_with_err_code('brew outdated qrencode || brew upgrade qrencode')
nci.call_with_err_code('brew install qt --force')
nci.call_with_err_code('brew install berkeley-db@4 --force')
nci.call_with_err_code('brew install boost@1.60 --force')
nci.call_with_err_code('brew install miniupnpc --force')
nci.call_with_err_code('brew install curl --force')
nci.call_with_err_code('brew install python --force')
nci.call_with_err_code('brew install openssl --force')
nci.call_with_err_code('brew install qrencode --force')
nci.call_with_err_code('brew unlink qt && brew link --force --overwrite qt')
nci.call_with_err_code('brew unlink berkeley-db@4 && brew link --force --overwrite berkeley-db@4')
nci.call_with_err_code('brew unlink boost@1.60 && brew link --force --overwrite boost@1.60')
nci.call_with_err_code('brew unlink miniupnpc && brew link --force --overwrite miniupnpc')
nci.call_with_err_code('brew unlink curl && brew link --force --overwrite curl')
nci.call_with_err_code('brew unlink python && brew link --force --overwrite python')
nci.call_with_err_code('brew unlink openssl && brew link --force --overwrite openssl')
nci.call_with_err_code('brew unlink qrencode && brew link --force --overwrite qrencode')
nci.call_with_err_code('qmake "USE_UPNP=1" "USE_QRCODE=1" "RELEASE=1" "NEBLIO_CONFIG += Tests" ../neblio-wallet.pro')
nci.call_with_err_code("make -j" + str(mp.cpu_count()))
print("")
print("")
print("Building finished successfully.")
print("")
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import multiprocessing as mp
import neblio_ci_libs as nci
working_dir = os.getcwd()
build_dir = "build"
nci.mkdir_p(build_dir)
os.chdir(build_dir)
nci.call_with_err_code('brew update')
nci.call_with_err_code('brew outdated qt || brew upgrade qt')
nci.call_with_err_code('brew outdated berkeley-db@4 || brew upgrade berkeley-db@4')
nci.call_with_err_code('brew outdated boost@1.60 || brew upgrade boost@1.60')
nci.call_with_err_code('brew outdated miniupnpc || brew upgrade miniupnpc')
nci.call_with_err_code('brew outdated curl || brew upgrade curl')
nci.call_with_err_code('brew outdated numpy || brew upgrade numpy')
nci.call_with_err_code('brew outdated python || brew upgrade python')
nci.call_with_err_code('brew outdated openssl || brew upgrade openssl')
nci.call_with_err_code('brew outdated qrencode || brew upgrade qrencode')
nci.call_with_err_code('brew install qt --force')
nci.call_with_err_code('brew install berkeley-db@4 --force')
nci.call_with_err_code('brew install boost@1.60 --force')
nci.call_with_err_code('brew install miniupnpc --force')
nci.call_with_err_code('brew install curl --force')
nci.call_with_err_code('brew install python --force')
nci.call_with_err_code('brew install openssl --force')
nci.call_with_err_code('brew install qrencode --force')
nci.call_with_err_code('brew unlink qt && brew link --force --overwrite qt')
nci.call_with_err_code('brew unlink berkeley-db@4 && brew link --force --overwrite berkeley-db@4')
nci.call_with_err_code('brew unlink boost@1.60 && brew link --force --overwrite boost@1.60')
nci.call_with_err_code('brew unlink miniupnpc && brew link --force --overwrite miniupnpc')
nci.call_with_err_code('brew unlink curl && brew link --force --overwrite curl')
nci.call_with_err_code('brew unlink python && brew link --force --overwrite python')
nci.call_with_err_code('brew unlink openssl && brew link --force --overwrite openssl')
nci.call_with_err_code('brew unlink qrencode && brew link --force --overwrite qrencode')
nci.call_with_err_code('qmake "USE_UPNP=1" "USE_QRCODE=1" "RELEASE=1" "NEBLIO_CONFIG += Tests" ../neblio-wallet.pro')
nci.call_with_err_code("make -j" + str(mp.cpu_count()))
# run tests
nci.call_with_err_code("./wallet/test/neblio-tests")
print("")
print("")
print("Building finished successfully.")
print("")
| mit | Python |
f62ccee091e804610bfa16764260e817197abfa4 | Update __init__.py | jaredleekatzman/DeepSurv | deepsurv/__init__.py | deepsurv/__init__.py | from .deep_surv import DeepSurv
from .viz import plot_log
from . import datasets
| from .deep_surv import DeepSurv
from .viz import plot_log
import .datasets
| mit | Python |
a4a0542fd97de1d6a395c1f661b43ec84ee01948 | clean up tests | disqus/codebox,disqus/codebox | codesharer/apps/snippets/tests.py | codesharer/apps/snippets/tests.py | import time
import unittest2
from codesharer.app import create_app
from codesharer.apps.snippets.models import Snippet
from flask import g, Response, request
class FlaskTest(unittest2.TestCase):
def setUp(self):
self.app = create_app()
self.app.config.from_object('codesharer.conf.TestingConfig')
self.client = self.app.test_client()
self._ctx = self.app.test_request_context()
self._ctx.push()
self.app.preprocess_request()
g.redis.flushdb()
def tearDown(self):
self.app.process_response(Response())
self._ctx.pop()
class SnippetTestCase(FlaskTest):
def test_snippets(self):
"""Basic api test of model"""
self.assertEquals(Snippet.objects.count(), 0)
res = []
for i in xrange(3):
time.sleep(0.01)
res.append(Snippet.objects.create(
org=1,
text='test %d' % i,
user=1,
))
self.assertEquals(Snippet.objects.count(), 3)
res.reverse()
for n, sn in enumerate(Snippet.objects.all()):
self.assertEquals(res[n], sn)
self.assertEquals(res[n], Snippet.objects.get(sn.pk))
class SnippetFrontendTestCase(FlaskTest):
def test_snippet_creation(self):
"""
test snippet creation via post to url
"""
rv = self.client.post('/disqus/new', data={
'text': 'foo',
})
self.assertEquals(Snippet.objects.count(), 1)
| import time
import unittest2
from codesharer.app import create_app
from codesharer.apps.snippets.models import Snippet
from flask import g, Response
class FlaskTest(unittest2.TestCase):
def setUp(self):
self.app = create_app()
self.app.config.from_object('codesharer.conf.TestingConfig')
self.client = self.app.test_client()
self._ctx = self.app.test_request_context()
self._ctx.push()
self.app.preprocess_request()
g.redis.flushdb()
def tearDown(self):
self.app.process_response(Response())
self._ctx.pop()
class SnippetTestCase(FlaskTest):
def test_snippets(self):
"""
Simplest test we can do. Train using ours and train using the built-in.
"""
self.assertEquals(Snippet.objects.count(), 0)
res = []
for i in xrange(3):
time.sleep(0.01)
res.append(Snippet.objects.create(
org=1,
text='test %d' % i,
user=1,
))
self.assertEquals(Snippet.objects.count(), 3)
res.reverse()
for n, sn in enumerate(Snippet.objects.all()):
self.assertEquals(res[n], sn)
self.assertEquals(res[n], Snippet.objects.get(sn.pk))
class SnippetFrontendTestCase(FlaskTest):
def test_snippet_creation(self):
"""
test snippet creation via post to url
"""
rv = self.client.post('/disqus/new', data={
'text': 'foo',
})
self.assertEquals(Snippet.objects.count(), 1)
| apache-2.0 | Python |
0fe8cbe31484f7ca390725b684e620355ac222df | Fix bug for earlier change to allow using IP as hostname (#5513) | lyft/incubator-airflow,danielvdende/incubator-airflow,nathanielvarona/airflow,apache/incubator-airflow,airbnb/airflow,DinoCow/airflow,Acehaidrey/incubator-airflow,cfei18/incubator-airflow,Acehaidrey/incubator-airflow,spektom/incubator-airflow,nathanielvarona/airflow,apache/airflow,spektom/incubator-airflow,wooga/airflow,DinoCow/airflow,airbnb/airflow,spektom/incubator-airflow,mtagle/airflow,cfei18/incubator-airflow,airbnb/airflow,mistercrunch/airflow,DinoCow/airflow,mtagle/airflow,lyft/incubator-airflow,wileeam/airflow,cfei18/incubator-airflow,bolkedebruin/airflow,sekikn/incubator-airflow,Fokko/incubator-airflow,dhuang/incubator-airflow,nathanielvarona/airflow,bolkedebruin/airflow,lyft/incubator-airflow,bolkedebruin/airflow,danielvdende/incubator-airflow,spektom/incubator-airflow,mrkm4ntr/incubator-airflow,nathanielvarona/airflow,wileeam/airflow,cfei18/incubator-airflow,mrkm4ntr/incubator-airflow,wooga/airflow,wileeam/airflow,Acehaidrey/incubator-airflow,apache/incubator-airflow,Acehaidrey/incubator-airflow,nathanielvarona/airflow,DinoCow/airflow,cfei18/incubator-airflow,nathanielvarona/airflow,apache/airflow,mistercrunch/airflow,mtagle/airflow,danielvdende/incubator-airflow,cfei18/incubator-airflow,dhuang/incubator-airflow,Fokko/incubator-airflow,mrkm4ntr/incubator-airflow,Acehaidrey/incubator-airflow,dhuang/incubator-airflow,Fokko/incubator-airflow,mrkm4ntr/incubator-airflow,mistercrunch/airflow,danielvdende/incubator-airflow,sekikn/incubator-airflow,sekikn/incubator-airflow,danielvdende/incubator-airflow,dhuang/incubator-airflow,lyft/incubator-airflow,wooga/airflow,Fokko/incubator-airflow,danielvdende/incubator-airflow,apache/airflow,apache/airflow,Acehaidrey/incubator-airflow,bolkedebruin/airflow,mtagle/airflow,sekikn/incubator-airflow,wileeam/airflow,bolkedebruin/airflow,airbnb/airflow,mistercrunch/airflow,apache/incubator-airflow,apache/airflow,apache/incubator-airflow,apache/airflow,wooga/airflow | airflow/utils/net.py | airflow/utils/net.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import importlib
import socket
from airflow.configuration import (conf, AirflowConfigException)
def get_host_ip_address():
return socket.gethostbyname(socket.getfqdn())
def get_hostname():
"""
Fetch the hostname using the callable from the config or using
`socket.getfqdn` as a fallback.
"""
# First we attempt to fetch the callable path from the config.
try:
callable_path = conf.get('core', 'hostname_callable')
except AirflowConfigException:
callable_path = None
# Then we handle the case when the config is missing or empty. This is the
# default behavior.
if not callable_path:
return socket.getfqdn()
# Since we have a callable path, we try to import and run it next.
module_path, attr_name = callable_path.split(':')
module = importlib.import_module(module_path)
callable = getattr(module, attr_name)
return callable()
| # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import importlib
import socket
from airflow.configuration import (conf, AirflowConfigException)
def get_host_ip_address():
socket.gethostbyname(socket.getfqdn())
def get_hostname():
"""
Fetch the hostname using the callable from the config or using
`socket.getfqdn` as a fallback.
"""
# First we attempt to fetch the callable path from the config.
try:
callable_path = conf.get('core', 'hostname_callable')
except AirflowConfigException:
callable_path = None
# Then we handle the case when the config is missing or empty. This is the
# default behavior.
if not callable_path:
return socket.getfqdn()
# Since we have a callable path, we try to import and run it next.
module_path, attr_name = callable_path.split(':')
module = importlib.import_module(module_path)
callable = getattr(module, attr_name)
return callable()
| apache-2.0 | Python |
234802a5a2479918cf927bcac8f824951df63f62 | use weakrefset if WeakSet is not available | bwesterb/sarah | src/refStore.py | src/refStore.py | from __future__ import with_statement
from mirte.core import Module
from sarah.event import Event
import weakref
import threading
# python 2.6 support
if hasattr(weakref, 'WeakSet'):
WeakSet = weakref.WeakSet
else:
import weakrefset
WeakSet = weakrefset.WeakSet
class RefStore(Module):
def __init__(self, *args, **kwargs):
super(RefStore, self).__init__(*args, **kwargs)
self.namespaces = WeakSet()
def create_namespace(self):
ret = NameSpace()
self.namespaces.add(ret)
return ret
class NameSpace(object):
def __init__(self):
self.obj_to_key = weakref.WeakKeyDictionary()
self.key_to_obj = weakref.WeakValueDictionary()
self.lock = threading.Lock()
self.n = 0
def key_of(self, obj):
with self.lock:
try:
return self.obj_to_key[obj]
except KeyError:
self.n += 1
self.obj_to_key[obj] = self.n
self.key_to_obj[self.n] = obj
return self.n
def by_key(self, key):
return self.key_to_obj[key]
| from __future__ import with_statement
from mirte.core import Module
from sarah.event import Event
import weakref
import threading
class RefStore(Module):
def __init__(self, *args, **kwargs):
super(RefStore, self).__init__(*args, **kwargs)
self.namespaces = weakref.WeakSet()
def create_namespace(self):
ret = NameSpace()
self.namespaces.add(ret)
return ret
class NameSpace(object):
def __init__(self):
self.obj_to_key = weakref.WeakKeyDictionary()
self.key_to_obj = weakref.WeakValueDictionary()
self.lock = threading.Lock()
self.n = 0
def key_of(self, obj):
with self.lock:
try:
return self.obj_to_key[obj]
except KeyError:
self.n += 1
self.obj_to_key[obj] = self.n
self.key_to_obj[self.n] = obj
return self.n
def by_key(self, key):
return self.key_to_obj[key]
| agpl-3.0 | Python |
38e9dfba0642eef16496b7f54c87c6fd55e40e9b | use full search text to allow for low faker entropy | yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core | foodsaving/groups/tests/test_groups_api_filter.py | foodsaving/groups/tests/test_groups_api_filter.py | from rest_framework import status
from rest_framework.test import APITestCase
from foodsaving.groups.factories import GroupFactory
from foodsaving.users.factories import UserFactory
class TestGroupsAPIFilter(APITestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.url = '/api/groups/'
# some user
cls.user = UserFactory()
# two groups with different members
cls.member = UserFactory()
cls.group = GroupFactory(members=[cls.member, ])
cls.member2 = UserFactory()
cls.group2 = GroupFactory(members=[cls.member2, ])
cls.empty_group = GroupFactory()
def test_filter_by_member(self):
response = self.client.get(self.url, {'members': self.member.id})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], self.group.name)
response = self.client.get(self.url, {'members': self.member.id})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], self.group.name)
def test_dont_include_empty(self):
response = self.client.get(self.url, {'include_empty': False})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
self.assertFalse(self.empty_group.id in [_['id'] for _ in response.data])
def test_include_empty(self):
response = self.client.get(self.url, {'include_empty': True})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 3)
def test_search_name(self):
response = self.client.get(self.url, {'search': self.group.name})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], self.group.name)
def test_search_description(self):
response = self.client.get(self.url, {'search': self.group.public_description})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], self.group.name)
| from rest_framework import status
from rest_framework.test import APITestCase
from foodsaving.groups.factories import GroupFactory
from foodsaving.users.factories import UserFactory
class TestStoresAPIFilter(APITestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.url = '/api/groups/'
# some user
cls.user = UserFactory()
# two groups with different members
cls.member = UserFactory()
cls.group = GroupFactory(members=[cls.member, ])
cls.member2 = UserFactory()
cls.group2 = GroupFactory(members=[cls.member2, ])
cls.empty_group = GroupFactory()
def test_filter_by_member(self):
response = self.client.get(self.url, {'members': self.member.id})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], self.group.name)
response = self.client.get(self.url, {'members': self.member.id})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], self.group.name)
def test_dont_include_empty(self):
response = self.client.get(self.url, {'include_empty': False})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
self.assertFalse(self.empty_group.id in [_['id'] for _ in response.data])
def test_include_empty(self):
response = self.client.get(self.url, {'include_empty': True})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 3)
def test_search_name(self):
response = self.client.get(self.url, {'search': self.group.name})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], self.group.name)
def test_search_description(self):
response = self.client.get(self.url, {'search': self.group.public_description[:10]})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['name'], self.group.name)
| agpl-3.0 | Python |
f56302afef540124d2d446bb995feb18ef2d20bc | use single quotes | aldryn/aldryn-blog,aldryn/aldryn-blog | aldryn_blog/admin.py | aldryn_blog/admin.py | # -*- coding: utf-8 -*-
import copy
from django.conf import settings
from django.contrib import admin
from aldryn_blog.forms import PostForm
from aldryn_blog.models import Post
import cms
from cms.admin.placeholderadmin import PlaceholderAdmin
from cms.admin.placeholderadmin import FrontendEditableAdmin
from distutils.version import LooseVersion
class PostAdmin(FrontendEditableAdmin, PlaceholderAdmin):
render_placeholder_language_tabs = False
list_display = ['title', 'author', 'publication_start', 'publication_end']
date_hierarchy = 'publication_start'
raw_id_fields = ['author']
form = PostForm
frontend_editable_fields = ('title', 'lead_in')
_fieldsets = [
(None, {
'fields': ['title', 'slug', 'publication_start', 'publication_end', 'author', 'language']
}),
(None, {
'fields': ['key_visual', 'lead_in', 'tags']
}),
('Content', {
'classes': ['plugin-holder', 'plugin-holder-nopage'],
'fields': ['content']
}),
]
def get_fieldsets(self, request, obj=None):
fieldsets = copy.deepcopy(self._fieldsets)
# remove language field if only one language is available
if len(settings.LANGUAGES) <= 1:
fieldsets[0][1]['fields'] = fieldsets[0][1]['fields'][:-1]
# remove placeholder field if CMS 3.0
if LooseVersion(cms.__version__) >= LooseVersion('3.0'):
del fieldsets[-1]
return fieldsets
def get_list_display(self, request):
if len(settings.LANGUAGES) > 1:
return self.list_display + ['language']
return self.list_display
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
data['author'] = request.user.id # default author is logged-in user
request.GET = data
return super(PostAdmin, self).add_view(request, *args, **kwargs)
admin.site.register(Post, PostAdmin)
| # -*- coding: utf-8 -*-
import copy
from django.conf import settings
from django.contrib import admin
from aldryn_blog.forms import PostForm
from aldryn_blog.models import Post
import cms
from cms.admin.placeholderadmin import PlaceholderAdmin
from cms.admin.placeholderadmin import FrontendEditableAdmin
from distutils.version import LooseVersion
class PostAdmin(FrontendEditableAdmin, PlaceholderAdmin):
render_placeholder_language_tabs = False
list_display = ['title', 'author', 'publication_start', 'publication_end']
date_hierarchy = 'publication_start'
raw_id_fields = ['author']
form = PostForm
frontend_editable_fields = ("title", "lead_in")
_fieldsets = [
(None, {
'fields': ['title', 'slug', 'publication_start', 'publication_end', 'author', 'language']
}),
(None, {
'fields': ['key_visual', 'lead_in', 'tags']
}),
('Content', {
'classes': ['plugin-holder', 'plugin-holder-nopage'],
'fields': ['content']
}),
]
def get_fieldsets(self, request, obj=None):
fieldsets = copy.deepcopy(self._fieldsets)
# remove language field if only one language is available
if len(settings.LANGUAGES) <= 1:
fieldsets[0][1]['fields'] = fieldsets[0][1]['fields'][:-1]
# remove placeholder field if CMS 3.0
if LooseVersion(cms.__version__) >= LooseVersion('3.0'):
del fieldsets[-1]
return fieldsets
def get_list_display(self, request):
if len(settings.LANGUAGES) > 1:
return self.list_display + ['language']
return self.list_display
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
data['author'] = request.user.id # default author is logged-in user
request.GET = data
return super(PostAdmin, self).add_view(request, *args, **kwargs)
admin.site.register(Post, PostAdmin)
| bsd-3-clause | Python |
8a249d7a881d07f1f2ddef529fe221579745663c | Fix typo | shirone/angkot,angkot/angkot,angkot/angkot,shirone/angkot,angkot/angkot,shirone/angkot,angkot/angkot,shirone/angkot | angkot/decorators.py | angkot/decorators.py | class APIResponse(object):
def __init__(self):
self.headers = {}
def __setitem__(self, key, value):
self.headers[key] = value
def __getitem__(self, key):
return self.headers[key]
class OK(APIResponse):
def __init__(self, data, http_code=200):
super(OK, self).__init__()
self.data = data
self.http_code = http_code
@property
def dict(self):
data = dict(self.data)
data['status'] = 'ok'
return data
class Fail(APIResponse):
def __init__(self, data=None, http_code=500,
error_code=500, error_msg="Internal server error"):
super(Fail, self).__init__()
if data is None:
data = {}
self.data = data
self.http_code = http_code
self.error_code = error_code
self.error_msg = error_msg
@property
def dict(self):
data = dict(self.data)
data['status'] = 'fail'
data['code'] = self.error_code
data['msg'] = self.error_msg
return data
def api(func):
from django.http import HttpResponse
import json
def _func(request, *args, **kwargs):
res = func(request, *args, **kwargs)
code = 200
headers = {}
if res is None:
data = {'status': 'ok'}
elif type(res) == dict:
data = dict(res)
data['status'] = 'ok'
elif isinstance(res, OK):
data = res.dict
code = res.http_code
headers = res.headers
elif isinstance(res, Fail):
data = res.dict
code = res.http_code
headers = res.headers
else:
code = 500
data = {'status': 'fail',
'code': 501,
'msg': 'Internal server error'}
data = json.dumps(data)
res = HttpResponse(data, status=code, content_type='text/plain')
for key, value in headers.items():
res[key] = value
return res
return _func
| class APIResponse(object):
def __init__(self):
self.headers = {}
def __setitem__(self, key, value):
self.headers[key] = value
def __getitem__(self, key):
return self.headers[key]
class OK(APIResponse):
def __init__(self, data, http_code=200):
super(OK, self).__init__()
self.data = data
self.http_code = http_code
@property
def dict(self):
data = dict(self.data)
data['status'] = 'ok'
return data
class Fail(APIResponse):
def __init__(self, data=None, http_code=500,
error_code=500, error_msg="Internal server error"):
super(OK, self).__init__()
if data is None:
data = {}
self.data = data
self.http_code = http_code
self.error_code = error_code
self.error_msg = error_msg
@property
def dict(self):
data = dict(self.data)
data['status'] = 'fail'
data['code'] = self.error_code
data['msg'] = self.error_msg
return data
def api(func):
from django.http import HttpResponse
import json
def _func(request, *args, **kwargs):
res = func(request, *args, **kwargs)
code = 200
headers = {}
if res is None:
data = {'status': 'ok'}
elif type(res) == dict:
data = dict(res)
data['status'] = 'ok'
elif isinstance(res, OK):
data = res.dict
code = res.http_code
headers = res.headers
elif isinstance(res, Fail):
data = res.dict
code = res.http_code
headers = res.headers
else:
code = 500
data = {'status': 'fail',
'code': 501,
'msg': 'Internal server error'}
data = json.dumps(data)
res = HttpResponse(data, status=code, content_type='text/plain')
for key, value in headers.items():
res[key] = value
return res
return _func
| agpl-3.0 | Python |
93f2ff45ff3d61487ed061ae3d1a65051c3d1799 | Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong. | svn2github/django,svn2github/django,svn2github/django | django/contrib/admin/__init__.py | django/contrib/admin/__init__.py | # ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
# has been referenced in documentation.
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
| from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
| bsd-3-clause | Python |
4dc7dc14ddc9309cde0c06089c9c544b3a6d94a3 | Put back methods for platform controller | ednad/ooi-ui-services,oceanzus/ooi-ui-services,FBRTMaka/ooi-ui-services,DanielJMaher/ooi-ui-services,asascience-open/ooi-ui-services,Bobfrat/ooi-ui-services,birdage/ooi-ui-services | ooiservices/controller/platform.py | ooiservices/controller/platform.py | #!/usr/bin/env python
'''
ooiservices.controller.platform.py
PlatformController
'''
from ooiservices.controller.base import BaseController
from ooiservices.model.platform import PlatformModel
from flask.ext.restful import Resource
from flask import request
__author__ = "Brian McKenna"
class PlatformController(BaseController):
plat = PlatformModel()
def __init__(self):
BaseController.__init__(self)
def get(self, id):
result = self.plat.read(id)
#Formats the json dict to be used by the view:
formatted_result = self.plat.filter_ooi_platforms(result)
return formatted_result
def put(self, id):
args = request.args
params = args.items()
doc = {}
if params:
for item in params:
doc[item[0]] = item[1]
doc['id'] = id
result = self.plat.update(doc)
return result
def delete(self, id):
return self.plat.delete(id)
class List(Resource):
result = self.plat.update(doc)
return result
def delete(self, id):
return self.plat.delete(id)
class List(Resource):
plat = PlatformModel()
def get(self):
result = self.plat.read()
#Formats the json dict to be used by the view:
formatted_result = self.plat.filter_ooi_platforms(result)
return formatted_result | #!/usr/bin/env python
'''
ooiservices.controller.platform.py
PlatformController
'''
from ooiservices.controller.base import BaseController
from ooiservices.model.platform import PlatformModel
from flask.ext.restful import Resource
from flask import request
__author__ = "Brian McKenna"
class PlatformController(BaseController):
plat = PlatformModel()
def __init__(self):
BaseController.__init__(self)
def get(self, id):
result = self.plat.read(id)
#Formats the json dict to be used by the view:
formatted_result = self.plat.filter_ooi_platforms(result)
return formatted_result
def put(self, id):
args = request.args
params = args.items()
doc = {}
if params:
for item in params:
doc[item[0]] = item[1]
doc['id'] = id
result = self.plat.update(doc)
return result
def delete(self, id):
return self.plat.delete(id)
class List(Resource):
plat = PlatformModel()
def get(self):
result = self.plat.read()
#Formats the json dict to be used by the view:
formatted_result = self.plat.filter_ooi_platforms(result)
return formatted_result | apache-2.0 | Python |
e9046eaaf2058bba137139ce903898e2b22b6343 | Fix a couple of bugs in AttemptViewSet.submit | ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo | web/attempts/rest.py | web/attempts/rest.py | import json
from rest_framework.viewsets import ModelViewSet
from rest_framework.serializers import ModelSerializer
from rest_framework.response import Response
from rest_framework import fields, decorators, validators
from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_400_BAD_REQUEST
from .models import Attempt
class AttemptSerializer(ModelSerializer):
"""
Serialize an Attempt object.
"""
secret = fields.CharField(write_only=True)
class Meta:
model = Attempt
@staticmethod
def check_secret(validated_data):
# Check and remove secret from the validated_data dictionary
user_secret = json.loads(validated_data.pop('secret', '[]'))
secret_matches, hint = validated_data['part'].check_secret(user_secret)
if not secret_matches:
validated_data['accepted'] = False
def create(self, validated_data):
self.check_secret(validated_data)
return super(AttemptSerializer, self).create(validated_data)
def update(self, instance, validated_data):
self.check_secret(validated_data)
return super(AttemptSerializer, self).update(instance, validated_data)
class AttemptViewSet(ModelViewSet):
"""
A viewset for viewing and editing Attempt instances.
"""
serializer_class = AttemptSerializer
queryset = Attempt.objects.all()
@decorators.list_route(methods=['post'])
def submit(self, request):
serializer = AttemptSerializer(data=request.data)
def _f(validator):
return not isinstance(validator, validators.UniqueTogetherValidator)
serializer.validators = filter(_f, serializer.validators)
if serializer.is_valid():
AttemptSerializer.check_secret(serializer.validated_data)
created = Attempt.objects.update_or_create(
user=serializer.validated_data['user'],
part=serializer.validated_data['part'],
defaults=serializer.validated_data)[1]
status = HTTP_201_CREATED if created else HTTP_200_OK
response = {'status': 'submission saved'}
return Response(json.dumps(response), status=status)
else:
return Response(serializer.errors,
status=HTTP_400_BAD_REQUEST)
| import json
from rest_framework.viewsets import ModelViewSet
from rest_framework.serializers import ModelSerializer
from rest_framework.response import Response
from rest_framework import fields, decorators, validators
from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED
from .models import Attempt
class AttemptSerializer(ModelSerializer):
"""
Serialize an Attempt object.
"""
secret = fields.CharField(write_only=True)
class Meta:
model = Attempt
@staticmethod
def check_secret(validated_data):
# Check and remove secret from the validated_data dictionary
user_secret = json.loads(validated_data.pop('secret', '[]'))
secret_matches, hint = validated_data['part'].check_secret(user_secret)
if not secret_matches:
validated_data['accepted'] = False
def create(self, validated_data):
self.check_secret(validated_data)
return super(AttemptSerializer, self).create(validated_data)
def update(self, instance, validated_data):
self.check_secret(validated_data)
return super(AttemptSerializer, self).update(instance, validated_data)
class AttemptViewSet(ModelViewSet):
"""
A viewset for viewing and editing Attempt instances.
"""
serializer_class = AttemptSerializer
queryset = Attempt.objects.all()
@decorators.list_route(methods=['post'])
def submit(self, request):
serializer = AttemptSerializer(data=request.data)
def _f(validator):
not isinstance(validator, validators.UniqueTogetherValidator)
serializer.validators = filter(_f, serializer.validators)
if serializer.is_valid():
AttemptSerializer.check_secret(serializer.validated_data)
created = Attempt.objects.update_or_create(
user=serializer.validated_data['user'],
part=serializer.validated_data['part'],
defaults=serializer.validated_data)[1]
status = HTTP_201_CREATED if created else HTTP_200_OK
response = {'status': 'submission saved'}
return Response(json.dumps(response), status=status)
else:
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
| agpl-3.0 | Python |
cb71ba473c70ee4a5269f0f3adcfff0815e46789 | Use MD5 password hashing for tests | pculture/unisubs,pculture/unisubs,ReachingOut/unisubs,ofer43211/unisubs,wevoice/wesub,norayr/unisubs,eloquence/unisubs,norayr/unisubs,eloquence/unisubs,norayr/unisubs,ofer43211/unisubs,eloquence/unisubs,pculture/unisubs,wevoice/wesub,ofer43211/unisubs,ujdhesa/unisubs,ujdhesa/unisubs,ujdhesa/unisubs,ReachingOut/unisubs,ReachingOut/unisubs,ReachingOut/unisubs,wevoice/wesub,pculture/unisubs,ujdhesa/unisubs,eloquence/unisubs,ofer43211/unisubs,norayr/unisubs,wevoice/wesub | dev_settings_test.py | dev_settings_test.py | # Amara, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from settings import *
from dev_settings import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': "/tmp/django_test_db.sqlite",
'USER': "",
'PASSWORD': "",
'HOST': "",
'PORT': ''
}
}
CACHE_PREFIX = "testcache"
CACHE_TIMEOUT = 60
DEFAULT_PROTOCOL = 'https'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_PLUGINS = ['utils.test_utils.UnisubsTestPlugin']
CELERY_ALWAYS_EAGER = True
# Use MD5 password hashing, other algorithms are purposefully slow to increase
# security. Also include the SHA1 hasher since some of the tests use it.
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
)
import logging
logging.getLogger('pysolr').setLevel(logging.ERROR)
try:
from dev_settings_test_local import *
except ImportError:
pass
| # Amara, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from settings import *
from dev_settings import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': "/tmp/django_test_db.sqlite",
'USER': "",
'PASSWORD': "",
'HOST': "",
'PORT': ''
}
}
CACHE_PREFIX = "testcache"
CACHE_TIMEOUT = 60
DEFAULT_PROTOCOL = 'https'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_PLUGINS = ['utils.test_utils.UnisubsTestPlugin']
CELERY_ALWAYS_EAGER = True
import logging
logging.getLogger('pysolr').setLevel(logging.ERROR)
try:
from dev_settings_test_local import *
except ImportError:
pass
| agpl-3.0 | Python |
445f244ddac6001b65f03d058a14178a19919eed | Allow Config to be initialised without any args | praekelt/diamondash,praekelt/diamondash,praekelt/diamondash | diamondash/config.py | diamondash/config.py | import yaml
from diamondash import utils
class ConfigError(Exception):
"""Raised when there is an error parsing a configuration"""
class ConfigMetaClass(type):
def __new__(mcs, name, bases, dict):
cls = type.__new__(mcs, name, bases, dict)
defaults = {}
for base in bases:
if hasattr(base, 'DEFAULTS'):
defaults.update(base.DEFAULTS)
defaults.update(cls.DEFAULTS)
cls.DEFAULTS = defaults
return cls
class Config(dict):
__metaclass__ = ConfigMetaClass
DEFAULTS = {}
def __init__(self, items=None):
super(Config, self).__init__(self._parse(items or {}))
@classmethod
def parse(cls, items):
return items
@classmethod
def _parse(cls, items):
items = utils.add_dicts(cls.DEFAULTS, items)
return cls.parse(items)
@classmethod
def from_file(cls, filename, **defaults):
items = utils.add_dicts(defaults, yaml.safe_load(open(filename)))
return cls(items)
@classmethod
def for_type(cls, type_name):
type_cls = utils.load_class_by_string(type_name)
return type_cls.CONFIG_CLS
| import yaml
from diamondash import utils
class ConfigError(Exception):
"""Raised when there is an error parsing a configuration"""
class ConfigMetaClass(type):
def __new__(mcs, name, bases, dict):
cls = type.__new__(mcs, name, bases, dict)
defaults = {}
for base in bases:
if hasattr(base, 'DEFAULTS'):
defaults.update(base.DEFAULTS)
defaults.update(cls.DEFAULTS)
cls.DEFAULTS = defaults
return cls
class Config(dict):
__metaclass__ = ConfigMetaClass
DEFAULTS = {}
def __init__(self, items):
super(Config, self).__init__(self._parse(items))
@classmethod
def parse(cls, items):
return items
@classmethod
def _parse(cls, items):
items = utils.add_dicts(cls.DEFAULTS, items)
return cls.parse(items)
@classmethod
def from_file(cls, filename, **defaults):
items = utils.add_dicts(defaults, yaml.safe_load(open(filename)))
return cls(items)
@classmethod
def for_type(cls, type_name):
type_cls = utils.load_class_by_string(type_name)
return type_cls.CONFIG_CLS
| bsd-3-clause | Python |
809df0ddfae0ffe2268c94d41be366d8a28f6854 | replace deepcopy with copy | AlexYukikaze/JSONx | JSONx/utils.py | JSONx/utils.py | __all__ = ['on', 'when', 'decode_escapes', 'get_dict_path']
import re
import codecs
import copy
ESCAPE_SEQUENCE_RE = re.compile(r'''
( \\U........ # 8-digit hex escapes
| \\u.... # 4-digit hex escapes
| \\x.. # 2-digit hex escapes
| \\[0-7]{1,3} # Octal escapes
| \\N\{[^}]+\} # Unicode characters by name
| \\[\\'"abfnrtv] # Single-character escapes
)''', re.UNICODE | re.VERBOSE)
def decode_escapes(s):
def decode_match(match):
return codecs.decode(match.group(0), 'unicode-escape')
return ESCAPE_SEQUENCE_RE.sub(decode_match, s)
def get_dict_path(dic, path):
def callback(accumulator, key):
obj, keys = accumulator
if isinstance(obj, dict):
if key in obj:
keys.append(key)
return obj[key], keys
path_string = '/'.join(keys)
raise Exception('Object "./{}" has no key "{}"'.format(path_string, key))
try:
path = path.strip(' ./').replace('.', '/')
if not path:
return dic, None
result, _ = reduce(callback, path.split('/'), (dic, []))
return copy.copy(result), None
except Exception, e:
return None, e.message
def get_position(string, index):
lines = string.splitlines(True)
curr_pos = 0
for line_num, line in enumerate(lines):
if curr_pos + len(line) > index:
return line_num + 1, index - curr_pos + 1
curr_pos += len(line)
| __all__ = ['on', 'when', 'decode_escapes', 'get_dict_path']
import re
import codecs
ESCAPE_SEQUENCE_RE = re.compile(r'''
( \\U........ # 8-digit hex escapes
| \\u.... # 4-digit hex escapes
| \\x.. # 2-digit hex escapes
| \\[0-7]{1,3} # Octal escapes
| \\N\{[^}]+\} # Unicode characters by name
| \\[\\'"abfnrtv] # Single-character escapes
)''', re.UNICODE | re.VERBOSE)
def decode_escapes(s):
def decode_match(match):
return codecs.decode(match.group(0), 'unicode-escape')
return ESCAPE_SEQUENCE_RE.sub(decode_match, s)
def get_dict_path(dic, path):
import copy
import collections
def callback(accumulator, key):
obj, keys = accumulator
if isinstance(obj, collections.Mapping):
if key in obj:
keys.append(key)
return obj[key], keys
path_string = '/'.join(keys)
raise Exception('Object "./{}" has no key "{}"'.format(path_string, key))
try:
path = path.strip(' ./').replace('.', '/')
if not path:
return dic, None
result, _ = reduce(callback, path.split('/'), (dic, []))
return copy.deepcopy(result), None
except Exception, e:
return None, e.message
def get_position(string, index):
lines = string.splitlines(True)
curr_pos = 0
for line_num, line in enumerate(lines):
if curr_pos + len(line) > index:
return line_num + 1, index - curr_pos + 1
curr_pos += len(line)
| mit | Python |
6542cde820ed65e9f64a5d8e42e09f950b0d4986 | Document Luna.Logger.Logger.log | Ghostkeeper/Luna | Luna/Logger.py | Luna/Logger.py | #!/usr/bin/env python
#This is free and unencumbered software released into the public domain.
#
#Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
#software, either in source code form or as a compiled binary, for any purpose,
#commercial or non-commercial, and by any means.
#
#In jurisdictions that recognize copyright laws, the author or authors of this
#software dedicate any and all copyright interest in the software to the public
#domain. We make this dedication for the benefit of the public at large and to
#the detriment of our heirs and successors. We intend this dedication to be an
#overt act of relinquishment in perpetuity of all present and future rights to
#this software under copyright law.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
#ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
#WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#For more information, please refer to <https://unlicense.org/>
from enum import Enum #To define the log levels.
import Luna.Plugins #To call all the loggers to log.
#Enumerates the logging importance levels.
class Level(Enum):
#For logging fatal errors that will crash the program.
ERROR = 1
#For logging fatal errors that will crash the current operation.
CRITICAL = 2
#For logging events that are probably not going the way the user intended.
WARNING = 3
#For logging events, at least all events that got initiated from an external
#source.
INFO = 4
#Information that might be useful for a debugger to know.
DEBUG = 5
#Provides an API to use logger plug-ins.
class Logger:
#Logs a new message.
def log(level,message,*args):
substituted = message % args #Substitute all arguments into the message.
loggers = Luna.Plugins.Plugins.getLoggers()
for logger in loggers:
logger.log(level,substituted)
if not loggers: #If there are no loggers, fall back to the built-in logging system.
Logger.__fallbackLog(level,substituted)
#Logs a message to the standard output.
#
#This way of logging is meant to be kept very simple. It is used only when
#there are no other logging methods available, still providing a way of
#debugging if something goes wrong during the plug-in loading.
#
#\param level The message importance level.
#\param message The message to log.
def __fallbackLog(level,message):
if level == Level.ERROR:
levelStr = "ERROR"
elif level == Level.CRITICAL:
levelStr = "CRITICAL"
elif level == Level.WARNING:
levelStr = "WARNING"
elif level == Level.INFO:
levelStr = "INFO"
elif level == Level.DEBUG:
levelStr = "DEBUG"
print("[" + levelStr + "] " + message) | #!/usr/bin/env python
#This is free and unencumbered software released into the public domain.
#
#Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
#software, either in source code form or as a compiled binary, for any purpose,
#commercial or non-commercial, and by any means.
#
#In jurisdictions that recognize copyright laws, the author or authors of this
#software dedicate any and all copyright interest in the software to the public
#domain. We make this dedication for the benefit of the public at large and to
#the detriment of our heirs and successors. We intend this dedication to be an
#overt act of relinquishment in perpetuity of all present and future rights to
#this software under copyright law.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
#ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
#WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#For more information, please refer to <https://unlicense.org/>
from enum import Enum #To define the log levels.
import Luna.Plugins #To call all the loggers to log.
#Enumerates the logging importance levels.
class Level(Enum):
#For logging fatal errors that will crash the program.
ERROR = 1
#For logging fatal errors that will crash the current operation.
CRITICAL = 2
#For logging events that are probably not going the way the user intended.
WARNING = 3
#For logging events, at least all events that got initiated from an external
#source.
INFO = 4
#Information that might be useful for a debugger to know.
DEBUG = 5
#Provides an API to use logger plug-ins.
class Logger:
def log(level,message,*args):
substituted = message % args #Substitute all arguments into the message.
loggers = Luna.Plugins.Plugins.getLoggers()
for logger in loggers:
logger.log(level,substituted)
if not loggers: #If there are no loggers, fall back to the built-in logging system.
Logger.__fallbackLog(level,substituted)
#Logs a message to the standard output.
#
#This way of logging is meant to be kept very simple. It is used only when
#there are no other logging methods available, still providing a way of
#debugging if something goes wrong during the plug-in loading.
#
#\param level The message importance level.
#\param message The message to log.
def __fallbackLog(level,message):
if level == Level.ERROR:
levelStr = "ERROR"
elif level == Level.CRITICAL:
levelStr = "CRITICAL"
elif level == Level.WARNING:
levelStr = "WARNING"
elif level == Level.INFO:
levelStr = "INFO"
elif level == Level.DEBUG:
levelStr = "DEBUG"
print("[" + levelStr + "] " + message) | cc0-1.0 | Python |
5ef6aaa6743fc6996ed06dfc4622a456cc817095 | Remove redundant space line | bowen0701/algorithms_data_structures | lc0063_unique_paths_ii.py | lc0063_unique_paths_ii.py | """Leetcode 63. Unique Paths II.
Medium
URL: https://leetcode.com/problems/unique-paths-ii/
A robot is located at the top-left corner of a m x n grid
(marked 'Start' in the diagram below).
The robot can only move either down or right at any point in time.
The robot is trying to reach the bottom-right corner of the grid
(marked 'Finish' in the diagram below).
Now consider if some obstacles are added to the grids.
How many unique paths would there be?
An obstacle and empty space is marked as 1 and 0 respectively in the grid.
Note: m and n will be at most 100.
Example 1:
Input:
[
[0,0,0],
[0,1,0],
[0,0,0]
]
Output: 2
"""
class Solution(object):
def uniquePathsWithObstacles(self, obstacleGrid):
"""Unique paths with obstacles.
Time complexity: O(mn).
Space complexity: O(mn).
"""
m, n = len(obstacleGrid), len(obstacleGrid[0])
path = [[0] * n for _ in range(m)]
# Set the top-left to 1.
if obstacleGrid[0][0] == 0:
path[0][0] = 1
# Set the 1st row to 1 util reach obstacles.
for j in range(1, n):
if obstacleGrid[0][j] == 0:
path[0][j] = path[0][j - 1]
else:
break
# Set the 1st col to 1 util reach obstacles.
for i in range(1, m):
if obstacleGrid[i][0] == 0:
path[i][0] = path[i - 1][0]
else:
break
# Compute path to (i, j) from (i - 1, j) and (i, j - 1).
for i in range(1, m):
for j in range(1, n):
if obstacleGrid[i][j] == 0:
path[i][j] = path[i - 1][j] + path[i][j - 1]
return path[-1][-1]
def main():
obstacleGrid = [
[0,0,0],
[0,1,0],
[0,0,0]
]
print Solution().uniquePathsWithObstacles(obstacleGrid)
if __name__ == '__main__':
main()
| """Leetcode 63. Unique Paths II.
Medium
URL: https://leetcode.com/problems/unique-paths-ii/
A robot is located at the top-left corner of a m x n grid
(marked 'Start' in the diagram below).
The robot can only move either down or right at any point in time.
The robot is trying to reach the bottom-right corner of the grid
(marked 'Finish' in the diagram below).
Now consider if some obstacles are added to the grids.
How many unique paths would there be?
An obstacle and empty space is marked as 1 and 0 respectively in the grid.
Note: m and n will be at most 100.
Example 1:
Input:
[
[0,0,0],
[0,1,0],
[0,0,0]
]
Output: 2
"""
class Solution(object):
def uniquePathsWithObstacles(self, obstacleGrid):
"""Unique paths with obstacles.
Time complexity: O(mn).
Space complexity: O(mn).
"""
m, n = len(obstacleGrid), len(obstacleGrid[0])
path = [[0] * n for _ in range(m)]
# Set the top-left to 1.
if obstacleGrid[0][0] == 0:
path[0][0] = 1
# Set the 1st row to 1 util reach obstacles.
for j in range(1, n):
if obstacleGrid[0][j] == 0:
path[0][j] = path[0][j - 1]
else:
break
# Set the 1st col to 1 util reach obstacles.
for i in range(1, m):
if obstacleGrid[i][0] == 0:
path[i][0] = path[i - 1][0]
else:
break
# Compute path to (i, j) from (i - 1, j) and (i, j - 1).
for i in range(1, m):
for j in range(1, n):
if obstacleGrid[i][j] == 0:
path[i][j] = path[i - 1][j] + path[i][j - 1]
return path[-1][-1]
def main():
obstacleGrid = [
[0,0,0],
[0,1,0],
[0,0,0]
]
print Solution().uniquePathsWithObstacles(obstacleGrid)
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
e843793d0a611143bbee179469ab3557744e19ef | fix sillyness | tonybaloney/st2contrib,armab/st2contrib,tonybaloney/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,StackStorm/st2contrib,armab/st2contrib,StackStorm/st2contrib,armab/st2contrib | packs/windows/actions/winrm_cmd.py | packs/windows/actions/winrm_cmd.py | from winrm.protocol import Protocol
from st2actions.runners.pythonrunner import Action
__all__ = [
'WinRMCmdAction'
]
class WinRMCmdAction(Action):
def run(self, host, password, command, params, username='Administrator',
port=5732, secure=True):
proto = 'https' if secure else 'http'
p = Protocol(
endpoint='%s://%s:%i/wsman' % (proto, host, port), # RFC 2732?
transport='ntlm',
username=username,
password=password,
server_cert_validation='ignore')
shell_id = p.open_shell()
# run the command
command_id = p.run_command(shell_id, command, params)
std_out, std_err, status_code = p.get_command_output(shell_id,
command_id)
p.cleanup_command(shell_id, command_id)
p.close_shell(shell_id)
return {'stdout': std_out, 'stderr': std_err}
| from winrm.protocol import Protocol
from st2actions.runners.pythonrunner import Action
__all__ = [
'WinRMCmdAction'
]
class WinRMCmdAction(Action):
def run(self, host, password, command, params, username='Administrator',
port=5732, secure=True):
proto = 'https' if secure else 'http'
p = Protocol(
endpoint='%s://%s:%i/wsman' % (proto, host, port), # RFC 2732?
transport='ntlm',
username=username,
password=password,
server_cert_validation='ignore')
shell_id = p.open_shell()
std_out_logs = []
std_err_logs = []
# run the command
command_id = p.run_command(shell_id, command, params)
std_out, std_err, status_code = p.get_command_output(shell_id,
command_id)
std_out_logs.append(std_out)
std_err_logs.append(std_err)
p.cleanup_command(shell_id, command_id)
p.close_shell(shell_id)
return {'stdout': std_out, 'stderr': std_err}
| apache-2.0 | Python |
ff77d37b09c798ed68b27b5e91978a471408b437 | Update __init__.py | r0h4n/node-agent,Tendrl/node_agent,Tendrl/node-agent,Tendrl/node_agent,r0h4n/node-agent,Tendrl/node-agent,r0h4n/node-agent,Tendrl/node-agent | tendrl/node_agent/objects/global_network/__init__.py | tendrl/node_agent/objects/global_network/__init__.py | from tendrl.commons.etcdobj import EtcdObj
from tendrl.commons import objects
class GlobalNetwork(objects.BaseObject):
def __init__(self, interface=None, interface_id=None,
ipv4=None, ipv6=None, netmask=None, subnet=None,
status=None, sysfs_id=None, device_link=None,
interface_type=None, model=None, driver_modules=None,
driver=None, drive=None, hw_address=None, link_detected=None,
*args, **kwargs):
super(GlobalNetwork, self).__init__(*args, **kwargs)
# networks/<subnet>/<node_id>/<interface_id>
self.value = 'networks/%s/%s/%s'
self.interface = interface
self.interface_id = interface_id
self.ipv4 = ipv4
self.ipv6 = ipv6
self.netmask = netmask
self.subnet = subnet
self.status = status
self.sysfs_id = sysfs_id
self.device_link = device_link
self.drive = drive
self.interface_type = interface_type
self.model = model
self.driver_modules = driver_modules
self.driver = driver
self.hw_address = hw_address
self.link_detected = link_detected
self._etcd_cls = _GlobalNetworkEtcd
class _GlobalNetworkEtcd(EtcdObj):
"""A table of the Global Network, lazily updated
"""
__name__ = 'networks/%s/%s/%s'
_tendrl_cls = GlobalNetwork
def render(self):
self.__name__ = self.__name__ % (
self.subnet.replace("/", "_"),
NS.node_context.node_id,
self.interface_id
)
return super(_GlobalNetworkEtcd, self).render()
| from tendrl.commons.etcdobj import EtcdObj
from tendrl.commons import objects
class GlobalNetwork(objects.BaseObject):
def __init__(self, interface=None, interface_id=None,
ipv4=None, ipv6=None, netmask=None, subnet=None,
status=None, sysfs_id=None, device_link=None,
interface_type=None, model=None, driver_modules=None,
driver=None, drive=None, hw_address=None, link_detected=None,
*args, **kwargs):
super(GlobalNetwork, self).__init__(*args, **kwargs)
# networks/<subnet>/<node_id>/<interface_id>
self.value = 'networks/%s/%s/%s'
self.interface = interface
self.interface_id = interface_id
self.ipv4 = ipv4
self.ipv6 = ipv6
self.netmask = netmask
self.subnet = subnet
self.status = status
self.sysfs_id = sysfs_id
self.device_link = device_link
self.drive = drive
self.interface_type = interface_type
self.model = model
self.driver_modules = driver_modules
self.driver = driver
self.hw_address = hw_address
self.link_detected = link_detected
self._etcd_cls = _GlobalNetworkEtcd
def load_definition(self):
return {}
class _GlobalNetworkEtcd(EtcdObj):
"""A table of the Global Network, lazily updated
"""
__name__ = 'networks/%s/%s/%s'
_tendrl_cls = GlobalNetwork
def render(self):
self.__name__ = self.__name__ % (
self.subnet.replace("/", "_"),
NS.node_context.node_id,
self.interface_id
)
return super(_GlobalNetworkEtcd, self).render()
| lgpl-2.1 | Python |
1bc661472b2e20fc3f5d1d97db7dd7e3e7dc0661 | Fix the test bug with fetching the Encrypted property and fix doc comment | stratis-storage/stratisd,stratis-storage/stratisd,stratis-storage/stratisd | tests/client-dbus/tests/dbus/pool/test_properties.py | tests/client-dbus/tests/dbus/pool/test_properties.py | # Copyright 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test accessing properties of a pool.
"""
# isort: LOCAL
from stratisd_client_dbus import Manager, PoolR1, get_object
from stratisd_client_dbus._constants import TOP_OBJECT
from .._misc import SimTestCase, device_name_list
_DEVICE_STRATEGY = device_name_list(1)
class PropertyTestCase(SimTestCase):
"""
Set up a pool with at least one device.
"""
_POOLNAME = "deadpool"
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
super().setUp()
proxy = get_object(TOP_OBJECT)
((_, (self._pool_object_path, _)), _, _) = Manager.Methods.CreatePool(
proxy,
{
"name": self._POOLNAME,
"redundancy": (True, 0),
"devices": _DEVICE_STRATEGY(),
},
)
def testProps(self):
"""
Test reading some pool properties.
"""
pool = get_object(self._pool_object_path)
is_encrypted = PoolR1.Properties.Encrypted.Get(pool)
self.assertEqual(is_encrypted, False)
| # Copyright 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test accessing properties of a pool.
"""
# isort: LOCAL
from stratisd_client_dbus import Manager, PoolR1, get_object
from stratisd_client_dbus._constants import TOP_OBJECT
from .._misc import SimTestCase, device_name_list
_DEVICE_STRATEGY = device_name_list()
class PropertyTestCase(SimTestCase):
"""
Set up a pool with a name and one filesystem.
"""
_POOLNAME = "deadpool"
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
super().setUp()
proxy = get_object(TOP_OBJECT)
((_, (self._pool_object_path, _)), _, _) = Manager.Methods.CreatePool(
proxy,
{
"name": self._POOLNAME,
"redundancy": (True, 0),
"devices": _DEVICE_STRATEGY(),
},
)
def testProps(self):
"""
Test reading some pool properties.
"""
pool = get_object(self._pool_object_path)
is_encrypted = PoolR1.Properties.Encrypted.Get(pool)
self.assertEqual(is_encrypted, False)
| mpl-2.0 | Python |
bfcec696308ee8bfd226a54c17a7e15d49e2aed7 | Add standard header, use spack helpers | matthiasdiener/spack,mfherbst/spack,lgarren/spack,tmerrick1/spack,TheTimmy/spack,LLNL/spack,tmerrick1/spack,TheTimmy/spack,TheTimmy/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,EmreAtes/spack,TheTimmy/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,mfherbst/spack,iulian787/spack,tmerrick1/spack,EmreAtes/spack,skosukhin/spack,skosukhin/spack,krafczyk/spack,lgarren/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,lgarren/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,TheTimmy/spack,skosukhin/spack,iulian787/spack,lgarren/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,LLNL/spack,lgarren/spack,mfherbst/spack,EmreAtes/spack,skosukhin/spack,krafczyk/spack | var/spack/repos/builtin/packages/nextflow/package.py | var/spack/repos/builtin/packages/nextflow/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Nextflow(Package):
"""Data-driven computational pipelines"""
homepage = "http://www.nextflow.io"
version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a',
url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow',
expand=False)
depends_on('jdk')
def unpack(self):
pass
def install(self, spec, prefix):
mkdirp(prefix.bin)
install("nextflow", join_path(prefix.bin, "nextflow"))
set_executable( join_path(prefix.bin, "nextflow"))
| from spack import *
from glob import glob
import os
class Nextflow(Package):
"""Data-driven computational pipelines"""
homepage = "http://www.nextflow.io"
version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a',
url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow',
expand=False)
depends_on('jdk')
def unpack(self):
pass
def install(self, spec, prefix):
chmod = which('chmod')
mkdirp(prefix.bin)
install("nextflow", join_path(prefix.bin, "nextflow"))
chmod('+x', join_path(prefix.bin, "nextflow"))
| lgpl-2.1 | Python |
bcd272b80fdaca9f77a55e4c5ea276a3af94f0aa | update files | takekazuomi/azure-quickstart-templates,nzthiago/azure-quickstart-templates,slapointe/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,jmservera/azure-quickstart-templates,matt1883/azure-quickstart-templates,jmservera/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,johndowns/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,nzthiago/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,slapointe/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,johndowns/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,slapointe/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,daltskin/azure-quickstart-templates,Azure/azure-quickstart-templates,slapointe/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,slapointe/azure-quickstart-templates,johndowns/azure-quickstart-templates,matt1883/azure-quickstart-templates,johndowns/azure-quickstart-templates,matt1883/azure-quickstart-templates,jmservera/azure-quickstart-templates,neudesic/azure-quickstart-templates,nzthiago/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,Azure/azure-quickstart-templates,Azure/azure-quickstart-templates,neudesic/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,nzthiago/azure-quickstart-templates,johndowns/azure-quickstart-templates,jmservera/azure-quickstart-templates,johndowns/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,slapointe/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,Azure/azure-quickstart-templates,nzthiago/azure-quickstart-templates,neudesic/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,jmservera/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,jmservera/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,Azure/azure-quickstart-templates,neudesic/azure-quickstart-templates,nzthiago/azure-quickstart-templates,neudesic/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,matt1883/azure-quickstart-templates,Azure/azure-quickstart-templates,daltskin/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,daltskin/azure-quickstart-templates,daltskin/azure-quickstart-templates,neudesic/azure-quickstart-templates | 101-machine-learning-service-create/prereqs/Driver.py | 101-machine-learning-service-create/prereqs/Driver.py | import json
import numpy
from sklearn.externals import joblib
from azureml.core.model import Model
from azureml.contrib.services.aml_request import AMLRequest, rawhttp
from azureml.contrib.services.aml_response import AMLResponse
def init():
global model
model_path = Model.get_model_path('sklearn_regression_model1.pkl')
model = joblib.load(model_path)
@rawhttp
def run(request):
if request.method == 'GET':
respBody = str.encode(request.full_path)
return AMLResponse(respBody, 200)
elif request.method == 'POST':
try:
reqBody = request.get_data(False)
raw_data = reqBody.decode("utf-8")
data = json.loads(raw_data)['data']
data = numpy.array(data)
result = model.predict(data)
result_string = json.dumps(result.tolist())
return AMLResponse(result_string, 200)
except Exception as e:
error = str(e)
return AMLResponse(error, 500)
else:
return AMLResponse("bad request", 500) | import os
import json
import numpy
import joblib
def init():
global model
# note here "sklearn_regression_model.pkl" is the name of the model registered under
model_path = os.path.join(os.getenv('AZUREML_MODEL_DIR'), 'sklearn_regression_model.pkl')
# deserialize the model file back into a sklearn model
model = joblib.load(model_path)
# note you can pass in multiple rows for scoring
def run(raw_data):
try:
data = json.loads(raw_data)['data']
data = numpy.array(data)
result = model.predict(data)
# you can return any data type as long as it is JSON-serializable
return result.tolist()
except Exception as e:
error = str(e)
return error
| mit | Python |
03ed25cee706ff0e2e1a6d5e8202a6f0b790059f | modify comments | Som-Energia/invoice-janitor | Importacions_F1_Q1/Fact_impF1_eliminar_Ja_existeix.py | Importacions_F1_Q1/Fact_impF1_eliminar_Ja_existeix.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from ooop import OOOP
import configdb
O = OOOP(**configdb.ooop)
imp_obj = O.GiscedataFacturacioImportacioLinia
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','=','Aquest fitxer XML ja s\'ha processat en els següents IDs')])
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','=','Ja existeix una factura amb el mateix origen')])
#imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like','Ja existeix una factura')])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")])
total = len(imp_del_ids)
n = 0
for imp_del_id in imp_del_ids:
try:
imp_obj.unlink([imp_del_id])
n +=1
print "%d/%d" % (n,total)
except Exception, e:
print e
| <<<<<<< HEAD
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from ooop import OOOP
import configdb
O = OOOP(**configdb.ooop)
imp_obj = O.GiscedataFacturacioImportacioLinia
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','=','Aquest fitxer XML ja s\'ha processat en els següents IDs')])
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','=','Ja existeix una factura amb el mateix origen')])
#imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like','Ja existeix una factura')])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")])
total = len(imp_del_ids)
n = 0
for imp_del_id in imp_del_ids:
try:
imp_obj.unlink([imp_del_id])
n +=1
print "%d/%d" % (n,total)
except Exception, e:
print e
=======
| agpl-3.0 | Python |
6389552dcbda0161b708b4a9551faa5d2a13d764 | Revert "just pull cases being updated without the ones from stock transactions [skip ci]" | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/form_processor/management/commands/archive_all_forms_for_user_in_domain.py | corehq/form_processor/management/commands/archive_all_forms_for_user_in_domain.py | from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
from io import open
from django.core.management.base import BaseCommand
from casexml.apps.case.cleanup import rebuild_case_from_forms
from casexml.apps.case.xform import get_case_ids_from_form
from corehq.apps.users.models import CouchUser
from corehq.form_processor.interfaces.dbaccessors import FormAccessors
from corehq.form_processor.models import RebuildWithReason
from corehq.util.log import with_progress_bar
from corehq.form_processor.interfaces.processor import FormProcessorInterface
class Command(BaseCommand):
help = """
Bulk archive forms for user on domain.
First archive all forms and then rebuild corresponding cases
"""
def add_arguments(self, parser):
parser.add_argument('user_id')
parser.add_argument('domain')
def handle(self, user_id, domain, **options):
user = CouchUser.get_by_user_id(user_id)
form_accessor = FormAccessors(domain)
# ordered with latest form's id on top
form_ids = form_accessor.get_form_ids_for_user(user_id)
forms = [f for f in form_accessor.get_forms(form_ids) if f.is_normal]
print("Found %s normal forms for user" % len(form_ids))
case_ids_to_rebuild = set()
for form in forms:
case_ids_to_rebuild.update(get_case_ids_from_form(form))
print("Found %s cases that would need to be rebuilt" % len(case_ids_to_rebuild))
# archive forms
print("Starting with form archival")
with open("forms_archived.txt", "w+b") as forms_log:
for form in with_progress_bar(forms):
forms_log.write("%s\n" % form.form_id)
form.archive(rebuild_cases=False)
# removing data
for xform in with_progress_bar(forms):
FormProcessorInterface(xform.domain).ledger_processor.process_form_archived(xform)
# archive cases
print("Starting with case archival")
reason = "User %s forms archived for domain %s by system" % (user.raw_username, domain)
with open("cases_rebuilt.txt", "w+b") as case_log:
for case_id in with_progress_bar(case_ids_to_rebuild):
case_log.write("%s\n" % case_id)
rebuild_case_from_forms(domain, case_id, RebuildWithReason(reason=reason))
| from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
from io import open
from django.core.management.base import BaseCommand
from casexml.apps.case.cleanup import rebuild_case_from_forms
from corehq.apps.users.models import CouchUser
from corehq.form_processor.interfaces.dbaccessors import FormAccessors
from corehq.form_processor.models import RebuildWithReason
from casexml.apps.case.xform import get_case_updates
from corehq.util.log import with_progress_bar
from corehq.form_processor.interfaces.processor import FormProcessorInterface
class Command(BaseCommand):
help = """
Bulk archive forms for user on domain.
First archive all forms and then rebuild corresponding cases
"""
def add_arguments(self, parser):
parser.add_argument('user_id')
parser.add_argument('domain')
def handle(self, user_id, domain, **options):
user = CouchUser.get_by_user_id(user_id)
form_accessor = FormAccessors(domain)
# ordered with latest form's id on top
form_ids = form_accessor.get_form_ids_for_user(user_id)
forms = [f for f in form_accessor.get_forms(form_ids) if f.is_normal]
print("Found %s normal forms for user" % len(form_ids))
case_ids_to_rebuild = set()
for form in forms:
form_case_ids = set(cu.id for cu in get_case_updates(form))
case_ids_to_rebuild.update(form_case_ids)
print("Found %s cases that would need to be rebuilt" % len(case_ids_to_rebuild))
# archive forms
print("Starting with form archival")
with open("forms_archived.txt", "w+b") as forms_log:
for form in with_progress_bar(forms):
forms_log.write("%s\n" % form.form_id)
form.archive(rebuild_cases=False)
# removing data
for xform in with_progress_bar(forms):
FormProcessorInterface(xform.domain).ledger_processor.process_form_archived(xform)
# archive cases
print("Starting with case archival")
reason = "User %s forms archived for domain %s by system" % (user.raw_username, domain)
with open("cases_rebuilt.txt", "w+b") as case_log:
for case_id in with_progress_bar(case_ids_to_rebuild):
case_log.write("%s\n" % case_id)
rebuild_case_from_forms(domain, case_id, RebuildWithReason(reason=reason))
| bsd-3-clause | Python |
46b76c96309cdeb435502a7ee96c672edc2abee7 | Update binary-tree-longest-consecutive-sequence-ii.py | tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,kamyu104/LeetCode,yiwen-luo/LeetCode,yiwen-luo/LeetCode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,kamyu104/LeetCode,jaredkoontz/leetcode,yiwen-luo/LeetCode,jaredkoontz/leetcode | Python/binary-tree-longest-consecutive-sequence-ii.py | Python/binary-tree-longest-consecutive-sequence-ii.py | # Time: O(n)
# Space: O(h)
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def longestConsecutive(self, root):
"""
:type root: TreeNode
:rtype: int
"""
def longestConsecutiveHelper(root):
if not root:
return 0, 0
left_len = longestConsecutiveHelper(root.left)
right_len = longestConsecutiveHelper(root.right)
cur_inc_len, cur_dec_len = 1, 1
if root.left:
if root.left.val == root.val + 1:
cur_inc_len = max(cur_inc_len, left_len[0] + 1)
elif root.left.val == root.val - 1:
cur_dec_len = max(cur_dec_len, left_len[1] + 1)
if root.right:
if root.right.val == root.val + 1:
cur_inc_len = max(cur_inc_len, right_len[0] + 1)
elif root.right.val == root.val - 1:
cur_dec_len = max(cur_dec_len, right_len[1] + 1)
self.max_len = max(self.max_len, cur_dec_len + cur_inc_len - 1)
return cur_inc_len, cur_dec_len
self.max_len = 0
longestConsecutiveHelper(root)
return self.max_len
| # Time: O(n)
# Space: O(h)
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def longestConsecutive(self, root):
"""
:type root: TreeNode
:rtype: int
"""
def longestConsecutiveHelper(root):
if not root:
return 0
left_len = longestConsecutiveHelper(root.left)
right_len = longestConsecutiveHelper(root.right)
cur_inc_len, cur_dec_len = 1, 1
if root.left:
if root.left.val == root.val + 1:
cur_inc_len = max(cur_inc_len, left_len[0] + 1)
elif root.left.val == root.val - 1:
cur_dec_len = max(cur_dec_len, left_len[1] + 1)
if root.right:
if root.right.val == root.val + 1:
cur_inc_len = max(cur_inc_len, right_len[0] + 1)
elif root.right.val == root.val - 1:
cur_dec_len = max(cur_dec_len, right_len[1] + 1)
self.max_len = max(self.max_len, cur_dec_len + cur_inc_len - 1)
return cur_inc_len, cur_dec_len
self.max_len = 0
longestConsecutiveHelper(root)
return self.max_len
| mit | Python |
e81b1ce7536ce32e022fb3132f8468d2472b2e31 | Improve logging of openended extension | PanDAWMS/panda-bigmon-atlas,PanDAWMS/panda-bigmon-atlas,PanDAWMS/panda-bigmon-atlas,PanDAWMS/panda-bigmon-atlas | atlas/prodtask/management/commands/extendopenended.py | atlas/prodtask/management/commands/extendopenended.py | from django.core.management.base import BaseCommand, CommandError
import time
from atlas.prodtask.open_ended import check_open_ended
class Command(BaseCommand):
args = '<request_id, request_id>'
help = 'Extend open ended requests'
def handle(self, *args, **options):
self.stdout.write('Start open ended at %s'%time.ctime())
if not args:
try:
check_open_ended()
except Exception,e:
raise CommandError('Some problem during request extension: %s'%e)
self.stdout.write('Successfully finished request extension: %s'%time.ctime()) | from django.core.management.base import BaseCommand, CommandError
from atlas.prodtask.open_ended import check_open_ended
class Command(BaseCommand):
args = '<request_id, request_id>'
help = 'Extend open ended requests'
def handle(self, *args, **options):
if not args:
try:
check_open_ended()
except Exception,e:
raise CommandError('Some problem during request extension: %s'%e)
self.stdout.write('Successfully finished request extension') | apache-2.0 | Python |
0325ad9244835fac724126f918b049a145f6cdb7 | Add documentation for DNSimple | jsha/letsencrypt,stweil/letsencrypt,lmcro/letsencrypt,letsencrypt/letsencrypt,letsencrypt/letsencrypt,lmcro/letsencrypt,stweil/letsencrypt,jsha/letsencrypt | certbot-dns-dnsimple/certbot_dns_dnsimple/__init__.py | certbot-dns-dnsimple/certbot_dns_dnsimple/__init__.py | """
The `~certbot_dns_dnsimple.dns_dnsimple` plugin automates the process of
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
subsequently removing, TXT records using the DNSimple API.
Named Arguments
---------------
======================================== =====================================
``--dns-dnsimple-credentials`` DNSimple credentials_ INI file.
(Required)
``--dns-dnsimple-propagation-seconds`` The number of seconds to wait for DNS
to propagate before asking the ACME
server to verify the DNS record.
(Default: 30)
======================================== =====================================
Credentials
-----------
Use of this plugin requires a configuration file containing DNSimple API
credentials, obtained from your DNSimple
`account page <https://dnsimple.com/user>`_.
.. code-block:: ini
:name: credentials.ini
:caption: Example credentials file:
# DNSimple API credentials used by Certbot
dns_dnsimple_token = MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAw
The path to this file can be provided interactively or using the
``--dns-dnsimple-credentials`` command-line argument. Certbot records the path
to this file for use during renewal, but does not store the file's contents.
.. caution::
You should protect these API credentials as you would the password to your
DNSimple account. Users who can read this file can use these credentials
to issue arbitrary API calls on your behalf. Users who can cause Certbot to
run using these credentials can complete a ``dns-01`` challenge to acquire
new certificates or revoke existing certificates for associated domains,
even if those domains aren't being managed by this server.
Examples
--------
.. code-block:: bash
:caption: To acquire a certificate for ``example.com``
certbot certonly \\
--dns-dnsimple \\
--dns-dnsimple-credentials ~/.secrets/certbot/dnsimple.ini \\
-d example.com
.. code-block:: bash
:caption: To acquire a single certificate for both ``example.com`` and
``www.example.com``
certbot certonly \\
--dns-dnsimple \\
--dns-dnsimple-credentials ~/.secrets/certbot/dnsimple.ini \\
-d example.com \\
-d www.example.com
.. code-block:: bash
:caption: To acquire a certificate for ``example.com``, waiting 60 seconds
for DNS propagation
certbot certonly \\
--dns-dnsimple \\
--dns-dnsimple-credentials ~/.secrets/certbot/dnsimple.ini \\
--dns-dnsimple-propagation-seconds 60 \\
-d example.com
"""
| """DNSimple DNS Authenticator"""
| apache-2.0 | Python |
710f6cc458948e6e404d625ac3d8372cfd1ed304 | update swarm config | cogmission/nupic.research,ThomasMiconi/htmresearch,ThomasMiconi/nupic.research,ywcui1990/htmresearch,ThomasMiconi/nupic.research,ywcui1990/htmresearch,ywcui1990/nupic.research,cogmission/nupic.research,mrcslws/htmresearch,ywcui1990/nupic.research,BoltzmannBrain/nupic.research,chanceraine/nupic.research,neuroidss/nupic.research,chanceraine/nupic.research,chanceraine/nupic.research,ThomasMiconi/htmresearch,numenta/htmresearch,marionleborgne/nupic.research,ywcui1990/htmresearch,marionleborgne/nupic.research,ywcui1990/nupic.research,numenta/htmresearch,neuroidss/nupic.research,ywcui1990/htmresearch,neuroidss/nupic.research,ywcui1990/nupic.research,numenta/htmresearch,numenta/htmresearch,BoltzmannBrain/nupic.research,ThomasMiconi/nupic.research,subutai/htmresearch,neuroidss/nupic.research,marionleborgne/nupic.research,neuroidss/nupic.research,cogmission/nupic.research,subutai/htmresearch,BoltzmannBrain/nupic.research,mrcslws/htmresearch,ThomasMiconi/htmresearch,marionleborgne/nupic.research,marionleborgne/nupic.research,ywcui1990/htmresearch,cogmission/nupic.research,ThomasMiconi/nupic.research,mrcslws/htmresearch,ThomasMiconi/htmresearch,cogmission/nupic.research,ThomasMiconi/nupic.research,ywcui1990/htmresearch,subutai/htmresearch,mrcslws/htmresearch,mrcslws/htmresearch,BoltzmannBrain/nupic.research,ThomasMiconi/nupic.research,ThomasMiconi/nupic.research,numenta/htmresearch,cogmission/nupic.research,ThomasMiconi/htmresearch,numenta/htmresearch,ThomasMiconi/nupic.research,ywcui1990/nupic.research,BoltzmannBrain/nupic.research,neuroidss/nupic.research,ThomasMiconi/htmresearch,mrcslws/htmresearch,marionleborgne/nupic.research,mrcslws/htmresearch,ThomasMiconi/htmresearch,subutai/htmresearch,BoltzmannBrain/nupic.research,numenta/htmresearch,ywcui1990/htmresearch,subutai/htmresearch,cogmission/nupic.research,subutai/htmresearch,marionleborgne/nupic.research,chanceraine/nupic.research,subutai/htmresearch,chanceraine/nupic.research,marionleborgne/nupic.research,mrcslws/htmresearch,cogmission/nupic.research,ywcui1990/nupic.research,BoltzmannBrain/nupic.research,BoltzmannBrain/nupic.research,numenta/htmresearch,neuroidss/nupic.research,ywcui1990/htmresearch,chanceraine/nupic.research,ywcui1990/nupic.research,subutai/htmresearch,ywcui1990/nupic.research,ThomasMiconi/htmresearch,neuroidss/nupic.research | sequence_prediction/continuous_sequence/swarm_description/SWARM_CONFIG_nyc_taxi.py | sequence_prediction/continuous_sequence/swarm_description/SWARM_CONFIG_nyc_taxi.py | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013-2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
SWARM_CONFIG = {
"includedFields": [
{
"fieldName": "timeofday",
"fieldType": "string",
"maxValue": 1500,
"minValue": 0
},
# {
# "fieldName": "dayofweek",
# "fieldType": "string"
# },
{
"fieldName": "passenger_count",
"fieldType": "int",
"maxValue": 40000,
"minValue": 0
}
],
"streamDef": {
"info": "passenger_count",
"version": 1,
"streams": [
{
"info": "passenger count",
"source": "file://data/nyc_taxi.csv",
"columns": [
"*"
],
"last_record": 5000
}
],
},
"inferenceType": "TemporalMultiStep",
"inferenceArgs": {
"predictionSteps": [
5
],
"predictedField": "passenger_count"
},
"metricWindow": 2000,
"iterationCount": -1,
"swarmSize": "large"
}
| # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013-2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
SWARM_CONFIG = {
"includedFields": [
# {
# "fieldName": "timeofday",
# "fieldType": "string",
# "maxValue": 1500,
# "minValue": 0
# },
# {
# "fieldName": "dayofweek",
# "fieldType": "string"
# },
{
"fieldName": "passenger_count",
"fieldType": "int",
"maxValue": 40000,
"minValue": 0
}
],
"streamDef": {
"info": "passenger_count",
"version": 1,
"streams": [
{
"info": "passenger count",
"source": "file://data/nyc_taxi.csv",
"columns": [
"*"
],
"last_record": 5000
}
],
},
"inferenceType": "TemporalMultiStep",
"inferenceArgs": {
"predictionSteps": [
5
],
"predictedField": "passenger_count"
},
"metricWindow": 2000,
"iterationCount": -1,
"swarmSize": "large"
}
| agpl-3.0 | Python |
4f2945a2e7891bfaa68b7e301e18d34032204127 | Stop writing to sys.stdout | tamentis/rpdb,kenmanheimer/rpdb,inirudebwoy/rpdb,coddingtonbear/ircpdb | rpdb/__init__.py | rpdb/__init__.py | """Remote Python Debugger (pdb wrapper)."""
__author__ = "Bertrand Janin <b@janin.com>"
__version__ = "0.1.3"
import pdb
import socket
import sys
class Rpdb(pdb.Pdb):
def __init__(self, addr="127.0.0.1", port=4444):
"""Initialize the socket and initialize pdb."""
# Writes to stdout are forbidden in mod_wsgi environments
try:
sys.stderr.write("pdb is running on %s:%d\n" % (addr, port))
except IOError:
pass
# Backup stdin and stdout before replacing them by the socket handle
self.old_stdout = sys.stdout
self.old_stdin = sys.stdin
# Open a 'reusable' socket to let the webapp reload on the same port
self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
self.skt.bind((addr, port))
self.skt.listen(1)
(clientsocket, address) = self.skt.accept()
handle = clientsocket.makefile('rw')
pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle)
sys.stdout = sys.stdin = handle
def shutdown(self):
"""Revert stdin and stdout, close the socket."""
sys.stdout = self.old_stdout
sys.stdin = self.old_stdin
self.skt.close()
self.set_continue()
def do_continue(self, arg):
"""Stop all operation on ``continue``."""
self.shutdown()
return 1
do_EOF = do_quit = do_exit = do_c = do_cont = do_continue
def set_trace(addr="127.0.0.1", port=4444):
"""Wrapper function to keep the same import x; x.set_trace() interface.
We catch all the possible exceptions from pdb and cleanup.
"""
debugger = Rpdb(addr=addr, port=port)
try:
debugger.set_trace(sys._getframe().f_back)
except Exception as e:
print(e)
| """Remote Python Debugger (pdb wrapper)."""
__author__ = "Bertrand Janin <b@janin.com>"
__version__ = "0.1.3"
import pdb
import socket
import sys
class Rpdb(pdb.Pdb):
def __init__(self, addr="127.0.0.1", port=4444):
"""Initialize the socket and initialize pdb."""
# Writes to stdout are forbidden in mod_wsgi environments
try:
print("pdb is running on %s:%d" % (addr, port))
except IOError:
pass
# Backup stdin and stdout before replacing them by the socket handle
self.old_stdout = sys.stdout
self.old_stdin = sys.stdin
# Open a 'reusable' socket to let the webapp reload on the same port
self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
self.skt.bind((addr, port))
self.skt.listen(1)
(clientsocket, address) = self.skt.accept()
handle = clientsocket.makefile('rw')
pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle)
sys.stdout = sys.stdin = handle
def shutdown(self):
"""Revert stdin and stdout, close the socket."""
sys.stdout = self.old_stdout
sys.stdin = self.old_stdin
self.skt.close()
self.set_continue()
def do_continue(self, arg):
"""Stop all operation on ``continue``."""
self.shutdown()
return 1
do_EOF = do_quit = do_exit = do_c = do_cont = do_continue
def set_trace(addr="127.0.0.1", port=4444):
"""Wrapper function to keep the same import x; x.set_trace() interface.
We catch all the possible exceptions from pdb and cleanup.
"""
debugger = Rpdb(addr=addr, port=port)
try:
debugger.set_trace(sys._getframe().f_back)
except Exception as e:
print(e)
| bsd-2-clause | Python |
a4acd7c50d9b1705b130663c23c2f0a2eaace863 | Fix a broken Last.fm url | philgyford/django-ditto,philgyford/django-ditto,philgyford/django-ditto | ditto/lastfm/urls.py | ditto/lastfm/urls.py | from django.conf.urls import url
from . import views
# The pattern for matching an Album/Artist/Track slug:
slug_chars = '[\w.,:=@&+%()$!°-]+'
urlpatterns = [
url(
regex=r"^$",
view=views.HomeView.as_view(),
name='home'
),
url(
regex=r"^library$",
view=views.ScrobbleListView.as_view(),
name='scrobble_list'
),
url(
regex=r"^library/albums$",
view=views.AlbumListView.as_view(),
name='album_list'
),
url(
regex=r"^library/artists$",
view=views.ArtistListView.as_view(),
name='artist_list'
),
url(
regex=r"^library/tracks$",
view=views.TrackListView.as_view(),
name='track_list'
),
url(
regex=r"^music/(?P<artist_slug>%s)/$" % slug_chars,
view=views.ArtistDetailView.as_view(),
name='artist_detail'
),
url(
regex=r"^music/(?P<artist_slug>%s)/\+albums/$" % slug_chars,
view=views.ArtistAlbumsView.as_view(),
name='artist_albums'
),
url(
regex=r"^music/(?P<artist_slug>%s)/(?P<album_slug>%s)/$" % (
slug_chars, slug_chars),
view=views.AlbumDetailView.as_view(),
name='album_detail'
),
url(
regex=r"^music/(?P<artist_slug>%s)/_/(?P<track_slug>%s)/$" % (
slug_chars, slug_chars),
view=views.TrackDetailView.as_view(),
name='track_detail'
),
url(
regex=r"^user/(?P<username>[a-z0-9]+)/$",
view=views.UserDetailView.as_view(),
name='user_detail'
),
]
| from django.conf.urls import url
from . import views
# The pattern for matching an Album/Artist/Track slug:
slug_chars = '[\w.,:=@&+%()$!-]+'
urlpatterns = [
url(
regex=r"^$",
view=views.HomeView.as_view(),
name='home'
),
url(
regex=r"^library$",
view=views.ScrobbleListView.as_view(),
name='scrobble_list'
),
url(
regex=r"^library/albums$",
view=views.AlbumListView.as_view(),
name='album_list'
),
url(
regex=r"^library/artists$",
view=views.ArtistListView.as_view(),
name='artist_list'
),
url(
regex=r"^library/tracks$",
view=views.TrackListView.as_view(),
name='track_list'
),
url(
regex=r"^music/(?P<artist_slug>%s)/$" % slug_chars,
view=views.ArtistDetailView.as_view(),
name='artist_detail'
),
url(
regex=r"^music/(?P<artist_slug>%s)/\+albums/$" % slug_chars,
view=views.ArtistAlbumsView.as_view(),
name='artist_albums'
),
url(
regex=r"^music/(?P<artist_slug>%s)/(?P<album_slug>%s)/$" % (
slug_chars, slug_chars),
view=views.AlbumDetailView.as_view(),
name='album_detail'
),
url(
regex=r"^music/(?P<artist_slug>%s)/_/(?P<track_slug>%s)/$" % (
slug_chars, slug_chars),
view=views.TrackDetailView.as_view(),
name='track_detail'
),
url(
regex=r"^user/(?P<username>[a-z0-9]+)/$",
view=views.UserDetailView.as_view(),
name='user_detail'
),
]
| mit | Python |
3ae76d7338c967bcf60216c5ec46f6406de45116 | Support directory as local URI | hundeboll/core,diorcety/oe-lite-core,sknsean/core,hundeboll/core,hundeboll/core,sknsean/core,diorcety/oe-lite-core,sknsean/core,hundeboll/core,sknsean/core,sknsean/core | lib/oelite/fetch/local.py | lib/oelite/fetch/local.py | import oelite.fetch
import oelite.path
import os
import hashlib
class LocalFetcher():
SUPPORTED_SCHEMES = ("file")
def __init__(self, uri, d):
if not uri.scheme in self.SUPPORTED_SCHEMES:
raise Exception(
"Scheme %s not supported by oelite.fetch.UrlFetcher"%(scheme))
self.uri = uri
if os.path.isabs(uri.location):
if not os.path.exists(uri.location):
raise oelite.fetch.LocalFileNotFound(self.uri, "file not found")
self.localpath = uri.location
d.set_input_mtime(self.localpath,
mtime=os.path.getmtime(self.localpath))
else:
self.localpath = oelite.path.which(d.get("FILESPATH_EXISTS"),
uri.location)
if not self.localpath:
raise oelite.fetch.LocalFileNotFound(self.uri, "file not found")
d.set_input_mtime(uri.location, d.get("FILESPATH"),
mtime=os.path.getmtime(self.localpath))
return
def signature(self):
try:
return self._signature
except AttributeError:
pass
if os.path.isdir(self.localpath):
raise oelite.fetch.NoSignature(self.uri, "can't compute directory signature")
m = hashlib.sha1()
m.update(open(self.localpath, "r").read())
self._signature = m.digest()
return self._signature
| import oelite.fetch
import oelite.path
import os
import hashlib
class LocalFetcher():
SUPPORTED_SCHEMES = ("file")
def __init__(self, uri, d):
if not uri.scheme in self.SUPPORTED_SCHEMES:
raise Exception(
"Scheme %s not supported by oelite.fetch.UrlFetcher"%(scheme))
self.uri = uri
if os.path.isabs(uri.location):
if not os.path.exists(uri.location):
raise oelite.fetch.LocalFileNotFound(self.uri, "file not found")
self.localpath = uri.location
d.set_input_mtime(self.localpath,
mtime=os.path.getmtime(self.localpath))
else:
self.localpath = oelite.path.which(d.get("FILESPATH_EXISTS"),
uri.location)
if not self.localpath:
raise oelite.fetch.LocalFileNotFound(self.uri, "file not found")
d.set_input_mtime(uri.location, d.get("FILESPATH"),
mtime=os.path.getmtime(self.localpath))
return
def signature(self):
try:
return self._signature
except AttributeError:
pass
m = hashlib.sha1()
m.update(open(self.localpath, "r").read())
self._signature = m.digest()
return self._signature
| mit | Python |
e21e971c67954b1d0f5ca2ee41518322f259a96b | handle case in which one swapString is empty | philippotto/Sublime-SwapStrings | SwapStrings.py | SwapStrings.py | import sublime, sublime_plugin
class SwapStringsCommand(sublime_plugin.TextCommand):
def run(self, edit, stringA=None, stringB=None):
if not stringA and not stringB:
inputView = sublime.active_window().show_input_panel(
"Specify the strings which shall be swapped. <> functions as a separator.",
"\"<>'",
self.onConfirm,
None,
None
)
inputView.run_command("select_all")
else:
view = self.view
selection = view.sel()
stringA, stringB = self.ensureOrder(stringA, stringB)
for region in selection:
if region.a == region.b:
# use entire line if regions is only a point
region = view.line(region)
regionStr = view.substr(region)
if stringB == "":
regionStr = regionStr.replace(stringA, "")
else:
swapToken = self.generateSwapToken(regionStr, stringA, stringB)
regionStr = regionStr \
.replace(stringA, swapToken) \
.replace(stringB, stringA) \
.replace(swapToken, stringB)
view.replace(edit, region, regionStr)
def ensureOrder(self, stringA, stringB):
# ensures that len(stringA) >= len(stringB)
# this is important for the edge case in which stringA is a substring of stringB
if len(stringB) > len(stringA):
stringA, stringB = stringB, stringA
return stringA, stringB
def generateSwapToken(self, regionStr, stringA, stringB):
# requirements derived by the three replacements:
# 1: uncritical since stringA >= stringB.
# 2: stringB must not be in swapToken.
# 3: swapToken must not be in stringA and not in regionStr.
# mind that stringA is not necessarily a substring of regionStr.
# choose swapToken so that stringB cannot be in swapToken
swapToken = stringB[:-1]
while swapToken in stringA + regionStr:
# extend swapToken with a character so that it isn't stringB
swapToken += chr(ord(stringB[-1]) + 1 % 255)
return swapToken
def onConfirm(self, swapString):
if "<>" not in swapString:
sublime.status_message("No <> was found for swapping strings.")
return
(a, b) = swapString.split("<>")
self.view.run_command("swap_strings", dict(stringA=a, stringB=b))
| import sublime, sublime_plugin
class SwapStringsCommand(sublime_plugin.TextCommand):
def run(self, edit, stringA=None, stringB=None):
if not stringA and not stringB:
inputView = sublime.active_window().show_input_panel(
"Specify the strings which shall be swapped. <> functions as a separator.",
"\"<>'",
self.onConfirm,
None,
None
)
inputView.run_command("select_all")
else:
view = self.view
selection = view.sel()
stringA, stringB = self.ensureOrder(stringA, stringB)
for region in selection:
if region.a == region.b:
# use entire line if regions is only a point
region = view.line(region)
regionStr = view.substr(region)
swapToken = self.generateSwapToken(regionStr, stringA, stringB)
regionStr = regionStr \
.replace(stringA, swapToken) \
.replace(stringB, stringA) \
.replace(swapToken, stringB)
view.replace(edit, region, regionStr)
def ensureOrder(self, stringA, stringB):
# ensures that len(stringA) >= len(stringB)
# this is important for the edge case in which stringA is a substring of stringB
if len(stringB) > len(stringA):
stringA, stringB = stringB, stringA
return stringA, stringB
def generateSwapToken(self, regionStr, stringA, stringB):
# requirements derived by the three replacements:
# 1: uncritical since stringA >= stringB.
# 2: stringB must not be in swapToken.
# 3: swapToken must not be in stringA and not in regionStr.
# mind that stringA is not necessarily a substring of regionStr.
# choose swapToken so that stringB cannot be in swapToken
swapToken = stringB[:-1]
while swapToken in stringA + regionStr:
# extend swapToken with a character so that it isn't stringB
swapToken += chr(ord(stringB[-1]) + 1 % 255)
return swapToken
def onConfirm(self, swapString):
(a, b) = swapString.split("<>")
self.view.run_command("swap_strings", dict(stringA=a, stringB=b))
| mit | Python |
29393bef20ab33da2844571dd50bd73308adf5e2 | use time.clock | cyberdelia/astrolabe,cyberdelia/astrolabe | astrolabe/instant.py | astrolabe/instant.py | from time import clock
CONVERSION_FACTOR = 1
def instant():
return clock()
| from time import time
CONVERSION_FACTOR = 1
def instant():
return time()
| mit | Python |
6632157febfed7ce99fa1aaecb72393b0301d3aa | Make empty migration authent 3 | makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek | geotrek/authent/migrations/0003_auto_20181203_1518.py | geotrek/authent/migrations/0003_auto_20181203_1518.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('authent', '0002_auto_20181107_1620'),
]
operations = [
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
from django.conf import settings
def add_permissions(apps, schema_editor):
if 'geotrek.infrastructure' in settings.INSTALLED_APPS:
call_command('update_geotrek_permissions', verbosity=0)
UserModel = apps.get_model('auth', 'User')
GroupModel = apps.get_model('auth', 'Group')
PermissionModel = apps.get_model('auth', 'Permission')
ContentTypeModel = apps.get_model("contenttypes", "ContentType")
type_permissions = ['add', 'change', 'change_geom', 'delete', 'export', 'read']
content_type_signage = ContentTypeModel.objects.get(model='signage')
content_type_infrastructure = ContentTypeModel.objects.get(model='infrastructure')
for user in UserModel.objects.all():
for type_perm in type_permissions:
if user.user_permissions.filter(codename='%s_infrastructure' % type_perm).exists():
user.user_permissions.add(PermissionModel.objects.get(
codename='%s_infrastructure' % type_perm, content_type=content_type_infrastructure))
if user.user_permissions.filter(codename='%s_signage' % type_perm).exists():
user.user_permissions.add(PermissionModel.objects.get(
codename='%s_signage' % type_perm, content_type=content_type_signage))
for group in GroupModel.objects.all():
for type_perm in type_permissions:
if group.permissions.filter(codename='%s_infrastructure' % type_perm).exists():
group.permissions.add(PermissionModel.objects.get(
codename='%s_infrastructure' % type_perm, content_type=content_type_infrastructure))
if group.permissions.filter(codename='%s_signage' % type_perm).exists():
group.permissions.add(PermissionModel.objects.get(
codename='%s_signage' % type_perm, content_type=content_type_signage))
PermissionModel.objects.filter(content_type__model='baseinfrastructure').delete()
class Migration(migrations.Migration):
dependencies = [
('authent', '0002_auto_20181107_1620'),
]
operations = [
migrations.RunPython(add_permissions)
]
| bsd-2-clause | Python |
f85202d64cf2520b4ad9e2cca77d6b49336a044a | Bump version to 3.5.0a17 | platformio/platformio,platformio/platformio-core,platformio/platformio-core | platformio/__init__.py | platformio/__init__.py | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (3, 5, "0a17")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = ("An open source ecosystem for IoT development. "
"Cross-platform build system and library manager. "
"Continuous and IDE integration. "
"Arduino, ESP8266 and ARM mbed compatible")
__url__ = "http://platformio.org"
__author__ = "Ivan Kravets"
__email__ = "me@ikravets.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
if sys.version_info < (2, 7, 0) or sys.version_info >= (3, 0, 0):
msg = ("PlatformIO Core v%s does not run under Python version %s.\n"
"Minimum supported version is 2.7, please upgrade Python.\n"
"Python 3 is not yet supported.\n")
sys.stderr.write(msg % (__version__, sys.version.split()[0]))
sys.exit(1)
| # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (3, 5, "0a16")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = ("An open source ecosystem for IoT development. "
"Cross-platform build system and library manager. "
"Continuous and IDE integration. "
"Arduino, ESP8266 and ARM mbed compatible")
__url__ = "http://platformio.org"
__author__ = "Ivan Kravets"
__email__ = "me@ikravets.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
if sys.version_info < (2, 7, 0) or sys.version_info >= (3, 0, 0):
msg = ("PlatformIO Core v%s does not run under Python version %s.\n"
"Minimum supported version is 2.7, please upgrade Python.\n"
"Python 3 is not yet supported.\n")
sys.stderr.write(msg % (__version__, sys.version.split()[0]))
sys.exit(1)
| apache-2.0 | Python |
b14abeff48ae46c849848bec15052b448a24634a | Bump version to 4.0.0a13 | platformio/platformio-core,platformio/platformio-core,platformio/platformio | platformio/__init__.py | platformio/__init__.py | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = (4, 0, "0a13")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"An open source ecosystem for IoT development. "
"Cross-platform IDE and unified debugger. "
"Remote unit testing and firmware updates. "
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
__url__ = "https://platformio.org"
__author__ = "PlatformIO"
__email__ = "contact@platformio.org"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
| # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = (4, 0, "0a12")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"An open source ecosystem for IoT development. "
"Cross-platform IDE and unified debugger. "
"Remote unit testing and firmware updates. "
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
__url__ = "https://platformio.org"
__author__ = "PlatformIO"
__email__ = "contact@platformio.org"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
| apache-2.0 | Python |
4efae77c0c290cae919477c438cb1f02d86837c9 | convert to new open source trigger | motmot/strokelitude,motmot/strokelitude-jfi-emulator | plot_raw_timeseries.py | plot_raw_timeseries.py | import pylab
import numpy as np
import sys
import tables
import motmot.fview_ext_trig.easy_decode as easy_decode
import matplotlib.ticker as mticker
if 1:
fname = sys.argv[1]
h5 = tables.openFile(fname,mode='r')
stroke_data=h5.root.stroke_data[:]
stroke_times = stroke_data['trigger_timestamp']
time_data=h5.root.time_data[:]
gain,offset,resids = easy_decode.get_gain_offset_resids(
input=time_data['framestamp'],
output=time_data['timestamp'])
top = h5.root.time_data.attrs.top
wordstream = h5.root.ain_wordstream[:]
wordstream = wordstream['word'] # extract into normal numpy array
r=easy_decode.easy_decode(wordstream,gain,offset,top)
chans = r.dtype.fields.keys()
chans.sort()
chans.remove('timestamps')
names = h5.root.ain_wordstream.attrs.channel_names
if 0:
Vcc = h5.root.ain_wordstream.attrs.Vcc
else:
Vcc=3.3
print 'Vcc',Vcc
ADCmax = (2**10)-1
analog_gain = Vcc/ADCmax
t0 = r['timestamps'][0]
N_subplots = len(chans)+2
ax=None
for i in range(N_subplots):
ax = pylab.subplot(N_subplots,1,i+1,sharex=ax)
if i < len(chans):
ax.plot(r['timestamps']-t0,r[chans[i]]*analog_gain,
label=names[int(chans[i])])
ax.set_ylabel('V')
ax.legend()
elif i == len(chans):
ax.plot(stroke_times-t0,stroke_data['right'],label='R')
ax.set_ylabel('R (degrees)')
ax.legend()
elif i == len(chans)+1:
ax.plot(stroke_times-t0,stroke_data['left'],label='L')
ax.set_ylabel('L (degrees)')
ax.legend()
ax.xaxis.set_major_formatter(mticker.FormatStrFormatter("%s"))
ax.yaxis.set_major_formatter(mticker.FormatStrFormatter("%s"))
ax.set_xlabel('Time (sec)')
pylab.show()
| import pylab
import numpy as np
import sys
import tables
import fview_ext_trig.easy_decode as easy_decode
import matplotlib.ticker as mticker
if 1:
fname = sys.argv[1]
h5 = tables.openFile(fname,mode='r')
stroke_data=h5.root.stroke_data[:]
stroke_times = stroke_data['trigger_timestamp']
time_data=h5.root.time_data[:]
gain,offset,resids = easy_decode.get_gain_offset_resids(
input=time_data['framestamp'],
output=time_data['timestamp'])
top = h5.root.time_data.attrs.top
wordstream = h5.root.ain_wordstream[:]
wordstream = wordstream['word'] # extract into normal numpy array
r=easy_decode.easy_decode(wordstream,gain,offset,top)
chans = r.dtype.fields.keys()
chans.sort()
chans.remove('timestamps')
names = h5.root.ain_wordstream.attrs.channel_names
if 0:
Vcc = h5.root.ain_wordstream.attrs.Vcc
else:
Vcc=3.3
print 'Vcc',Vcc
ADCmax = (2**10)-1
analog_gain = Vcc/ADCmax
t0 = r['timestamps'][0]
N_subplots = len(chans)+2
ax=None
for i in range(N_subplots):
ax = pylab.subplot(N_subplots,1,i+1,sharex=ax)
if i < len(chans):
ax.plot(r['timestamps']-t0,r[chans[i]]*analog_gain,
label=names[int(chans[i])])
ax.set_ylabel('V')
ax.legend()
elif i == len(chans):
ax.plot(stroke_times-t0,stroke_data['right'],label='R')
ax.set_ylabel('R (degrees)')
ax.legend()
elif i == len(chans)+1:
ax.plot(stroke_times-t0,stroke_data['left'],label='L')
ax.set_ylabel('L (degrees)')
ax.legend()
ax.xaxis.set_major_formatter(mticker.FormatStrFormatter("%s"))
ax.yaxis.set_major_formatter(mticker.FormatStrFormatter("%s"))
ax.set_xlabel('Time (sec)')
pylab.show()
| bsd-3-clause | Python |
284c82863a1bdf8c192ca04ca424dadcca1c7bc7 | scale the frequency plot correctly after decimation | emeb/iceRadio,emeb/iceRadio,emeb/iceRadio,emeb/iceRadio | FPGA/rxadc_2/python/tst_cic.py | FPGA/rxadc_2/python/tst_cic.py | #!/usr/bin/python3
#
# CIC decimator test bench
#
# 07-23-2015 E. Brombaugh
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import scipy.signal as signal
from scipy.fftpack import fft, ifft, fftfreq, fftshift
from ddc import cic_dec
# generate a signal
data_len = 2**16
Fs = 40e6
Fc = 20e3
data_bits = 12
data_scl = 2**(data_bits-1)-1
t = np.arange(data_len)/Fs
data_in = np.floor(data_scl * np.sin(2*np.pi*Fc*t) + 0.5)
# set system parameters and create an instance of the cic
cic_stages = 4;
cic_rate = 256;
uut = cic_dec(cic_stages, cic_rate, data_bits)
# run the model
cic_out = uut.calc(data_in)
# prepare to plot
if 0:
data = data_in
rate = Fs
else:
data = cic_out
rate = Fs/cic_rate
data_len = len(data)
t = np.arange(data_len)/rate
# plot of time
fig = plt.figure(1)
plt.plot(t, np.real(data))
plt.grid()
plt.xlabel("Time")
plt.ylabel("data")
plt.title("sinusoid - time")
# plot of frequency
fig = plt.figure(2)
f = rate * fftshift(fftfreq(data_len))/1e6
win = signal.blackmanharris(data_len)
data_bhwin = data * win
bh_gain = sum(win)/data_len
data_dB = 20*np.log10(np.abs(fftshift(fft(data_bhwin)))/
(data_len*(data_scl/2)*bh_gain))
plt.plot(f, data_dB)
plt.grid()
plt.xlabel("Frequency (MHz)")
plt.ylabel("dB")
plt.title("sinusoid - freq")
plt.xlim((0, (rate/1e6)/2))
plt.show()
| #!/usr/bin/python3
#
# CIC decimator test bench
#
# 07-23-2015 E. Brombaugh
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import scipy.signal as signal
from scipy.fftpack import fft, ifft, fftfreq, fftshift
from ddc import cic_dec
# generate a signal
data_len = 2**16
Fs = 40e6
Fc = 20e3
data_bits = 12
data_scl = 2**(data_bits-1)-1
t = np.arange(data_len)/Fs
data_in = np.floor(data_scl * np.sin(2*np.pi*Fc*t) + 0.5)
# set system parameters and create an instance of the cic
cic_stages = 4;
cic_rate = 256;
uut = cic_dec(cic_stages, cic_rate, data_bits)
# run the model
cic_out = uut.calc(data_in)
# prepare to plot
if 0:
data = data_in
rate = Fs
else:
data = cic_out
rate = Fs/cic_rate
data_len = len(data)
t = np.arange(data_len)/rate
# plot of time
fig = plt.figure(1)
plt.plot(t, np.real(data))
plt.grid()
plt.xlabel("Time")
plt.ylabel("data")
plt.title("sinusoid - time")
# plot of frequency
fig = plt.figure(2)
f = Fs * fftshift(fftfreq(data_len))/1e6
win = signal.blackmanharris(data_len)
data_bhwin = data * win
bh_gain = sum(win)/data_len
data_dB = 20*np.log10(np.abs(fftshift(fft(data_bhwin)))/
(data_len*(data_scl/2)*bh_gain))
plt.plot(f, data_dB)
plt.grid()
plt.xlabel("Frequency (MHz)")
plt.ylabel("dB")
plt.title("sinusoid - freq")
plt.xlim((0, (Fs/1e6)/2))
plt.show()
| mit | Python |
2fbf927113aaaeae9ec533814609d5c835d4604a | check that files are written | salilab/rmf,salilab/rmf,salilab/rmf,salilab/rmf | test/test_closing.py | test/test_closing.py | #!/usr/bin/python
import unittest
import RMF
import shutil
class Tests(unittest.TestCase):
def _show(self, g):
for i in range(0, g.get_number_of_children()):
print i, g.get_child_name(i), g.get_child_is_group(i)
"""Test the python code"""
def test_perturbed(self):
"""Test closing an RMF file"""
for suffix in ["rmf", "rmf2"]:
f= RMF.create_rmf_file(RMF._get_temporary_file_path("test_file."+suffix))
r= f.get_root_node()
print r.get_type()
sc= f.get_category("sequence")
ik= f.get_int_key(sc, "ik0")
f.set_current_frame(0)
r.set_value(ik, 1)
del r
del f
del ik
del sc
names= RMF.get_open_hdf5_handle_names()
print names
self.assertEqual(len(names), 0)
def test_perturbed_2(self):
"""Test reopening an RMF file"""
for suffix in ["rmf", "rmf2"]:
name=RMF._get_temporary_file_path("test_file."+suffix)
print name
f= RMF.create_rmf_file(name)
r= f.get_root_node()
print r.get_type()
sc= f.get_category("sequence")
ik= f.get_int_key(sc, "ik0")
f.set_current_frame(0)
r.set_value(ik, 1)
del f
del r
f= RMF.open_rmf_file_read_only(name)
r= f.get_root_node()
print r.get_type()
sc= f.get_category("sequence")
ik= f.get_int_key(sc, "ik0")
f.set_current_frame(0)
self.assertEqual(r.get_value(ik), 1)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
import unittest
import RMF
import shutil
class GenericTest(unittest.TestCase):
def _show(self, g):
for i in range(0, g.get_number_of_children()):
print i, g.get_child_name(i), g.get_child_is_group(i)
"""Test the python code"""
def test_perturbed(self):
"""Test closing an RMF file"""
for suffix in ["rmf", "rmf2"]:
f= RMF.create_rmf_file(RMF._get_temporary_file_path("test_file."+suffix))
r= f.get_root_node()
print r.get_type()
sc= f.get_category("sequence")
ik= f.get_int_key(sc, "ik0")
f.set_current_frame(0)
r.set_value(ik, 1)
del r
del f
del ik
del sc
names= RMF.get_open_hdf5_handle_names()
print names
self.assertEqual(len(names), 0)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
a03d406bd61d45ca4ca3f7367146d9a08d81d4c0 | test boundary cases | abacusresearch/gitflow,abacusresearch/gitflow | test/unit/version.py | test/unit/version.py | from gitflow import version
from gitflow.version import VersionConfig
config = VersionConfig()
config.qualifiers = ['alpha', 'beta']
def test_major_increment():
assert version.version_bump_major(config, "0.0.0").value == "1.0.0-alpha.1"
assert version.version_bump_major(config, "1.0.0-beta.4").value == "2.0.0-alpha.1"
def test_minor_increment():
assert version.version_bump_minor(config, "0.0.0").value == "0.1.0-alpha.1"
assert version.version_bump_minor(config, "1.0.0-beta.4").value == "1.1.0-alpha.1"
def test_patch_increment():
assert version.version_bump_patch(config, "0.0.0").value == "0.0.1-alpha.1"
assert version.version_bump_patch(config, "1.0.5").value == "1.0.6-alpha.1"
assert version.version_bump_patch(config, "1.0.0-beta.4").value == "1.0.1-alpha.1"
def test_qualifier_increment():
assert version.version_bump_qualifier(config, "1.0.0").value is None
assert version.version_bump_qualifier(config, "1.0.0-alpha.4").value == "1.0.0-beta.1"
assert version.version_bump_qualifier(config, "1.0.0-beta.4").value is None
def test_pre_release_increment():
assert version.version_bump_prerelease(config, "1.0.0").value is None
assert version.version_bump_prerelease(config, "1.0.0-alpha").value is None
assert version.version_bump_prerelease(config, "1.0.0-alpha.0").value == "1.0.0-alpha.1"
assert version.version_bump_prerelease(config, "1.0.0-alpha.4").value == "1.0.0-alpha.5"
def test_increment_to_release():
assert version.version_bump_to_release(config, "1.0.0-alpha.4").value == "1.0.0"
assert version.version_bump_to_release(config, "1.0.0").value is None
| from gitflow import version
from gitflow.version import VersionConfig
config = VersionConfig()
config.qualifiers = ['alpha', 'beta']
def test_major_increment():
assert version.version_bump_major(config, "0.0.0").value == "1.0.0-alpha.1"
assert version.version_bump_major(config, "1.0.0-beta.4").value == "2.0.0-alpha.1"
def test_minor_increment():
assert version.version_bump_minor(config, "0.0.0").value == "0.1.0-alpha.1"
assert version.version_bump_minor(config, "1.0.0-beta.4").value == "1.1.0-alpha.1"
def test_patch_increment():
assert version.version_bump_patch(config, "0.0.0").value == "0.0.1-alpha.1"
assert version.version_bump_patch(config, "1.0.0-beta.4").value == "1.0.1-alpha.1"
def test_qualifier_increment():
assert version.version_bump_qualifier(config, "1.0.0").value is None
assert version.version_bump_qualifier(config, "1.0.0-alpha.4").value == "1.0.0-beta.1"
def test_pre_release_increment():
assert version.version_bump_prerelease(config, "1.0.0").value is None
assert version.version_bump_prerelease(config, "1.0.0-alpha.4").value == "1.0.0-alpha.5"
def test_increment_to_release():
assert version.version_bump_to_release(config, "1.0.0-alpha.4").value == "1.0.0"
| mit | Python |
560cefff06f3e8b288c9ceecf6c84438c4d9783b | implement -z mode for statistics | narayandesai/diy-lsi | diylsi/statistics.py | diylsi/statistics.py | import argparse, json, multiprocessing, os, re, sys
# all functions in this library take a disk info dict as input
def poll_smart_status(diskinfo):
data = os.popen("/opt/omni/sbin/smartctl -a -d sat,12 /devices/scsi_vhci/disk@g%s:a,raw |grep self-assess" % diskinfo['guid']).readlines()[0]
#data = os.popen("/opt/omni/sbin/smartctl -a -d scsi /devices/scsi_vhci/disk@g%s:a,raw |grep self-assess" % diskinfo['guid']).readlines()[0]
return {'name':diskinfo['name'], 'smart-status': data.strip().split(':')[-1].strip()}
iomap = {'Soft Errors':'soft', 'Transport Errors':'transport', 'Media Error':'media', 'Device Not Ready':'not-ready', 'No Device':'no-device', 'Recoverable':'recoverable', 'Illegal Request':'illegal', 'Predictive Failure Analysis':'predictive'}
def poll_iostat(diskinfo):
data = " ".join([x.strip() for x in os.popen("iostat -En %s" % diskinfo['name']).readlines()])
ret = dict([('name', diskinfo['name'])])
for key, val in iomap.iteritems():
match = re.compile('.*%s: (\d+)' % key).match(data)
if match:
ret[val] = int(match.group(1))
return ret
def get_statistics(diskinfo):
data = poll_smart_status(diskinfo)
data.update(poll_iostat(diskinfo))
return data
def filter_zero_stats(disklist):
hard = ['transport', 'media', 'not-ready', 'no-device']
nzdisks = list()
for disk in disklist:
if disk['smart-status'] != 'PASSED':
nzdisks.append(disk)
continue
for field in hard:
if disk[field] != 0:
nzdisks.append(disk)
continue
if disk['soft'] != disk['recoverable']:
nzdisks.append(disk)
continue
return nzdisks
def do_statistics():
parser = argparse.ArgumentParser(prog='diy-lsi stats',
description='disk statistics info')
parser.add_argument('-z', dest='zero', help='display non-zero counters', action='store_true', default=False)
parser.add_argument('disks', help='disk name', nargs='*')
config = parser.parse_args(sys.argv[2:])
hwdb = json.load(open('/etc/diy-lsi/hardware.js'))
if config.disks:
disks = [x for x in hwdb if x.get('name') in config.disks]
else:
disks = [x for x in hwdb if x.has_key('name') and x.has_key('guid')]
pool = multiprocessing.Pool(64)
results = pool.map(get_statistics, disks)
if config.zero:
results = filter_zero_stats(results)
for stat in results:
print stat
| import argparse, json, multiprocessing, os, re, sys
# all functions in this library take a disk info dict as input
def poll_smart_status(diskinfo):
data = os.popen("/opt/omni/sbin/smartctl -a -d sat,12 /devices/scsi_vhci/disk@g%s:a,raw |grep self-assess" % diskinfo['guid']).readlines()[0]
#data = os.popen("/opt/omni/sbin/smartctl -a -d scsi /devices/scsi_vhci/disk@g%s:a,raw |grep self-assess" % diskinfo['guid']).readlines()[0]
return {'name':diskinfo['name'], 'smart-status': data.strip().split(':')[-1].strip()}
iomap = {'Soft Errors':'soft', 'Transport Errors':'transport', 'Media Error':'media', 'Device Not Ready':'not-ready', 'No Device':'no-device', 'Recoverable':'recoverable', 'Illegal Request':'illegal', 'Predictive Failure Analysis':'predictive'}
def poll_iostat(diskinfo):
data = " ".join([x.strip() for x in os.popen("iostat -En %s" % diskinfo['name']).readlines()])
ret = dict([('name', diskinfo['name'])])
for key, val in iomap.iteritems():
match = re.compile('.*%s: (\d+)' % key).match(data)
if match:
ret[val] = int(match.group(1))
return ret
def get_statistics(diskinfo):
data = poll_smart_status(diskinfo)
data.update(poll_iostat(diskinfo))
return data
def do_statistics():
parser = argparse.ArgumentParser(prog='diy-lsi stats',
description='disk statistics info')
parser.add_argument('disks', help='disk name', nargs='*')
config = parser.parse_args(sys.argv[2:])
hwdb = json.load(open('/etc/diy-lsi/hardware.js'))
if config.disks:
disks = [x for x in hwdb if x.get('name') in config.disks]
else:
disks = [x for x in hwdb if x.has_key('name') and x.has_key('guid')]
pool = multiprocessing.Pool(16)
results = pool.map(get_statistics, disks)
for stat in results:
print stat
| bsd-3-clause | Python |
40172c5dd09435aad8aff1083903810ccddff29b | add searching and paging to the data endpoint | BD2KGenomics/brca-website,BD2KGenomics/brca-website,BD2KGenomics/brca-website | django/data/views.py | django/data/views.py | import json
from operator import __or__
from django.core import serializers
from django.http import JsonResponse
from django.db.models import Q
from .models import Variant
def index(request):
order_by = request.GET.get('order_by')
direction = request.GET.get('direction')
page_size = int(request.GET.get('page_size'))
page_num = int(request.GET.get('page_num'))
search_term = request.GET.get('search_term')
search_columns = request.GET.getlist('search_column')
filters = request.GET.getlist('filter')
filterValues = request.GET.getlist('filterValue')
query = Variant.objects.values()
# if there are multiple filters given then AND them:
# the row must match all the filters
if filters:
query = query.filter(**dict(zip(filters,filterValues)))
# if there are multiple search columns given then OR them:
# the row must match in at least one column
if search_term:
query_list = (Q(**{column+'__icontains':search_term}) for column in search_columns)
query = query.filter(reduce(__or__,query_list))
# count the number of rows now before paginating
count = query.count()
if order_by:
if direction == 'descending':
order_by = '-' + order_by
query = query.order_by(order_by)
if page_size:
start = page_size * page_num
end = start + page_size
query = query[start:end]
# call list() now to evaluate the query
response = JsonResponse({'count':count, 'data':list(query)})
response['Access-Control-Allow-Origin'] = '*'
return response
| import json
from django.core import serializers
from django.http import JsonResponse
from .models import Variant
def index(request):
# Convert django's model representation to the format
# expected by the frontend
header = map(lambda field: field.name, Variant._meta.get_fields())
rows = list(Variant.objects.values_list())
data = {'header': header, 'rows': rows}
response = JsonResponse(data)
response['Access-Control-Allow-Origin'] = '*'
return response
| apache-2.0 | Python |
723b405b1defad5d6284d99bb3af4f3c224844a8 | Update ER docstring | skggm/skggm,skggm/skggm | inverse_covariance/profiling/erdos_renyi_graph.py | inverse_covariance/profiling/erdos_renyi_graph.py | from __future__ import absolute_import
import numpy as np
from sklearn.datasets import make_sparse_spd_matrix
from .graphs import Graph
class ErdosRenyiGraph(Graph):
"""Returns the adjacency matrix for Erdos-Renyi network via .create().
Parameters
-----------
spd_low : float (0, 1)
Equivalent to make_sparse_spd_matrix `smallest_coef`
spd_high : float (0, 1)
Equivalent to make_sparse_spd_matrix `largest_coef`
seed : int
Seed for np.random.RandomState seed. (default=1)
"""
def __init__(self, spd_low=0.7, spd_high=0.7, **kwargs):
self.spd_low = spd_low
self.spd_high = spd_high
super(ErdosRenyiGraph, self).__init__(**kwargs)
def create(self, n_features, alpha):
"""Build a new graph.
Parameters
-----------
n_features : int
alpha : float (0,1)
The complexity / sparsity factor.
This is (1 - alpha_0) in sklearn.datasets.make_sparse_spd_matrix
where alpha_0 is the probability that a coefficient is zero.
Returns
-----------
(n_features, n_features) matrices: covariance, precision, adjacency
"""
adjacency = make_sparse_spd_matrix(n_features,
alpha=np.abs(1.0 - alpha),
smallest_coef=self.spd_low,
largest_coef=self.spd_high,
random_state=self.prng)
precision = self.to_precision(adjacency)
covariance = self.to_covariance(precision)
return covariance, precision, adjacency
| from __future__ import absolute_import
import numpy as np
from sklearn.datasets import make_sparse_spd_matrix
from .graphs import Graph
class ErdosRenyiGraph(Graph):
"""Returns the adjacency matrix for Erdos-Renyi network via .create().
Parameters
-----------
spd_low : float (0, 1)
Equivalent to make_sparse_spd_matrix `smallest_coef`
spd_high : float (0, 1)
Equivalent to make_sparse_spd_matrix `largest_coef`
seed : int
Seed for np.random.RandomState seed. (default=1)
"""
def __init__(self, spd_low=0.7, spd_high=0.7, **kwargs):
self.spd_low = spd_low
self.spd_high = spd_high
super(ErdosRenyiGraph, self).__init__(**kwargs)
def create(self, n_features, alpha):
"""Build a new graph.
Parameters
-----------
n_features : int
alpha : float (0,1)
# TODO: Better comment on this parameter.
The complexity / sparsity factor.
Returns
-----------
(n_features, n_features) matrices: covariance, precision, adjacency
"""
adjacency = make_sparse_spd_matrix(n_features,
alpha=np.abs(1.0 - alpha),
smallest_coef=self.spd_low,
largest_coef=self.spd_high,
random_state=self.prng)
precision = self.to_precision(adjacency)
covariance = self.to_covariance(precision)
return covariance, precision, adjacency
| mit | Python |
6b1df5bdc1d89fc3ba008eab2d2955e2234de317 | make trailing slash on authenticate URLs optional | whitews/ReFlow,whitews/ReFlow,whitews/ReFlow | authenticate/urls.py | authenticate/urls.py | from django.conf.urls import patterns, url
__author__ = 'swhite'
urlpatterns = patterns('authenticate.views',
url(r'^login/?$', 'login_view', name="login"),
url(r'^logout/?$', 'logout_view', name="logout"),
url(r'^login_failed/?$', 'login_failed', name="login_failed"),
)
| from django.conf.urls import patterns, url
__author__ = 'swhite'
urlpatterns = patterns('authenticate.views',
url(r'^login/$', 'login_view', name="login"),
url(r'^logout/$', 'logout_view', name="logout"),
url(r'^login_failed/$', 'login_failed', name="login_failed"),
)
| bsd-3-clause | Python |
21e238fb49338992f846cfa22ed5638325cbd425 | Correct dependent migration | hotosm/osm-export-tool2,hotosm/osm-export-tool2,hotosm/osm-export-tool2,hotosm/osm-export-tool2 | jobs/migrations/0028_promote_pbf_export_format.py | jobs/migrations/0028_promote_pbf_export_format.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from ..models import ExportFormat
class Migration(migrations.Migration):
def promote_pbf_export_format(apps, schema_editor):
ExportFormat = apps.get_model('jobs', 'ExportFormat')
ExportFormat.objects.create(name='PBF Format', description='OSM PBF',
slug='PBF')
dependencies = [
('jobs', '0001_auto_20151003_1441'),
]
operations = [
migrations.RunPython(promote_pbf_export_format),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from ..models import ExportFormat
class Migration(migrations.Migration):
def promote_pbf_export_format(apps, schema_editor):
ExportFormat = apps.get_model('jobs', 'ExportFormat')
ExportFormat.objects.create(name='PBF Format', description='OSM PBF',
slug='PBF')
dependencies = [
('jobs', '0003_auto_20151027_1807'),
]
operations = [
migrations.RunPython(promote_pbf_export_format),
]
| bsd-3-clause | Python |
08d60c54fb92c1c0092bb071e3ca964d7569fd5c | Add some error handling that will output a formatted message. | sunlightlabs/python-sunlight,BrianPainter/python-sunlight | sunlight/cli.py | sunlight/cli.py | import json
import itertools
from clint import arguments
from clint.textui import puts, puts_err, indent, colored
import sunlight
def main():
args = arguments.Args()
services = sunlight.available_services()
service = services.get(args.get(0), None)
if service is not None:
available_methods = [
m for m in dir(service) if not m.startswith('_') and m != 'get' and m != 'is_pageable'
]
if args.get(1) in available_methods:
params = dict([
(f.strip('--'), args.value_after(f)) for f in args.flags.all
])
fn_args = [g.split(',') for g in args.grouped.get('_')[2:]]
fn_args = list(itertools.chain.from_iterable(fn_args))
try:
resp = getattr(service, args.get(1))(*fn_args, **params)
except Exception as e:
error_name = e.__class__.__name__ if e.__class__.__name__ != 'type' else 'Error'
puts_err(colored.red("{}:".format(error_name)))
with indent(4):
puts_err(colored.yellow(e.message.decode()))
return
meta = getattr(resp, '_meta', None)
if meta:
puts(colored.yellow(json.dumps(meta, indent=2)))
puts(colored.blue(json.dumps(resp, indent=2) + '\n'))
else:
help(methods=available_methods) # missing or invalid method param
else:
help(services=services) # missing or invalid service parameter
def help(services=None, methods=None):
puts_err("Usage: sunlight <service> <method> [<args>, ...]")
if services:
puts_err("Available services:")
with indent(4):
for s in services:
puts_err(s)
if methods:
puts_err("Available methods:")
with indent(4):
for m in methods:
puts_err(m)
if __name__ == "__main__":
main()
| import json
import itertools
from clint import arguments
from clint.textui import puts, puts_err, indent, colored
import sunlight
def main():
args = arguments.Args()
services = sunlight.available_services()
service = services.get(args.get(0), None)
if service is not None:
available_methods = [
m for m in dir(service) if not m.startswith('_') and m != 'get' and m != 'is_pageable'
]
if args.get(1) in available_methods:
params = dict([
(f.strip('--'), args.value_after(f)) for f in args.flags.all
])
fn_args = [g.split(',') for g in args.grouped.get('_')[2:]]
fn_args = list(itertools.chain.from_iterable(fn_args))
resp = getattr(service, args.get(1))(*fn_args, **params)
meta = getattr(resp, '_meta', None)
if meta:
puts(colored.yellow(json.dumps(meta, indent=2)))
puts(colored.blue(json.dumps(resp, indent=2) + '\n'))
else:
help(methods=available_methods) # missing or invalid method param
else:
help(services=services) # missing or invalid service parameter
def help(services=None, methods=None):
puts_err("Usage: sunlight <service> <method> [<args>, ...]")
if services:
puts_err("Available services:")
with indent(4):
for s in services:
puts_err(s)
if methods:
puts_err("Available methods:")
with indent(4):
for m in methods:
puts_err(m)
if __name__ == "__main__":
main()
| bsd-3-clause | Python |
092c3205a475d8230a113b798f89e754d0902a59 | remove superflous list() | dpshelio/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy | sunpy/io/jp2.py | sunpy/io/jp2.py | """JPEG 2000 File Reader"""
from __future__ import absolute_import, division, print_function
__author__ = "Keith Hughitt"
__email__ = "keith.hughitt@nasa.gov"
from xml.etree import cElementTree as ET
from glymur import Jp2k
from sunpy.util.xml import xml_to_dict
from sunpy.io.header import FileHeader
__all__ = ['read', 'get_header', 'write']
def read(filepath):
"""
Reads a JPEG2000 file
Parameters
----------
filepath : `str`
The file to be read
Returns
-------
pairs : `list`
A list of (data, header) tuples
"""
header = get_header(filepath)
data = Jp2k(filepath).read()[::-1]
return [(data, header[0])]
def get_header(filepath):
"""
Reads the header from the file
Parameters
----------
filepath : `str`
The file to be read
Returns
-------
headers : list
A list of headers read from the file
"""
jp2 = Jp2k(filepath)
xml_box = [box for box in jp2.box if box.box_id == 'xml ']
xmlstring = ET.tostring(xml_box[0].xml.find('fits'))
pydict = xml_to_dict(xmlstring)["fits"]
# Fix types
for k, v in pydict.items():
if v.isdigit():
pydict[k] = int(v)
elif _is_float(v):
pydict[k] = float(v)
# Remove newlines from comment
if 'comment' in pydict:
pydict['comment'] = pydict['comment'].replace("\n", "")
return [FileHeader(pydict)]
def write(fname, data, header):
"""
Place holder for required file writer
"""
raise NotImplementedError("No jp2 writer is implemented")
def _is_float(s):
"""Check to see if a string value is a valid float"""
try:
float(s)
return True
except ValueError:
return False
| """JPEG 2000 File Reader"""
from __future__ import absolute_import, division, print_function
__author__ = "Keith Hughitt"
__email__ = "keith.hughitt@nasa.gov"
from xml.etree import cElementTree as ET
from glymur import Jp2k
from sunpy.util.xml import xml_to_dict
from sunpy.io.header import FileHeader
__all__ = ['read', 'get_header', 'write']
def read(filepath):
"""
Reads a JPEG2000 file
Parameters
----------
filepath : `str`
The file to be read
Returns
-------
pairs : `list`
A list of (data, header) tuples
"""
header = get_header(filepath)
data = Jp2k(filepath).read()[::-1]
return [(data, header[0])]
def get_header(filepath):
"""
Reads the header from the file
Parameters
----------
filepath : `str`
The file to be read
Returns
-------
headers : list
A list of headers read from the file
"""
jp2 = Jp2k(filepath)
xml_box = [box for box in jp2.box if box.box_id == 'xml ']
xmlstring = ET.tostring(xml_box[0].xml.find('fits'))
pydict = xml_to_dict(xmlstring)["fits"]
# Fix types
for k, v in list(pydict.items()):
if v.isdigit():
pydict[k] = int(v)
elif _is_float(v):
pydict[k] = float(v)
# Remove newlines from comment
if 'comment' in pydict:
pydict['comment'] = pydict['comment'].replace("\n", "")
return [FileHeader(pydict)]
def write(fname, data, header):
"""
Place holder for required file writer
"""
raise NotImplementedError("No jp2 writer is implemented")
def _is_float(s):
"""Check to see if a string value is a valid float"""
try:
float(s)
return True
except ValueError:
return False
| bsd-2-clause | Python |
735debaba58c9b09d393279391eda4955df1312a | move repo cloning into setUp | GoogleChrome/wptdashboard,GoogleChrome/wptdashboard,GoogleChrome/wptdashboard | run/shas_test.py | run/shas_test.py | #!/usr/bin/env python
# Copyright 2017 The WPT Dashboard Project. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import mock
import os
import shas
import shutil
import subprocess
import unittest
from datetime import date
class TestSHAFinder(unittest.TestCase):
here = os.path.dirname(__file__)
wptd_dir = os.path.join(here, '../')
target_dir = os.path.abspath(
os.path.join(os.path.abspath(wptd_dir), '../', 'wptdashboard-temp')
)
def setUp(self):
# This is only necessary for local development environments
# that are unlikely to have been cloned with an explicit --depth
if os.path.exists(self.target_dir):
shutil.rmtree(self.target_dir)
command = [
'git', 'clone', '--depth', '1', 'https://github.com/w3c/wptdashboard',
self.target_dir
]
return_code = subprocess.check_call(command, cwd=self.wptd_dir)
assert return_code == 0, (
'Got non-0 return code: %d from command %s' % (return_code, command)
)
def test_nov_21st(self):
# Travis only pulls git history depth 50 by default
command = [
'git',
'fetch',
'--unshallow',
]
abspath = os.path.abspath(self.wptd_dir)
subprocess.call(command, cwd=self.target_dir)
# ~5 commits that day, ensure first is result.
logger = mock.Mock(logging.Logger)
sha_finder = shas.SHAFinder(logger, date(2017, 11, 21))
self.assertEqual(
'46060eb2c33de6101bc6930bf5e34f794aa9f996',
sha_finder.get_todays_sha(self.wptd_dir)
)
def test_nov_18th(self):
# No commits that day, ensure empty result.
logger = mock.Mock(logging.Logger)
sha_finder = shas.SHAFinder(logger, date(2017, 11, 18))
self.assertEqual('', sha_finder.get_todays_sha(self.wptd_dir))
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
# Copyright 2017 The WPT Dashboard Project. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import mock
import os
import shas
import shutil
import subprocess
import unittest
from datetime import date
here = os.path.dirname(__file__)
wptd_dir = os.path.join(here, '../')
target_dir = os.path.abspath(os.path.join(os.path.abspath(wptd_dir), '../', 'wptdashboard-temp'))
if os.path.exists(target_dir):
print('%s exists, so it will be deleted and recloned for this test' % target_dir)
shutil.rmtree(target_dir)
command = ['git', 'clone', '--depth', '1', 'https://github.com/w3c/wptdashboard', target_dir]
return_code = subprocess.check_call(command, cwd=wptd_dir)
assert return_code == 0, ('Got non-0 return code: %d from command %s' % (return_code, command))
class TestSHAFinder(unittest.TestCase):
def test_nov_21st(self):
# Travis only pulls git history depth 50 by default
command = [
'git',
'fetch',
'--unshallow',
]
abspath = os.path.abspath(wptd_dir)
subprocess.call(command, cwd=target_dir)
# ~5 commits that day, ensure first is result.
logger = mock.Mock(logging.Logger)
sha_finder = shas.SHAFinder(logger, date(2017, 11, 21))
self.assertEqual('46060eb2c33de6101bc6930bf5e34f794aa9f996',
sha_finder.get_todays_sha(wptd_dir))
def test_nov_18th(self):
# No commits that day, ensure empty result.
logger = mock.Mock(logging.Logger)
sha_finder = shas.SHAFinder(logger, date(2017, 11, 18))
self.assertEqual('', sha_finder.get_todays_sha(wptd_dir))
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
e1d76e8ac5270f6abc18137cce0f103ca567709f | Fix bug in image2 widget | frostidaho/qtile,frostidaho/qtile | libqtile/widget/image2.py | libqtile/widget/image2.py | from __future__ import division
import os
import cairocffi
from . import base
from .. import bar
from .. import images
class Image2(base._Widget, base.MarginMixin):
"""Display an image on the bar"""
orientations = base.ORIENTATION_BOTH
defaults = [
('loaded_image', None, 'image created by libqtile.images.Loader'),
]
def __init__(self, length=bar.CALCULATED, **config):
base._Widget.__init__(self, length, **config)
self.add_defaults(Image2.defaults)
self.add_defaults(base.MarginMixin.defaults)
# make the default 0 instead
self._widget_defaults["margin"] = 0
def _configure(self, qtile, bar):
base._Widget._configure(self, qtile, bar)
if self.loaded_image is None:
raise ValueError('No loaded_image given!')
elif not self.loaded_image.success:
raise ValueError('Image was not successfully loaded {!r}'.format(self.loaded_image))
@property
def image(self):
return self.loaded_image.surface
@property
def pattern(self):
return self.loaded_image.pattern
@property
def image_width(self):
return self.image.get_width()
@property
def image_height(self):
return self.image.get_height()
def draw(self):
self.drawer.clear(self.bar.background)
self.drawer.ctx.save()
self.drawer.ctx.translate(self.margin_x, self.margin_y)
self.drawer.ctx.set_source(self.pattern)
self.drawer.ctx.paint()
self.drawer.ctx.restore()
if self.bar.horizontal:
self.drawer.draw(offsetx=self.offset, width=self.width)
else:
self.drawer.draw(offsety=self.offset, height=self.width)
def calculate_length(self):
if self.bar.horizontal:
return self.image_width + (self.margin_x * 2)
else:
return self.image_height + (self.margin_y * 2)
| from __future__ import division
import os
import cairocffi
from . import base
from .. import bar
from .. import images
class Image2(base._Widget, base.MarginMixin):
"""Display an image on the bar"""
orientations = base.ORIENTATION_BOTH
defaults = [
('loaded_image', None, 'image created by libqtile.images.Loader'),
]
def __init__(self, length=bar.CALCULATED, **config):
base._Widget.__init__(self, length, **config)
self.add_defaults(Image2.defaults)
self.add_defaults(base.MarginMixin.defaults)
# make the default 0 instead
self._widget_defaults["margin"] = 0
def _configure(self, qtile, bar):
base._Widget._configure(self, qtile, bar)
if self.loaded_image is None:
raise ValueError('No loaded_image given!')
elif not self.loaded_image.success:
raise ValueError('Image was not successfully loaded {!r}'.format(limg))
@property
def image(self):
return self.loaded_image.surface
@property
def pattern(self):
return self.loaded_image.pattern
@property
def image_width(self):
return self.image.get_width()
@property
def image_height(self):
return self.image.get_height()
def draw(self):
self.drawer.clear(self.bar.background)
self.drawer.ctx.save()
self.drawer.ctx.translate(self.margin_x, self.margin_y)
self.drawer.ctx.set_source(self.pattern)
self.drawer.ctx.paint()
self.drawer.ctx.restore()
if self.bar.horizontal:
self.drawer.draw(offsetx=self.offset, width=self.width)
else:
self.drawer.draw(offsety=self.offset, height=self.width)
def calculate_length(self):
if self.bar.horizontal:
return self.image_width + (self.margin_x * 2)
else:
return self.image_height + (self.margin_y * 2)
| mit | Python |
e9ee6aae523905e9efa30033663a3be695a6b863 | bump to next dev version | planetlabs/planet-client-python,planetlabs/planet-client-python | planet/__version__.py | planet/__version__.py | __version__ = '2.0a6dev'
| __version__ = '2.0a5'
| apache-2.0 | Python |
09a2323bcfc47b00d71846ac69022a3dbae2b031 | Bump to version 0.0.7 | cogniteev/backache | backache/__init__.py | backache/__init__.py | from . core import *
from . antioxidant import ( # flake8: noqa
AsyncOperationContext,
celerize,
ProcessingInQuarantineException,
)
from . errors import *
__version__ = (0, 0, 7)
| from . core import *
from . antioxidant import ( # flake8: noqa
AsyncOperationContext,
celerize,
ProcessingInQuarantineException,
)
from . errors import *
__version__ = (0, 0, 6)
| apache-2.0 | Python |
0324d220872ef063cb39ce62264bd4835f260920 | Replace str into call in url | makinacorpus/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity | test_project/urls.py | test_project/urls.py | from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
from django.contrib.auth import views as auth_views
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
url(r'^login/$', auth_views.login, name='login'),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout',),
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
| from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout',),
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
| bsd-3-clause | Python |
e2ec3241a3becf19954e287a6f9caba3ba445829 | Add flags for protocol | dotoscat/Polytank-ASIR | polytanks/protocol.py | polytanks/protocol.py | #Copyright (C) 2017 Oscar Triano 'dotoscat' <dotoscat (at) gmail (dot) com>
#This program is free software: you can redistribute it and/or modify
#it under the terms of the GNU Affero General Public License as
#published by the Free Software Foundation, either version 3 of the
#License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU Affero General Public License for more details.
#You should have received a copy of the GNU Affero General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
import struct
#COMMANDS
CONNECT = 0x0001
DISCONNECT = 0x0002
INPUT = 0x0003
SNAPSHOT = 0x0004
#INPUT
MOVE_LEFT = 0x0100 << 1
MOVE_RIGHT = 0x0100 << 2
PRESS_SHOT = 0x0100 << 3
RELEASE_SHOT = 0x0100 << 4
JUMP = 0x0100 << 5
AIM = 0x0100 << 6
#OBJECT TYPE
TANK = 0x0100
_buffer = bytearray(64)
_command = struct.Struct("!xi")
_recreate_tank = struct.Struct("!iiff")
_move = struct.Struct("!iif")
def command_is(command, data):
return _command.unpack_from(data)[0] & command == command
def get_command(data):
return _command.unpack_from(data)[0]
def connect():
return _command.pack_into(_buffer, 0, CONNECT)
def move(id_, direction):
return _move.pack(MOVE, id_, direction)
def get_move(data):
return _move.unpack(data)
def recreate_tank(id_, x, y):
return _recreate_tank.pack(RECREATE_TANK, id_, x, y)
def get_recreate_tank(data):
return _recreate_tank.unpack(data)
| #Copyright (C) 2017 Oscar Triano 'dotoscat' <dotoscat (at) gmail (dot) com>
#This program is free software: you can redistribute it and/or modify
#it under the terms of the GNU Affero General Public License as
#published by the Free Software Foundation, either version 3 of the
#License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU Affero General Public License for more details.
#You should have received a copy of the GNU Affero General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
import struct
CONNECT = 0
DISCONNECT = 1
RECREATE_TANK = 2
MOVE = 3
_command = struct.Struct("!i")
_recreate_tank = struct.Struct("!iiff")
_move = struct.Struct("!iif")
def get_command(data):
return _command.unpack_from(data)[0]
def connect():
return _command.pack(CONNECT)
def move(id_, direction):
return _move.pack(MOVE, id_, direction)
def get_move(data):
return _move.unpack(data)
def recreate_tank(id_, x, y):
return _recreate_tank.pack(RECREATE_TANK, id_, x, y)
def get_recreate_tank(data):
return _recreate_tank.unpack(data)
| agpl-3.0 | Python |
27c10b93aeb6c6c69b401686787db245e4f0f01d | bump version for pypi | jld23/saspy,jld23/saspy | saspy/version.py | saspy/version.py | __version__ = '2.3.1'
| __version__ = '2.3.0'
| apache-2.0 | Python |
b7805df4b2d103b1cea63ddcb58dc86d0a90465a | Update Flask extension import | belxlaz/portfolio,belxlaz/portfolio | portfolio/__init__.py | portfolio/__init__.py | from flask import Flask
from flask_assets import Environment
from portfolio.views import site
# init the app, its compressor and its manager
app = Flask('portfolio')
# config
app.config.from_pyfile('config.py')
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
# assets
assets = Environment(app)
assets.config['LIBSASS_INCLUDES'] = app.config['LIBSASS_INCLUDES']
assets.config['coffeescript_bin'] = app.config['COFFEE_BIN']
assets.load_path = app.config['WEBASSETS_LOAD_PATH']
assets.from_yaml(app.config['ASSETS'])
# load views
app.register_blueprint(site)
| from flask import Flask
from flask.ext.assets import Environment
from portfolio.views import site
# init the app, its compressor and its manager
app = Flask('portfolio')
# config
app.config.from_pyfile('config.py')
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
# assets
assets = Environment(app)
assets.config['LIBSASS_INCLUDES'] = app.config['LIBSASS_INCLUDES']
assets.config['coffeescript_bin'] = app.config['COFFEE_BIN']
assets.load_path = app.config['WEBASSETS_LOAD_PATH']
assets.from_yaml(app.config['ASSETS'])
# load views
app.register_blueprint(site)
| mit | Python |
cb4a113ccb628971018e4d275c1e2f6c43144d38 | CHange import settings | SlugocM/bayesfit,SlugocM/bayesfit,SlugocM/bayesfit | bayesfit/plot_CDF.py | bayesfit/plot_CDF.py | """
*******************************************************
*
* plot_cdf - PLOT MODEL FIT TO DATA
*
* Version: Version 2.0
* License: Apache 2.0
* Written by: Michael Slugocki
* Created on: April 18, 2017
* Last updated: April 18, 2018
*
*******************************************************
"""
#################################################################
# IMPORT MODULES
#################################################################
import numpy as np
import matplotlib.pyplot as plt
from . import psyfunction as _psyfunction
#################################################################
# GENERATE CDF PLOT OF MODEL FIT TO DATA
#################################################################
def plot_cdf(data, metrics, options):
fig, ax = plt.subplots(nrows = 1, ncols = 1, figsize = (5, 5))
# Generate smooth curve from fitted function
x_max = data[:, 0].max()
x_min = data[:, 0].min()
x_est = np.linspace(x_min,x_max,1000)
y_pred = _psyfunction(x_est,
metrics['Fit'][0],
metrics['Fit'][1],
metrics['Fit'][2],
metrics['Fit'][3],
options['sigmoid_type'])
if options['logspace'] is True:
ax.set_xscale('log')
ax.plot([x_min, metrics['threshold']],
[options['threshold'], options['threshold']],
color = 'black',
linestyle = 'dotted',
linewidth = 2,
zorder = 1)
ax.plot([metrics['threshold'], metrics['threshold']],
[0, options['threshold']],
color = 'black',
linestyle = 'dotted',
linewidth = 2,
zorder = 1)
ax.set_ylim(-0.05,1.05)
ax.axhline(y=metrics['Fit'][2], color = 'r', linestyle = 'dashed', linewidth = 2, zorder = 1)
# Plot remainder of curve
ax.scatter(data[:,0], data[:,1], color = '#5998ff', s = 125, alpha = 1.0, zorder = 5)
ax.plot(x_est, y_pred, linestyle = '-', color = 'black', linewidth = 2, alpha = 0.85, zorder = 10)
ax.set_xlabel('Stimulus Intensity', fontsize = 16, fontweight = 'bold')
ax.set_ylabel('Proportion correct', fontsize = 16, fontweight = 'bold')
ax.xaxis.set_tick_params(labelsize=13)
ax.yaxis.set_tick_params(labelsize=13)
plt.tight_layout()
plt.show() | """
*******************************************************
*
* plot_cdf - PLOT MODEL FIT TO DATA
*
* Version: Version 2.0
* License: Apache 2.0
* Written by: Michael Slugocki
* Created on: April 18, 2017
* Last updated: April 18, 2018
*
*******************************************************
"""
#################################################################
# IMPORT MODULES
#################################################################
import numpy as np
import matplotlib.pyplot as plt
from psyFunction import psyfunction as _psyfunction
#################################################################
# GENERATE CDF PLOT OF MODEL FIT TO DATA
#################################################################
def plot_cdf(data, metrics, options):
fig, ax = plt.subplots(nrows = 1, ncols = 1, figsize = (5, 5))
# Generate smooth curve from fitted function
x_max = data[:, 0].max()
x_min = data[:, 0].min()
x_est = np.linspace(x_min,x_max,1000)
y_pred = _psyfunction(x_est,
metrics['Fit'][0],
metrics['Fit'][1],
metrics['Fit'][2],
metrics['Fit'][3],
options['sigmoid_type'])
if options['logspace'] is True:
ax.set_xscale('log')
ax.plot([x_min, metrics['threshold']],
[options['threshold'], options['threshold']],
color = 'black',
linestyle = 'dotted',
linewidth = 2,
zorder = 1)
ax.plot([metrics['threshold'], metrics['threshold']],
[0, options['threshold']],
color = 'black',
linestyle = 'dotted',
linewidth = 2,
zorder = 1)
ax.set_ylim(-0.05,1.05)
ax.axhline(y=metrics['Fit'][2], color = 'r', linestyle = 'dashed', linewidth = 2, zorder = 1)
# Plot remainder of curve
ax.scatter(data[:,0], data[:,1], color = '#5998ff', s = 125, alpha = 1.0, zorder = 5)
ax.plot(x_est, y_pred, linestyle = '-', color = 'black', linewidth = 2, alpha = 0.85, zorder = 10)
ax.set_xlabel('Stimulus Intensity', fontsize = 16, fontweight = 'bold')
ax.set_ylabel('Proportion correct', fontsize = 16, fontweight = 'bold')
ax.xaxis.set_tick_params(labelsize=13)
ax.yaxis.set_tick_params(labelsize=13)
plt.tight_layout()
plt.show() | apache-2.0 | Python |
893e5d9272d8e9a8f105424cdf9b2ded7a3fd2d9 | Validate correct count | RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline | tests/cli/qa_test.py | tests/cli/qa_test.py | # -*- coding: utf-8 -*-
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from click.testing import CliRunner
from rnacentral_pipeline.cli import qa
def test_can_parse_rfam_output():
runner = CliRunner()
filename = os.path.abspath('data/qa/rfam/scan.tbl')
with runner.isolated_filesystem():
result = runner.invoke(qa.cli, ['rfam', filename, 'rfam.csv'])
assert result.exit_code == 0
assert not result.exception
with open('rfam.csv', 'r') as raw:
data = raw.readlines()
assert len(data) == 126
| # -*- coding: utf-8 -*-
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from click.testing import CliRunner
from rnacentral_pipeline.cli import qa
def test_can_parse_rfam_output():
runner = CliRunner()
filename = os.path.abspath('data/qa/rfam/scan.tbl')
with runner.isolated_filesystem():
result = runner.invoke(qa.cli, ['rfam', filename, 'rfam.csv'])
assert result.exit_code == 0
assert not result.exception
with open('rfam.csv', 'r') as raw:
data = raw.readlines()
assert len(data) == 129
| apache-2.0 | Python |
e8de5506847d015172acb43248581763ed8ea40c | make listen on right place | moypray/flocker-docker-plugin,wallnerryan/flocker-docker-plugin,moypray/flocker-docker-plugin,mbrukman/flocker-docker-plugin,hackday-profilers/flocker-docker-plugin,mbrukman/flocker-docker-plugin,moypray/flocker-docker-plugin,wallnerryan/flocker-docker-plugin,wallnerryan/flocker-docker-plugin,mbrukman/flocker-docker-plugin,hackday-profilers/flocker-docker-plugin,hackday-profilers/flocker-docker-plugin | powerstripflocker.tac | powerstripflocker.tac | # Copyright ClusterHQ Limited. See LICENSE file for details.
from twisted.web import server, resource
from twisted.application import service, internet
from powerstripflocker.adapter import AdapterResource, HandshakeResource
def getAdapter():
root = resource.Resource()
v1 = resource.Resource()
root.putChild("v1", v1)
volume = resource.Resource()
v1.putChild("volume", volume)
volume.putChild("volumes", AdapterResource())
v1.putChild("handshake", HandshakeResource())
site = server.Site(root)
return site
application = service.Application("Powerstrip Flocker Adapter")
adapterServer = internet.UNIXServer("/var/run/docker-plugin/plugin.sock", getAdapter())
adapterServer.setServiceParent(application)
| # Copyright ClusterHQ Limited. See LICENSE file for details.
from twisted.web import server, resource
from twisted.application import service, internet
from powerstripflocker.adapter import AdapterResource, HandshakeResource
def getAdapter():
root = resource.Resource()
v1 = resource.Resource()
root.putChild("v1", v1)
volume = resource.Resource()
v1.putChild("volume", volume)
volume.putChild("volumes", AdapterResource())
v1.putChild("handshake", HandshakeResource())
site = server.Site(root)
return site
application = service.Application("Powerstrip Flocker Adapter")
adapterServer = internet.TCPServer(9042, getAdapter(), interface='0.0.0.0')
adapterServer.setServiceParent(application)
| apache-2.0 | Python |
3d3b8c4347eee02787ef626bf176c5a7f1923235 | bump to 0.6.4 | ghickman/couchdbkit,ghickman/couchdbkit,benoitc/couchdbkit,benoitc/couchdbkit,benoitc/couchdbkit,ghickman/couchdbkit | couchdbkit/version.py | couchdbkit/version.py | # -*- coding: utf-8 -
#
# This file is part of couchdbkit released under the MIT license.
# See the NOTICE for more information.
version_info = (0, 6, 4)
__version__ = ".".join(map(str, version_info))
| # -*- coding: utf-8 -
#
# This file is part of couchdbkit released under the MIT license.
# See the NOTICE for more information.
version_info = (0, 6, 3)
__version__ = ".".join(map(str, version_info))
| mit | Python |
a53612d5f276180d204378b9e4974fcd812f6a5b | Add licence header in fake camera test file. | angus-ai/angus-sdk-python | tests/fake_camera.py | tests/fake_camera.py | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from os import listdir
from os.path import isfile, join
class Camera(object):
def __init__(self, path):
self.files = [join(path, f) for f in listdir(path)]
self.files = sorted([f for f in self.files if isfile(f)])
self.current = 0
def reset(self):
self.current = 0
def has_next(self):
return self.current < len(self.files)
def next(self):
img = open(self.files[self.current], 'rb').read()
self.current += 1
return img
| from os import listdir
from os.path import isfile, join
class Camera(object):
def __init__(self, path):
self.files = [join(path, f) for f in listdir(path)]
self.files = sorted([f for f in self.files if isfile(f)])
self.current = 0
def reset(self):
self.current = 0
def has_next(self):
return self.current < len(self.files)
def next(self):
img = open(self.files[self.current], 'rb').read()
self.current += 1
return img
| apache-2.0 | Python |
7ba9066cd921858e0263a7a43b8527eb3d24574e | add serializer while returning shared_task | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/celery/shared_task.py | corehq/apps/celery/shared_task.py | from celery import shared_task
def task(*args, **kwargs):
"""
A wrapper over shared_task decorator which enforces the default task
serializer as JSON this decorator to create celery tasks in HQ.
This is planned to be used until https://github.com/celery/celery/issues/6759 is fixed.
After the fix goes out
- feel free to remove it and use the native shared_task decorator
- Set CELERY_TASK_SERIALIZER back to json
Parameters:
serializer (string): Serialization method to use.
Can be pickle, json, yaml, msgpack or any custom serialization
method that's been registered with kombu.serialization.registry.
queue (string): Name of the queue in which task is supposed to run
All other options defined https://docs.celeryq.dev/en/stable/userguide/tasks.html#list-of-options # noqa E501
"""
if len(args) == 1 and callable(args[0]) and not kwargs:
return shared_task(serializer='json')(args[0])
kwargs.setdefault('serializer', 'json')
def task(fn):
return shared_task(*args, **kwargs)(fn)
return task
| from celery import shared_task
def task(*args, **kwargs):
"""
A wrapper over shared_task decorator which enforces the default task
serializer as JSON this decorator to create celery tasks in HQ.
This is planned to be used until https://github.com/celery/celery/issues/6759 is fixed.
After the fix goes out
- feel free to remove it and use the native shared_task decorator
- Set CELERY_TASK_SERIALIZER back to json
Parameters:
serializer (string): Serialization method to use.
Can be pickle, json, yaml, msgpack or any custom serialization
method that's been registered with kombu.serialization.registry.
queue (string): Name of the queue in which task is supposed to run
All other options defined https://docs.celeryq.dev/en/stable/userguide/tasks.html#list-of-options # noqa E501
"""
if len(args) == 1 and callable(args[0]) and not kwargs:
return shared_task()(args[0])
kwargs.setdefault('serializer', 'json')
def task(fn):
return shared_task(*args, **kwargs)(fn)
return task
| bsd-3-clause | Python |
29ae43413fe8f3ec5b8dc376450711d2b9b49dac | add try except to logger | eseom/glide,eseom/glide | procwatcher/logger.py | procwatcher/logger.py | #!/usr/bin/env python
#
# http://github.com/eseom/procwatcher
#
# @author: EunseokEom <me@eseom.org>
# @desc: logger
import logging
import logging.handlers
from socket import error as socket_error
class Log(object):
def __init__(self):
self.logger = None
self.queue = []
self.get_logger()
def handle_error(self, record):
self.logger.removeHandler(self.handler)
self.logger = None
def get_logger(self):
self.logger = logging.getLogger('syslog')
self.logger.setLevel(logging.DEBUG)
try:
self.handler = logging.handlers.SysLogHandler(address='/dev/log',
facility=logging.handlers.SysLogHandler.LOG_DAEMON)
self.handler.handleError = self.handle_error
self.logger.addHandler(self.handler)
except socket_error as e:
try:
self.logger.removeHandler(self.handler)
except:
pass
self.logger = None
def info(self, message, index):
if not self.logger:
self.get_logger()
if not self.logger:
return
try:
self.logger.info('%s: %s', message.proc.name, message.message)
except Exception as e:
pass
| #!/usr/bin/env python
#
# http://github.com/eseom/procwatcher
#
# @author: EunseokEom <me@eseom.org>
# @desc: logger
import logging
import logging.handlers
from socket import error as socket_error
class Log(object):
def __init__(self):
self.logger = None
self.queue = []
self.get_logger()
def handle_error(self, record):
self.logger.removeHandler(self.handler)
self.logger = None
def get_logger(self):
self.logger = logging.getLogger('syslog')
self.logger.setLevel(logging.DEBUG)
try:
self.handler = logging.handlers.SysLogHandler(address='/dev/log',
facility=logging.handlers.SysLogHandler.LOG_DAEMON)
self.handler.handleError = self.handle_error
self.logger.addHandler(self.handler)
except socket_error as e:
self.logger.removeHandler(self.handler)
self.logger = None
def info(self, message, index):
if not self.logger:
self.get_logger()
if not self.logger:
return
try:
self.logger.info('%s: %s', message.proc.name, message.message)
except Exception as e:
pass
| mit | Python |
a3091b3ad0ce95dffedd8b2d9a8f9e72e36ad157 | add ability for importer to handle thecb data too | texastribune/the-dp,texastribune/the-dp,texastribune/the-dp,texastribune/the-dp | tx_highered/management/commands/tx_highered_import.py | tx_highered/management/commands/tx_highered_import.py | import os
from django.core.management.base import BaseCommand, CommandError
from tx_highered.scripts.import_customreport import generic as ipeds
from tx_highered.scripts.import_thecb_report import generic as thecb
class Command(BaseCommand):
args = '(ipeds|thecb) <file file ...>'
help = "Import Data"
def handle(self, importer_type, *args, **options):
# WISHLIST handle verbosity option
if importer_type == 'ipeds':
for path in args:
if os.path.isfile(path):
ipeds(path)
elif importer_type == 'thecb':
for path in args:
if os.path.isfile(path):
thecb(path)
else:
raise CommandError(u'Not a valid importer type: "{}"'
.format(importer_type))
| import os
from django.core.management.base import BaseCommand
from tx_highered.scripts.import_customreport import generic
class Command(BaseCommand):
args = '(ipeds|thecb) <file file ...>'
help = "Import Data"
def handle(self, importer_type, *args, **options):
# TODO handle THECB data
if importer_type == 'ipeds':
for path in args:
if os.path.isfile(path):
generic(path)
| apache-2.0 | Python |
328583decceda16b73d0c278a398a1cc632f3ce3 | Add py-pluggy 0.13.0 (#13176) | iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/py-pluggy/package.py | var/spack/repos/builtin/packages/py-pluggy/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPluggy(PythonPackage):
"""Plugin and hook calling mechanisms for python."""
homepage = "https://github.com/pytest-dev/pluggy"
url = "https://pypi.io/packages/source/p/pluggy/pluggy-0.13.0.tar.gz"
import_modules = ['pluggy']
version('0.13.0', sha256='fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34')
version('0.12.0', sha256='0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc')
version('0.7.1', sha256='95eb8364a4708392bae89035f45341871286a333f749c3141c20573d2b3876e1')
version('0.6.0', sha256='7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
depends_on('py-importlib-metadata@0.12:', when='^python@:3.7', type=('build', 'run'))
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPluggy(PythonPackage):
"""Plugin and hook calling mechanisms for python."""
homepage = "https://github.com/pytest-dev/pluggy"
url = "https://pypi.io/packages/source/p/pluggy/pluggy-0.12.0.tar.gz"
import_modules = ['pluggy']
version('0.12.0', sha256='0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc')
version('0.7.1', sha256='95eb8364a4708392bae89035f45341871286a333f749c3141c20573d2b3876e1')
version('0.6.0', sha256='7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
depends_on('py-importlib-metadata@0.12:', type=('build', 'run'))
| lgpl-2.1 | Python |
87cf38a4279d247b3e6b5380cc4222641a123343 | add v1.14.0 from github (#27660) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/random123/package.py | var/spack/repos/builtin/packages/random123/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Random123(Package):
"""Random123 is a library of 'counter-based' random number
generators (CBRNGs), in which the Nth random number can be obtained
by applying a stateless mixing function to N instead of the
conventional approach of using N iterations of a stateful
transformation."""
homepage = "https://www.deshawresearch.com/resources_random123.html"
url = "https://github.com/DEShawResearch/random123/archive/refs/tags/v1.14.0.tar.gz"
version('1.14.0', sha256='effafd8656b18030b2a5b995cd3650c51a7c45052e6e1c21e48b9fa7a59d926e')
version('1.13.2', sha256='74a1c6bb66b2684f03d3b1008642a2e9141909103cd09f428d2c60bcaa51cb40',
url='https://www.deshawresearch.com/downloads/download_random123.cgi/Random123-1.13.2.tar.gz')
version('1.10', sha256='4afdfba4b941e33e23b5de9b7907b7e3ac326cb4d34b5fa8225edd00b5fe053b',
url='https://www.deshawresearch.com/downloads/download_random123.cgi/Random123-1.10.tar.gz')
version('1.09', sha256='cf6abf623061bcf3d17e5e49bf3f3f0ae400ee89ae2e97c8cb8dcb918b1ebabe',
url='https://www.deshawresearch.com/downloads/download_random123.cgi/Random123-1.09.tar.gz')
patch('ibmxl.patch', when='@1.09')
patch('arm-gcc.patch', when='@1.09')
patch('v1132-xl161.patch', when='@1.13.2')
def install(self, spec, prefix):
# Random123 doesn't have a build system.
# We have to do our own install here.
install_tree('include', prefix.include)
install('./LICENSE', "%s" % prefix)
if spec.satisfies('@1.09'):
# used by some packages, e.g. quinoa
install('examples/uniform.hpp',
join_path(prefix.include, 'Random123'))
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Random123(Package):
"""Random123 is a library of 'counter-based' random number
generators (CBRNGs), in which the Nth random number can be obtained
by applying a stateless mixing function to N instead of the
conventional approach of using N iterations of a stateful
transformation."""
homepage = "https://www.deshawresearch.com/resources_random123.html"
url = "https://www.deshawresearch.com/downloads/download_random123.cgi/Random123-1.09.tar.gz"
version('1.13.2', sha256='74a1c6bb66b2684f03d3b1008642a2e9141909103cd09f428d2c60bcaa51cb40')
version('1.10', sha256='4afdfba4b941e33e23b5de9b7907b7e3ac326cb4d34b5fa8225edd00b5fe053b')
version('1.09', sha256='cf6abf623061bcf3d17e5e49bf3f3f0ae400ee89ae2e97c8cb8dcb918b1ebabe')
patch('ibmxl.patch', when='@1.09')
patch('arm-gcc.patch', when='@1.09')
patch('v1132-xl161.patch', when='@1.13.2')
def install(self, spec, prefix):
# Random123 doesn't have a build system.
# We have to do our own install here.
install_tree('include', prefix.include)
install('./LICENSE', "%s" % prefix)
if spec.satisfies('@1.09'):
# used by some packages, e.g. quinoa
install('examples/uniform.hpp',
join_path(prefix.include, 'Random123'))
| lgpl-2.1 | Python |
40109263f0efd73fb4fab78f03f9222045ae4165 | Fix initial debug configuration for J-Link | platformio/platformio,platformio/platformio-core,platformio/platformio-core | platformio/commands/debug/initcfgs.py | platformio/commands/debug/initcfgs.py | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
GDB_DEFAULT_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset halt
end
define pio_reset_target
monitor reset
end
target extended-remote $DEBUG_PORT
monitor init
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_STUTIL_INIT_CONFIG = """
define pio_reset_halt_target
monitor halt
monitor reset
end
define pio_reset_target
monitor reset
end
target extended-remote $DEBUG_PORT
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_JLINK_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset
monitor halt
end
define pio_reset_target
monitor reset
end
target extended-remote $DEBUG_PORT
monitor clrbp
monitor speed auto
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_BLACKMAGIC_INIT_CONFIG = """
define pio_reset_halt_target
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
end
define pio_reset_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
monitor swdp_scan
attach 1
set mem inaccessible-by-default off
$LOAD_CMDS
$INIT_BREAK
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
"""
GDB_MSPDEBUG_INIT_CONFIG = """
define pio_reset_halt_target
end
define pio_reset_target
end
target extended-remote $DEBUG_PORT
monitor erase
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_QEMU_INIT_CONFIG = """
define pio_reset_halt_target
monitor system_reset
end
define pio_reset_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
| # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
GDB_DEFAULT_INIT_CONFIG = """
define pio_reset_halt_target
monitor reset halt
end
define pio_reset_target
monitor reset
end
target extended-remote $DEBUG_PORT
monitor init
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_STUTIL_INIT_CONFIG = """
define pio_reset_halt_target
monitor halt
monitor reset
end
define pio_reset_target
monitor reset
end
target extended-remote $DEBUG_PORT
$INIT_BREAK
pio_reset_halt_target
$LOAD_CMDS
pio_reset_halt_target
"""
GDB_JLINK_INIT_CONFIG = """
define pio_reset_halt_target
monitor halt
monitor reset
end
define pio_reset_target
monitor reset
end
target extended-remote $DEBUG_PORT
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_BLACKMAGIC_INIT_CONFIG = """
define pio_reset_halt_target
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
end
define pio_reset_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
monitor swdp_scan
attach 1
set mem inaccessible-by-default off
$LOAD_CMDS
$INIT_BREAK
set language c
set *0xE000ED0C = 0x05FA0004
set $busy = (*0xE000ED0C & 0x4)
while ($busy)
set $busy = (*0xE000ED0C & 0x4)
end
set language auto
"""
GDB_MSPDEBUG_INIT_CONFIG = """
define pio_reset_halt_target
end
define pio_reset_target
end
target extended-remote $DEBUG_PORT
monitor erase
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
GDB_QEMU_INIT_CONFIG = """
define pio_reset_halt_target
monitor system_reset
end
define pio_reset_target
pio_reset_halt_target
end
target extended-remote $DEBUG_PORT
pio_reset_halt_target
$LOAD_CMDS
$INIT_BREAK
"""
| apache-2.0 | Python |
55f5ff9db2b819096630d9547bb5a3e7fa1e80dd | Fix printing unrelated files in output | Brickstertwo/git-commands | bin/commands/tuck.py | bin/commands/tuck.py | """Stash specific files."""
from subprocess import call, check_output
from utils.messages import error, info
def tuck(files, message=None, quiet=False):
"""Stash specific files."""
# resolve the files to be tucked
files_to_tuck = check_output(['git', 'diff', '--name-only', '--cached', '--'] + files).splitlines()
files_to_tuck += check_output(['git', 'diff', '--name-only', '--'] + files).splitlines()
# resolve new files to be tucked
files_to_tuck += check_output(['git', 'ls-files', '--others', '--'] + files).splitlines()
if not files_to_tuck:
error("no files to tuck using: " + ' '.join(files))
# reset the files to be tucked in the event they have changes. Like stash, we won't keep track of staged/unstaged
# changes
call(['git', 'reset', '--quiet', '--'] + files_to_tuck)
# commit already staged files
staged = check_output('git diff --name-only --cached'.split())
if staged:
check_output(['git', 'commit', '--message', 'TUCK: staged', '--quiet']).splitlines()
# commit unstaged files
ignore_files = [':!{}'.format(f) for f in files_to_tuck]
call(['git', 'add', '--', '.'] + ignore_files)
unstaged = check_output('git diff --name-only --cached'.split())
if unstaged:
call(['git', 'commit', '--message', 'TUCK: unstaged', '--quiet'])
# stash files to be tucked
stash_command = ['git', 'stash', 'save', '--include-untracked', '--quiet']
if message:
stash_command += [message]
check_output(stash_command)
# reset all original files
reset_command = ['git', 'reset', '--quiet', 'HEAD^']
if unstaged:
call(reset_command)
if staged:
call(reset_command + ['--soft'])
info('Tucked files: ' + ' '.join(files_to_tuck), quiet)
| """Stash specific files."""
from subprocess import call, check_output
from utils.messages import error, info
def tuck(files, message=None, quiet=False):
"""Stash specific files."""
# resolve the files to be tucked
files_to_tuck = check_output(['git', 'ls-files', '--others', '--cached', '--'] + files).splitlines()
if not files_to_tuck:
error("no files to tuck using: " + ' '.join(files))
# reset the files to be tucked in the event they have changes. Like stash, we won't keep track of staged/unstaged
# changes
call(['git', 'reset', '--quiet', '--'] + files_to_tuck)
# commit already staged files
staged = check_output('git diff --name-only --cached'.split())
if staged:
check_output(['git', 'commit', '--message', 'TUCK: staged', '--quiet']).splitlines()
# commit unstaged files
ignore_files = [':!{}'.format(f) for f in files_to_tuck]
call(['git', 'add', '--', '.'] + ignore_files)
unstaged = check_output('git diff --name-only --cached'.split())
if unstaged:
call(['git', 'commit', '--message', 'TUCK: unstaged', '--quiet'])
# stash files to be tucked
stash_command = ['git', 'stash', 'save', '--include-untracked', '--quiet']
if message:
stash_command += [message]
check_output(stash_command)
# reset all original files
reset_command = ['git', 'reset', '--quiet', 'HEAD^']
if unstaged:
call(reset_command)
if staged:
call(reset_command + ['--soft'])
info('Tucked files: ' + ' '.join(files_to_tuck), quiet)
| mit | Python |
3e259105659c8738ad559275011721f15eaa64c0 | Create a new Talk Reviewers group | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer | wafer/management/commands/wafer_add_default_groups.py | wafer/management/commands/wafer_add_default_groups.py | # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.contrib.auth.models import Group, Permission
class Command(BaseCommand):
help = "Add some useful default groups"
GROUPS = {
# Permissions are specified as (app, code_name) pairs
'Page Editors': (
('pages', 'add_page'), ('pages', 'delete_page'),
('pages', 'change_page'), ('pages', 'add_file'),
('pages', 'delete_file'), ('pages', 'change_file'),
),
'Talk Mentors': (
('talks', 'change_talk'), ('talks', 'view_all_talks'),
('talks', 'edit_private_notes'),
),
'Talk Reviewers': (
('talks', 'view_all_talks'),
('talks', 'edit_private_notes'),
('talks', 'add_review'),
),
'Registration': (),
}
def add_wafer_groups(self):
# This creates the groups we need for page editor and talk mentor
# roles.
for wafer_group, permission_list in self.GROUPS.items():
group, created = Group.objects.all().get_or_create(
name=wafer_group)
if not created:
print('Using existing %s group' % wafer_group)
for app, perm_code in permission_list:
try:
perm = Permission.objects.filter(
codename=perm_code, content_type__app_label=app).get()
except Permission.DoesNotExist:
print('Unable to find permission %s' % perm_code)
continue
except Permission.MultipleObjectsReturned:
print('Non-unique permission %s' % perm_code)
continue
if perm not in group.permissions.all():
print('Adding %s to %s' % (perm_code, wafer_group))
group.permissions.add(perm)
group.save()
def handle(self, *args, **options):
self.add_wafer_groups()
| # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.contrib.auth.models import Group, Permission
class Command(BaseCommand):
help = "Add some useful default groups"
GROUPS = {
# Permissions are specified as (app, code_name) pairs
'Page Editors': (
('pages', 'add_page'), ('pages', 'delete_page'),
('pages', 'change_page'), ('pages', 'add_file'),
('pages', 'delete_file'), ('pages', 'change_file'),
),
'Talk Mentors': (
('talks', 'change_talk'), ('talks', 'view_all_talks'),
('talks', 'edit_private_notes'),
),
'Registration': (),
}
def add_wafer_groups(self):
# This creates the groups we need for page editor and talk mentor
# roles.
for wafer_group, permission_list in self.GROUPS.items():
group, created = Group.objects.all().get_or_create(
name=wafer_group)
if not created:
print('Using existing %s group' % wafer_group)
for app, perm_code in permission_list:
try:
perm = Permission.objects.filter(
codename=perm_code, content_type__app_label=app).get()
except Permission.DoesNotExist:
print('Unable to find permission %s' % perm_code)
continue
except Permission.MultipleObjectsReturned:
print('Non-unique permission %s' % perm_code)
continue
if perm not in group.permissions.all():
print('Adding %s to %s' % (perm_code, wafer_group))
group.permissions.add(perm)
group.save()
def handle(self, *args, **options):
self.add_wafer_groups()
| isc | Python |
b694e36df79850b7c47dd62f17b1895da810c4cf | Fix default parameters at run_benchmark.py. | pictools/pica,pictools/pica,pictools/pica | bin/run_benchmark.py | bin/run_benchmark.py | import datetime
import itertools
import os
import subprocess
# Modify parameters here
benchmark_path = "benchmark"
out_directory = datetime.datetime.now().strftime('benchmark_%Y-%m-%d_%H-%M-%S')
dimension = 3
size = 50
ppc = 100
temperature = 0.0
iterations = 100
representations = ["SoA", "AoS"]
storages = ["unordered", "ordered"]
# add other combinations here
combination_keys = ["-r", "-e"]
combination_values = list(itertools.product(representations, storages))
# End of parameters
# Enumerate all combinations of parameters and run
if not os.path.exists(out_directory):
os.makedirs(out_directory)
args_base = (benchmark_path, "-d", str(dimension), "-g", str(size), "-p", str(ppc), "-t", str(temperature), "-i", str(iterations))
for i in range(0, len(combination_values)):
file_name = ""
args_combination = ()
for j in range(0, len(combination_values[i])):
args_combination += (combination_keys[j], combination_values[i][j])
file_name += combination_values[i][j] + "_"
args = args_base + args_combination
popen = subprocess.Popen(args, stdout=subprocess.PIPE, universal_newlines=True)
popen.wait()
file_name = file_name[:-1] + ".txt"
f = open(os.path.join(out_directory, file_name), "w")
f.write(str(popen.stdout.read()))
| import datetime
import itertools
import os
import subprocess
# Modify parameters here
benchmark_path = "benchmark"
out_directory = datetime.datetime.now().strftime('benchmark_%Y-%m-%d_%H-%M-%S')
dimension = 3
size = 50
ppc = 1
temperature = 0.0
iterations = 1
representations = ["SoA", "AoS"]
storages = ["unordered", "ordered"]
# add other combinations here
combination_keys = ["-r", "-e"]
combination_values = list(itertools.product(representations, storages))
# End of parameters
# Enumerate all combinations of parameters and run
if not os.path.exists(out_directory):
os.makedirs(out_directory)
args_base = (benchmark_path, "-d", str(dimension), "-g", str(size), "-p", str(ppc), "-t", str(temperature), "-i", str(iterations))
for i in range(0, len(combination_values)):
file_name = ""
args_combination = ()
for j in range(0, len(combination_values[i])):
args_combination += (combination_keys[j], combination_values[i][j])
file_name += combination_values[i][j] + "_"
args = args_base + args_combination
popen = subprocess.Popen(args, stdout=subprocess.PIPE, universal_newlines=True)
popen.wait()
file_name = file_name[:-1] + ".txt"
f = open(os.path.join(out_directory, file_name), "w")
f.write(str(popen.stdout.read()))
| mit | Python |
005198ff1c7076e0a872456cd633c9b718dcb1ea | Fix flake8 issue | JIC-CSB/dtoolcore | dtoolcore/compare.py | dtoolcore/compare.py | """Module with helper functions for comparing datasets."""
def diff_identifiers(a, b):
"""Return list of tuples where identifiers in datasets differ.
Tuple structure:
(identifier, present in a, present in b)
:param a: first :class:`dtoolcore.DataSet`
:param b: second :class:`dtoolcore.DataSet`
:returns: list of tuples where identifiers in datasets differ
"""
a_ids = set(a.identifiers)
b_ids = set(b.identifiers)
difference = []
for i in a_ids.difference(b_ids):
difference.append((i, True, False))
for i in b_ids.difference(a_ids):
difference.append((i, False, True))
return difference
def diff_sizes(a, b):
"""Return list of tuples where sizes differ.
Tuple structure:
(identifier, size in a, size in b)
Assumes list of identifiers in a and b are identical.
:param a: first :class:`dtoolcore.DataSet`
:param b: second :class:`dtoolcore.DataSet`
:returns: list of tuples for all items with different sizes
"""
difference = []
for i in a.identifiers:
a_size = a.item_properties(i)["size_in_bytes"]
b_size = b.item_properties(i)["size_in_bytes"]
if a_size != b_size:
difference.append((i, a_size, b_size))
return difference
def diff_content(a, reference):
"""Return list of tuples where content differ.
Tuple structure:
(identifier, hash in a, hash in reference)
Assumes list of identifiers in a and b are identical.
Storage broker of reference used to generate hash for files in a.
:param a: first :class:`dtoolcore.DataSet`
:param b: second :class:`dtoolcore.DataSet`
:returns: list of tuples for all items with different content
"""
difference = []
for i in a.identifiers:
fpath = a.item_content_abspath(i)
calc_hash = reference._storage_broker.hasher(fpath)
ref_hash = reference.item_properties(i)["hash"]
if calc_hash != ref_hash:
info = (i, calc_hash, ref_hash)
difference.append(info)
return difference
| """Module with helper functions for comparing datasets."""
def diff_identifiers(a, b):
"""Return list of tuples where identifiers in datasets differ.
Tuple structure:
(identifier, present in a, present in b)
:param a: first :class:`dtoolcore.DataSet`
:param b: second :class:`dtoolcore.DataSet`
:returns: list of tuples where identifiers in datasets differ
"""
a_ids = set(a.identifiers)
b_ids = set(b.identifiers)
difference = []
for i in a_ids.difference(b_ids):
difference.append((i, True, False))
for i in b_ids.difference(a_ids):
difference.append((i, False, True))
return difference
def diff_sizes(a, b):
"""Return list of tuples where sizes differ.
Tuple structure:
(identifier, size in a, size in b)
Assumes list of identifiers in a and b are identical.
:param a: first :class:`dtoolcore.DataSet`
:param b: second :class:`dtoolcore.DataSet`
:returns: list of tuples for all items with different sizes
"""
difference = []
for i in a.identifiers:
a_size = a.item_properties(i)["size_in_bytes"]
b_size = b.item_properties(i)["size_in_bytes"]
if a_size != b_size:
difference.append((i, a_size, b_size))
return difference
def diff_content(a, reference):
"""Return list of tuples where content differ.
Tuple structure:
(identifier, hash in a, hash in reference)
Assumes list of identifiers in a and b are identical.
Storage broker of reference used to generate hash for files in a.
:param a: first :class:`dtoolcore.DataSet`
:param b: second :class:`dtoolcore.DataSet`
:returns: list of tuples for all items with different content
"""
difference = []
for i in a.identifiers:
fpath = a.item_content_abspath(i)
calc_hash = reference._storage_broker.hasher(fpath)
ref_hash = reference.item_properties(i)["hash"]
if calc_hash != ref_hash:
info = (i, calc_hash, ref_hash)
difference.append(info)
return difference
| mit | Python |
0a7e722fd872722788affa7c40f2c122c1a04828 | Add test for existing bucket name | patlub/BucketListAPI,patlub/BucketListAPI | tests/test_bucket.py | tests/test_bucket.py | import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class BucketTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
user = json.dumps({
'email': 'pat@gmail.com',
'password': 'bucketlist',
'name': 'Patrick'
})
response = self.client.post('/auth/register', data=user)
json_repr = json.loads(response.data.decode())
self.token = json_repr['id']
def test_add_bucket_without_bucket_name(self):
"""Should return 400 for missing bucket name"""
bucket = json.dumps({
'bucket': '',
'desc': 'travel'
})
response = self.client.post('/auth/bucket', data=bucket,
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 400)
self.assertIn('Missing', response.data.decode())
def test_add_bucket_successfully(self):
"""Should return 201 for bucket added"""
bucket = json.dumps({
'bucket': 'Travel',
'desc': 'Visit places'
})
response = self.client.post('/auth/bucket', data=bucket,
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 201)
self.assertIn('Successfully', response.data.decode())
def test_add_bucket_with_existing_bucket_name(self):
"""Should return 400 for missing bucket name"""
# First Add bucket
self.test_add_bucket_successfully()
bucket = json.dumps({
'bucket': 'Travel',
'desc': 'travel'
})
response = self.client.post('/auth/bucket', data=bucket,
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 400)
self.assertIn('Bucket name Already exists', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
| import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class BucketTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
user = json.dumps({
'email': 'pat@gmail.com',
'password': 'bucketlist',
'name': 'Patrick'
})
response = self.client.post('/auth/register', data=user)
json_repr = json.loads(response.data.decode())
self.token = json_repr['id']
def test_add_bucket_without_bucket_name(self):
"""Should return 400 for missing bucket name"""
bucket = json.dumps({
'bucket': '',
'desc': 'travel'
})
response = self.client.post('/auth/bucket', data=bucket,
headers={"Authorization" : self.token})
self.assertEqual(response.status_code, 400)
self.assertIn('Missing', response.data.decode())
def test_add_bucket_successfully(self):
"""Should return 201 for bucket added"""
bucket = json.dumps({
'bucket': 'Travel',
'desc': 'Visit places'
})
response = self.client.post('/auth/bucket', data=bucket,
headers={"Authorization" : self.token})
self.assertEqual(response.status_code, 201)
self.assertIn('Successfully', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
| mit | Python |
0446ff796442fd34053209ec1a8860b7f3971a05 | Support custom user models | ejucovy/django-opendebates,ejucovy/django-opendebates,ejucovy/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,ejucovy/django-opendebates | opendebates/opendebates/authentication_backend.py | opendebates/opendebates/authentication_backend.py | from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""
Email Authentication Backend
Allows a user to sign in using an email/password pair rather than
a username/password pair.
"""
def authenticate(self, username=None, password=None):
""" Authenticate a user based on email address as the user name. """
User = get_user_model()
try:
user = User.objects.get(email=username)
if user.check_password(password):
return user
except Exception:
return None
def get_user(self, user_id):
""" Get a User object from the user_id. """
User = get_user_model()
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| from django.contrib.auth.models import User, check_password
class EmailAuthBackend(object):
"""
Email Authentication Backend
Allows a user to sign in using an email/password pair rather than
a username/password pair.
"""
def authenticate(self, username=None, password=None):
""" Authenticate a user based on email address as the user name. """
try:
user = User.objects.get(email=username)
if user.check_password(password):
return user
except Exception:
return None
def get_user(self, user_id):
""" Get a User object from the user_id. """
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| apache-2.0 | Python |
d1f4ab4c7da9eaaeaebe5b9d0a45b0d8cc9762bf | Fix oversight in test_config.py | getnikola/nikola,getnikola/nikola,okin/nikola,okin/nikola,okin/nikola,okin/nikola,getnikola/nikola,getnikola/nikola | tests/test_config.py | tests/test_config.py | # -*- coding: utf-8 -*-
import os
import re
from nikola import __main__ as nikola
from .base import BaseTestCase
class ConfigTest(BaseTestCase):
"""Provides tests for the configuration-file handling."""
@classmethod
def setUpClass(self):
self.metadata_option = "ADDITIONAL_METADATA"
script_root = os.path.dirname(__file__)
test_dir = os.path.join(script_root, "data", "test_config")
nikola.main(["--conf=" + os.path.join(test_dir, "conf.py")])
self.simple_config = nikola.config
nikola.main(["--conf=" + os.path.join(test_dir, "prod.py")])
self.complex_config = nikola.config
nikola.main(["--conf=" + os.path.join(test_dir, "config.with+illegal(module)name.characters.py")])
self.complex_filename_config = nikola.config
self.check_base_equality(self.complex_filename_config)
@classmethod
def check_base_equality(self, config):
"""Check whether the specified `config` equals the base config."""
for option in self.simple_config.keys():
if re.match("^[A-Z]+(_[A-Z]+)*$", option) and option != self.metadata_option:
assert self.simple_config[option] == config[option]
def test_simple_config(self):
"""Check whether configuration-files without ineritance are interpreted correctly."""
assert self.simple_config[self.metadata_option]["ID"] == "conf"
def test_inherited_config(self):
"""Check whether configuration-files with ineritance are interpreted correctly."""
self.check_base_equality(config=self.complex_config)
assert self.complex_config[self.metadata_option]["ID"] == "prod"
def test_config_with_illegal_filename(self):
"""Check whether files with illegal module-name characters can be set as config-files, too."""
self.check_base_equality(config=self.complex_filename_config)
assert self.complex_filename_config[self.metadata_option]["ID"] == "illegal"
| # -*- coding: utf-8 -*-
import os
import re
from nikola import __main__ as nikola
from .base import BaseTestCase
class ConfigTest(BaseTestCase):
"""Provides tests for the configuration-file handling."""
@classmethod
def setUpClass(self):
self.metadata_option = "ADDITIONAL_METADATA"
script_root = os.path.dirname(__file__)
test_dir = os.path.join(script_root, "data", "test_config")
nikola.main(["--conf=" + os.path.join(test_dir, "conf.py")])
self.simple_config = nikola.config
nikola.main(["--conf=" + os.path.join(test_dir, "prod.py")])
self.complex_config = nikola.config
nikola.main(["--conf=" + os.path.join(test_dir, "config.with+illegal(module)name.characters.py")])
self.complex_filename_config = nikola.config
self.check_base_equality(self.complex_filename_config)
@classmethod
def check_base_equality(self, config):
"""Check whether the specified `config` equals the base config."""
for option in self.simple_config.keys():
if re.match("^[A-Z]+(_[A-Z]+)*$", option) and option != self.metadata_option:
assert self.simple_config[option] == self.complex_config[option]
def test_simple_config(self):
"""Check whether configuration-files without ineritance are interpreted correctly."""
assert self.simple_config[self.metadata_option]["ID"] == "conf"
def test_inherited_config(self):
"""Check whether configuration-files with ineritance are interpreted correctly."""
self.check_base_equality(config=self.complex_config)
assert self.complex_config[self.metadata_option]["ID"] == "prod"
def test_config_with_illegal_filename(self):
"""Check whether files with illegal module-name characters can be set as config-files, too."""
self.check_base_equality(config=self.complex_filename_config)
assert self.complex_filename_config[self.metadata_option]["ID"] == "illegal"
| mit | Python |
20caf44978ea4a839bb7cc4df7cc4b0b48cf64df | Test customized and disabled messages | rochacbruno/flask_simplelogin,rochacbruno/flask_simplelogin | tests/test_config.py | tests/test_config.py | import pytest
from flask import Flask
from flask_simplelogin import SimpleLogin, Message
class Settings(dict):
"""A dictionary-like object that allows access to its values using the
attribute syntax (as required by Flask.config.from_object)"""
def __init__(self, *args, **kwargs):
super(Settings, self).__init__(*args, **kwargs)
for key, value in kwargs.items():
self.__dict__[key] = value
def create_simple_login(settings):
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret-here'
app.config.from_object(settings)
return SimpleLogin(app)
def test_default_configs_are_loaded(app):
settings = Settings()
sl = create_simple_login(settings)
assert sl.config['blueprint'] == 'simplelogin'
assert sl.config['login_url'] == '/login/'
assert sl.config['logout_url'] == '/logout/'
assert sl.config['home_url'] == '/'
def test_custom_configs_are_loaded(app):
settings = Settings(
SIMPLELOGIN_BLUEPRINT='custom_blueprint',
SIMPLELOGIN_LOGIN_URL='/custom_login/',
SIMPLELOGIN_LOGOUT_URL='/custom_logout/',
SIMPLELOGIN_HOME_URL='/custom_home/'
)
sl = create_simple_login(settings)
assert sl.config['blueprint'] == 'custom_blueprint'
assert sl.config['login_url'] == '/custom_login/'
assert sl.config['logout_url'] == '/custom_logout/'
assert sl.config['home_url'] == '/custom_home/'
def test_configs_are_loaded_with_backwards_compatibility(client):
settings = Settings(
SIMPLE_LOGIN_BLUEPRINT='custom_blueprint',
SIMPLE_LOGIN_LOGIN_URL='/custom_login/',
SIMPLE_LOGIN_LOGOUT_URL='/custom_logout/',
SIMPLE_LOGIN_HOME_URL='/custom_home/'
)
with pytest.warns(FutureWarning):
sl = create_simple_login(settings)
assert sl.config['blueprint'] == 'custom_blueprint'
assert sl.config['login_url'] == '/custom_login/'
assert sl.config['logout_url'] == '/custom_logout/'
assert sl.config['home_url'] == '/custom_home/'
def test_messages_disabled(app):
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret-here'
sl = SimpleLogin(app, messages=False)
for message in sl.messages.values():
assert not message.enabled
def test_messages_customized(app):
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret-here'
custom_message_dict = {
'login_success': Message('login_success_custom_message', category='login_success_custom_category'),
'logout':Message(enabled=False)
}
sl = SimpleLogin(app, messages=custom_message_dict)
# Assert that custom messages and categories have been changed.
assert custom_message_dict['login_success'].text == sl.messages['login_success'].text
assert custom_message_dict['login_success'].category == sl.messages['login_success'].category
assert not sl.messages['logout'].enabled
# Assert that keys not specified remain the same.
assert sl.messages['login_required'].text=='You need to login first' | import pytest
from flask import Flask
from flask_simplelogin import SimpleLogin
class Settings(dict):
"""A dictionary-like object that allows access to its values using the
attribute syntax (as required by Flask.config.from_object)"""
def __init__(self, *args, **kwargs):
super(Settings, self).__init__(*args, **kwargs)
for key, value in kwargs.items():
self.__dict__[key] = value
def create_simple_login(settings):
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret-here'
app.config.from_object(settings)
return SimpleLogin(app)
def test_default_configs_are_loaded(app):
settings = Settings()
sl = create_simple_login(settings)
assert sl.config['blueprint'] == 'simplelogin'
assert sl.config['login_url'] == '/login/'
assert sl.config['logout_url'] == '/logout/'
assert sl.config['home_url'] == '/'
def test_custom_configs_are_loaded(app):
settings = Settings(
SIMPLELOGIN_BLUEPRINT='custom_blueprint',
SIMPLELOGIN_LOGIN_URL='/custom_login/',
SIMPLELOGIN_LOGOUT_URL='/custom_logout/',
SIMPLELOGIN_HOME_URL='/custom_home/'
)
sl = create_simple_login(settings)
assert sl.config['blueprint'] == 'custom_blueprint'
assert sl.config['login_url'] == '/custom_login/'
assert sl.config['logout_url'] == '/custom_logout/'
assert sl.config['home_url'] == '/custom_home/'
def test_configs_are_loaded_with_backwards_compatibility(client):
settings = Settings(
SIMPLE_LOGIN_BLUEPRINT='custom_blueprint',
SIMPLE_LOGIN_LOGIN_URL='/custom_login/',
SIMPLE_LOGIN_LOGOUT_URL='/custom_logout/',
SIMPLE_LOGIN_HOME_URL='/custom_home/'
)
with pytest.warns(FutureWarning):
sl = create_simple_login(settings)
assert sl.config['blueprint'] == 'custom_blueprint'
assert sl.config['login_url'] == '/custom_login/'
assert sl.config['logout_url'] == '/custom_logout/'
assert sl.config['home_url'] == '/custom_home/'
| mit | Python |
a35799477172d6b8b2b70e92ab254998373b7643 | use config realpath for testing forked | 20c/vaping,20c/vaping | tests/test_daemon.py | tests/test_daemon.py |
import os
import pytest
import munge
import vaping
import vaping.daemon
import vaping.config
def test_plugin_context():
data = {
'1': 'two'
}
cfg = vaping.Config(data=data)
ctx = vaping.daemon.PluginContext(cfg)
assert data == ctx.config.data
# test immutable
ctx.config.data['1'] = 'three'
assert data != ctx.config.data
def test_empty_config_dir(this_dir):
config_dir = os.path.join(this_dir, "data", "config", "empty")
with pytest.raises(ValueError) as excinfo:
vaping.daemon.Vaping(config_dir=config_dir)
assert 'no plugins specified' in str(excinfo)
def test_empty_config_dict():
with pytest.raises(ValueError) as excinfo:
daemon = vaping.daemon.Vaping(config={})
assert 'config was not specified' in str(excinfo)
def test_empty_config_object():
with pytest.raises(ValueError) as excinfo:
vaping.daemon.Vaping(config=vaping.Config())
assert 'config was not specified' in str(excinfo)
def test_config_object(this_dir):
config_dir = os.path.join(this_dir, "data", "config", "fping")
vaping.daemon.Vaping(config=vaping.Config(read=config_dir))
def test_config_dir_not_found():
with pytest.raises(IOError) as excinfo:
daemon = vaping.daemon.Vaping(config_dir="does/not/exist")
assert 'config dir not found' in str(excinfo)
def test_load_config_files(data_config_daemon):
codec = munge.get_codec('yaml')()
data = codec.loads(data_config_daemon.yml)
data['vaping'] = dict(home_dir=os.path.realpath(data_config_daemon.path))
daemon = vaping.daemon.Vaping(config=data)
# print(data_config_daemon.dumps(daemon.config.data))
data_config_daemon.expected["vaping"]["home_dir"] = os.path.realpath(data_config_daemon.expected["vaping"]["home_dir"])
assert data_config_daemon.expected == daemon.config.data
def test_start_stop(this_dir):
config_dir = os.path.join(this_dir, "data", "config", "fping")
daemon = vaping.daemon.Vaping(config=vaping.Config(read=config_dir))
# daemon._main()
# daemon.stop()
|
import os
import pytest
import munge
import vaping
import vaping.daemon
import vaping.config
def test_plugin_context():
data = {
'1': 'two'
}
cfg = vaping.Config(data=data)
ctx = vaping.daemon.PluginContext(cfg)
assert data == ctx.config.data
# test immutable
ctx.config.data['1'] = 'three'
assert data != ctx.config.data
def test_empty_config_dir(this_dir):
config_dir = os.path.join(this_dir, "data", "config", "empty")
with pytest.raises(ValueError) as excinfo:
vaping.daemon.Vaping(config_dir=config_dir)
assert 'no plugins specified' in str(excinfo)
def test_empty_config_dict():
with pytest.raises(ValueError) as excinfo:
daemon = vaping.daemon.Vaping(config={})
assert 'config was not specified' in str(excinfo)
def test_empty_config_object():
with pytest.raises(ValueError) as excinfo:
vaping.daemon.Vaping(config=vaping.Config())
assert 'config was not specified' in str(excinfo)
def test_config_dir_not_found():
with pytest.raises(IOError) as excinfo:
daemon = vaping.daemon.Vaping(config_dir="does/not/exist")
assert 'config dir not found' in str(excinfo)
def test_load_config_files(data_config_daemon):
codec = munge.get_codec('yaml')()
data = codec.loads(data_config_daemon.yml)
data['vaping'] = dict(home_dir=os.path.relpath(data_config_daemon.path))
daemon = vaping.daemon.Vaping(config=data)
# print(data_config_daemon.dumps(daemon.config.data))
assert data_config_daemon.expected == daemon.config.data
| apache-2.0 | Python |
5f7e22a6e11a3d5ecea0e413b40559a8fc169941 | Add bunch more tests. | ionelmc/pytest-benchmark,thedrow/pytest-benchmark,aldanor/pytest-benchmark,SectorLabs/pytest-benchmark | tests/test_manual.py | tests/test_manual.py | from pytest import raises
def test_single(benchmark):
runs = []
benchmark.manual(runs.append, args=[123])
assert runs == [123]
def test_setup(benchmark):
runs = []
def stuff(foo, bar=123):
runs.append((foo, bar))
def setup():
return [1], {"bar": 2}
benchmark.manual(stuff, setup=setup)
assert runs == [(1, 2)]
def test_args_kwargs(benchmark):
runs = []
def stuff(foo, bar=123):
runs.append((foo, bar))
benchmark.manual(stuff, args=[1], kwargs={"bar": 2})
assert runs == [(1, 2)]
def test_iterations(benchmark):
runs = []
benchmark.manual(runs.append, args=[1], iterations=10)
assert runs == [1] * 11
def test_rounds_iterations(benchmark):
runs = []
benchmark.manual(runs.append, args=[1], iterations=10, rounds=15)
assert runs == [1] * 151
def test_rounds(benchmark):
runs = []
benchmark.manual(runs.append, args=[1], rounds=15)
assert runs == [1] * 15
def test_setup_many_rounds(benchmark):
runs = []
def stuff(foo, bar=123):
runs.append((foo, bar))
def setup():
return [1], {"bar": 2}
benchmark.manual(stuff, setup=setup, rounds=10)
assert runs == [(1, 2)] * 10
def test_cant_use_both_args_and_setup_with_return(benchmark):
runs = []
def stuff(foo, bar=123):
runs.append((foo, bar))
def setup():
return [1], {"bar": 2}
raises(TypeError, benchmark.manual, stuff, setup=setup, args=[123])
assert runs == []
def test_can_use_both_args_and_setup_without_return(benchmark):
runs = []
def stuff(foo, bar=123):
runs.append((foo, bar))
benchmark.manual(stuff, setup=lambda: None, args=[123])
assert runs == [(123, 123)]
def test_cant_use_setup_with_many_iterations(benchmark):
raises(ValueError, benchmark.manual, None, setup=lambda: None, iterations=2)
def test_iterations_must_be_positive_int(benchmark):
raises(ValueError, benchmark.manual, None, setup=lambda: None, iterations=0)
raises(ValueError, benchmark.manual, None, setup=lambda: None, iterations=-1)
raises(ValueError, benchmark.manual, None, setup=lambda: None, iterations="asdf")
| from pytest import raises
def test_single(benchmark):
runs = []
benchmark.manual(runs.append, args=[123])
assert runs == [123]
def test_setup(benchmark):
runs = []
def stuff(foo, bar=123):
runs.append((foo, bar))
def setup():
return [1], {"bar": 2}
benchmark.manual(stuff, setup=setup)
assert runs == [(1, 2)]
def test_setup_many_rounds(benchmark):
runs = []
def stuff(foo, bar=123):
runs.append((foo, bar))
def setup():
return [1], {"bar": 2}
benchmark.manual(stuff, setup=setup, rounds=10)
assert runs == [(1, 2)] * 10
def test_cant_use_both_args_and_setup_with_return(benchmark):
runs = []
def stuff(foo, bar=123):
runs.append((foo, bar))
def setup():
return [1], {"bar": 2}
raises(TypeError, benchmark.manual, stuff, setup=setup, args=[123])
assert runs == []
def test_can_use_both_args_and_setup_without_return(benchmark):
runs = []
def stuff(foo, bar=123):
runs.append((foo, bar))
benchmark.manual(stuff, setup=lambda: None, args=[123])
assert runs == [(123, 123)]
def test_cant_use_setup_with_many_iterations(benchmark):
raises(ValueError, benchmark.manual, None, setup=lambda: None, iterations=2)
def test_iterations_must_be_positive_int(benchmark):
raises(ValueError, benchmark.manual, None, setup=lambda: None, iterations=0)
raises(ValueError, benchmark.manual, None, setup=lambda: None, iterations=-1)
raises(ValueError, benchmark.manual, None, setup=lambda: None, iterations="asdf")
| bsd-2-clause | Python |
79901400dfda1de507a667c5cab1df6e5163600e | Update test_models.py #41 | 7pairs/twingo,7pairs/twingo | tests/test_models.py | tests/test_models.py | # -*- coding: utf-8 -*-
#
# Copyright 2015-2019 Jun-ya HASEBA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import factory
from django.contrib.auth.models import User
from django.test import TestCase
from twingo.models import Profile
class UserFactory(factory.DjangoModelFactory):
"""
Userのテストデータを作成するファクトリー。
"""
class Meta:
model = User
username = factory.Sequence(lambda x: 'username_%02d' % x)
first_name = factory.Sequence(lambda x: 'first_name_%02d' % x)
last_name = factory.Sequence(lambda x: 'last_name_%02d' % x)
email = factory.Sequence(lambda x: 'user_%02d@dummy.com' % x)
password = factory.Sequence(lambda x: 'password_%02d' % x)
is_staff = False
is_active = True
is_superuser = False
class ProfileFactory(factory.DjangoModelFactory):
"""
Profileのテストデータを作成するファクトリー。
"""
class Meta:
model = Profile
twitter_id = factory.Sequence(lambda x: x)
screen_name = factory.Sequence(lambda x: 'screen_name_%02d' % x)
name = factory.Sequence(lambda x: 'name_%02d' % x)
description = None
url = None
profile_image_url = None
user = factory.LazyAttribute(lambda x: UserFactory())
class ModelsTest(TestCase):
"""
models.pyに対するテストコード。
"""
def test_pre_save_profile_01(self):
"""
[対象] pre_save_profile() : No.01
[条件] 任意入力の項目にNoneを設定する。
[結果] 該当の項目が空文字として保存される。
"""
profile = ProfileFactory()
self.assertEqual('', profile.description)
self.assertEqual('', profile.url)
self.assertEqual('', profile.profile_image_url)
| # -*- coding: utf-8 -*-
#
# Copyright 2015 Jun-ya HASEBA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import factory
from django.contrib.auth.models import User
from django.test import TestCase
from twingo.models import Profile
class UserFactory(factory.DjangoModelFactory):
"""
Userのテストデータを作成するファクトリー。
"""
class Meta:
model = User
username = factory.Sequence(lambda x: 'username_%02d' % x)
first_name = factory.Sequence(lambda x: 'first_name_%02d' % x)
last_name = factory.Sequence(lambda x: 'last_name_%02d' % x)
email = factory.Sequence(lambda x: 'user_%02d@dummy.com' % x)
password = factory.Sequence(lambda x: 'password_%02d' % x)
is_staff = False
is_active = True
is_superuser = False
class ProfileFactory(factory.DjangoModelFactory):
"""
Profileのテストデータを作成するファクトリー。
"""
class Meta:
model = Profile
twitter_id = factory.Sequence(lambda x: x)
screen_name = factory.Sequence(lambda x: 'screen_name_%02d' % x)
name = factory.Sequence(lambda x: 'name_%02d' % x)
description = None
url = None
profile_image_url = None
user = factory.LazyAttribute(lambda x: UserFactory())
class ModelsTest(TestCase):
"""
models.pyに対するテストコード。
"""
def test_pre_save_profile_01(self):
"""
[対象] pre_save_profile() : No.01
[条件] 任意入力の項目にNoneを設定する。
[結果] 該当の項目が空文字として保存される。
"""
profile = ProfileFactory()
self.assertEqual('', profile.description)
self.assertEqual('', profile.url)
self.assertEqual('', profile.profile_image_url)
| apache-2.0 | Python |
2a1622fcaec83ca496fa088299dd036e9d46961c | Remove boost from user mapping | cutoffthetop/recommender,cutoffthetop/recommender,cutoffthetop/recommender | scripts/usync.py | scripts/usync.py | # -*- coding: utf-8 -*-
"""
Copyright: (c) 2013 by Nicolas Drebenstedt.
License: BSD, see LICENSE for more details.
"""
from datetime import date
from time import sleep
from elasticsearch.client import IndicesClient
from elasticsearch import Elasticsearch
dest = Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}])
src = Elasticsearch(hosts=[{'host': '217.13.68.236', 'port': 9200}])
index = '%s-%s' % date.today().isocalendar()[:2]
start = 0
def main():
global start
hits = src.search(
from_=start,
size=1,
doc_type='user'
)
start += 1
user = hits['hits']['hits'][0]
status = dest.index(
index=index,
doc_type='user',
id=user['_id'],
body=user['_source'],
)
print user['_id'], status.get('created', False)
if __name__ == '__main__':
body = {
'user': {
'properties': {
'events': {'type': 'nested'},
'rank': {'type': 'float'}
},
'_timestamp': {
'enabled': True
}
}
}
ic = IndicesClient(dest)
if not ic.exists(index):
ic.create(index)
if not ic.exists_type(index=index, doc_type='user'):
ic.put_mapping(
index=index,
doc_type='user',
body=body
)
while 1:
try:
main()
except KeyboardInterrupt:
raise SystemExit(0)
except:
continue
finally:
sleep(0.02)
| # -*- coding: utf-8 -*-
"""
Copyright: (c) 2013 by Nicolas Drebenstedt.
License: BSD, see LICENSE for more details.
"""
from datetime import date
from time import sleep
from elasticsearch.client import IndicesClient
from elasticsearch import Elasticsearch
dest = Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}])
src = Elasticsearch(hosts=[{'host': '217.13.68.236', 'port': 9200}])
index = '%s-%s' % date.today().isocalendar()[:2]
start = 0
def main():
global start
hits = src.search(
from_=start,
size=1,
doc_type='user'
)
start += 1
user = hits['hits']['hits'][0]
status = dest.index(
index=index,
doc_type='user',
id=user['_id'],
body=user['_source'],
)
print user['_id'], status.get('created', False)
if __name__ == '__main__':
body = {
'user': {
'properties': {
'events': {'type': 'nested'},
'rank': {'type': 'float', 'store': 'yes'}
},
'_boost': {
'name': 'rank',
'null_value': 0.1
},
'_timestamp': {
'enabled': True
}
}
}
ic = IndicesClient(dest)
if not ic.exists(index):
ic.create(index)
if not ic.exists_type(index=index, doc_type='user'):
ic.put_mapping(
index=index,
ignore_conflicts=True,
doc_type='user',
body=body
)
while 1:
try:
main()
except KeyboardInterrupt:
raise SystemExit(0)
except:
continue
finally:
sleep(0.02)
| bsd-2-clause | Python |
e4b8ab1ac8d15037e71538dda8a3f73daed5972e | Add NOQA comment to version imports in project init | jmagnusson/pkgtmpl | pkgtmpl/python/pkgname/__init__.py | pkgtmpl/python/pkgname/__init__.py | from .metadata import __version_info__, __version__ # NOQA
| from .metadata import __version_info__, __version__
| bsd-2-clause | Python |
d1ea64d6645f60df38221cbd194c26dff9686dcd | Handle logging unicode messages in python2. | guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt | scripts/utils.py | scripts/utils.py | import sys
import hashlib
def e(s):
if type(s) == str:
return str
return s.encode('utf-8')
def d(s):
if type(s) == unicode:
return s
return unicode(s, 'utf-8')
def mkid(s):
return hashlib.sha1(e(s)).hexdigest()[:2*4]
class Logger(object):
def __init__(self):
self._mode = 'INFO'
def progress(self, message):
message = e(message)
if not sys.stderr.isatty():
return
if self._mode == 'PROGRESS':
print >>sys.stderr, '\r',
print >>sys.stderr, message,
self._mode = 'PROGRESS'
def info(self, message):
message = e(message)
if self._mode == 'PROGRESS':
print >>sys.stderr
print >>sys.stderr, message
self._mode = 'INFO'
| import sys
import hashlib
def e(s):
if type(s) == str:
return str
return s.encode('utf-8')
def d(s):
if type(s) == unicode:
return s
return unicode(s, 'utf-8')
def mkid(s):
return hashlib.sha1(e(s)).hexdigest()[:2*4]
class Logger(object):
def __init__(self):
self._mode = 'INFO'
def progress(self, message):
if not sys.stderr.isatty():
return
if self._mode == 'PROGRESS':
print >>sys.stderr, '\r',
print >>sys.stderr, message,
self._mode = 'PROGRESS'
def info(self, message):
if self._mode == 'PROGRESS':
print >>sys.stderr
print >>sys.stderr, message
self._mode = 'INFO'
| mit | Python |
01b0d4066cd709e75585da1b36e74f9b8f412669 | Increase poll timeout (#63) | bjustin-ibm/openwhisk-package-kafka,dubeejw/openwhisk-package-kafka,bjustin-ibm/openwhisk-package-kafka,bjustin-ibm/openwhisk-package-kafka,dubeejw/openwhisk-package-kafka,dubeejw/openwhisk-package-kafka | provider/thedoctor.py | provider/thedoctor.py | import logging
import time
from consumer import Consumer
from consumercollection import ConsumerCollection
from threading import Thread
class TheDoctor (Thread):
# maximum time to allow a consumer to not successfully poll() before restarting
poll_timeout_seconds = 20
# interval between the Doctor making rounds
sleepy_time_seconds = 2
def __init__(self, consumerCollection):
Thread.__init__(self)
self.daemon = True
self.consumerCollection = consumerCollection
def run(self):
logging.info('[Doctor] The Doctor is in!')
while True:
consumers = self.consumerCollection.getCopyForRead()
for consumerId in consumers:
consumer = consumers[consumerId]
logging.debug('[Doctor] [{}] Consumer is in state: {}'.format(consumerId, consumer.currentState()))
if consumer.currentState() is Consumer.State.Dead and consumer.desiredState() is Consumer.State.Running:
# well this is unexpected...
logging.error('[Doctor][{}] Consumer is dead, but should be alive!'.format(consumerId))
consumer.restart()
elif consumer.currentState() is Consumer.State.Dead and consumer.desiredState() is Consumer.State.Dead:
# Bring out yer dead...
logging.info('[{}] Removing dead consumer from the collection.'.format(consumer.trigger))
self.consumerCollection.removeConsumerForTrigger(consumer.trigger)
elif consumer.secondsSinceLastPoll() > self.poll_timeout_seconds and consumer.desiredState() is Consumer.State.Running:
# there seems to be an issue with the kafka-python client where it gets into an
# error-handling loop. This causes poll() to never complete, but also does not
# throw an exception.
logging.error('[Doctor][{}] Consumer timed-out, but should be alive! Restarting consumer.'.format(consumerId))
consumer.restart()
time.sleep(self.sleepy_time_seconds)
| import logging
import time
from consumer import Consumer
from consumercollection import ConsumerCollection
from threading import Thread
class TheDoctor (Thread):
# maximum time to allow a consumer to not successfully poll() before restarting
poll_timeout_seconds = 2
# interval between the Doctor making rounds
sleepy_time_seconds = 2
def __init__(self, consumerCollection):
Thread.__init__(self)
self.daemon = True
self.consumerCollection = consumerCollection
def run(self):
logging.info('[Doctor] The Doctor is in!')
while True:
consumers = self.consumerCollection.getCopyForRead()
for consumerId in consumers:
consumer = consumers[consumerId]
logging.debug('[Doctor] [{}] Consumer is in state: {}'.format(consumerId, consumer.currentState()))
if consumer.currentState() is Consumer.State.Dead and consumer.desiredState() is Consumer.State.Running:
# well this is unexpected...
logging.error('[Doctor][{}] Consumer is dead, but should be alive!'.format(consumerId))
consumer.restart()
elif consumer.currentState() is Consumer.State.Dead and consumer.desiredState() is Consumer.State.Dead:
# Bring out yer dead...
logging.info('[{}] Removing dead consumer from the collection.'.format(consumer.trigger))
self.consumerCollection.removeConsumerForTrigger(consumer.trigger)
elif consumer.secondsSinceLastPoll() > self.poll_timeout_seconds and consumer.desiredState() is Consumer.State.Running:
# there seems to be an issue with the kafka-python client where it gets into an
# error-handling loop. This causes poll() to never complete, but also does not
# throw an exception.
logging.error('[Doctor][{}] Consumer timed-out, but should be alive! Restarting consumer.'.format(consumerId))
consumer.restart()
time.sleep(self.sleepy_time_seconds)
| apache-2.0 | Python |
4468ff9e38fcb846ac1f07fed26c1bbfb64c88c8 | test choices render | satyrius/cmsplugin-polls,satyrius/cmsplugin-polls | tests/test_render.py | tests/test_render.py | from bs4 import BeautifulSoup
from cms.api import add_plugin
from cms.models import Placeholder
from django.http import HttpRequest
from django.template import Context, RequestContext
from django.test import TestCase
from cmsplugin_polls.models import Poll
from cmsplugin_polls.cms_plugins import PollPlugin
class PollPluginRenderTest(TestCase):
def setUp(self):
self.placeholder = Placeholder.objects.create(slot='test')
self.poll = Poll.objects.create(question='Do you like my plugin?')
def add_choice(self, text):
return self.poll.choice_set.create(text=text)
def add_plugin(self, **kwargs):
model_instance = add_plugin(
self.placeholder,
PollPlugin,
'en',
**kwargs)
return model_instance
def render(self, plugin, ctx=None):
return plugin.render_plugin(ctx or Context())
def test_template_render(self):
plugin = self.add_plugin(poll=self.poll)
# Switch on template debug to catch all template errors
with self.settings(TEMPLATE_DEBUG=True):
self.render(plugin)
def test_form_action(self):
plugin = self.add_plugin(poll=self.poll)
html = self.render(plugin)
soup = BeautifulSoup(html)
self.assertEqual(soup.form['action'], '/polls/vote')
self.assertEqual(soup.form['method'], 'POST')
def get_request(self, path=''):
request = HttpRequest()
request.current_page = None
request.path = path
return request
def test_form_hidden_fields(self):
plugin = self.add_plugin(poll=self.poll)
context = RequestContext(self.get_request('/foo/bar/'))
html = self.render(plugin, ctx=context)
soup = BeautifulSoup(html)
hidden = {i['name']: i for i in soup.form.find_all(type='hidden')}
self.assertIn('poll', hidden)
self.assertEqual(int(hidden['poll']['value']), self.poll.id)
self.assertIn('next', hidden)
self.assertEqual(hidden['next']['value'], '/foo/bar/')
def test_choices(self):
self.add_choice('Yes')
self.add_choice('No')
self.add_choice('This is not the choice you are looking for')
plugin = self.add_plugin(poll=self.poll)
html = self.render(plugin)
soup = BeautifulSoup(html)
choices = {int(i['value']) for i in soup.form.find_all(type='radio')}
self.assertEqual(
choices, set(self.poll.choice_set.values_list('id', flat=True)))
| from bs4 import BeautifulSoup
from cms.api import add_plugin
from cms.models import Placeholder
from django.http import HttpRequest
from django.template import Context, RequestContext
from django.test import TestCase
from cmsplugin_polls.models import Poll
from cmsplugin_polls.cms_plugins import PollPlugin
class PollPluginRenderTest(TestCase):
def setUp(self):
self.placeholder = Placeholder.objects.create(slot='test')
self.poll = Poll.objects.create(question='Do you like my plugin?')
def add_plugin(self, **kwargs):
model_instance = add_plugin(
self.placeholder,
PollPlugin,
'en',
**kwargs)
return model_instance
def render(self, plugin, ctx=None):
return plugin.render_plugin(ctx or Context())
def test_template_render(self):
plugin = self.add_plugin(poll=self.poll)
# Switch on template debug to catch all template errors
with self.settings(TEMPLATE_DEBUG=True):
self.render(plugin)
def test_form_action(self):
plugin = self.add_plugin(poll=self.poll)
html = self.render(plugin)
soup = BeautifulSoup(html)
self.assertEqual(soup.form['action'], '/polls/vote')
self.assertEqual(soup.form['method'], 'POST')
def get_request(self, path=''):
request = HttpRequest()
request.current_page = None
request.path = path
return request
def test_form_hidden_fields(self):
plugin = self.add_plugin(poll=self.poll)
context = RequestContext(self.get_request('/foo/bar/'))
html = self.render(plugin, ctx=context)
soup = BeautifulSoup(html)
hidden = {i['name']: i for i in soup.form.find_all(type='hidden')}
self.assertIn('poll', hidden)
self.assertEqual(int(hidden['poll']['value']), self.poll.id)
self.assertIn('next', hidden)
self.assertEqual(hidden['next']['value'], '/foo/bar/')
| mit | Python |
84a99e9557a323e094c360e748c7d7042980fc59 | Test PEP8 integration into Atom | davidgasquez/tip | tests/test_sample.py | tests/test_sample.py | import unittest
from tip.algorithms.dummy import dummy_add
class TestDummyAdd(unittest.TestCase):
def test_lcm(self):
r = dummy_add(2, 2)
self.assertEqual(r, 4)
| import unittest
from tip.algorithms.dummy import dummy_add
class TestDummyAdd(unittest.TestCase):
def test_lcm(self):
r = dummy_add(2, 2)
self.assertEqual(r, 4)
| unlicense | Python |
6e97ca8107a16da42b7e60711e5112e8a2b7b22d | add functions to extract features from textual data | DucAnhPhi/LinguisticAnalysis | dataset.py | dataset.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 25 18:23:39 2017
Prepare datasets for neural_network.py
@author: duc
"""
import numpy as np
import utils
import copy
from nltk.corpus import cmudict
import linguistic_analysis as la
import flesch_kincaid as fk
import neural_network as nn
def get_xor():
input = np.array([[0,0],[1,0],[0,1],[1,1]])
output = np.array([[0],[1],[1],[0]])
return(input, output)
def get_combined_keywords(tweets1, tweets2):
# get combined list of top 25 most used keywords
keywords1 = la.get_most_frequent_keywords(tweets1)
keywords2 = la.get_most_frequent_keywords(tweets2)
# get rid of tuples
for i,tuple in enumerate(keywords1):
keywords1[i] = tuple[0]
for i,tuple in enumerate(keywords2):
keywords2[i] = tuple[0]
keywords1 = set(keywords1)
keywords2 = set(keywords2)
# combined keywords
combinedKeywords = keywords1.union(keywords2)
return combinedKeywords
def get_keywords_count(tweet, dictionary):
keywords = copy.copy(dictionary)
for sentence in tweet:
for word in sentence:
if word in keywords:
keywords[word] += 1
return keywords
def extract_features(tweets, preprocessedTweets, keywordsCount, pronDict):
extracted = []
for index, tweet in enumerate(preprocessedTweets):
sentLength = la.get_sentence_length(tweet)
exclMarks = la.get_exclamation_marks(tweets[index])
gradeLvl = fk.get_flesch_grade_level(tweet, pronDict)
keyCount = get_keywords_count(tweet, keywordsCount)
# put all features together
features = [ sentLength, exclMarks, gradeLvl ]
for key in keyCount:
features.append(keyCount[key])
extracted.append(features)
return extracted
def get_tweet_data(person1, person2):
# get tweets
tweets1 = la.get_max_amount_tweets(person1)
tweets2 = la.get_max_amount_tweets(person2)
# remove retweets
tweets1 = utils.remove_retweets(tweets1)
tweets2 = utils.remove_retweets(tweets2)
# preprocessed tweets
preprocessedTweets1 = [ utils.preprocess(tweet) for tweet in tweets1 if len(utils.preprocess(tweet)) ]
preprocessedTweets2 = [ utils.preprocess(tweet) for tweet in tweets2 if len(utils.preprocess(tweet)) ]
# extract features
keywords = get_combined_keywords(preprocessedTweets1, preprocessedTweets2)
keywordsCount = {keyword:0 for keyword in keywords}
pronouncingDict = cmudict.dict()
data1 = np.array(extract_features(tweets1, preprocessedTweets1, keywordsCount, pronouncingDict))
data2 = np.array(extract_features(tweets2, preprocessedTweets2, keywordsCount, pronouncingDict))
# label data
data1 = nn.concat_bias(1, data1)
data2 = nn.concat_bias(0, data2)
print(data1, "\n")
print(data2, "\n")
# concatenate vertically
data = np.r_[data1, data2]
if __name__ == '__main__':
np.set_printoptions(threshold = 10000, precision=4, suppress=True)
get_tweet_data("realDonaldTrump", "HillaryClinton")
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 25 18:23:39 2017
Prepare datasets for neural_network.py
@author: duc
"""
import numpy as np
def getXOR():
input = np.array([[0,0],[1,0],[0,1],[1,1]])
output = np.array([[0],[1],[1],[0]])
return(input, output)
| mit | Python |
dec8f4ad43b2852bed15ff6a88890262d0ae126c | fix AttributeError | jermowery/xos,opencord/xos,wathsalav/xos,cboling/xos,cboling/xos,jermowery/xos,jermowery/xos,zdw/xos,zdw/xos,open-cloud/xos,xmaruto/mcord,cboling/xos,wathsalav/xos,wathsalav/xos,opencord/xos,open-cloud/xos,open-cloud/xos,cboling/xos,jermowery/xos,zdw/xos,cboling/xos,wathsalav/xos,xmaruto/mcord,zdw/xos,opencord/xos,xmaruto/mcord,xmaruto/mcord | plstackapi/planetstack/api/keys.py | plstackapi/planetstack/api/keys.py | from plstackapi.openstack.client import OpenStackClient
from plstackapi.openstack.driver import OpenStackDriver
from plstackapi.planetstack.api.auth import auth_check
from plstackapi.planetstack.models import Key, User
def lookup_user(fields):
user = None
if 'user' in fields:
if isinstance(fields['user'], int):
users = User.objects.filter(id=fields['user'])
else:
users = User.objects.filter(email=fields['user'])
if users:
user = users[0]
return user
def add_key(auth, fields):
driver = OpenStackDriver(client = auth_check(auth))
user = lookup_user(fields)
if user: fields['user'] = user
key = Key(**fields)
nova_fields = {'name': key.name,
'key': key.key}
nova_key = driver.create_keypair(**nova_fields)
key.save()
return key
def update_key(auth, id, **fields):
return
def delete_key(auth, filter={}):
driver = OpenStackDriver(client = auth_check(auth))
keys = Key.objects.filter(**filter)
for key in keys:
driver.delete_keypair(name=key.name)
key.delete()
return 1
def get_keys(auth, filter={}):
client = auth_check(auth)
keys = Key.objects.filter(**filter)
return keys
| from plstackapi.openstack.client import OpenStackClient
from plstackapi.openstack.driver import OpenStackDriver
from plstackapi.planetstack.api.auth import auth_check
from plstackapi.planetstack.models import Key, User
def lookup_user(fields):
user = None
if 'user' in fields:
if isinstance(fields['user'], int):
users = User.objects.filter(id=fields['user'])
else:
users = User.objects.filter(email=fields['user'])
if users:
user = users[0]
return user
def add_key(auth, fields):
driver = OpenStackDriver(client = auth_check(auth))
user = lookup_user(fields)
if user: fields['user'] = user
key = Key(**fields)
nova_fields = {'name': key.name,
'key': key.key}
nova_key = driver.create_keypair(**nova_fields)
key.save()
return key
def update_key(auth, id, **fields):
return
def delete_key(auth, filter={}):
driver = OpenStackDriver(client = auth_check(auth))
keys = Key.objects.filter(**filter)
for key in keys:
driver.delete_key(name=key.name)
key.delete()
return 1
def get_keys(auth, filter={}):
client = auth_check(auth)
keys = Key.objects.filter(**filter)
return keys
| apache-2.0 | Python |
fad7441706822f2070f6bc1a94e3bb6b42647e4c | Add arg parsing | mediachain/mediachain-client,mediachain/mediachain-client | mediachain/reader/main.py | mediachain/reader/main.py | import sys
import argparse
import os
import mediachain.api
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
parser = argparse.ArgumentParser(
prog='mediachain-reader',
description='Mediachain Reader CLI'
)
subparsers = parser.add_subparsers(help='Mediachain Reader SubCommands',
dest='subcommand')
get_parser = subparsers.add_parser(
'get',
help='Get a revision chain for a given artefact/entity id'
)
get_parser.add_argument('object_id',
type=str,
help='The id of the artefact/entity to fetch')
ns = parser.parse_args(arguments)
fn = getattr(mediachain.api, ns.subcommand)
fn(ns)
if __name__ == "__main__":
main()
| import sys
import argparse
import os
def main(arguments=None):
if arguments == None:
arguments = sys.argv[1:]
if __name__ == "__main__":
main()
| mit | Python |
6d25f4ae41f6eb0459ed01cec526f57f298b4733 | Add list_routes command to application.py | alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api | application.py | application.py | #!/usr/bin/env python
import os
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
@manager.command
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(application.url_map.iter_rules(), key=lambda r: r.rule):
print "{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
import os
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| mit | Python |
0858a56c31cbab9dcecf6388a26b0ae65e501d16 | add custom method to log the gist exceptions | adnedelcu/SyncSettings,mfuentesg/SyncSettings | sync_settings/sync_settings_manager.py | sync_settings/sync_settings_manager.py | # -*- coding: utf-8 -*-
import sublime, os
from .logger import Logger
class SyncSettingsManager:
settingsFilename = 'SyncSettings.sublime-settings'
files = [
"Package Control.merged-ca-bundle",
"Package Control.system-ca-bundle",
"Package Control.user-ca-bundle",
"Package Control.sublime-settings",
"Preferences.sublime-settings",
"Package Control.last-run",
"Default (OSX).sublime-keymap",
"Default (Windows).sublime-keymap",
"Default (Linux).sublime-keymap"
]
@staticmethod
def settings (key = None, newValue = None):
settings = sublime.load_settings(SyncSettingsManager.settingsFilename)
if not key is None and not newValue is None:
settings.set(key, newValue)
elif not key is None and newValue is None:
return settings.get(key)
else:
return settings
@staticmethod
def getFiles ():
excludedFiles = SyncSettingsManager.settings('excluded_files')
return SyncSettingsManager.excludeValues(SyncSettingsManager.files, excludedFiles)
@staticmethod
def getContentFiles ():
r = {}
for f in SyncSettingsManager.getFiles():
fullPath = SyncSettingsManager.getPackagesPath(f)
if os.path.isfile(fullPath) and os.path.exists(fullPath):
try:
content = open(fullPath, 'r').read()
r.update({
f: {
'content': content
}
})
except Exception as e:
Logger.log(str(e), Logger.MESSAGE_ERROR_TYPE)
return r
@staticmethod
def getPackagesPath (filename = None):
path = os.path.join(sublime.packages_path(), 'User')
if not filename is None:
return os.path.join(path, filename)
return path
@staticmethod
def getSettingsFilename ():
return SyncSettingsManager.settingsFilename
@staticmethod
def excludeValues (l, e):
try:
for el in e:
l.remove(el)
except Exception as e:
Logger.log(str(e), Logger.MESSAGE_ERROR_TYPE)
return l
@staticmethod
def showMessageAndLog (message, error = True):
m = l = ''
if isinstance(message, Exception):
message = message.toJSON()
m = message.get('app_message')
l = message.get('error_description')+ ', File: ' + message.get('filename') +' - Line: ' + message.get('line')
elif isinstance(message, str):
m = l = message
sublime.status_message('Sync Settings: ' + m)
Logger.log(l, Logger.MESSAGE_ERROR_TYPE if error else Logger.MESSAGE_INFO_TYPE)
| # -*- coding: utf-8 -*-
import sublime, os
class SyncSettingsManager:
settingsFilename = 'SyncSettings.sublime-settings'
files = [
"Package Control.merged-ca-bundle",
"Package Control.system-ca-bundle",
"Package Control.user-ca-bundle",
"Package Control.sublime-settings",
"Preferences.sublime-settings",
"Package Control.last-run",
"Default (OSX).sublime-keymap",
"Default (Windows).sublime-keymap",
"Default (Linux).sublime-keymap"
]
@staticmethod
def settings (key = None, newValue = None):
settings = sublime.load_settings(SyncSettingsManager.settingsFilename)
if not key is None and not newValue is None:
settings.set(key, newValue)
elif not key is None and newValue is None:
return settings.get(key)
else:
return settings
@staticmethod
def getFiles ():
excludedFiles = SyncSettingsManager.settings('excluded_files')
return SyncSettingsManager.excludeValues(SyncSettingsManager.files, excludedFiles)
@staticmethod
def getContentFiles ():
r = {}
for f in SyncSettingsManager.getFiles():
fullPath = SyncSettingsManager.getPackagesPath(f)
if os.path.isfile(fullPath) and os.path.exists(fullPath):
content = open(fullPath, 'r').read()
r.update({
f: {
'content': content
}
})
return r
@staticmethod
def getPackagesPath (filename = None):
path = os.path.join(sublime.packages_path(), 'User')
if not filename is None:
return os.path.join(path, filename)
return path
@staticmethod
def getSettingsFilename ():
return SyncSettingsManager.settingsFilename
@staticmethod
def excludeValues (l, e):
try:
for el in e:
l.remove(el)
except Exception as e:
pass
return l
| mit | Python |
8809c8d0027ad5c034a9101fe1e27d29971b3543 | Combine two timestamp gt/lt assertions. | alphagov/tagalog,alphagov/tagalog | test/helpers.py | test/helpers.py | import os
from mock import *
from nose.tools import *
from datetime import datetime
HERE = os.path.dirname(__file__)
def fixture(*path):
return open(fixture_path(*path), 'rb')
def fixture_path(*path):
return os.path.join(HERE, 'fixtures', *path)
class TimestampRange:
"""
Helper for timestamp creations which can't be mocked. Call ``start()`
and ``finish()`` either side of the method which creates the timestamp.
Then call ``assert_inrange(string)`` with the timestamp string that you
want to test. It will assert that it is within the upper and lower bands
expected. Times should be ISO8601 UTC.
"""
def __init__(self):
self.lower = None
self.upper = None
def start(self):
self.lower = datetime.utcnow()
def finish(self):
self.upper = datetime.utcnow()
def assert_inrange(self, ts_string):
ts = datetime.strptime(ts_string, '%Y-%m-%dT%H:%M:%S.%fZ')
assert_true(self.lower < ts < self.upper)
| import os
from mock import *
from nose.tools import *
from datetime import datetime
HERE = os.path.dirname(__file__)
def fixture(*path):
return open(fixture_path(*path), 'rb')
def fixture_path(*path):
return os.path.join(HERE, 'fixtures', *path)
class TimestampRange:
"""
Helper for timestamp creations which can't be mocked. Call ``start()`
and ``finish()`` either side of the method which creates the timestamp.
Then call ``assert_inrange(string)`` with the timestamp string that you
want to test. It will assert that it is within the upper and lower bands
expected. Times should be ISO8601 UTC.
"""
def __init__(self):
self.lower = None
self.upper = None
def start(self):
self.lower = datetime.utcnow()
def finish(self):
self.upper = datetime.utcnow()
def assert_inrange(self, ts_string):
ts = datetime.strptime(ts_string, '%Y-%m-%dT%H:%M:%S.%fZ')
assert(self.lower < ts)
assert(self.upper > ts)
| mit | Python |
74b7b8f9ea326a4293cd7790632138ed4e741500 | add device authentication support | bboozzoo/mender-backend-cli | mender/client/__init__.py | mender/client/__init__.py | # The MIT License (MIT)
#
# Copyright (c) 2016 Maciej Borzecki
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
import requests
import requests.auth
from requests import Session as ApiClient
API_URL = '/api/integrations/0.1/'
API_DEVICES_URL = '/api/devices/0.1/'
def add_url_path(base, path):
if not base.endswith('/'):
base += '/'
if path.startswith('/'):
path = path[1:]
return base + path
def service_path(service):
return add_url_path(API_URL, service)
def admissions_url(host, path=''):
ap = add_url_path(host, service_path('/admission/devices'))
if path:
return add_url_path(ap, path)
return ap
def deployments_url(host, path=''):
ap = add_url_path(host, service_path('/deployments/deployments'))
if path:
return add_url_path(ap, path)
return ap
def images_url(host, path=''):
ap = add_url_path(host, service_path('/deployments/images'))
if path:
return add_url_path(ap, path)
return ap
def inventory_url(host, path=''):
ap = add_url_path(host, service_path('/inventory'))
if path:
return add_url_path(ap, path)
return ap
def device_url(host, path=''):
ap = add_url_path(host, API_DEVICES_URL)
if path:
return add_url_path(ap, path)
return ap
class ClientError(requests.exceptions.RequestException):
"""Wrapper for client errors"""
pass
class DeviceTokenAuth(requests.auth.AuthBase):
"""Perform device authentication using device token"""
def __init__(self, token):
self.token = token
def __call__(self, r):
r.headers['Authorization'] = 'Bearer {}'.format(self.token)
return r
| # The MIT License (MIT)
#
# Copyright (c) 2016 Maciej Borzecki
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
import requests
from requests import Session as ApiClient
API_URL = '/api/integrations/0.1/'
API_DEVICES_URL = '/api/devices/0.1/'
def add_url_path(base, path):
if not base.endswith('/'):
base += '/'
if path.startswith('/'):
path = path[1:]
return base + path
def service_path(service):
return add_url_path(API_URL, service)
def admissions_url(host, path=''):
ap = add_url_path(host, service_path('/admission/devices'))
if path:
return add_url_path(ap, path)
return ap
def deployments_url(host, path=''):
ap = add_url_path(host, service_path('/deployments/deployments'))
if path:
return add_url_path(ap, path)
return ap
def images_url(host, path=''):
ap = add_url_path(host, service_path('/deployments/images'))
if path:
return add_url_path(ap, path)
return ap
def inventory_url(host, path=''):
ap = add_url_path(host, service_path('/inventory'))
if path:
return add_url_path(ap, path)
return ap
def device_url(host, path=''):
ap = add_url_path(host, API_DEVICES_URL)
if path:
return add_url_path(ap, path)
return ap
class ClientError(requests.exceptions.RequestException):
"""Wrapper for client errors"""
pass
| mit | Python |
a5d7bc1e7d1b5b1bef737aa205cfd40d34efc154 | Fix test_open_home | frafra/is-osm-uptodate,frafra/is-osm-uptodate | tests/test_webapp.py | tests/test_webapp.py | from common import URL
from parameterized import parameterized
from seleniumbase import BaseCase
from seleniumbase.config import settings
buttons = "creation lastedit revisions frequency".split()
class TestWebapp(BaseCase):
def test_open_home(self):
self.set_window_size("768", "432")
self.open(URL)
self.assert_text(
"least recently updated", timeout=settings.EXTREME_TIMEOUT
)
def test_js_errors(self):
self.test_open_home()
self.assert_no_js_errors()
@parameterized.expand(buttons)
def test_tabs(self, button):
self.test_open_home()
self.execute_script(
'document.getElementById("' + button + '").click()'
)
self.assert_no_js_errors()
| from common import URL
from parameterized import parameterized
from seleniumbase import BaseCase
from seleniumbase.config import settings
buttons = "creation lastedit revisions frequency".split()
class TestWebapp(BaseCase):
def test_open_home(self):
self.set_window_size("768", "432")
self.open(URL)
self.assert_text("Worst node", timeout=settings.EXTREME_TIMEOUT)
def test_js_errors(self):
self.test_open_home()
self.assert_no_js_errors()
@parameterized.expand(buttons)
def test_tabs(self, button):
self.test_open_home()
self.execute_script(
'document.getElementById("' + button + '").click()'
)
self.assert_no_js_errors()
| agpl-3.0 | Python |
f96c60abacd6e44a3fc43ac83e7cfb794f1e02ad | Remove log translations | openstack/python-zaqarclient | zaqarclient/_i18n.py | zaqarclient/_i18n.py | # Copyright 2014 Red Hat, Inc
# All Rights .Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_i18n import * # noqa
_translators = TranslatorFactory(domain='zaqarclient')
# The primary translation function using the well-known name "_"
_ = _translators.primary
| # Copyright 2014 Red Hat, Inc
# All Rights .Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_i18n import * # noqa
_translators = TranslatorFactory(domain='zaqarclient')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
| apache-2.0 | Python |
0753885bf362e7800c2a7817fe037417bd38dc9e | Check character not check digit | rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism | python/isbn-verifier/isbn_verifier.py | python/isbn-verifier/isbn_verifier.py | class IsbnVerifier(object):
VALID_SEPERATOR = "-"
VALID_CHECK_CHARACTER = "X"
VALID_DIGITS = list(map(str, range(0, 10)))
VALID_CHARACTERS = set(VALID_DIGITS) | set([VALID_SEPERATOR, VALID_CHECK_CHARACTER])
VALID_LENGTH = 10
@classmethod
def is_valid(cls, string):
if cls.invalid(string):
return False
sum_so_far = 0
for i, c in enumerate(cls.remove_seperator(string)):
sum_so_far += cls.convert_char_to_int(c) * (10 - i)
return sum_so_far % 11 == 0
@classmethod
def invalid(cls, string):
return cls.invalid_character(string) or cls.invalid_length(string) or cls.invalid_X_other_than_check_digit(string)
@classmethod
def invalid_character(cls, string):
return any(char not in cls.VALID_CHARACTERS for char in string)
@classmethod
def invalid_length(cls, string):
return len(cls.remove_invalid_characters_and_slashes(string)) != cls.VALID_LENGTH
@classmethod
def invalid_X_other_than_check_digit(cls, string):
return cls.VALID_CHECK_CHARACTER in string and not string.endswith(cls.VALID_CHECK_CHARACTER)
@classmethod
def remove_invalid_characters_and_slashes(cls, string):
return cls.remove_seperator(cls.remove_invalid_characters(string))
@classmethod
def remove_invalid_characters(cls, string):
return "".join(filter(lambda char: char in cls.VALID_CHARACTERS, string))
@classmethod
def convert_char_to_int(cls, char):
return int(cls.convert_check_character_to_ten(char))
@classmethod
def convert_check_character_to_ten(cls, char):
return 10 if char == cls.VALID_CHECK_CHARACTER else char
@classmethod
def remove_seperator(cls, string):
return "".join(filter(lambda char: char != cls.VALID_SEPERATOR , string))
def verify(isbn):
return IsbnVerifier.is_valid(isbn)
| class IsbnVerifier(object):
VALID_SEPERATOR = "-"
VALID_CHECK_DIGIT = "X"
VALID_DIGITS = list(map(str, range(0, 10)))
VALID_CHARACTERS = set(VALID_DIGITS) | set([VALID_SEPERATOR, VALID_CHECK_DIGIT])
VALID_LENGTH = 10
@classmethod
def is_valid(cls, string):
if cls.invalid(string):
return False
sum_so_far = 0
for i, c in enumerate(cls.remove_seperator(string)):
sum_so_far += cls.convert_char_to_int(c) * (10 - i)
return sum_so_far % 11 == 0
@classmethod
def invalid(cls, string):
return cls.invalid_character(string) or cls.invalid_length(string) or cls.invalid_X_other_than_check_digit(string)
@classmethod
def invalid_character(cls, string):
return any(char not in cls.VALID_CHARACTERS for char in string)
@classmethod
def invalid_length(cls, string):
return len(cls.remove_invalid_characters_and_slashes(string)) != cls.VALID_LENGTH
@classmethod
def invalid_X_other_than_check_digit(cls, string):
return cls.VALID_CHECK_DIGIT in string and not string.endswith(cls.VALID_CHECK_DIGIT)
@classmethod
def remove_invalid_characters_and_slashes(cls, string):
return cls.remove_seperator(cls.remove_invalid_characters(string))
@classmethod
def remove_invalid_characters(cls, string):
return "".join(filter(lambda char: char in cls.VALID_CHARACTERS, string))
@classmethod
def convert_char_to_int(cls, char):
return int(cls.convert_x_to_ten(char))
@classmethod
def convert_x_to_ten(cls, char):
return 10 if char == cls.VALID_CHECK_DIGIT else char
@classmethod
def remove_seperator(cls, string):
return "".join(filter(lambda char: char != cls.VALID_SEPERATOR , string))
def verify(isbn):
return IsbnVerifier.is_valid(isbn)
| mit | Python |
2cd7a53f92d0a8c79fa9e89da08abce8b07988b0 | fix off my 1 error | closeio/closeio-api-scripts | contacts_from_search_query.py | contacts_from_search_query.py | #!/usr/bin/env python
import argparse
from flask_common.utils import CsvWriter
from closeio_api import Client as CloseIO_API
HEADERS = ['lead_name', 'contact_name', 'title', 'primary_email', 'primary_phone']
parser = argparse.ArgumentParser(description='Exports the primary contact information for each contact to CSV based on search query')
parser.add_argument('--api_key', '-k', required=True, help='API Key')
parser.add_argument('--query', '-q', required=True, help='Search Query')
parser.add_argument('--output', '-o', required=True, help='Output Filename')
args = parser.parse_args()
with open(args.output, 'wb') as f:
writer = CsvWriter(f)
api = CloseIO_API(args.api_key)
# get the org id necessary for search
org_id = api.get('api_key')['data'][0]['organization_id']
# get all the search results for given lead name
search_results = []
filters = {
'organization_id': org_id,
'query': args.query,
'_fields': 'id,name,contacts'
}
writer.writerow(HEADERS)
skip = 0
limit = 100
has_more = True
while has_more:
filters['_skip'] = skip
filters['_limit'] = limit
leads = api.get('lead', data=filters)['data']
for lead in leads:
for contact in lead['contacts']:
phones = contact['phones']
emails = contact['emails']
primary_phone = phones[0]['phone'] if phones else None
primary_email = emails[0]['email'] if emails else None
row = [lead['name'], contact['name'], contact['title'], primary_email, primary_phone]
writer.writerow(row)
if len(leads) < limit:
break
skip += limit + 1
| #!/usr/bin/env python
import argparse
from flask_common.utils import CsvWriter
from closeio_api import Client as CloseIO_API
HEADERS = ['lead_name', 'contact_name', 'title', 'primary_email', 'primary_phone']
parser = argparse.ArgumentParser(description='Exports the primary contact information for each contact to CSV based on search query')
parser.add_argument('--api_key', '-k', required=True, help='API Key')
parser.add_argument('--query', '-q', required=True, help='Search Query')
parser.add_argument('--output', '-o', required=True, help='Output Filename')
args = parser.parse_args()
with open(args.output, 'wb') as f:
writer = CsvWriter(f)
api = CloseIO_API(args.api_key)
# get the org id necessary for search
org_id = api.get('api_key')['data'][0]['organization_id']
# get all the search results for given lead name
search_results = []
filters = {
'organization_id': org_id,
'query': args.query,
'_fields': 'id,name,contacts'
}
writer.writerow(HEADERS)
skip = 0
limit = 100
has_more = True
while has_more:
filters['_skip'] = skip
filters['_limit'] = limit
leads = api.get('lead', data=filters)['data']
for lead in leads:
for contact in lead['contacts']:
phones = contact['phones']
emails = contact['emails']
primary_phone = phones[0]['phone'] if phones else None
primary_email = emails[0]['email'] if emails else None
row = [lead['name'], contact['name'], contact['title'], primary_email, primary_phone]
writer.writerow(row)
if len(leads) < limit:
break
skip += limit
| mit | Python |
98dca5076dbcd6f7fa3830dd5cdaab60c2d0a46e | Fix indent on test code for test/assembly broken in r1220 Review URL: https://chromiumcodereview.appspot.com/9429007 | old8xp/gyp_from_google,old8xp/gyp_from_google,old8xp/gyp_from_google,old8xp/gyp_from_google,old8xp/gyp_from_google | test/assembly/gyptest-assembly.py | test/assembly/gyptest-assembly.py | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A basic test of compiling assembler files.
"""
import sys
import TestGyp
if sys.platform != 'win32':
# TODO(bradnelson): get this working for windows.
test = TestGyp.TestGyp(formats=['make', 'ninja', 'scons', 'xcode'])
test.run_gyp('assembly.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('assembly.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Got 42.
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.pass_test()
| #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A basic test of compiling assembler files.
"""
import sys
import TestGyp
if sys.platform != 'win32':
# TODO(bradnelson): get this working for windows.
test = TestGyp.TestGyp(formats=['make', 'ninja', 'scons', 'xcode'])
test.run_gyp('assembly.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('assembly.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Got 42.
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.pass_test()
| bsd-3-clause | Python |
1c28341a4cd828de607d9cc4252f444844c0a892 | Tweak up timeout in UDP test | rec/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel | test/bibliopixel/util/udp_test.py | test/bibliopixel/util/udp_test.py | import contextlib, queue, time, unittest
from bibliopixel.util import udp
TEST_ADDRESS = '127.0.0.1', 5678
TIMEOUT = 0.3
@contextlib.contextmanager
def receive_udp(address, results):
receiver = udp.QueuedReceiver(address)
receiver.start()
yield
try:
while True:
results.append(receiver.queue.get(timeout=TIMEOUT))
except queue.Empty:
pass
class UDPTest(unittest.TestCase):
def test_full(self):
messages = [s.encode() for s in ('foo', '', 'bar', 'baz', '', 'bing')]
expected = [s for s in messages if s]
# Note that empty messages are either not sent, or not received.
actual = []
with receive_udp(TEST_ADDRESS, actual):
sender = udp.QueuedSender(TEST_ADDRESS)
sender.start()
for m in messages:
sender.send(m)
self.assertEquals(actual, expected)
| import contextlib, queue, time, unittest
from bibliopixel.util import udp
TEST_ADDRESS = '127.0.0.1', 5678
TIMEOUT = 0.2
@contextlib.contextmanager
def receive_udp(address, results):
receiver = udp.QueuedReceiver(address)
receiver.start()
yield
try:
while True:
results.append(receiver.queue.get(timeout=TIMEOUT))
except queue.Empty:
pass
class UDPTest(unittest.TestCase):
def test_full(self):
messages = [s.encode() for s in ('foo', '', 'bar', 'baz', '', 'bing')]
expected = [s for s in messages if s]
# Note that empty messages are either not sent, or not received.
actual = []
with receive_udp(TEST_ADDRESS, actual):
sender = udp.QueuedSender(TEST_ADDRESS)
sender.start()
for m in messages:
sender.send(m)
self.assertEquals(actual, expected)
| mit | Python |
56401448a09723677331b736b3a76c1eab976881 | Change the way cornice services are defined. | mostlygeek/tokenserver,mostlygeek/tokenserver,mozilla-services/tokenserver,mozilla-services/tokenserver | tokenserver/views.py | tokenserver/views.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import json
from cornice import Service
from vep.errors import Error as BrowserIDError
from tokenserver.util import JsonError
from tokenserver.verifiers import get_verifier
# A GET on / returns the discovery API
discovery = Service(name='discovery', path='/')
token = Service(name='token', path='/1.0/{application}/{version}')
@discovery.get()
def _discovery(request):
discovery = os.path.join(os.path.dirname(__file__), 'discovery.json')
with open(discovery) as f:
return json.loads(f.read())
# validators
def valid_assertion(request):
"""Validate that the assertion given in the request is correct.
If not, add errors in the response so that the client can know what
happened.
"""
def _raise_unauthorized():
raise JsonError(401, description='Unauthorized')
token = request.headers.get('Authorization')
if token is None:
_raise_unauthorized()
token = token.split()
if len(token) != 2:
_raise_unauthorized()
name, assertion = token
if name.lower() != 'browser-id':
resp = JsonError(401, description='Unsupported')
resp.www_authenticate = ('Browser-ID', {})
raise resp
try:
verifier = get_verifier()
verifier.verify(assertion)
except BrowserIDError:
_raise_unauthorized()
# everything sounds good, add the assertion to the list of validated fields
# and continue
request.validated['assertion'] = assertion
def valid_app(request):
supported = request.registry.settings['tokenserver.applications']
application = request.matchdict.get('application')
version = request.matchdict.get('version')
if application not in supported:
raise JsonError(404, location='url', name='application',
description='Unknown application')
else:
request.validated['application'] = application
supported_versions = supported[application]
if version not in supported_versions:
raise JsonError(404, location='url', name=version,
description='Unknown application version')
else:
request.validated['version'] = version
@token.get(validators=(valid_app, valid_assertion))
def return_token(request):
# XXX here, build the token
assertion = request.validated['assertion']
application = request.validated['application']
version = request.validated['version']
#email = request.validated['email']
secrets = request.registry.settings['tokenserver.secrets_file']
return {'service_entry': 'http://example.com'}
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import json
from cornice import Service
from cornice.resource import resource, view
from tokenserver.util import JsonError
from tokenserver.verifiers import get_verifier
#
# Discovery page
#
discovery = Service(name='discovery', path='/')
@discovery.get()
def _discovery(request):
discovery = os.path.join(os.path.dirname(__file__), 'discovery.json')
with open(discovery) as f:
return json.loads(f.read())
#
# token service
#
def valid_assertion(request):
token = request.headers.get('Authorization')
if token is None:
raise JsonError(401, description='Unauthorized')
token = token.split()
if len(token) != 2:
raise JsonError(401, description='Unauthorized')
name, assertion = token
if name.lower() != 'browser-id':
resp = JsonError(401, description='Unsupported')
resp.www_authenticate = ('Browser-ID', {})
raise resp
verifier = get_verifier()
verifier.verify(assertion)
request.validated['assertion'] = assertion
def valid_app(request):
supported = request.registry.settings['tokenserver.applications']
application = request.matchdict.get('application')
version = request.matchdict.get('version')
if application not in supported:
raise JsonError(404, location='url', name='application',
description='Unknown application')
else:
request.validated['application'] = application
supported_versions = supported[application]
if version not in supported_versions:
raise JsonError(404, location='url', name=version,
description='Unknown application version')
else:
request.validated['version'] = version
@resource(path='/1.0/{application}/{version}')
class TokenService(object):
def __init__(self, request):
self.request = request
@view(validators=(valid_app, valid_assertion))
def get(self):
request = self.request
# XXX here, build the token
assertion = request.validated['assertion']
application = request.validated['application']
version = request.validated['version']
#email = request.validated['email']
secrets = request.registry.settings['tokenserver.secrets_file']
return {'service_entry': 'http://example.com'}
| mpl-2.0 | Python |
dec7214d65a900ace53d173a4abc9bb884a13eb8 | Improve testing | danbob123/oi,walkr/oi | test/test_oi.py | test/test_oi.py | import unittest
import oi
class TestOi(unittest.TestCase):
def setUp(self):
self.address = 'ipc:///tmp/test-programd.sock'
self.p = oi.Program('programd', self.address)
self.ctl = oi.CtlProgram('programctl', self.address)
def tearDown(self):
self.p.service.sock.close()
self.ctl.client.sock.close()
# --------------------------------------
def test_new_program(self):
self.assertIsNotNone(self.p)
def test_new_ctl(self):
self.assertIsNotNone(self.ctl)
def test_add_command_for_program(self):
self.p.add_command('test', lambda p: 'test')
def test_add_local_command_for_ctl(self):
self.ctl.add_command('test', lambda p: 'test')
dest, res, err = self.ctl.call('test')
self.assertEqual(dest, 'local')
self.assertEqual(res, 'test')
self.assertIsNone(err)
| import unittest
import oi
class TestOi(unittest.TestCase):
def setUp(self):
self.address = 'ipc:///tmp/test-programd.sock'
self.p = oi.Program('programd', self.address)
self.ctl = oi.CtlProgram('programctl', self.address)
def tearDown(self):
self.p.service.sock.close()
self.ctl.client.sock.close()
# --------------------------------------
def test_new_program(self):
self.assertIsNotNone(self.p)
def test_new_ctl(self):
self.assertIsNotNone(self.ctl)
def test_add_command(self):
self.p.add_command('test', lambda p: 'test')
| mit | Python |
f949118c6056f14e72226a2e70a11a3a75bc93e8 | Print chars instead of strings, this should take meta chars in consideration as well | jtwaleson/decrypt | decrypt.py | decrypt.py | #!/usr/bin/env python
import curses
import time
import fileinput
import random
import string
lines = []
chance = 0.1
confirmed_per_line = []
screen = curses.initscr()
curses.noecho()
try:
curses.curs_set(0)
except:
pass
screen.keypad(1)
def iterate(increase = False):
still_random = 0
global chance, confirmed_per_line, lines
if increase:
chance += 0.01
screen.erase()
(y,x) = screen.getmaxyx()
final_line = len(lines)
if final_line > y:
first_line = final_line - y
else:
first_line = 0
for line_num in range(first_line, final_line):
line = lines[line_num]
for i in [i for i in range(min(x, len(line)))]:
if i not in confirmed_per_line[line_num]:
still_random += 1
if random.random() < chance:
confirmed_per_line[line_num].append(i)
char = random.choice(string.punctuation)
else:
char = line[i]
try:
screen.addch(line_num - first_line, i, char)
except Exception:
pass
screen.refresh()
time.sleep(0.1)
return still_random > 0
try:
for line in fileinput.input():
confirmed_per_line.append([])
lines.append(line.rstrip())
iterate()
fileinput.close()
while iterate(True):
pass
time.sleep(2)
finally:
curses.endwin()
for line in lines:
print(line)
| #!/usr/bin/env python
import curses
import time
import fileinput
import random
import string
lines = []
chance = 0.1
confirmed_per_line = []
screen = curses.initscr()
curses.noecho()
try:
curses.curs_set(0)
except:
pass
screen.keypad(1)
def iterate(increase = False):
still_random = 0
global chance, confirmed_per_line, lines
if increase:
chance += 0.01
screen.erase()
(y,x) = screen.getmaxyx()
final_line = len(lines)
if final_line > y:
first_line = final_line - y
else:
first_line = 0
for line_num in range(first_line, final_line):
line = lines[line_num]
for i in [i for i in range(min(x, len(line))) if i not in confirmed_per_line[line_num]]:
still_random += 1
if random.random() < chance:
confirmed_per_line[line_num].append(i)
random_line = ''.join(random.choice(string.punctuation) if col not in confirmed_per_line[line_num] else line[col] for col in range(min(len(line), x)))
try:
screen.addstr(line_num - first_line, 0, random_line)
except Exception:
pass
screen.refresh()
time.sleep(0.1)
return still_random > 0
try:
for line in fileinput.input():
confirmed_per_line.append([])
lines.append(line.rstrip())
iterate()
fileinput.close()
while iterate(True):
pass
time.sleep(2)
finally:
curses.endwin()
for line in lines:
print(line)
| mit | Python |
2c356f404de628c23aad9bead3c5c592c508dff5 | Allow to run with no directory passed in. | pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython | tools/make-frozen.py | tools/make-frozen.py | #!/usr/bin/env python
#
# Create frozen modules structure for MicroPython.
#
# Usage:
#
# Have a directory with modules to be frozen (only modules, not packages
# supported so far):
#
# frozen/foo.py
# frozen/bar.py
#
# Run script, passing path to the directory above:
#
# ./make-frozen.py frozen > frozen.c
#
# Include frozen.c in your build, having defined MICROPY_MODULE_FROZEN_STR in
# config.
#
from __future__ import print_function
import sys
import os
def module_name(f):
return f
modules = []
if len(sys.argv) > 1:
root = sys.argv[1].rstrip("/")
root_len = len(root)
for dirpath, dirnames, filenames in os.walk(root):
for f in filenames:
fullpath = dirpath + "/" + f
st = os.stat(fullpath)
modules.append((fullpath[root_len + 1:], st))
print("#include <stdint.h>")
print("const char mp_frozen_str_names[] = {")
for f, st in modules:
m = module_name(f)
print('"%s\\0"' % m)
print('"\\0"};')
print("const uint32_t mp_frozen_str_sizes[] = {")
for f, st in modules:
print("%d," % st.st_size)
print("};")
print("const char mp_frozen_str_content[] = {")
for f, st in modules:
data = open(sys.argv[1] + "/" + f, "rb").read()
# We need to properly escape the script data to create a C string.
# When C parses hex characters of the form \x00 it keeps parsing the hex
# data until it encounters a non-hex character. Thus one must create
# strings of the form "data\x01" "abc" to properly encode this kind of
# data. We could just encode all characters as hex digits but it's nice
# to be able to read the resulting C code as ASCII when possible.
data = bytearray(data) # so Python2 extracts each byte as an integer
esc_dict = {ord('\n'): '\\n', ord('\r'): '\\r', ord('"'): '\\"', ord('\\'): '\\\\'}
chrs = ['"']
break_str = False
for c in data:
try:
chrs.append(esc_dict[c])
except KeyError:
if 32 <= c <= 126:
if break_str:
chrs.append('" "')
break_str = False
chrs.append(chr(c))
else:
chrs.append('\\x%02x' % c)
break_str = True
chrs.append('\\0"')
print(''.join(chrs))
print("};")
| #!/usr/bin/env python
#
# Create frozen modules structure for MicroPython.
#
# Usage:
#
# Have a directory with modules to be frozen (only modules, not packages
# supported so far):
#
# frozen/foo.py
# frozen/bar.py
#
# Run script, passing path to the directory above:
#
# ./make-frozen.py frozen > frozen.c
#
# Include frozen.c in your build, having defined MICROPY_MODULE_FROZEN_STR in
# config.
#
from __future__ import print_function
import sys
import os
def module_name(f):
return f
modules = []
root = sys.argv[1].rstrip("/")
root_len = len(root)
for dirpath, dirnames, filenames in os.walk(root):
for f in filenames:
fullpath = dirpath + "/" + f
st = os.stat(fullpath)
modules.append((fullpath[root_len + 1:], st))
print("#include <stdint.h>")
print("const char mp_frozen_str_names[] = {")
for f, st in modules:
m = module_name(f)
print('"%s\\0"' % m)
print('"\\0"};')
print("const uint32_t mp_frozen_str_sizes[] = {")
for f, st in modules:
print("%d," % st.st_size)
print("};")
print("const char mp_frozen_str_content[] = {")
for f, st in modules:
data = open(sys.argv[1] + "/" + f, "rb").read()
# We need to properly escape the script data to create a C string.
# When C parses hex characters of the form \x00 it keeps parsing the hex
# data until it encounters a non-hex character. Thus one must create
# strings of the form "data\x01" "abc" to properly encode this kind of
# data. We could just encode all characters as hex digits but it's nice
# to be able to read the resulting C code as ASCII when possible.
data = bytearray(data) # so Python2 extracts each byte as an integer
esc_dict = {ord('\n'): '\\n', ord('\r'): '\\r', ord('"'): '\\"', ord('\\'): '\\\\'}
chrs = ['"']
break_str = False
for c in data:
try:
chrs.append(esc_dict[c])
except KeyError:
if 32 <= c <= 126:
if break_str:
chrs.append('" "')
break_str = False
chrs.append(chr(c))
else:
chrs.append('\\x%02x' % c)
break_str = True
chrs.append('\\0"')
print(''.join(chrs))
print("};")
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.