commit stringlengths 40 40 | old_file stringlengths 4 236 | new_file stringlengths 4 236 | old_contents stringlengths 1 3.26k | new_contents stringlengths 16 4.43k | subject stringlengths 16 624 | message stringlengths 17 3.29k | lang stringclasses 5
values | license stringclasses 13
values | repos stringlengths 5 91.5k |
|---|---|---|---|---|---|---|---|---|---|
f38eb25fe13320297baad173c8e6d6ac7cfb9542 | spacy/tests/tokens/test_vec.py | spacy/tests/tokens/test_vec.py | from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert 0.08 >= hype.vector[0] > 0.07
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert 0.08 >= hype.vector[0] > 0.07
| from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert -0.7 >= hype.vector[0] > -0.8
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert -0.7 >= hype.vector[0] > -0.8
| Fix test for word vector | Fix test for word vector
| Python | mit | oroszgy/spaCy.hu,recognai/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,banglakit/spaCy,explosion/spaCy,explosion/spaCy,raphael0202/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,honnibal/spaCy,recognai/spaCy,banglakit/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,recognai/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,explosion/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,honnibal/spaCy,honnibal/spaCy,raphael0202/spaCy,explosion/spaCy,raphael0202/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,banglakit/spaCy,raphael0202/spaCy,banglakit/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,spacy-io/spaCy,spacy-io/spaCy |
e81c56e1f3b682e0cfffa40851aed817be3b1812 | etcd3/__init__.py | etcd3/__init__.py | from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
| from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.2.2'
__all__ = (
'Etcd3Client',
'Lease',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
| Add Lease to public api | Add Lease to public api
| Python | apache-2.0 | kragniz/python-etcd3 |
00a7f13ac2dbbd7449fd0ce260a21448c67b73e9 | birdwatch/api.py | birdwatch/api.py | __author__ = 'jloeffler'
def list_projects():
return {}
def list_contributors():
return {}
| __author__ = 'jloeffler'
from github3 import GitHub
from github3.models import GitHubError
from github3.repos.repo import Repository
from birdwatch.configuration import configuration
from birdwatch.collector import Project
def list_projects():
# just for testing
github = GitHub(token=configuration.github_token)
zalando_repos = github.iter_user_repos("zalando")
projects = {'projects': [{'name': repo.name} for repo in zalando_repos if repo.name == 'birdwatch']}
print(projects)
return projects
def list_contributors():
return {}
| Return one project for testing | Return one project for testing
| Python | apache-2.0 | marky-mark/catwatch,AlexanderYastrebov/catwatch,AlexanderYastrebov/catwatch,marky-mark/catwatch,AlexanderYastrebov/catwatch,marky-mark/catwatch,AlexanderYastrebov/catwatch,marky-mark/catwatch |
692f13b9dbe994baf44bf42384e956608b94fede | aldryn_apphooks_config/utils.py | aldryn_apphooks_config/utils.py | # -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if request.current_page:
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model) | # -*- coding: utf-8 -*-
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model) | Add better check for being in a CMS-page request | Add better check for being in a CMS-page request
| Python | bsd-3-clause | aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config |
829ad434f42b457294d44108b26c6880cd0e4c36 | pymatgen/__init__.py | pymatgen/__init__.py | __author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .util.io_utils import zopen
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
| __author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Dec 18 2013"
__version__ = "2.8.10"
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder, \
pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
| Remove zopen in pymatgen root. | Remove zopen in pymatgen root.
Former-commit-id: 375be0147716d3b4d2dee95680eae4ee3804716b [formerly 05648421c1fa77f6f339f68be2c43bb7952e918a]
Former-commit-id: e5cfaf0277815951ddb09d9d6b30876e400870d7 | Python | mit | dongsenfo/pymatgen,Bismarrck/pymatgen,czhengsci/pymatgen,fraricci/pymatgen,blondegeek/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen,tallakahath/pymatgen,mbkumar/pymatgen,davidwaroquiers/pymatgen,ndardenne/pymatgen,montoyjh/pymatgen,xhqu1981/pymatgen,vorwerkc/pymatgen,matk86/pymatgen,gVallverdu/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,tallakahath/pymatgen,montoyjh/pymatgen,mbkumar/pymatgen,dongsenfo/pymatgen,ndardenne/pymatgen,dongsenfo/pymatgen,setten/pymatgen,mbkumar/pymatgen,gpetretto/pymatgen,nisse3000/pymatgen,Bismarrck/pymatgen,gVallverdu/pymatgen,tallakahath/pymatgen,gpetretto/pymatgen,richardtran415/pymatgen,matk86/pymatgen,johnson1228/pymatgen,czhengsci/pymatgen,matk86/pymatgen,gVallverdu/pymatgen,davidwaroquiers/pymatgen,nisse3000/pymatgen,vorwerkc/pymatgen,gmatteo/pymatgen,Bismarrck/pymatgen,mbkumar/pymatgen,tschaume/pymatgen,aykol/pymatgen,gmatteo/pymatgen,setten/pymatgen,vorwerkc/pymatgen,czhengsci/pymatgen,blondegeek/pymatgen,aykol/pymatgen,aykol/pymatgen,nisse3000/pymatgen,xhqu1981/pymatgen,richardtran415/pymatgen,gVallverdu/pymatgen,tschaume/pymatgen,setten/pymatgen,fraricci/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,richardtran415/pymatgen,setten/pymatgen,czhengsci/pymatgen,johnson1228/pymatgen,tschaume/pymatgen,xhqu1981/pymatgen,gpetretto/pymatgen,blondegeek/pymatgen,johnson1228/pymatgen,dongsenfo/pymatgen,matk86/pymatgen,blondegeek/pymatgen,tschaume/pymatgen,gpetretto/pymatgen,nisse3000/pymatgen,davidwaroquiers/pymatgen,montoyjh/pymatgen,Bismarrck/pymatgen,davidwaroquiers/pymatgen |
0563882d0d1bfdf4e64a65bcd91e8d6d4ab6ed8f | core/polyaxon/polypod/compiler/lineage/artifacts_collector.py | core/polyaxon/polypod/compiler/lineage/artifacts_collector.py | #!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name),
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
| #!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional
from polyaxon.utils.fqn_utils import to_fqn_name
from traceml.artifacts import V1ArtifactKind, V1RunArtifact
def collect_lineage_artifacts_path(artifact_path: str) -> Optional[V1RunArtifact]:
name = os.path.basename(artifact_path.rstrip("/")) # Trim handles cases like `foo/` -> ''
return V1RunArtifact(
name=to_fqn_name(name) if name else "_",
kind=V1ArtifactKind.DIR,
path=artifact_path,
summary={"path": artifact_path},
is_input=True,
)
| Fix artifacts name sanitization for root folders | Fix artifacts name sanitization for root folders
| Python | apache-2.0 | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon |
365da65390a0c2093fbbc5681c72cbfbd73ae78f | rctk/widgets/text.py | rctk/widgets/text.py | from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
| from rctk.widgets.control import Control, remote_attribute
from rctk.task import Task
from rctk.event import Changable, Submittable
class Text(Control, Changable, Submittable):
name = "text"
value = remote_attribute('value', "")
def __init__(self, tk, value="", rows=1, columns=20, **properties):
self._value = value
self._rows = rows
self._columns = columns
super(Text, self).__init__(tk, **properties)
def create(self):
self.tk.create_control(self, value=self._value, rows=self._rows, columns=self._columns)
def sync(self, **data):
if 'value' in data:
self._value = data['value']
class Password(Text):
name = "password"
| Allow additional properties on Text | Allow additional properties on Text
git-svn-id: ec97508af0aa29a1d296967d6f0ba22a468c79d6@350 286bb87c-ec97-11de-a004-2f18c49ebcc3
| Python | bsd-2-clause | rctk/rctk,rctk/rctk |
5d1fe61d152d2c5544982322a9f156809ea267f0 | main.py | main.py | from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if event['type'] == 'message':
message = event['text']
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
| from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if 'type' in event and event['type'] == 'message' \
and 'text' in event:
message = event['text'].encode('ascii', 'ignore')
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
| Fix crashes from misc. events | Fix crashes from misc. events
| Python | mit | Spferical/slack-megahal,Spferical/matrix-chatbot,Spferical/matrix-chatbot,Spferical/matrix-megahal |
152dfbb9fc5ca5fe5c859fea5ba4a25a31f3ff13 | gn/compile_processors.py | gn/compile_processors.py | #!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
print("Recompiling " + p + "...")
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
| #!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
skslc = sys.argv[1]
clangFormat = sys.argv[2]
fetchClangFormat = sys.argv[3]
processors = sys.argv[4:]
exeSuffix = '.exe' if sys.platform.startswith('win') else '';
for p in processors:
try:
if not os.path.isfile(clangFormat + exeSuffix):
subprocess.check_call([sys.executable, fetchClangFormat]);
noExt, _ = os.path.splitext(p)
head, tail = os.path.split(noExt)
targetDir = os.path.join(head, "generated")
if not os.path.isdir(targetDir):
os.mkdir(targetDir)
target = os.path.join(targetDir, tail)
subprocess.check_output([skslc, p, target + ".h"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".h\"", shell=True)
subprocess.check_output([skslc, p, target + ".cpp"])
subprocess.check_call(clangFormat + " --sort-includes=false -i \"" +
target + ".cpp\"", shell=True)
except subprocess.CalledProcessError as err:
print("### Error compiling " + p + ":")
print(err.output)
exit(1)
| Remove "Recompiling..." output when building .fp files | Remove "Recompiling..." output when building .fp files
Change-Id: I41402dc04d4388217d7f7cd8de9aff8fbb4a3765
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/317391
Reviewed-by: John Stiles <f4fcf42d3bb5924557f1eeb3be66747535e585da@google.com>
Commit-Queue: Brian Osman <794c0b5534edf5601d88e1d41975d0262da12894@google.com>
| Python | bsd-3-clause | google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,google/skia,google/skia,google/skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia |
c1508d51a90db1ebf3c0278c777ff3169e0d13f9 | tests/unit/test_wrapper.py | tests/unit/test_wrapper.py | import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
| import numpy as np
from functools import partial
from elfi.wrapper import Wrapper
class Test_wrapper():
def test_echo_exec_arg(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper("1")
assert ret == 1
def test_echo_default_arg(self):
command = "echo 1"
wrapper = Wrapper(command, post=int)
ret = wrapper()
assert ret == 1
def test_echo_both_args(self):
command = "echo 1 {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper("2")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_kwargs(self):
command = "echo {param}"
wrapper = Wrapper(command, post=int)
ret = wrapper(param="1")
assert ret == 1
def test_echo_args_and_kwargs(self):
command = "echo {param} {0}"
post = partial(np.fromstring, sep=" ")
wrapper = Wrapper(command, post=post)
ret = wrapper(2, param="1")
assert np.array_equal(ret, np.array([1,2]))
def test_echo_non_string_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(1)
assert ret == 1
def test_echo_1d_array_args(self):
command = "echo {0}"
wrapper = Wrapper(command, post=int)
ret = wrapper(np.array([1]))
assert ret == 1
| Add test for 1d array arguments for Wrapper | Add test for 1d array arguments for Wrapper
| Python | bsd-3-clause | lintusj1/elfi,elfi-dev/elfi,lintusj1/elfi,HIIT/elfi,elfi-dev/elfi |
90655c89fcf56af06a69f8110a9f7154294ca11c | ritter/analytics/sentiment_analyzer.py | ritter/analytics/sentiment_analyzer.py | import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
| import re, math
from collections import Counter
import itertools
from sentimental import sentimental
class SentimentAnalyzer():
_sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True)
path = sentimental.Sentimental.get_datafolder()
_sentimental.train([path + '/sv/ruhburg'])
def calculate_friend_scores(marked_tree):
reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)')
scores = {}
for item in marked_tree:
if 'text' in item:
m = reg.findall(item['text'])
c = sorted(list(Counter(m)))
pairs = list(itertools.combinations(c, 2))
senti = SentimentAnalyzer.sentiment(item['text'])
for pair in pairs:
s = scores.get(pair, [0, 0])
if senti == 1:
s[0] = s[0] + 1
elif senti == -1:
s[1] = s[1] + 1
scores[pair] = s
return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()}
def sentiment(text):
label = max(SentimentAnalyzer._sentimental.sentiment(text))
if label == 'positive':
return 1
elif label == 'negative':
return -1
else:
return 0
| Update to Sentimental 2.2.x with undersampling | feat: Update to Sentimental 2.2.x with undersampling
| Python | mit | ErikGartner/ghostdoc-ritter |
93be15b7f74673247eeabc208fd56cc6cb735e43 | tests/matchers/test_contain.py | tests/matchers/test_contain.py | import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain({1, 2, 3}, 1).matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain({1, 2, 3}, 4).matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
| import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
| Remove sets from tests Since python 2.6 does not have literal set syntax | Remove sets from tests
Since python 2.6 does not have literal set syntax | Python | mit | vesln/robber.py,taoenator/robber.py |
6ee083f5b5a190f30f4916698c57c7ee1c2225fe | create_sample.py | create_sample.py | # importing modules/ libraries
import pandas as pd
import random
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv',
index = False)
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv',
index = False)
# create sample of orders data
n = 3421083
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_sample_df = pd.read_csv('Data/orders.csv',
skiprows = skip)
order_sample_df.to_csv('Data/orders_sample.csv',
index = False) | # importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create sample of order products train data
n = 1384617
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__train_sample_df = pd.read_csv('Data/order_products__train.csv',
index = 'order_Id', skiprows = skip)
order_products__train_sample_df.to_csv('Data/order_products__train_sample.csv')
# create sample of order products prior data
n = 32434489
s = round(0.1 * n)
skip = sorted(random.sample(range(1,n), n-s))
order_products__prior_sample_df = pd.read_csv('Data/order_products__prior.csv',
index = 'order_Id', skiprows = skip)
order_products__prior_sample_df.to_csv('Data/order_products__prior_sample.csv')
# create sample of orders data
prior_unique_ids = np.array(order_products__prior_sample_df.index.unique())
train_unique_ids = np.array(order_products__train_sample_df.index.unique())
match_ids = np.concatenate((prior_unique_ids, train_unique_ids), axis = 0)
order_sample_df = pd.read_csv('Data/orders.csv', index_col = 'order_id')
order_sample_df = order_sample_df.loc[match_ids,:]
order_sample_df.to_csv('Data/orders_sample.csv') | Change create sample code to ensure matching order ids data | fix: Change create sample code to ensure matching order ids data
| Python | mit | rjegankumar/instacart_prediction_model |
acc3888ef55d7df22df08b16cc746186fc1a75c7 | main.py | main.py | #!/usr/bin/env python3
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
| #!/usr/bin/env python3.6
import argparse
import asyncio
import logging
import sys
from pathlib import Path
from MoMMI.logsetup import setup_logs
# Do this BEFORE we import master, because it does a lot of event loop stuff.
if sys.platform == "win32":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
else:
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except:
pass
from MoMMI.master import master
def main() -> None:
version = sys.version_info
if version.major < 3 or (version.major == 3 and version.minor < 6):
logging.critical("You need at least Python 3.6 to run MoMMI.")
sys.exit(1)
setup_logs()
parser = argparse.ArgumentParser()
parser.add_argument("--config-dir", "-c",
default="./config",
help="The directory to read config files from.",
dest="config",
type=Path)
parser.add_argument("--storage-dir", "-s",
default="./data",
help="The directory to use for server data storage.",
dest="data",
type=Path)
args = parser.parse_args()
master.start(args.config, args.data)
if __name__ == "__main__":
main()
| Use uvloop because apparently it's fast. | Use uvloop because apparently it's fast.
| Python | mit | PJB3005/MoMMI,PJB3005/MoMMI,PJB3005/MoMMI |
3fb56e434182e5b28dcad0c547b0326ebe5be352 | main.py | main.py | from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
| from createCollection import createCollectionFile
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--type', dest='collectionType', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def writeCollectionToFile(collectionFileName, arguments):
collection = generateNewCollection(arguments.username, arguments.collectionType, arguments.collectionName)
collectionFile = open(collectionFileName, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
def main():
arguments = generateArgumentsFromParser()
collectionFileName = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create":
createCollectionFile(arguments.username, arguments.collectionName)
writeCollectionToFile(collectionFileName, arguments)
elif arguments.action.lower() == "update":
return None
if __name__ == '__main__':
main()
| Refactor create action into function | Refactor create action into function
| Python | apache-2.0 | AmosGarner/PyInventory |
f34f67247d97d75039c65f71da8489fbffa17575 | snowpenguin/django/recaptcha2/tests.py | snowpenguin/django/recaptcha2/tests.py | import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
| import os
from django.forms import Form
from django.test import TestCase
from snowpenguin.django.recaptcha2.fields import ReCaptchaField
from snowpenguin.django.recaptcha2.widgets import ReCaptchaWidget
class RecaptchaTestForm(Form):
recaptcha = ReCaptchaField(widget=ReCaptchaWidget())
class TestRecaptchaForm(TestCase):
def setUp(self):
os.environ['RECAPTCHA_DISABLE'] = 'True'
def test_dummy_validation(self):
form = RecaptchaTestForm({})
self.assertTrue(form.is_valid())
def test_dummy_error(self):
del os.environ['RECAPTCHA_DISABLE']
form = RecaptchaTestForm({})
self.assertFalse(form.is_valid())
def tearDown(self):
del os.environ['RECAPTCHA_DISABLE']
| Check possible exception with wrong key data | Check possible exception with wrong key data
| Python | lgpl-2.1 | kbytesys/django-recaptcha2,kbytesys/django-recaptcha2 |
969aed7046e4965962e8ed5daa9c557baffc48bc | glue_h5part/io.py | glue_h5part/io.py | import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
data[attribute] = group[attribute].value
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
| import os
import h5py
from glue.core import Data
def read_step_to_data(filename, step_id=0):
"""
Given a filename and a step ID, read in the data into a new Data object.
"""
f = h5py.File(filename, 'r')
try:
group = f['Step#{0}'.format(step_id)]
except KeyError:
raise ValueError("Step ID {0} not found in file: {1}".format(step_id, filename))
data = Data()
for attribute in group:
try:
data[attribute] = group[attribute].value
except AttributeError:
pass
data.label = os.path.basename(filename.rsplit('.', 1)[0])
return data
def find_n_steps(filename):
"""
Given a filename, find how many steps exist in the file.
"""
f = h5py.File(filename, 'r')
if 'Step#0' not in f:
raise ValueError("File does not contain Step#n entries")
# Some groups may not be 'Step' groups so we have to work backwards. The
# absolute maximum number of steps is the number of groups at the root level.
n_max = len(f)
for n_max in range(n_max - 1, -1, -1):
if 'Step#{0}'.format(n_max) in f:
return n_max
| Fix issue with HDF5 objects that don't have a value | Fix issue with HDF5 objects that don't have a value | Python | bsd-2-clause | glue-viz/glue-h5part |
adee7a2530d22d1242f89cddc84795efd1d02653 | imagesift/cms_plugins.py | imagesift/cms_plugins.py | import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
if limit:
qs = qs[:limit]
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = list(qs)
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin) | import datetime
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import GalleryPlugin
class ImagesiftPlugin(CMSPluginBase):
model = GalleryPlugin
name = _('Imagesift Plugin')
render_template = "imagesift_plugin.html"
def date_digest(self, images):
"""
return a list of unique dates, for all the images passed
"""
dates = {}
for i in images:
dates.setdefault(i.overrideable_date().date(), None)
return sorted(dates.keys())
def render(self, context, instance, placeholder):
url = context['request'].get_full_path()
date = context['request'].GET.get('date')
limit = instance.thumbnail_limit
qs = instance.get_images_queryset()
# there's no way around listing, sorry.
qs = list(qs)
filtered = False
if date:
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
qs = [i for i in qs if i.overrideable_date().date() == date]
filtered = _('The set of images is filtered to %s' % unicode(date))
# sort before limit
qs.sort(key=lambda i: i.overrideable_date())
if limit:
qs = qs[:limit]
context.update({
'dates': [d.isoformat() for d in self.date_digest(qs)],
'filtered':filtered,
'images': qs,
'instance': instance,
'placeholder': placeholder,
'url':url,
})
return context
plugin_pool.register_plugin(ImagesiftPlugin) | Sort returned images by date, taking into account overrides | Sort returned images by date, taking into account overrides
| Python | bsd-3-clause | topiaruss/cmsplugin-imagesift,topiaruss/cmsplugin-imagesift,topiaruss/cmsplugin-imagesift |
8f14126e36e7f5c15431cd7541762e485c3f8169 | main.py | main.py | from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime
import json
def main():
#createCollection('agarner','books')
now = datetime.datetime.now()
items = []
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
itemCollection = Collection('Items', 'agarner', items)
print itemCollection.toJSON()
if __name__ == '__main__':
main()
| from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path
CONST_COLLECTIONS_NAME = 'collections'
CONST_USERNAME = 'agarner'
CONST_COLLECTION = 'Items'
def generateItemsCollection():
items = []
now = datetime.datetime.now()
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
return Collection(CONST_COLLECTION, CONST_USERNAME, items)
def main():
createCollection(CONST_USERNAME,CONST_COLLECTION)
itemCollection = generateItemsCollection()
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTIONS_NAME+'/'+CONST_USERNAME+'_'+CONST_COLLECTION+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
| Implement ability to save json data to collection file | Implement ability to save json data to collection file
| Python | apache-2.0 | AmosGarner/PyInventory |
d63509e0d68a1dceabbbcf58432a92f7a4cbfd77 | robot/robot/src/autonomous/main.py | robot/robot/src/autonomous/main.py |
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self):
super().__init__()
print("Team 1418 robot code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init() |
try:
import wpilib
except ImportError:
from pyfrc import wpilib
# import components here
from components import drive, intake, catapult
class MyRobot(wpilib.SimpleRobot):
def __init__ (self, drive, intake, catapult):
super().__init__()
print("Team 1418 autonomous code for 2014")
#################################################################
# THIS CODE IS SHARED BETWEEN THE MAIN ROBOT AND THE ELECTRICAL #
# TEST CODE. WHEN CHANGING IT, CHANGE BOTH PLACES! #
#################################################################
wpilib.SmartDashboard.init()
def on_enable(self):
time = wpilib.Timer()
timer.Start()
update (self, timer)
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
self.Compressor.Start()
self.intake.armDown()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
self.drive.move(self,0,-1,0)
self.catapult.launch()
self.catapult.pulldown()
self.robot.winch_motor.Set(0)
if self.robot.ball_sensor!=.4:
self.intake.wheels()
self.drive.move(self,0,1,0)
elif self.robot.ball_sensor==.4:
self.drive.move(self,0,-1,0)
self.catapult.launch()
'''Do not implement your own loop for autonomous mode. Instead,
assume that
this function is called over and over and over again during
autonomous
mode if this mode is active
time_elapsed is a number that tells you how many seconds
autonomous mode has
been running so far.
'''
| Bring the autonomous mode back | Bring the autonomous mode back
| Python | bsd-3-clause | frc1418/2014 |
ba4ea2169a13d61d30c94e89db512a34bc0fe3b5 | bluesky/tests/test_documents.py | bluesky/tests/test_documents.py | from bluesky.run_engine import RunEngine
from bluesky.tests.utils import setup_test_run_engine
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
| import pytest
import jsonschema
from bluesky.run_engine import RunEngine
from event_model import DocumentNames, schemas
from bluesky.tests.utils import setup_test_run_engine
from bluesky.utils import new_uid
from bluesky.examples import simple_scan, motor
RE = setup_test_run_engine()
def test_custom_metadata():
def assert_lion(name, doc):
assert 'animal' in doc
assert doc['animal'] == 'lion'
RE(simple_scan(motor), animal='lion', subs={'start': assert_lion})
# Note: Because assert_lion is processed on the main thread, it can
# fail the test. I checked by writing a failing version of it. - D.A.
def test_dots_not_allowed_in_keys():
doc = {'time': 0,
'uid': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.start])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.start])
# Now add illegal key.
doc.update({'b.': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.start])
doc = {'time': 0,
'uid': new_uid(),
'data_keys': {'a': {'source': '',
'dtype': 'number',
'shape': []}},
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
# Now add illegal key.
doc.update({'b.c': 'd'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.descriptor])
doc = {'time': 0,
'uid': new_uid(),
'exit_status': 'success',
'reason': '',
'run_start': new_uid()}
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Add a legal key.
doc.update({'b': 'c'})
jsonschema.validate(doc, schemas[DocumentNames.stop])
# Now add illegal key.
doc.update({'.b': 'c'})
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(doc, schemas[DocumentNames.stop])
| Test that event_model forbids dots in key names. | TST: Test that event_model forbids dots in key names.
| Python | bsd-3-clause | ericdill/bluesky,ericdill/bluesky |
aca1b138350434c9afb08f31164269cd58de1d2d | YouKnowShit/CheckFile.py | YouKnowShit/CheckFile.py | import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file) | import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupupdir)
upupupupdir = os.path.abspath('..')
print(upupupupdir)
filenames = os.listdir(upupupupdir)
for file in filenames:
print(file) | Add a level of uper directory | Add a level of uper directory
| Python | mit | jiangtianyu2009/PiSoftCake |
1f527bd99a35cf6396e6300369719b3f5f5490ff | app/main/forms.py | app/main/forms.py | from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
| from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
from .. import data_api_client
class AdminEmailAddressValidator(object):
def __init__(self, message=None):
self.message = message
def __call__(self, form, field):
if not data_api_client.email_is_valid_for_admin_user(field.data):
raise validators.StopValidation(self.message)
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
| Add new validator that applies data_api_client.email_is_valid_for_admin_user to field | Add new validator that applies data_api_client.email_is_valid_for_admin_user to field
| Python | mit | alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend |
62b7b01fe9a1d87692e97a6a75b52d542f8a43be | scrapi/processing/elastic_search.py | scrapi/processing/elastic_search.py | import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
logger = logging.getLogger(__name__)
logger.warn(es.count(index='share'))
| import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
normalized['dateUpdated'] = self.version_dateUpdated(normalized)
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
def version_dateUpdated(self, normalized):
old_doc = es.get_source(
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
ignore=[404]
)
return old_doc['dateUpdated'] if old_doc else normalized['dateUpdated']
| Add some versioning for dateUpdated so that updated documents aren't bumped to the top of the stream | Add some versioning for dateUpdated so that updated documents aren't bumped to the top of the stream
| Python | apache-2.0 | fabianvf/scrapi,felliott/scrapi,fabianvf/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,mehanig/scrapi,felliott/scrapi,ostwald/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,jeffreyliu3230/scrapi,alexgarciac/scrapi |
0460404bb7f3e9a9f6ece1c4a141b16fced6f741 | tests/test_chunked_http.py | tests/test_chunked_http.py | from disco.test import TestCase, TestJob
from disco.core import Job
import disco
import threading
import BaseHTTPServer
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
| from disco.test import TestCase, TestJob
from disco.core import Job
from disco.compat import http_server
import disco
import threading
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(http_server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = http_server.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
| Use the disco.compat.http_server to work with python3. | Use the disco.compat.http_server to work with python3.
| Python | bsd-3-clause | pombredanne/disco,simudream/disco,ErikDubbelboer/disco,beni55/disco,discoproject/disco,ErikDubbelboer/disco,oldmantaiter/disco,simudream/disco,oldmantaiter/disco,seabirdzh/disco,seabirdzh/disco,seabirdzh/disco,discoproject/disco,ktkt2009/disco,discoproject/disco,ktkt2009/disco,beni55/disco,ErikDubbelboer/disco,pombredanne/disco,mozilla/disco,mozilla/disco,discoproject/disco,beni55/disco,mwilliams3/disco,mwilliams3/disco,pombredanne/disco,simudream/disco,pombredanne/disco,seabirdzh/disco,ErikDubbelboer/disco,ktkt2009/disco,beni55/disco,ErikDubbelboer/disco,ktkt2009/disco,seabirdzh/disco,beni55/disco,pooya/disco,mozilla/disco,mozilla/disco,pooya/disco,ktkt2009/disco,oldmantaiter/disco,discoproject/disco,pooya/disco,simudream/disco,oldmantaiter/disco,pombredanne/disco,oldmantaiter/disco,mwilliams3/disco,mwilliams3/disco,simudream/disco,pooya/disco,mwilliams3/disco |
dd5b8c55e601709f1f04cb0ec7dbde63b84801d8 | snippet_parser/fr.py | snippet_parser/fr.py | #-*- encoding: utf-8 -*-
import base
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
| #-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = template.params[0]
if len(template.params) == 2:
ret += template.params[1]
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
| Implement a couple of other French templates. | Implement a couple of other French templates.
Still need to add tests for these.
Former-commit-id: 4021d27a7bd15a396b637beb57c10fc95936cb3f | Python | mit | eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,eggpi/citationhunt |
878c14e04327f2f9d2d4acd22de21ed23b0cfb9a | skan/test/test_vendored_correlate.py | skan/test/test_vendored_correlate.py | from time import time
import numpy as np
from skan.vendored import thresholding as th
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
| from time import time
from functools import reduce
import numpy as np
from skan.vendored import thresholding as th
from skimage.transform import integral_image
from scipy import ndimage as ndi
class Timer:
def __init__(self):
self.interval = 0
def __enter__(self):
self.t0 = time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.interval = time() - self.t0
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with Timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with Timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1.interval < 2 * t0.interval
def test_reference_correlation():
ndim = 4
shape = np.random.randint(0, 20, size=ndim)
x = np.random.random(shape)
kern = reduce(np.outer, [[-1, 0, 0, 1]] * ndim).reshape((4,) * ndim)
px = np.pad(x, (2, 1), mode='reflect')
pxi = integral_image(px)
mean_fast = th.correlate_nonzeros(pxi, kern / 3 ** ndim)
mean_ref = ndi.correlate(x, np.ones((3,) * ndim) / 3 ** ndim,
mode='mirror')
np.testing.assert_allclose(mean_fast, mean_ref)
| Add test for new fast correlation | Add test for new fast correlation
| Python | bsd-3-clause | jni/skan |
98ba687e67c8d5a17560bed59f42dbe8e3fb0cf6 | amaascore/books/enums.py | amaascore/books/enums.py | from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Individual', 'Management', 'Trading', 'Wash'}
| from __future__ import absolute_import, division, print_function, unicode_literals
BOOK_TYPES = {'Counterparty', 'Management', 'Trading', 'Wash'}
| Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639. | Remove Individual as a book_type - it doesn’t really add anything. AMAAS-639.
| Python | apache-2.0 | amaas-fintech/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,paul-rs/amaas-core-sdk-python,amaas-fintech/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,paul-rs/amaas-core-sdk-python |
4650b6730d925c4a5fde34ec4c2f9058763ab58b | cupcake/smush.py | cupcake/smush.py | """
User-facing interface to all dimensionality reduction algorithms
"""
def smushplot(data, smusher):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
| """
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher, n_components=2, marker='o', marker_order=None,
text=False, text_order=None, linewidth=1, linewidth_order=None,
edgecolor='k', edgecolor_order=None, smusher_kws=None,
plot_kws=None):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
| Add a bunch of plotting and keyword arguments | Add a bunch of plotting and keyword arguments
| Python | bsd-3-clause | olgabot/cupcake |
fc7c08aecf9d247e54db70ae14c999902d6f6bfa | workflow/migrations/0024_auto_20180620_0537.py | workflow/migrations/0024_auto_20180620_0537.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-20 12:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0023_auto_20180425_0136'),
]
operations = [
migrations.AddField(
model_name='dashboard',
name='dashboard_uuid',
field=models.UUIDField(blank=True, null=True, default=None, verbose_name='Dashboard UUID'),
),
migrations.AddField(
model_name='dashboard',
name='public_url_token',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dashboard',
name='public',
field=models.CharField(blank=True, choices=[('org', 'Organization'), ('url', 'URL'), ('all', 'All')], max_length=5, null=True),
),
]
| Fix the dashboard migration for UUID | Fix the dashboard migration for UUID
| Python | apache-2.0 | toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity |
9c6ad90f20354ca47a2fc56cc0d7ff6ebfc613d3 | weather/weather-display.py | weather/weather-display.py | #!/usr/bin/env python
from subprocess import call
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_image():
call('wget -O "%s" "%s"' % (OUTPUT_FILE, URL), shell=True)
def main():
clear_screen()
get_image()
if __name__ == "__main__":
main()
| #!/usr/bin/env python
from subprocess import call
from datetime import datetime
URL = 'http://microdash.herokuapp.com/FOG/'
OUTPUT_FILE = '/mnt/us/weather/weather-script-output.png'
def clear_screen():
call('/usr/sbin/eips -c', shell=True)
def get_dashboard(url, output_file):
call('rm %s' % output_file, shell=True)
call('wget -O "%s" "%s"' % (output_file, url), shell=True)
def set_dashboard_background(image_path):
call('eips -g %s' % image_path, shell=True)
def main():
now = datetime.now()
# Only execute after 6 and before 10:
if (now.hour >= 5) and (now.hour <= 11):
clear_screen()
get_dashboard(URL, OUTPUT_FILE)
set_dashboard_background(OUTPUT_FILE)
if __name__ == "__main__":
main()
| Update weather display to be run during specified intervals. | Update weather display to be run during specified intervals.
| Python | bsd-3-clause | alfredo/microdash,alfredo/microdash |
c1189bf7c24068fda9871436a705b70fd016dfd5 | examples/json_editor.py | examples/json_editor.py | """
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
| """
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
pygment_style = random.choice(modes.PYGMENTS_STYLES)
logging.info('pygments style: %s', pygment_style)
self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
pygment_style)
self.action_open = QtWidgets.QAction('open file', self)
self.action_open.setShortcut('Ctrl+O')
self.action_open.triggered.connect(self.open_file)
self.addAction(self.action_open)
def open_file(self):
filename, _ = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open JSON file')
if filename:
self.editor.file.open(filename)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
| Add missing open action to the example so that you can open other files | Add missing open action to the example so that you can open other files
(usefull for testing and evaluating)
| Python | mit | pyQode/pyqode.json,pyQode/pyqode.json |
446738f7615711766952205558fee7ce85ca3a3b | MS1/ddp-erlang-style/dna_lib.py | MS1/ddp-erlang-style/dna_lib.py | __author__ = 'mcsquaredjr'
import os
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
def my_lines(ip):
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip, port = line.split(":")
if ip == str(ip):
line.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1 | __author__ = 'mcsquaredjr'
import os
import socket
node_file = os.environ["NODES"]
cad_file = os.environ["CAD"]
procs_per_nod = os.environ["PROCS_PER_NODE"]
itemcount = os.environ["ITEMCOUNT"]
ddp = os.environment["DDP"]
def my_lines(i):
ip = socket.gethostbyname(socket.gethostname())
with open(cad_file, "r") as cad:
lines = []
for line in cad:
ip_str, port = line.split(":")
if ip_str == str(ip):
lines.append(line)
def chunk_number(i):
if i == 0 or i == 1:
return 0
else:
return i -1
def chunk_count(i):
with open(cad_file) as cad:
for i, l in enumerate(cad):
pass
return i + 1 - 2 | Add more variables and bug fixes | Add more variables and bug fixes
| Python | apache-2.0 | SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC |
f6013aa29fddf9883f8f0bea4b7733718b9d8846 | core/admin/migrations/versions/3f6994568962_.py | core/admin/migrations/versions/3f6994568962_.py | """ Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
from mailu import app
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
connection = op.get_bind()
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
# also apply the current config value if set
if app.config.get("FETCHMAIL_KEEP", "False") == "True":
connection.execute(
fetch_table.update().values(keep=True)
)
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
| """ Add keep as an option in fetches
Revision ID: 3f6994568962
Revises: 2335c80a6bc3
Create Date: 2017-02-02 22:31:00.719703
"""
# revision identifiers, used by Alembic.
revision = '3f6994568962'
down_revision = '2335c80a6bc3'
from alembic import op
import sqlalchemy as sa
fetch_table = sa.Table(
'fetch',
sa.MetaData(),
sa.Column('keep', sa.Boolean())
)
def upgrade():
op.add_column('fetch', sa.Column('keep', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
with op.batch_alter_table('fetch') as batch:
batch.drop_column('keep')
| Fix an old migration that was reading configuration before migrating | Fix an old migration that was reading configuration before migrating
| Python | mit | kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io |
9e785ef701e4c9d04924eff0ffc9c8d50fa267f6 | ingestors/email/outlookpst.py | ingestors/email/outlookpst.py | from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
self.exec_command('readpst',
'-M', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
| from ingestors.base import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
class OutlookPSTIngestor(Ingestor, TempFileSupport, ShellSupport, OLESupport):
MIME_TYPES = [
'application/vnd.ms-outlook'
]
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path):
self.extract_ole_metadata(file_path)
self.result.flag(self.result.FLAG_DIRECTORY)
with self.create_temp_dir() as temp_dir:
if self.result.mime_type is None:
self.result.mime_type = 'application/vnd.ms-outlook'
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
except Exception:
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, self.result, temp_dir)
raise
| Make readpst partial output ingest. | Make readpst partial output ingest.
| Python | mit | alephdata/ingestors |
b4c292374175b8623a232bed47e8fa0bef60680b | astatsscraper/parsing.py | astatsscraper/parsing.py | def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
full_url = response.urljoin(relative_url)
self.logger.debug(full_url)
yield scrapy.Request(full_url, callback=self.parse_game_stats)
else:
self.logger.debug('Link ignored ' + relative_url) | def parse_app_page(response):
# Should always be able to grab a title
title = response.xpath('//div[@class = "panel panel-default panel-gameinfo"]/div[@class = "panel-heading"]/text()').extract()[0].strip()
# Parse times into floats
time_to_hundo = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Hours to 100%"]/text()[last()]').extract()[0].strip()
time_to_hundo = time_to_hundo.replace(',', '.')
time_to_hundo = float(time_to_hundo)
# Points may or may not be present, default to 0 if absent
points = response.xpath('//table[@class = "Default1000"]/tr/td[span = "Points"]/text()[last()]').extract()
if not points:
points = 0
else:
points = int(points[0].strip())
yield {
'title': title,
'time to 100%': time_to_hundo,
'points': points,
}
def parse_search_result_for_apps(response):
for href in response.xpath('//table//table//a/@href'):
relative_url = href.extract()
if relative_url.startswith('Steam_Game_Info.php?AppID='):
yield relative_url[:len('Steam_Game_Info.php?AppID=')]
| Fix up parse search func | Fix up parse search func
| Python | mit | SingingTree/AStatsScraper,SingingTree/AStatsScraper |
43d283cb4cb01ec919f9129571a51594e699fcf5 | src/gogoutils/formats.py | src/gogoutils/formats.py | """Determine the generator format"""
from collections import ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
| """Determine the generator format"""
try:
from collections import ChainMap
except ImportError:
from ConfigParser import _Chainmap as ChainMap
DEFAULT_FORMAT = {
'domain': 'example.com',
'app': '{repo}{project}',
'dns_elb': '{repo}.{project}.{env}.{domain}',
'dns_instance': '{repo}{project}-xx.{env}.{domain}',
'iam_base': '{project}_{repo}',
'iam_user': '{project}_{repo}',
'iam_group': '{project}',
'iam_role': '{project}_{repo}_role',
'iam_policy': '{project}_{repo}_policy',
'iam_profile': '{project}_{repo}_profile',
's3_bucket': 'archaius-{env}',
's3_bucket_path': '{project}/{repo}{project}',
's3_archaius_name': 'archaius-{env}/{project}/{repo}{project}/',
'jenkins_job_name': '{project}_{repo}',
'git_repo': '{raw_project}/{raw_repo}',
'git_repo_qe': '{raw_project}/{raw_repo}-qa',
'git_repo_configs': '{raw_project}/{raw_repo}-config',
}
class Formats(object):
def __init__(self, config={}):
self.config = ChainMap(config, DEFAULT_FORMAT)
def get_formats(self):
return self.config
def __getitem__(self, key):
return self.config[key]
| Add Chainmap support for python2 | Add Chainmap support for python2
| Python | apache-2.0 | gogoair/gogo-utils |
a75ff3a9d9b86ea71fbc582641ea943c282bfe2d | analyser/api.py | analyser/api.py | from flask.ext.classy import FlaskView
class AnalyserView(FlaskView):
def get(self):
return "awesome"
| from flask.ext.classy import FlaskView
from utils.decorators import validate, require
from utils.validators import validate_url
class AnalyserView(FlaskView):
@require('url')
@validate({
'url': validate_url
})
def post(self, url):
return url
| Add more joy using decorators | Add more joy using decorators
| Python | apache-2.0 | vtemian/kruncher |
402c010b6ab4673ae3b5c684b8e0c155ec98b172 | gentle/gt/operations.py | gentle/gt/operations.py | #coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(green(service + "start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + "fail..."))
continue
print(green(service + "end..."))
| #coding=utf-8
from __future__ import absolute_import
from fabric.api import local, run, sudo, task
from fabric.contrib.console import confirm
from fabric.state import env
from fabric.context_managers import cd, lcd, hide, settings
from fabric.colors import red, green, yellow
from .utils import repl_root
from .project import rsync_project
@task(alias='p', default=True)
def publish():
'''Publish your app'''
rsync()
restart()
@task(alias='rs')
def rsync():
'''Rsync your local dir to remote'''
rsync_project(env.rsync['rpath'], repl_root(env.rsync['lpath']),
sshpass=True)
@task(alias='rt')
def restart():
'''Restart your services'''
for service, need_ops in env.services.iteritems():
print(yellow(service) + ": " + green("start..."))
try:
rsync_project(need_ops['rpath'], need_ops['lpath'], sshpass=True)
if need_ops['sudo']:
sudo(need_ops['command'], pty=False,
user=need_ops['user'] if need_ops['user'] else env.user)
else:
run(need_ops['command'])
except:
print(red(service + ": fail..."))
continue
print(yellow(service) + ": " + green("end..."))
| Add yellow color for services | Add yellow color for services
| Python | apache-2.0 | dongweiming/gentle |
7a804eac3f354a778eda3daa8cd5f88b09259f74 | south/signals.py | south/signals.py | """
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
if 'django.contrib.auth' in settings.INSTALLED_APPS:
def create_permissions_compat(app, **kwargs):
from django.db.models import get_app
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), (), 0)
post_migrate.connect(create_permissions_compat)
| """
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
# Compatibility code for django.contrib.auth
# Is causing strange errors, removing for now (we might need to fix up orm first)
#if 'django.contrib.auth' in settings.INSTALLED_APPS:
#def create_permissions_compat(app, **kwargs):
#from django.db.models import get_app
#from django.contrib.auth.management import create_permissions
#create_permissions(get_app(app), (), 0)
#post_migrate.connect(create_permissions_compat)
| Remove the auth contenttypes thing for now, needs improvement | Remove the auth contenttypes thing for now, needs improvement
| Python | apache-2.0 | smartfile/django-south,smartfile/django-south |
8b1d878aff4168d74437d3ba0cfaf8307e7c377d | consts/model_type.py | consts/model_type.py | class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
AWARD = 5
MEDIA = 6
| class ModelType(object):
"""
Enums for the differnet model types
DO NOT CHANGE EXISTING ONES
"""
EVENT = 0
TEAM = 1
MATCH = 2
EVENT_TEAM = 3
DISTRICT = 4
DISTRICT_TEAM = 5
AWARD = 6
MEDIA = 7
| Update model enums to match app | Update model enums to match app
| Python | mit | josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,1fish2/the-blue-alliance |
4fb3a127706d7ff7ead0d2d8b698183905d85d4e | dependency_injector/__init__.py | dependency_injector/__init__.py | """Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .errors import Error
__all__ = ('AbstractCatalog',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
# Decorators
'override',
'inject',
# Errors
'Error')
| """Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import Injection
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .utils import is_provider
from .utils import ensure_is_provider
from .utils import is_injection
from .utils import ensure_is_injection
from .utils import is_kwarg_injection
from .utils import is_attribute_injection
from .utils import is_method_injection
from .errors import Error
__all__ = (
# Catalogs
'AbstractCatalog',
'override',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
'inject',
# Utils
'is_provider',
'ensure_is_provider',
'is_injection',
'ensure_is_injection',
'is_kwarg_injection',
'is_attribute_injection',
'is_method_injection',
# Errors
'Error',
)
| Add additional shortcuts for top level package | Add additional shortcuts for top level package
| Python | bsd-3-clause | ets-labs/python-dependency-injector,ets-labs/dependency_injector,rmk135/dependency_injector,rmk135/objects |
b61bf7dbdb26b6ff3e76f10173ffb94a76cd4f4e | lego.py | lego.py | import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = []
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if len(self.rect) == 4:
self.rect = []
self.rect.append((x, y))
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
| import numpy as np
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = np.empty((4, 2))
self.rect_index = -1
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def has_roi(self):
return self.rect_index == 3
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if self.has_roi():
self.rect_index = -1
self.rect_index += 1
self.rect[self.rect_index][0] = x
self.rect[self.rect_index][1] = y
if self.has_roi():
print self.rect
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
| Use numpy array for ROI | Use numpy array for ROI
| Python | mit | superquadratic/beat-bricks |
da75222fa286588394da7f689d47bd53716ffaa1 | coverage/execfile.py | coverage/execfile.py | """Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module("__main__")
sys.modules['__main__'] = main_mod
main_mod.__dict__.update({
'__name__': '__main__',
'__file__': filename,
})
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
| """Execute files of Python code."""
import imp, os, sys
def run_python_file(filename, args):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element representing the file being executed.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module('__main__')
sys.modules['__main__'] = main_mod
main_mod.__file__ = filename
# Set sys.argv and the first path element properly.
old_argv = sys.argv
old_path0 = sys.path[0]
sys.argv = args
sys.path[0] = os.path.dirname(filename)
try:
source = open(filename).read()
exec compile(source, filename, "exec") in main_mod.__dict__
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
sys.path[0] = old_path0
| Simplify the construction of the __main__ module in run_python_file. | Simplify the construction of the __main__ module in run_python_file.
| Python | apache-2.0 | blueyed/coveragepy,nedbat/coveragepy,larsbutler/coveragepy,blueyed/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,larsbutler/coveragepy,hugovk/coveragepy,larsbutler/coveragepy,7WebPages/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,hugovk/coveragepy,blueyed/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,hugovk/coveragepy,blueyed/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,nedbat/coveragepy,hugovk/coveragepy,nedbat/coveragepy |
64d7fb0b9ae9e14447a236a51e27b033aee20219 | urls.py | urls.py | from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^registration/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
| from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import direct_to_template
import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^accounts/', include('registration.urls')),
(r'^$', direct_to_template,
{ 'template': 'index.html' }, 'index'),
)
if settings.STATIC_SERVE:
urlpatterns += patterns('',
url(
regex = r'^media/(?P<path>.*)$',
view = 'django.views.static.serve',
kwargs = {'document_root': settings.MEDIA_ROOT}),
)
| Move pages from registration to accounts/ | Move pages from registration to accounts/
| Python | agpl-3.0 | pu239ppy/authentic2,BryceLohr/authentic,pu239ppy/authentic2,incuna/authentic,adieu/authentic2,pu239ppy/authentic2,pu239ppy/authentic2,incuna/authentic,adieu/authentic2,adieu/authentic2,BryceLohr/authentic,BryceLohr/authentic,incuna/authentic,incuna/authentic,adieu/authentic2,incuna/authentic,BryceLohr/authentic |
96884e4c35b89cb1f63a6249c9c24e27894a3752 | tacker/db/api.py | tacker/db/api.py | # Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
| # Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_db.sqlalchemy import enginefacade
context_manager = enginefacade.transaction_context()
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
_FACADE = context_manager._factory.get_legacy_facade()
return _FACADE
def get_engine():
"""Helper method to grab engine."""
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session."""
facade = _create_facade_lazily()
return facade.get_session(autocommit=autocommit,
expire_on_commit=expire_on_commit)
| Remove unused LOG to keep code clean | Remove unused LOG to keep code clean
TrivialFix
Change-Id: I21fa9ebda98005c377d395f498cb44cf6599f0e5
| Python | apache-2.0 | stackforge/tacker,zeinsteinz/tacker,stackforge/tacker,openstack/tacker,openstack/tacker,openstack/tacker,zeinsteinz/tacker |
50bdb59f7629b60d6aa6c9f3f21b447f00476b19 | webmanager/views_oauth2.py | webmanager/views_oauth2.py | from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
user_instance = user_access_token.user # User.objects.get(username=user_access_token.user)
user_instance.backend = "django.contrib.auth.backends.ModelBackend"
login(request, user_instance)
return HttpResponseRedirect(target)
| from djangoautoconf.django_utils import retrieve_param
from django.utils import timezone
from provider.oauth2.backends import AccessTokenBackend
from provider.oauth2.models import AccessToken
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import login
from djangoautoconf.req_with_auth import login_by_django_user
def login_from_oauth2(request):
data = retrieve_param(request)
target = data.get("target", "/admin")
if "access_token" in data:
access_tokens = AccessToken.objects.filter(token=data["access_token"], expires__gt=timezone.now())
if access_tokens.exists():
user_access_token = access_tokens[0]
user_access_token.expires = timezone.now()
user_access_token.save()
django_user_instance = user_access_token.user
login_by_django_user(django_user_instance, request)
return HttpResponseRedirect(target)
| Move user login code to djangoautoconf. | Move user login code to djangoautoconf.
| Python | bsd-3-clause | weijia/webmanager,weijia/webmanager,weijia/webmanager |
e4a3d3c273a1b7e26e9fdcf7f8da060bf127f27e | examples/django_project/django_project/tests.py | examples/django_project/django_project/tests.py | import unittest
from datetime import datetime
from snapshottest.django import TestCase
def api_client_get(url):
return {
'url': url,
}
class TestDemo(TestCase):
def test_api_me(self):
now = datetime.now().isoformat()
my_api_response = api_client_get('/' + now)
self.assertMatchSnapshot(my_api_response)
if __name__ == '__main__':
unittest.main()
| import unittest
from datetime import datetime
from snapshottest.django import TestCase
def api_client_get(url):
return {
'url': url,
}
class TestDemo(TestCase):
def test_api_me(self):
# Note this tests should fail unless the snapshot-update command line
# option is specified. Run `python manage.py test --snapshot-update`.
now = datetime.now().isoformat()
my_api_response = api_client_get('/' + now)
self.assertMatchSnapshot(my_api_response)
if __name__ == '__main__':
unittest.main()
| Add note for test_api_me django-project test | Add note for test_api_me django-project test
| Python | mit | syrusakbary/snapshottest |
18a133cc3b56fdc1a5185bf3da3bed001e1b5c64 | dictionary.py | dictionary.py | # Import non-standard modules
import json
def definition(word):
'''This function returns the available definitions(s) of the input'''
return data[word]
# Load dictionary data from data.json to python dictionary
data = json.load(open('data.json', 'r'))
while True:
ip = input("Enter word:")
if ip in {'!q', '!Q'}:
break
elif data.__contains__(ip):
print(definition(ip))
else:
print("Please enter a valid word! \nEnter '!q' to quit!!!\n")
| # Import non-standard modules
import json
from difflib import get_close_matches
def definition(word):
'''This function returns the available definitions(s) of the input'''
return data[word]
# Load dictionary data from data.json to python dictionary
data = json.load(open('data.json', 'r'))
# Infinite loop for processing
while True:
# Accept case-insensitive input from user
ip = str(input("Enter word:")).lower()
# Exit from program - user choice
if ip == '!q':
break
# Check dictionary for definition
elif data.__contains__(ip):
print(definition(ip))
# If exact definition is not found, provide suggestion
elif len(get_close_matches(ip, data.keys(), cutoff=0.8)) > 0:
print("Did you mean to type",
get_close_matches(ip, data.keys(), cutoff=0.8)[0], "?(y/n):")
choice = str(input()).lower()
# Provide output if generated suggestion is accepted
if choice == 'y':
ip = get_close_matches(ip, data.keys(), cutoff=0.8)[0]
print(definition(ip))
# No suggestion or definition found
else:
print("No such word exists!! \nEnter '!q' to quit!!!")
| Add feature to process user input and provide automatic suggestions | Add feature to process user input and provide automatic suggestions
| Python | mit | jojenreed/Python-CLI-Dictionary |
cc838a311e891294da10ca7465782d71e622b5ef | dodge.py | dodge.py | import platform
class OSXDodger(object):
allowed_version = "10.12.1"
def __init__(self, applications_dir):
self.app_dir = applications_dir
def load_applications(self):
"""
Read all applications in the `/Applications/` dir
"""
pass
def select_applications(self):
"""
Allow user to select an application they want
not to appear on the Dock
"""
pass
def load_dodger_filer(self):
"""
Load the file to modify for the application
chosen by the user in `select_applications`
The file to be loaded for is `info.plist`
"""
pass
def dodge_application(self):
"""
Remive the application from the Dock
"""
pass
@classmethod
def pc_is_macintosh(cls):
"""
Check if it is an `Apple Computer` i.e a Mac
@return bool
"""
system = platform.system().lower()
sys_version = int((platform.mac_ver())[0].replace(".", ""))
allowed_version = int(cls.allowed_version.replace(".", ""))
if (system == "darwin") and (sys_version >= allowed_version):
return True
else:
print("\nSorry :(")
print("FAILED. OsX-dock-dodger is only applicable to computers " +
"running OS X {} or higher".format(cls.allowed_version))
return False
dodge = OSXDodger("/Applications/")
dodge.pc_is_macintosh()
| import platform
class OSXDodger(object):
allowed_version = "10.12.1"
def __init__(self, applications_dir):
self.app_dir = applications_dir
def load_applications(self):
"""
Read all applications in the `/Applications/` dir
"""
self.pc_is_macintosh()
def select_applications(self):
"""
Allow user to select an application they want
not to appear on the Dock
"""
pass
def load_dodger_filer(self):
"""
Load the file to modify for the application
chosen by the user in `select_applications`
The file to be loaded for is `info.plist`
"""
pass
def dodge_application(self):
"""
Remive the application from the Dock
"""
pass
@classmethod
def pc_is_macintosh(cls):
"""
Check if it is an `Apple Computer` i.e a Mac
@return bool
"""
system = platform.system().lower()
sys_version = int((platform.mac_ver())[0].replace(".", ""))
allowed_version = int(cls.allowed_version.replace(".", ""))
if (system == "darwin") and (sys_version >= allowed_version):
return True
else:
print("\nSorry :(")
print("FAILED. OsX-dock-dodger is only applicable to computers " +
"running OS X {} or higher".format(cls.allowed_version))
return False
dodge = OSXDodger("/Applications/")
dodge.load_applications()
| Add class method to check if PC is a Mac | Add class method to check if PC is a Mac
| Python | mit | yoda-yoda/osx-dock-dodger,denisKaranja/osx-dock-dodger |
967f9363872ff64847e4b93a7c1ea75869eaabd9 | benchmarks/benchmarks.py | benchmarks/benchmarks.py | # Write the benchmarking functions here.
# See "Writing benchmarks" in the asv docs for more information.
import os.path as osp
import numpy as np
from tempfile import TemporaryDirectory
import h5py
class TimeSuite:
"""
An example benchmark that times the performance of various kinds
of iterating over dictionaries in Python.
"""
def setup(self):
self._td = TemporaryDirectory()
path = osp.join(self._td.name, 'test.h5')
with h5py.File(path, 'w') as f:
f['a'] = np.arange(100000)
self.f = h5py.File(path, 'r')
def teardown(self):
self.f.close()
self._td.cleanup()
def time_many_small_reads(self):
ds = self.f['a']
for i in range(10000):
arr = ds[i * 10:(i + 1) * 10]
| # Write the benchmarking functions here.
# See "Writing benchmarks" in the asv docs for more information.
import os.path as osp
import numpy as np
from tempfile import TemporaryDirectory
import h5py
class TimeSuite:
"""
An example benchmark that times the performance of various kinds
of iterating over dictionaries in Python.
"""
def setup(self):
self._td = TemporaryDirectory()
path = osp.join(self._td.name, 'test.h5')
with h5py.File(path, 'w') as f:
f['a'] = np.arange(100000)
self.f = h5py.File(path, 'r')
def teardown(self):
self.f.close()
self._td.cleanup()
def time_many_small_reads(self):
ds = self.f['a']
for i in range(10000):
arr = ds[i * 10:(i + 1) * 10]
class WritingTimeSuite:
"""Based on example in GitHub issue 492:
https://github.com/h5py/h5py/issues/492
"""
def setup(self):
self._td = TemporaryDirectory()
path = osp.join(self._td.name, 'test.h5')
self.f = h5py.File(path, 'w')
self.shape = shape = (128, 1024, 512)
self.f.create_dataset(
'a', shape=shape, dtype=np.float32, chunks=(1, shape[1], 64)
)
def teardown(self):
self.f.close()
self._td.cleanup()
def time_write_index_last_axis(self):
ds = self.f['a']
data = np.zeros(self.shape[:2])
for i in range(self.shape[2]):
ds[..., i] = data
def time_write_slice_last_axis(self):
ds = self.f['a']
data = np.zeros(self.shape[:2])
for i in range(self.shape[2]):
ds[..., i:i+1] = data[..., np.newaxis]
| Add benchmark for writing with index of last dimension | Add benchmark for writing with index of last dimension
| Python | bsd-3-clause | h5py/h5py,h5py/h5py,h5py/h5py |
7613fc5c3ef1cc552d39e5fde533e8f9af7cc931 | form_designer/tests/test_cms_plugin.py | form_designer/tests/test_cms_plugin.py | import django
from django.contrib.auth.models import AnonymousUser
from django.utils.crypto import get_random_string
import pytest
from cms import api
from cms.page_rendering import render_page
from form_designer.contrib.cms_plugins.form_designer_form.cms_plugins import FormDesignerPlugin
from form_designer.models import FormDefinition, FormDefinitionField
@pytest.mark.django_db
def test_cms_plugin_renders_in_cms_page(rf):
if django.VERSION >= (1, 10):
pytest.xfail('This test is broken in Django 1.10+')
fd = FormDefinition.objects.create(
mail_to='test@example.com',
mail_subject='Someone sent you a greeting: {{ test }}'
)
field = FormDefinitionField.objects.create(
form_definition=fd,
name='test',
label=get_random_string(),
field_class='django.forms.CharField',
)
page = api.create_page("test", "page.html", "en")
ph = page.get_placeholders()[0]
api.add_plugin(ph, FormDesignerPlugin, "en", form_definition=fd)
request = rf.get("/")
request.user = AnonymousUser()
request.current_page = page
response = render_page(request, page, "fi", "test")
response.render()
content = response.content.decode("utf8")
assert field.label in content
assert "<form" in content
| from django.contrib.auth.models import AnonymousUser
from django.utils.crypto import get_random_string
import pytest
from cms import api
from cms.page_rendering import render_page
from form_designer.contrib.cms_plugins.form_designer_form.cms_plugins import FormDesignerPlugin
from form_designer.models import FormDefinition, FormDefinitionField
@pytest.mark.django_db
def test_cms_plugin_renders_in_cms_page(rf):
fd = FormDefinition.objects.create(
mail_to='test@example.com',
mail_subject='Someone sent you a greeting: {{ test }}'
)
field = FormDefinitionField.objects.create(
form_definition=fd,
name='test',
label=get_random_string(),
field_class='django.forms.CharField',
)
page = api.create_page("test", "page.html", "en")
ph = page.get_placeholders()[0]
api.add_plugin(ph, FormDesignerPlugin, "en", form_definition=fd)
request = rf.get("/")
request.user = AnonymousUser()
request.current_page = page
response = render_page(request, page, "fi", "test")
response.render()
content = response.content.decode("utf8")
assert field.label in content
assert "<form" in content
| Revert "Disable Django-CMS test on Django 1.10+" | Revert "Disable Django-CMS test on Django 1.10+"
Django CMS tests should work now with Django 1.10 and 1.11 too, since
the Django CMS version 3.4.5 supports them.
This reverts commit fcfe2513fc8532dc2212a254da42d75048e76de7.
| Python | bsd-3-clause | kcsry/django-form-designer,andersinno/django-form-designer-ai,kcsry/django-form-designer,andersinno/django-form-designer,andersinno/django-form-designer-ai,andersinno/django-form-designer |
39a1212508c27a5c21f8b027fef3fb409a28657f | app/commands.py | app/commands.py | from flask import current_app
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.command('list-routes')(list_routes)
| import click
from flask import current_app
from flask.cli import with_appcontext
@click.command('list-routes')
@with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.add_command(list_routes)
| Switch existing command to standard approach | Switch existing command to standard approach
This is the suggested approach in the documentation [1] and using
it makes it clearer what's going on and to add other commands with
arguments, which we'll do in the next commit.
[1]: https://flask.palletsprojects.com/en/2.0.x/cli/#custom-commands
| Python | mit | alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin |
79a2f5ddc1d6d05dca0d44ee6586b2d8809ee1c0 | deep_q_rl/ale_run.py | deep_q_rl/ale_run.py | """This script launches all of the processes necessary to train a
deep Q-network on an ALE game.
Usage:
ale_run.py [--glue_port GLUE_PORT]
All unrecognized command line arguments will be passed on to
rl_glue_ale_agent.py
"""
import subprocess
import sys
import os
import argparse
ROM_PATH = "/home/spragunr/neural_rl_libraries/roms/breakout.bin"
# Check for glue_port command line argument and set it up...
parser = argparse.ArgumentParser(description='Neural rl agent.')
parser.add_argument('--glue_port', type=str, default="4096",
help='rlglue port (default 4096)')
args, unknown = parser.parse_known_args()
my_env = os.environ.copy()
my_env["RLGLUE_PORT"] = args.glue_port
# Start the necessary processes:
p1 = subprocess.Popen(['rl_glue'], env=my_env)
ale_string = ("ale -game_controller rlglue -frame_skip 4 "
"-restricted_action_set true ")
p2 = subprocess.Popen(ale_string + ROM_PATH, shell=True, env=my_env)
p3 = subprocess.Popen(['./rl_glue_ale_experiment.py'], env=my_env)
p4 = subprocess.Popen(['./rl_glue_ale_agent.py'] + sys.argv[1:], env=my_env)
p1.wait()
p2.wait()
p3.wait()
p4.wait()
| """This script launches all of the processes necessary to train a
deep Q-network on an ALE game.
Usage:
ale_run.py [--glue_port GLUE_PORT]
All unrecognized command line arguments will be passed on to
rl_glue_ale_agent.py
"""
import subprocess
import sys
import os
import argparse
# Put your binaries under the directory 'deep_q_rl/roms'
ROM_PATH = "../roms/breakout.bin"
# Check for glue_port command line argument and set it up...
parser = argparse.ArgumentParser(description='Neural rl agent.')
parser.add_argument('--glue_port', type=str, default="4096",
help='rlglue port (default 4096)')
args, unknown = parser.parse_known_args()
my_env = os.environ.copy()
my_env["RLGLUE_PORT"] = args.glue_port
# Start the necessary processes:
p1 = subprocess.Popen(['rl_glue'], env=my_env)
ale_string = ("ale -game_controller rlglue -frame_skip 4 "
"-restricted_action_set true ")
p2 = subprocess.Popen(ale_string + ROM_PATH, shell=True, env=my_env)
p3 = subprocess.Popen(['./rl_glue_ale_experiment.py'], env=my_env)
p4 = subprocess.Popen(['./rl_glue_ale_agent.py'] + sys.argv[1:], env=my_env)
p1.wait()
p2.wait()
p3.wait()
p4.wait()
| Make the position of the roms work for everybody. | Make the position of the roms work for everybody.
Here you previously used a directory that mentions your particular setting. Substitute it by a relative directory so it works for everybody. Also, suggest at the read file to create a 'rome' directory with 'breakout.bin' in it. | Python | bsd-3-clause | r0k3/deep_q_rl,aaannndddyyy/deep_q_rl,davidsj/deep_q_rl,spragunr/deep_q_rl,gogobebe2/deep_q_rl,aaannndddyyy/deep_q_rl,omnivert/deep_q_rl,npow/deep_q_rl,vvw/deep_q_rl,codeaudit/deep_q_rl,alito/deep_q_rl,jleni/deep_q_rl,peterzcc/deep_q_rl,davidsj/deep_q_rl,spragunr/deep_q_rl,codeaudit/deep_q_rl,udibr/deep_q_rl,udibr/deep_q_rl,sygi/deep_q_rl,r0k3/deep_q_rl,tmylk/deep_q_rl,jcatw/deep_q_rl,gogobebe2/deep_q_rl,vvw/deep_q_rl,jleni/deep_q_rl,tmylk/deep_q_rl,peterzcc/deep_q_rl,jcatw/deep_q_rl,alito/deep_q_rl,sygi/deep_q_rl,omnivert/deep_q_rl |
8b16084a4fe72a369ada80969f6b728abf611d8f | marathon_itests/environment.py | marathon_itests/environment.py | import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
print_container_logs('zookeeper')
print_container_logs('marathon')
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
| import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
if scenario.status != 'passed':
print "Zookeeper container logs:"
print_container_logs('zookeeper')
print "Marathon container logs:"
print_container_logs('marathon')
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
| Print container logs only in case of failed scenario | Print container logs only in case of failed scenario
| Python | apache-2.0 | somic/paasta,gstarnberger/paasta,Yelp/paasta,gstarnberger/paasta,somic/paasta,Yelp/paasta |
a8af7cd7918322615a342c2fd662f394866da55f | tests/sentry/metrics/test_datadog.py | tests/sentry/metrics/test_datadog.py | from __future__ import absolute_import
import socket
from mock import patch
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=socket.gethostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=socket.gethostname(),
)
| from __future__ import absolute_import
from mock import patch
from datadog.util.hostname import get_hostname
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo', 1,
tags=['instance:bar'],
host=get_hostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo', 30,
sample_rate=1,
tags=['instance:bar'],
host=get_hostname(),
)
| Test DatadogMetricsBackend against datadog's get_hostname | Test DatadogMetricsBackend against datadog's get_hostname
This fixes tests in Travis since the hostname returned is different
| Python | bsd-3-clause | pauloschilling/sentry,pauloschilling/sentry,pauloschilling/sentry |
7c117c4555fdf30b3d98a453c7e28245dca0c9b9 | tests/test_stack/test_stack.py | tests/test_stack/test_stack.py | import unittest
from aids.stack.stack import Stack
class StackTestCase(unittest.TestCase):
'''
Unit tests for the Stack data structure
'''
def setUp(self):
pass
def test_stack_initialization(self):
test_stack = Stack()
self.assertTrue(isinstance(test_stack, Stack))
def test_stack_is_empty(self):
test_stack = Stack()
self.assertTrue(test_stack.is_empty())
def tearDown(self):
pass
| import unittest
from aids.stack.stack import Stack
class StackTestCase(unittest.TestCase):
'''
Unit tests for the Stack data structure
'''
def setUp(self):
self.test_stack = Stack()
def test_stack_initialization(self):
self.assertTrue(isinstance(self.test_stack, Stack))
def test_stack_is_empty(self):
self.assertTrue(self.test_stack.is_empty())
def test_stack_push(self):
self.test_stack.push(1)
self.assertEqual(len(self.test_stack), 1)
def test_stack_peek(self):
self.test_stack.push(1)
self.assertEqual(self.test_stack.peek(), 1)
def test_stack_pop(self):
self.test_stack.push(1)
self.assertEqual(self.test_stack.pop(), 1)
def test_stack_size(self):
self.test_stack.push(1)
self.assertEqual(self.test_stack.size(), 1)
def tearDown(self):
pass
| Add unit tests for push, peek, pop and size | Add unit tests for push, peek, pop and size | Python | mit | ueg1990/aids |
1ef1851e508295f6d4bf01289591f42c21656df7 | test/on_yubikey/test_interfaces.py | test/on_yubikey/test_interfaces.py | import unittest
from .framework import DestructiveYubikeyTestCase, exactly_one_yubikey_present
from yubikit.core.otp import OtpConnection
from yubikit.core.fido import FidoConnection
from yubikit.core.smartcard import SmartCardConnection
from ykman.device import connect_to_device
from time import sleep
@unittest.skipIf(
not exactly_one_yubikey_present(), "Exactly one YubiKey must be present."
)
class TestInterfaces(DestructiveYubikeyTestCase):
def try_connection(self, conn_type):
for _ in range(8):
try:
conn = connect_to_device(None, [conn_type])[0]
conn.close()
return
except Exception:
sleep(0.5)
self.fail("Failed connecting to device over %s" % conn_type)
def test_switch_interfaces(self):
self.try_connection(FidoConnection)
self.try_connection(OtpConnection)
self.try_connection(FidoConnection)
self.try_connection(SmartCardConnection)
self.try_connection(OtpConnection)
self.try_connection(SmartCardConnection)
self.try_connection(OtpConnection)
self.try_connection(FidoConnection)
self.try_connection(SmartCardConnection)
self.try_connection(FidoConnection)
self.try_connection(SmartCardConnection)
self.try_connection(OtpConnection)
| import unittest
from .framework import DestructiveYubikeyTestCase, exactly_one_yubikey_present
from yubikit.core.otp import OtpConnection
from yubikit.core.fido import FidoConnection
from yubikit.core.smartcard import SmartCardConnection
from ykman.base import YUBIKEY
from ykman.device import connect_to_device
from time import sleep
@unittest.skipIf(
not exactly_one_yubikey_present(), "Exactly one YubiKey must be present."
)
class TestInterfaces(DestructiveYubikeyTestCase):
def try_connection(self, conn_type):
if self.key_type == YUBIKEY.NEO and conn_type == SmartCardConnection:
sleep(3.5)
conn, dev, info = connect_to_device(None, [conn_type])
conn.close()
def setUp(self):
conn, dev, info = connect_to_device()
conn.close()
self.key_type = dev.pid.get_type()
def test_switch_interfaces(self):
self.try_connection(FidoConnection)
self.try_connection(OtpConnection)
self.try_connection(FidoConnection)
self.try_connection(SmartCardConnection)
self.try_connection(OtpConnection)
self.try_connection(SmartCardConnection)
self.try_connection(FidoConnection)
| Test handling of reclaim timeout. | Test handling of reclaim timeout.
| Python | bsd-2-clause | Yubico/yubikey-manager,Yubico/yubikey-manager |
300cbd3ff4d0e5021892f7c9940635695cb017a3 | integration-test/197-clip-buildings.py | integration-test/197-clip-buildings.py | from shapely.geometry import shape
# this is mid way along the High Line in NYC, which is a huge long
# "building". we should be clipping it to a buffer of 3x the tile
# dimensions.
# http://www.openstreetmap.org/way/37054313
with features_in_tile_layer(16, 19295, 24631, 'buildings') as buildings:
# max width and height in degress as 3x the size of the above tile
max_w = 0.0164794921875
max_h = 0.012484410579673977
# need to check that we at least saw the high line
saw_the_high_line = False
for building in buildings:
bounds = shape(building['geometry']).bounds
w = bounds[2] - bounds[0]
h = bounds[3] - bounds[1]
if building['properties']['id'] == 37054313:
saw_the_high_line = True
if w > max_w or h > max_h:
raise Exception("feature %r is %rx%r, larger than the allowed "
"%rx%r."
% (building['properties']['id'],
w, h, max_w, max_h))
if not saw_the_high_line:
raise Exception("Expected to see the High Line in this tile, "
"but didn't.")
| from shapely.geometry import shape
# this is mid way along the High Line in NYC, which is a huge long
# "building". we should be clipping it to a buffer of 3x the tile
# dimensions.
# http://www.openstreetmap.org/relation/7141751
with features_in_tile_layer(16, 19295, 24631, 'buildings') as buildings:
# max width and height in degress as 3x the size of the above tile
max_w = 0.0164794921875
max_h = 0.012484410579673977
# need to check that we at least saw the high line
saw_the_high_line = False
for building in buildings:
bounds = shape(building['geometry']).bounds
w = bounds[2] - bounds[0]
h = bounds[3] - bounds[1]
if building['properties']['id'] == -7141751:
saw_the_high_line = True
if w > max_w or h > max_h:
raise Exception("feature %r is %rx%r, larger than the allowed "
"%rx%r."
% (building['properties']['id'],
w, h, max_w, max_h))
if not saw_the_high_line:
raise Exception("Expected to see the High Line in this tile, "
"but didn't.")
| Update data for the High Line, NYC | Update data for the High Line, NYC
Looks like it was turned into a multipolygon relation in [this changeset](http://www.openstreetmap.org/changeset/47542769).
| Python | mit | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource |
86446c6d1b0b8583562e0fccf1745e95ce7003c2 | util/__init__.py | util/__init__.py | #!/usr/bin/env python
#
# dials.util.__init__.py
#
# Copyright (C) 2013 Diamond Light Source
#
# Author: James Parkhurst
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
| #!/usr/bin/env python
#
# dials.util.__init__.py
#
# Copyright (C) 2013 Diamond Light Source
#
# Author: James Parkhurst
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
from __future__ import division
class HalError(RuntimeError):
def __init__(self, string=''):
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Dave'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that. {1}'.format(
username, string)
# Init base class
RuntimeError.__init__(self, text)
def halraiser(e):
''' Function to re-raise an exception with a Hal message. '''
import logging
# Get the log and write the error to the log file
log = logging.getLogger(__name__)
log.error(e)
# Get the username
try:
from getpass import getuser
username = getuser()
except Exception:
username = 'Humanoid'
# Put in HAL error text.
text = 'I\'m sorry {0}. I\'m afraid I can\'t do that.'.format(username)
# Append to exception
if len(e.args) == 0:
e.args = (text,)
elif len(e.args) == 1:
e.args = (text + ' ' + e.args[0],)
else:
e.args = (text,) + e.args
# Reraise the exception
raise
| Print out errors to log. | Print out errors to log. | Python | bsd-3-clause | dials/dials,dials/dials,dials/dials,dials/dials,dials/dials |
aa3134912af3e57362310eb486d0f4e1d8660d0c | grains/grains.py | grains/grains.py | # File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
import itertools
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
def on_square(num):
for k, v in board.iteritems():
if k == num:
return v
def total_after(num):
if num == 1:
return 1
else:
for k, v in board.iteritems():
if k == num:
total_after = sum(map(board.get, itertools.takewhile(lambda key: key != v, board)))
return total_after
print (board)
print (total_after(1))
print(on_square(1))
| # File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
def on_square(num):
for k, v in board.iteritems():
if k == num:
return v
def total_after(num):
total = 0
for i in range(1, num+1):
total += on_square(i)
return total
| Reformat total_after function + Remove itertools | Reformat total_after function + Remove itertools
| Python | mit | amalshehu/exercism-python |
5188861fb873ea301eb5ec386f1df65c0707e146 | openstack/tests/functional/object/v1/test_container.py | openstack/tests/functional/object/v1/test_container.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.object_store.v1 import container
from openstack.tests.functional import base
class TestContainer(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
@classmethod
def setUpClass(cls):
super(TestContainer, cls).setUpClass()
tainer = cls.conn.object_store.create_container(name=cls.NAME)
assert isinstance(tainer, container.Container)
cls.assertIs(cls.NAME, tainer.name)
@classmethod
def tearDownClass(cls):
pass
# TODO(thowe): uncomment this when bug/1451211 fixed
# tainer = cls.conn.object_store.delete_container(cls.NAME,
# ignore_missing=False)
# cls.assertIs(None, tainer)
def test_list(self):
names = [o.name for o in self.conn.object_store.containers()]
self.assertIn(self.NAME, names)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.object_store.v1 import container
from openstack.tests.functional import base
class TestContainer(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
@classmethod
def setUpClass(cls):
super(TestContainer, cls).setUpClass()
tainer = cls.conn.object_store.create_container(name=cls.NAME)
assert isinstance(tainer, container.Container)
cls.assertIs(cls.NAME, tainer.name)
@classmethod
def tearDownClass(cls):
result = cls.conn.object_store.delete_container(cls.NAME,
ignore_missing=False)
cls.assertIs(None, result)
def test_list(self):
names = [o.name for o in self.conn.object_store.containers()]
self.assertIn(self.NAME, names)
def test_get_metadata(self):
tainer = self.conn.object_store.get_container_metadata(self.NAME)
self.assertEqual(0, tainer.object_count)
self.assertEqual(0, tainer.bytes_used)
self.assertEqual(self.NAME, tainer.name)
| Add functional tests for container metadata and delete | Add functional tests for container metadata and delete
Change-Id: Id13073b37d19482ca6ff6e53e315aaa424c3f1b9
| Python | apache-2.0 | openstack/python-openstacksdk,dtroyer/python-openstacksdk,mtougeron/python-openstacksdk,briancurtin/python-openstacksdk,dudymas/python-openstacksdk,briancurtin/python-openstacksdk,dudymas/python-openstacksdk,openstack/python-openstacksdk,stackforge/python-openstacksdk,dtroyer/python-openstacksdk,mtougeron/python-openstacksdk,stackforge/python-openstacksdk |
6b84688c1b5a7f2e8c9e5007455b88cbaa845e9f | tests/test_track_output/results.py | tests/test_track_output/results.py | #!/usr/bin/env python
import os
import sys
import glob
import shutil
from subprocess import call
# If vtk python module is not available, we can't run track.py so skip this
# test
cwd = os.getcwd()
try:
import vtk
except ImportError:
print('----------------Skipping test-------------')
shutil.copy('results_true.dat', 'results_test.dat')
exit()
# Run track processing script
call(['../../track.py', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
shutil.copy('poly.pvtp', 'results_test.dat')
| #!/usr/bin/env python
import os
import sys
import glob
import shutil
from subprocess import call
# If vtk python module is not available, we can't run track.py so skip this
# test
cwd = os.getcwd()
try:
import vtk
except ImportError:
print('----------------Skipping test-------------')
shutil.copy('results_true.dat', 'results_test.dat')
exit()
# Run track processing script
call(['../../scripts/openmc-track-to-vtk', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
shutil.copy('poly.pvtp', 'results_test.dat')
| Fix path to script in test_track_output | Fix path to script in test_track_output
| Python | mit | mjlong/openmc,wbinventor/openmc,bhermanmit/openmc,wbinventor/openmc,johnnyliu27/openmc,paulromano/openmc,mjlong/openmc,smharper/openmc,lilulu/openmc,johnnyliu27/openmc,amandalund/openmc,samuelshaner/openmc,mit-crpg/openmc,shikhar413/openmc,johnnyliu27/openmc,mit-crpg/openmc,liangjg/openmc,kellyrowland/openmc,amandalund/openmc,walshjon/openmc,lilulu/openmc,liangjg/openmc,liangjg/openmc,johnnyliu27/openmc,samuelshaner/openmc,mit-crpg/openmc,amandalund/openmc,smharper/openmc,shikhar413/openmc,samuelshaner/openmc,mit-crpg/openmc,shikhar413/openmc,paulromano/openmc,liangjg/openmc,smharper/openmc,shikhar413/openmc,samuelshaner/openmc,paulromano/openmc,walshjon/openmc,walshjon/openmc,smharper/openmc,bhermanmit/openmc,wbinventor/openmc,wbinventor/openmc,walshjon/openmc,paulromano/openmc,lilulu/openmc,amandalund/openmc,kellyrowland/openmc |
85cb348dab356386362fe7657dee3e31aa1f92bf | rep.py | rep.py | """
The top level of the APL Read-Evaluate-Print loop
UNDER DEVELOPMENT
This version adds a dummy evaluate routine.
"""
import sys
# ------------------------------
def evaluate(expression):
"""
Evaluate an APL expression - dummy version
"""
return (expression)
def read_evaluate_print (prompt):
"""
Read input, echo input
"""
try:
while True:
print(end=prompt)
line = input().lstrip()
if line:
if line[0] == ')':
if line[0:4].upper() == ')OFF':
apl_exit("Bye bye")
print('⎕', evaluate(line))
except EOFError:
apl_exit(None)
def apl_quit ():
"""
Quit without clean up
"""
print ()
sys.exit(0)
def apl_exit (message):
"""
Clean up and quit
"""
if message is None:
print ()
else:
print (message)
sys.exit(0)
# EOF
| """
The top level of the APL Read-Evaluate-Print loop
UNDER DEVELOPMENT
This version adds simple APL exception handling
"""
import sys
from apl_exception import APL_Exception as apl_exception
# ------------------------------
def evaluate(expression):
"""
Evaluate an APL expression - dummy version
"""
return (expression)
def read_evaluate_print (prompt):
"""
Read input, echo input
"""
try:
while True:
print(end=prompt)
line = input().lstrip()
if line:
if line[0] == ')':
if line[0:4].upper() == ')OFF':
apl_exit("Bye bye")
try:
result = evaluate(line)
except apl_exception as e:
print(' '*(len(prompt)+len(line)-len(e.line)),end="^\n")
result = e.message
finally:
print('⎕', result)
except EOFError:
apl_exit(None)
def apl_quit ():
"""
Quit without clean up
"""
print()
sys.exit(0)
def apl_exit (message):
"""
Clean up and quit
"""
if message is None:
print()
else:
print(message)
sys.exit(0)
# EOF
| Add exception handling to the read-evaluate-loop | Add exception handling to the read-evaluate-loop
| Python | apache-2.0 | NewForester/apl-py,NewForester/apl-py |
574fba0650e4c68b7a31533207c26d8d57ed49c2 | run.py | run.py | from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
if argv[1] == 'connect4':
Connect4Network().play()
| from sys import argv
from examples.connect4.connect4 import Connect4Network
if __name__ == '__main__':
if len(argv) > 1:
if argv[1] == 'connect4':
Connect4Network().play()
| Check length of argv before accessing it | Check length of argv before accessing it
| Python | mit | tysonzero/py-ann |
a7867806a6bd3abfd6bf2bcac6c490965be000e2 | tests/test_completeness.py | tests/test_completeness.py | import unittest as unittest
from syntax import Syntax
from jscodegen import CodeGenerator
def add_cases(generator):
def class_decorator(cls):
"""Add tests to `cls` generated by `generator()`."""
for f, token in generator():
test = lambda self, i=token, f=f: f(self, i)
test.__name__ = "test %s" % token.name
setattr(cls, test.__name__, test)
return cls
return class_decorator
def _test_tokens():
def t(self, to):
c = CodeGenerator({})
func_name = to.name.lower()
try:
getattr(c, func_name)
self.assertTrue(True, func_name)
except AttributeError:
self.fail("Not implemented: %s" % func_name)
for token in Syntax:
yield t, token
class TestCase(unittest.TestCase):
pass
TestCase = add_cases(_test_tokens)(TestCase)
if __name__=="__main__":
unittest.main() | import unittest as unittest
from jscodegen.syntax import Syntax
from jscodegen import CodeGenerator
def add_cases(generator):
def class_decorator(cls):
"""Add tests to `cls` generated by `generator()`."""
for f, token in generator():
test = lambda self, i=token, f=f: f(self, i)
test.__name__ = "test %s" % token.name
setattr(cls, test.__name__, test)
return cls
return class_decorator
def _test_tokens():
def t(self, to):
c = CodeGenerator({})
func_name = to.name.lower()
try:
getattr(c, func_name)
self.assertTrue(True, func_name)
except AttributeError:
self.fail("Not implemented: %s" % func_name)
for token in Syntax:
yield t, token
class TestCase(unittest.TestCase):
pass
TestCase = add_cases(_test_tokens)(TestCase)
if __name__=="__main__":
unittest.main()
| Fix an issue in the tests | Fix an issue in the tests
| Python | mit | ksons/jscodegen.py |
eb0a5e5768c2f699cbc1f6395d3c14320d8e730d | template/__init__.py | template/__init__.py | #!/usr/bin/env python
# pylint: disable=import-error
"""Generate files from Jinja2 templates and environment variables."""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
) # pylint: disable=duplicate-code
from os import environ
from sys import stdin, stdout
import argparse
from argparse import ArgumentParser
from jinja2 import Environment
import template.filters
__version__ = "0.6.4"
def render(template_string):
"""Render the template."""
env = Environment(autoescape=True)
# Add all functions in template.filters as Jinja filters.
# pylint: disable=invalid-name
for tf in filter(lambda x: not x.startswith("_"), dir(template.filters)):
env.filters[tf] = getattr(template.filters, tf)
t = env.from_string(template_string)
return t.render(environ)
def main():
"""Main entrypoint."""
parser = ArgumentParser(
description="""A CLI tool for generating files from Jinja2 templates
and environment variables."""
)
parser.add_argument(
"filename",
help="Input filename",
type=argparse.FileType("r"),
nargs="?",
)
parser.add_argument(
"-o",
"--output",
help="Output to filename",
type=argparse.FileType("w"),
)
args = parser.parse_args()
infd = args.filename if args.filename else stdin
outfd = args.output if args.output else stdout
print(render(infd.read()), file=outfd)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# pylint: disable=import-error
"""A CLI tool for generating files from Jinja2 templates and environment
variables."""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
) # pylint: disable=duplicate-code
from os import environ
from sys import stdin, stdout
import argparse
from argparse import ArgumentParser
from jinja2 import Environment
import template.filters
__version__ = "0.6.4"
def render(template_string):
"""Render the template."""
env = Environment(autoescape=True)
# Add all functions in template.filters as Jinja filters.
# pylint: disable=invalid-name
for tf in filter(lambda x: not x.startswith("_"), dir(template.filters)):
env.filters[tf] = getattr(template.filters, tf)
t = env.from_string(template_string)
return t.render(environ)
def main():
"""Main entrypoint."""
parser = ArgumentParser(description=__doc__)
parser.add_argument(
"filename",
help="Input filename",
type=argparse.FileType("r"),
nargs="?",
)
parser.add_argument(
"-o",
"--output",
help="Output to filename",
type=argparse.FileType("w"),
)
args = parser.parse_args()
infd = args.filename if args.filename else stdin
outfd = args.output if args.output else stdout
print(render(infd.read()), file=outfd)
if __name__ == "__main__":
main()
| Use the docstring as the CLI description. | Use the docstring as the CLI description.
| Python | agpl-3.0 | adarnimrod/template,adarnimrod/template |
ae593fd5de74a123b5064ef2e018b4955dc9e6c9 | runserver.py | runserver.py | #!/usr/local/bin/python2.7
import sys
from datetime import date
from main import app
from upload_s3 import set_metadata
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
cron_condition = len(sys.argv) == 3 and date.today().day > 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
print "Cron succeeded"
#freezer = Freezer(app)
#freezer.freeze()
#set_metadata()
else:
app.run()
| #!/usr/local/bin/python2.7
import sys
from datetime import date
from main import app
from upload_s3 import set_metadata
from flask_frozen import Freezer
# cron is called with 3 arguments, should only run in the first week of month
cron_condition = len(sys.argv) == 3 and date.today().day < 8
force_update = len(sys.argv) == 2 and sys.argv[1] == 'freeze'
if len(sys.argv) > 1: # if runserver is passed an argument
if cron_condition or force_update:
freezer = Freezer(app)
freezer.freeze()
set_metadata()
else:
app.run()
| Complete testing, change back to production code | Complete testing, change back to production code
| Python | apache-2.0 | vprnet/dorothys-list,vprnet/dorothys-list,vprnet/dorothys-list |
3cef3e4774923b81e622f03aee44a933293c6a8d | modelview/migrations/0036_auto_20170322_1622.py | modelview/migrations/0036_auto_20170322_1622.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-03-22 15:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelview', '0035_auto_20160426_1721'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='source_of_funding',
field=models.CharField(help_text='What is the main source of funding?', max_length=200, null=True, verbose_name='Source of funding'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-03-22 15:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
#('modelview', '0035_auto_20160426_1721'),
('modelview', '0035_auto_20170724_1801'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='source_of_funding',
field=models.CharField(help_text='What is the main source of funding?', max_length=200, null=True, verbose_name='Source of funding'),
),
]
| Change reference from modelview/0035_auto_20160426 to 0035_auto_20170724 | Change reference from modelview/0035_auto_20160426 to 0035_auto_20170724
| Python | agpl-3.0 | openego/oeplatform,tom-heimbrodt/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,openego/oeplatform |
c50d9efe011417162ab6e99e19973dcde6b3ddfa | utils/get_collection_object_count.py | utils/get_collection_object_count.py | #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(
description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument(
'--pynuxrc',
default='~/.pynuxrc-prod',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
action='store_true',
help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
if not argv.components:
return
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(
description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument(
'--pynuxrc',
default='~/.pynuxrc',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
action='store_true',
help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
uid_set = set()
for obj in objects:
uid_set.add(obj['uid'])
unique = len(uid_set)
print "unique uid count: {}".format(unique)
if not argv.components:
return
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
| Change default arg value for pynuxrc | Change default arg value for pynuxrc
| Python | bsd-3-clause | barbarahui/nuxeo-calisphere,barbarahui/nuxeo-calisphere |
dad05c2eff78383e3179fd1f4a4502fe553afecd | tests/lib/docker_compose_tools.py | tests/lib/docker_compose_tools.py | # pylint: disable=line-too-long
""" Handles docker compose """
from lib.docker_tools import docker_compose
from lib.tools import show_status
def set_up(pg_version, es_version):
""" Start containers """
compose = docker_compose(pg_version, es_version)
show_status(
"Starting testing environment for PostgreSQL {pg_version} with Elasticsearch {es_version}...".format(
pg_version=pg_version, es_version=es_version
)
)
show_status("Stopping and Removing any old containers...")
compose("stop")
compose("rm", "--force")
show_status("Building new images...")
compose("build")
show_status("Starting new containers...")
compose("up", "-d")
show_status("Testing environment started")
def tear_down(pg_version, es_version):
""" Stop containers """
compose = docker_compose(pg_version, es_version)
show_status(
"Stopping testing environment for PostgreSQL {pg_version} with Elasticsearch {es_version}...".format(
pg_version=pg_version, es_version=es_version
)
)
compose("down")
show_status("Testing environment stopped")
| # pylint: disable=line-too-long
""" Handles docker compose """
import sys
from sh import ErrorReturnCode
from lib.docker_tools import docker_compose
from lib.tools import show_status
def set_up(pg_version, es_version):
""" Start containers """
compose = docker_compose(pg_version, es_version)
show_status(
"Starting testing environment for PostgreSQL {pg_version} with Elasticsearch {es_version}...".format(
pg_version=pg_version, es_version=es_version
)
)
show_status("Stopping and Removing any old containers...")
compose("stop")
compose("rm", "--force")
show_status("Building new images...")
try:
compose("build")
except ErrorReturnCode as exc:
print("Failed to build images...")
print(exc.stdout.decode("utf-8"))
print()
print(exc.stderr.decode("utf-8"))
sys.exit(1)
show_status("Starting new containers...")
compose("up", "-d")
show_status("Testing environment started")
def tear_down(pg_version, es_version):
""" Stop containers """
compose = docker_compose(pg_version, es_version)
show_status(
"Stopping testing environment for PostgreSQL {pg_version} with Elasticsearch {es_version}...".format(
pg_version=pg_version, es_version=es_version
)
)
compose("down")
show_status("Testing environment stopped")
| Print full stdout / stderr when image build fails | Print full stdout / stderr when image build fails
| Python | mit | matthewfranglen/postgres-elasticsearch-fdw |
482bcbaeddf3d18c445b780a083d723a85a6dd88 | wok/renderers.py | wok/renderers.py | from markdown import markdown
import docutils.core
from docutils.writers.html4css1 import Writer as rst_html_writer
class Renderer(object):
extensions = []
@classmethod
def render(cls, plain):
return plain
class Markdown(Renderer):
extensions = ['markdown', 'mkd']
@classmethod
def render(cls, plain):
return markdown(plain, ['def_list', 'footnotes'])
class ReStructuredText(Renderer):
extensions = ['rst']
@classmethod
def render(cls, plain):
w = rst_html_writer()
return docutils.core.publish_parts(plain, writer=w)['body']
class Plain(Renderer):
extensions = 'txt'
@classmethod
def render(cls, plain):
return plain.replace('\n', '<br>')
all = [Renderer, Plain, Markdown, ReStructuredText]
| from markdown import markdown
import docutils.core
from docutils.writers.html4css1 import Writer as rst_html_writer
class Renderer(object):
extensions = []
@classmethod
def render(cls, plain):
return plain
class Markdown(Renderer):
extensions = ['markdown', 'mkd']
@classmethod
def render(cls, plain):
return markdown(plain, ['def_list', 'footnotes', 'codehilite'])
class ReStructuredText(Renderer):
extensions = ['rst']
@classmethod
def render(cls, plain):
w = rst_html_writer()
return docutils.core.publish_parts(plain, writer=w)['body']
class Plain(Renderer):
extensions = 'txt'
@classmethod
def render(cls, plain):
return plain.replace('\n', '<br>')
all = [Renderer, Plain, Markdown, ReStructuredText]
| Enable syntax highlighting with Markdown. | Enable syntax highlighting with Markdown.
| Python | mit | wummel/wok,mythmon/wok,ngokevin/wok,matt-garman/wok,Avaren/wok,algor512/wok,edunham/wok,ngokevin/wok,chrplace/wok,moreati/wok,mythmon/wok,algor512/wok,vaygr/wok,vaygr/wok,edunham/wok,algor512/wok,moreati/wok,abbgrade/wok,edunham/wok,gchriz/wok,abbgrade/wok,matt-garman/wok,chrplace/wok,chrplace/wok,matt-garman/wok,jneves/wok,wummel/wok,moreati/wok,jneves/wok,gchriz/wok,Avaren/wok,vaygr/wok,mythmon/wok,gchriz/wok,jneves/wok,Avaren/wok |
eef8498388c672b25344a3f6fd8c05166e4ed4f6 | xea_core/urls.py | xea_core/urls.py | """xea_core URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'api/auth/jwt/', include('jwt_knox.urls')),
url(r'^api/', include('api.urls')),
]
| """xea_core URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'api/auth/jwt/', include('jwt_knox.urls', namespace='jwt_knox')),
url(r'^api/', include('api.urls')),
]
| Add namespace to jwt_knox URLs | Add namespace to jwt_knox URLs
| Python | agpl-3.0 | gpul-org/xea-core |
6cce924d849f209fb8251acd4c21d65ad0daa24c | plugins/basic_info_plugin.py | plugins/basic_info_plugin.py | import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
short_description = 'List some basic info about the string in a table'
header = 'Basic info:'
default = True
description = textwrap.dedent('''\
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes''')
key = '--basic'
def handle(self):
table = VeryPrettyTable()
table.field_names = ['String', 'Length', '# Digits', '# Alpha', '# Punct.', '# Control']
for s in self.args['STRING']:
table.add_row((s, len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s)))
return str(table) + '\n' | import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
from plugins.util import green, red
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
short_description = 'List some basic info about the string in a table'
header = 'Basic info:'
default = True
description = textwrap.dedent('''\
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes''')
key = '--basic'
def handle(self):
table = VeryPrettyTable()
table.field_names = ['String', 'Length', '# Digits', '# Alpha', '# Punct.', '# Control', 'Hex?']
for s in self.args['STRING']:
table.add_row((s, len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s),
green('✔') if all(x in string.hexdigits for x in s) else red('✗')))
return str(table) + '\n' | Add hex check to basic info | Add hex check to basic info
| Python | mit | Sakartu/stringinfo |
1e82bf0e23775dfc2541dc8c2dec4447c20f2cda | tests/submitsaved.py | tests/submitsaved.py | #!/usr/bin/env python2
import pickle, sys
sys.path.append('../')
from lewas.parsers import UnitParser, AttrParser, field_rangler
import lewas.datastores
import lewas.models
config = "../config"
config = lewas.readConfig(config)
lewas.datastores.submitRequest(pickle.load(open(sys.argv[1])), config, False)
| #!/usr/bin/env python2
import pickle, sys
sys.path.append('../')
from lewas.parsers import UnitParser, AttrParser, field_rangler
import lewas.datastores
import lewas.models
config = "../config"
config = lewas.readConfig(config)
for fn in sys.argv[1:]:
lewas.datastores.submitRequest(pickle.load(open(fn)), config, False)
print "processed", fn
| Add capability to resubmit multiple files | Add capability to resubmit multiple files
| Python | mit | LEWASatVT/lewas |
904644ba540fbf6dcd47fb5a006d03a64d299fb2 | src/yoi/pages.py | src/yoi/pages.py | from flask import request
from flaskext.genshi import render_response
from yoi.app import app
@app.route('/')
def get_index():
return render_response('index.html')
@app.route('/home')
def get_home():
return render_response('home.html')
@app.route('/journal')
def get_journal():
return render_response('journal.html')
@app.route('/new-entry')
def get_new_entry():
return render_response('new-entry.html')
| from flask import request
from flaskext.genshi import render_response
from yoi.app import app
@app.route('/')
def index():
return render_response('index.html')
@app.route('/home')
def home():
return render_response('home.html')
@app.route('/journal')
def journal():
return render_response('journal.html')
@app.route('/new-entry')
def new_entry():
return render_response('new-entry.html')
| Remove `get_` prefix on page functions. | Remove `get_` prefix on page functions.
| Python | mit | doptio/you-owe-it,doptio/you-owe-it,doptio/you-owe-it,doptio/you-owe-it |
8365945ef62b8f9cd37022302e8ee6299716720d | masterfirefoxos/base/helpers.py | masterfirefoxos/base/helpers.py | from feincms.templatetags.feincms_tags import feincms_render_region
from jingo import register
from jinja2 import Markup
@register.function
def render_region(feincms_page, region, request):
return Markup(feincms_render_region(None, feincms_page, region, request))
| from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper
from feincms.templatetags.feincms_tags import feincms_render_region
from jingo import register
from jinja2 import Markup
static = register.function(static_helper)
@register.function
def render_region(feincms_page, region, request):
return Markup(feincms_render_region(None, feincms_page, region, request))
| Add static helper for jinja2 | Add static helper for jinja2
| Python | mpl-2.0 | craigcook/masterfirefoxos,glogiotatidis/masterfirefoxos,mozilla/masterfirefoxos,craigcook/masterfirefoxos,enng0227/masterfirefoxos,glogiotatidis/masterfirefoxos,mozilla/masterfirefoxos,liu21st/masterfirefoxos,enng0227/masterfirefoxos,enng0227/masterfirefoxos,mozilla/masterfirefoxos,glogiotatidis/masterfirefoxos,glogiotatidis/masterfirefoxos,liu21st/masterfirefoxos,liu21st/masterfirefoxos,enng0227/masterfirefoxos,craigcook/masterfirefoxos,liu21st/masterfirefoxos,mozilla/masterfirefoxos,craigcook/masterfirefoxos |
538a48b0ec6589d4d8fcf1c4253898a0fd3367e3 | hello.py | hello.py | # Based on "Flask is Fun" demo from Flask documentation.
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.config.update(
SERVER_NAME='127.0.0.1:%d' % int(os.environ.get('PORT', 5000)),
)
app.run()
| # Based on "Flask is Fun" demo from Flask documentation.
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.config.update(
SERVER_NAME='127.0.0.1:%d' % int(os.environ.get('PORT', 5000)),
)
app.run(host='0.0.0.0')
| Add external access for demo process | Add external access for demo process
Avoid using Lynx web browser on a Raspberry pi for reaching localhost on console :-) | Python | mit | smartmob-project/strawboss-demo,smartmob-project/strawboss-demo |
1598c699dc6bdf5d6edd700b70e11df207412dcd | hackernews.py | hackernews.py | import requests
class HackerNews():
def __init__(self):
self.url = 'https://hacker-news.firebaseio.com/v0/{uri}'
def request(self, method, uri):
url = self.url.format(uri=uri)
return requests.request(method, url)
def item(self, item_id):
r = self.request('GET', 'item/{item_id}.json'.format(item_id=item_id))
return r.json()
def user(self, user_id):
r = self.request('GET', 'user/{user_id}.json'.format(user_id=user_id))
return r.json()
def top_stories(self):
r = self.request('GET', 'topstories.json')
return r.json()
def max_item(self):
r = self.request('GET', 'maxitem.json')
return r.json()
def updates(self):
r = self.request('GET', 'updates.json')
return r.json()
| from datetime import datetime
import requests
class HackerNews():
def __init__(self, timeout=5):
self.url = 'https://hacker-news.firebaseio.com/v0/{uri}'
self.timeout = timeout
def request(self, method, uri):
url = self.url.format(uri=uri)
return requests.request(method, url, timeout=self.timeout)
def item(self, item_id):
r = self.request('GET', 'item/{item_id}.json'.format(item_id=item_id))
item = r.json()
item['time'] = datetime.fromtimestamp(item['time'])
return item
def user(self, user_id):
r = self.request('GET', 'user/{user_id}.json'.format(user_id=user_id))
user = r.json()
user['created'] = datetime.fromtimestamp(user['created'])
return user
def top_stories(self):
r = self.request('GET', 'topstories.json')
return r.json()
def max_item(self):
r = self.request('GET', 'maxitem.json')
return r.json()
def updates(self):
r = self.request('GET', 'updates.json')
return r.json()
| Convert timestamps to native datetime objects (breaking change) | Convert timestamps to native datetime objects (breaking change)
| Python | mit | abrinsmead/hackernews-python |
8b7a7e09a53d23b37266c67995756cf1dadf520d | conanfile.py | conanfile.py | from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.1.2"
class CMakeIncludeGuardConan(ConanFile):
name = "cmake-include-guard"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
requires = ("cmake-module-common/master@smspillaz/cmake-module-common", )
generators = "cmake"
url = "http://github.com/polysquare/cmake-include-guard"
licence = "MIT"
def source(self):
zip_name = "cmake-include-guard.zip"
download("https://github.com/polysquare/"
"cmake-include-guard/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/cmake-include-guard",
src="cmake-include-guard-" + VERSION,
keep_path=True)
| from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.1.2"
class CMakeIncludeGuardConan(ConanFile):
name = "cmake-include-guard"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
url = "http://github.com/polysquare/cmake-include-guard"
licence = "MIT"
options = {
"dev": [True, False]
}
default_options = "dev=False"
def requirements(self):
if self.options.dev:
self.requires("cmake-module-common/master@smspillaz/cmake-module-common")
def source(self):
zip_name = "cmake-include-guard.zip"
download("https://github.com/polysquare/"
"cmake-include-guard/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/cmake-include-guard",
src="cmake-include-guard-" + VERSION,
keep_path=True)
| Make cmake-module-common a dev-only requirement | conan: Make cmake-module-common a dev-only requirement
| Python | mit | polysquare/cmake-include-guard |
70686bc62b85d36894b6e7909ceaa25d122ffe95 | users/serializers.py | users/serializers.py | from rest_framework import serializers
from django.contrib.auth.models import User, Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = ('name',)
class UserSerializer(serializers.ModelSerializer):
groups_complete = GroupSerializer(source='groups', read_only=True, many=True)
class Meta:
model = User
fields = ('id', 'username', 'password', 'first_name',
'last_name', 'groups', 'groups_complete')
extra_kwargs = {
'username': {'required': True},
'password': {'required': True}
}
| from rest_framework import serializers
from django.contrib.auth.models import User, Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = ('name',)
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'password', 'first_name',
'last_name', 'groups')
extra_kwargs = {
'username': {'required': True},
'password': {'required': False}
}
| Remove nested serializer for groups, make password optional in serializer for user edition | Remove nested serializer for groups, make password optional in serializer for user edition
| Python | mit | fernandolobato/balarco,fernandolobato/balarco,fernandolobato/balarco |
c86ccf75fdf49115697548bb046bc83f348a1aba | tests/test_exceptions.py | tests/test_exceptions.py | from framewirc import exceptions
class MissingAttributesTest:
def test_message(self):
attrs = ['some', 'attrs']
expected = 'Required attribute(s) missing: {}'.format(attrs)
exception = exceptions.MissingAttributes(attrs)
assert str(exception) == expected
| from framewirc import exceptions
def test_message():
attrs = ['some', 'attrs']
expected = 'Required attribute(s) missing: {}'.format(attrs)
exception = exceptions.MissingAttributes(attrs)
assert str(exception) == expected
| Fix test that wasn't running | Fix test that wasn't running
| Python | bsd-2-clause | meshy/framewirc |
4b926ab5de00bc4885021fff0b2ac3679703707a | nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py | nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
| Drop FK before dropping instance_id column. | Drop FK before dropping instance_id column. | Python | apache-2.0 | klmitch/nova,hanlind/nova,aristanetworks/arista-ovs-nova,fajoy/nova,dawnpower/nova,gooddata/openstack-nova,joker946/nova,shail2810/nova,orbitfp7/nova,jeffrey4l/nova,zaina/nova,tianweizhang/nova,ruslanloman/nova,mgagne/nova,yosshy/nova,cloudbase/nova,paulmathews/nova,JioCloud/nova_test_latest,savi-dev/nova,russellb/nova,nikesh-mahalka/nova,BeyondTheClouds/nova,scripnichenko/nova,leilihh/nova,gooddata/openstack-nova,vmturbo/nova,JianyuWang/nova,badock/nova,tudorvio/nova,CloudServer/nova,TieWei/nova,sileht/deb-openstack-nova,MountainWei/nova,fnordahl/nova,mmnelemane/nova,psiwczak/openstack,bclau/nova,Stavitsky/nova,NeCTAR-RC/nova,viggates/nova,jianghuaw/nova,aristanetworks/arista-ovs-nova,CiscoSystems/nova,yosshy/nova,cloudbase/nova-virtualbox,affo/nova,dstroppa/openstack-smartos-nova-grizzly,akash1808/nova,yrobla/nova,ruslanloman/nova,openstack/nova,vladikr/nova_drafts,maoy/zknova,bigswitch/nova,apporc/nova,barnsnake351/nova,dims/nova,salv-orlando/MyRepo,spring-week-topos/nova-week,devendermishrajio/nova_test_latest,edulramirez/nova,saleemjaveds/https-github.com-openstack-nova,Triv90/Nova,dims/nova,paulmathews/nova,houshengbo/nova_vmware_compute_driver,angdraug/nova,vmturbo/nova,watonyweng/nova,usc-isi/nova,gooddata/openstack-nova,sridevikoushik31/openstack,NoBodyCam/TftpPxeBootBareMetal,usc-isi/extra-specs,SUSE-Cloud/nova,mikalstill/nova,belmiromoreira/nova,Triv90/Nova,vmturbo/nova,cyx1231st/nova,petrutlucian94/nova,shahar-stratoscale/nova,rahulunair/nova,MountainWei/nova,gspilio/nova,berrange/nova,gspilio/nova,felixma/nova,nikesh-mahalka/nova,redhat-openstack/nova,badock/nova,noironetworks/nova,joker946/nova,alexandrucoman/vbox-nova-driver,shahar-stratoscale/nova,isyippee/nova,yrobla/nova,salv-orlando/MyRepo,petrutlucian94/nova_dev,rajalokan/nova,maelnor/nova,russellb/nova,isyippee/nova,rahulunair/nova,houshengbo/nova_vmware_compute_driver,tealover/nova,whitepages/nova,alvarolopez/nova,TwinkleChawla/nova,maoy/zknova,mahak/nova,usc-isi/nova,cyx1231st/nova,iuliat/nova,tianweizhang/nova,eayunstack/nova,ewindisch/nova,JioCloud/nova,DirectXMan12/nova-hacking,virtualopensystems/nova,BeyondTheClouds/nova,leilihh/nova,petrutlucian94/nova_dev,rajalokan/nova,luogangyi/bcec-nova,tudorvio/nova,raildo/nova,shootstar/novatest,fajoy/nova,sridevikoushik31/openstack,maelnor/nova,tanglei528/nova,tanglei528/nova,ewindisch/nova,j-carpentier/nova,josephsuh/extra-specs,Brocade-OpenSource/OpenStack-DNRM-Nova,eneabio/nova,rrader/nova-docker-plugin,LoHChina/nova,mahak/nova,TwinkleChawla/nova,Juniper/nova,imsplitbit/nova,mikalstill/nova,scripnichenko/nova,akash1808/nova_test_latest,OpenAcademy-OpenStack/nova-scheduler,CiscoSystems/nova,rajalokan/nova,bgxavier/nova,NoBodyCam/TftpPxeBootBareMetal,saleemjaveds/https-github.com-openstack-nova,alexandrucoman/vbox-nova-driver,noironetworks/nova,NewpTone/stacklab-nova,zzicewind/nova,tealover/nova,phenoxim/nova,yrobla/nova,Francis-Liu/animated-broccoli,devoid/nova,mgagne/nova,DirectXMan12/nova-hacking,Metaswitch/calico-nova,sridevikoushik31/nova,KarimAllah/nova,berrange/nova,qwefi/nova,fnordahl/nova,blueboxgroup/nova,CCI-MOC/nova,kimjaejoong/nova,klmitch/nova,CCI-MOC/nova,spring-week-topos/nova-week,edulramirez/nova,akash1808/nova_test_latest,angdraug/nova,josephsuh/extra-specs,Yuriy-Leonov/nova,affo/nova,double12gzh/nova,iuliat/nova,projectcalico/calico-nova,vmturbo/nova,redhat-openstack/nova,Yuriy-Leonov/nova,josephsuh/extra-specs,gspilio/nova,savi-dev/nova,Triv90/Nova,mandeepdhami/nova,rahulunair/nova,devendermishrajio/nova,zhimin711/nova,savi-dev/nova,DirectXMan12/nova-hacking,mandeepdhami/nova,Juniper/nova,dstroppa/openstack-smartos-nova-grizzly,double12gzh/nova,Juniper/nova,projectcalico/calico-nova,takeshineshiro/nova,blueboxgroup/nova,devendermishrajio/nova_test_latest,adelina-t/nova,JianyuWang/nova,eonpatapon/nova,j-carpentier/nova,dawnpower/nova,bigswitch/nova,tangfeixiong/nova,SUSE-Cloud/nova,silenceli/nova,devendermishrajio/nova,sebrandon1/nova,plumgrid/plumgrid-nova,CEG-FYP-OpenStack/scheduler,Metaswitch/calico-nova,Yusuke1987/openstack_template,JioCloud/nova_test_latest,sridevikoushik31/openstack,citrix-openstack-build/nova,ntt-sic/nova,ted-gould/nova,sebrandon1/nova,klmitch/nova,fajoy/nova,salv-orlando/MyRepo,sacharya/nova,Tehsmash/nova,NeCTAR-RC/nova,mikalstill/nova,phenoxim/nova,Stavitsky/nova,dstroppa/openstack-smartos-nova-grizzly,eneabio/nova,shail2810/nova,alvarolopez/nova,eneabio/nova,tangfeixiong/nova,akash1808/nova,usc-isi/extra-specs,hanlind/nova,klmitch/nova,rickerc/nova_audit,bgxavier/nova,usc-isi/extra-specs,usc-isi/nova,psiwczak/openstack,varunarya10/nova_test_latest,thomasem/nova,viggates/nova,JioCloud/nova,yatinkumbhare/openstack-nova,bclau/nova,barnsnake351/nova,takeshineshiro/nova,alaski/nova,openstack/nova,Francis-Liu/animated-broccoli,alaski/nova,leilihh/novaha,ted-gould/nova,felixma/nova,leilihh/novaha,CEG-FYP-OpenStack/scheduler,ntt-sic/nova,jianghuaw/nova,paulmathews/nova,CloudServer/nova,sridevikoushik31/nova,rrader/nova-docker-plugin,varunarya10/nova_test_latest,LoHChina/nova,russellb/nova,maheshp/novatest,qwefi/nova,silenceli/nova,eonpatapon/nova,sacharya/nova,thomasem/nova,psiwczak/openstack,NoBodyCam/TftpPxeBootBareMetal,rickerc/nova_audit,KarimAllah/nova,sileht/deb-openstack-nova,eharney/nova,shootstar/novatest,cloudbase/nova-virtualbox,rajalokan/nova,Tehsmash/nova,virtualopensystems/nova,cloudbau/nova,houshengbo/nova_vmware_compute_driver,maheshp/novatest,Brocade-OpenSource/OpenStack-DNRM-Nova,kimjaejoong/nova,eayunstack/nova,gooddata/openstack-nova,maheshp/novatest,cernops/nova,raildo/nova,sridevikoushik31/nova,mahak/nova,Juniper/nova,cloudbau/nova,maoy/zknova,sridevikoushik31/nova,jianghuaw/nova,luogangyi/bcec-nova,sebrandon1/nova,plumgrid/plumgrid-nova,jeffrey4l/nova,imsplitbit/nova,NewpTone/stacklab-nova,Yusuke1987/openstack_template,eharney/nova,orbitfp7/nova,zzicewind/nova,openstack/nova,jianghuaw/nova,apporc/nova,zhimin711/nova,vladikr/nova_drafts,cloudbase/nova,BeyondTheClouds/nova,cernops/nova,watonyweng/nova,mmnelemane/nova,OpenAcademy-OpenStack/nova-scheduler,aristanetworks/arista-ovs-nova,adelina-t/nova,citrix-openstack-build/nova,sileht/deb-openstack-nova,cloudbase/nova,NewpTone/stacklab-nova,zaina/nova,KarimAllah/nova,petrutlucian94/nova,yatinkumbhare/openstack-nova,TieWei/nova,whitepages/nova,hanlind/nova,devoid/nova,belmiromoreira/nova,cernops/nova |
cd9c6f96f68391cf333b68e1ea28e513e869aca3 | troposphere/detective.py | troposphere/detective.py | # Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class Graph(AWSObject):
resource_type = "AWS::Detective::Graph"
props = {
}
class MemberInvitation(AWSObject):
resource_type = "AWS::Detective::MemberInvitation"
props = {
'GraphArn': (basestring, True),
'MemberEmailAddress': (basestring, True),
'MemberId': (basestring, True),
'Message': (basestring, False),
}
| # Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import boolean
class Graph(AWSObject):
resource_type = "AWS::Detective::Graph"
props = {
}
class MemberInvitation(AWSObject):
resource_type = "AWS::Detective::MemberInvitation"
props = {
'DisableEmailNotification': (boolean, False),
'GraphArn': (basestring, True),
'MemberEmailAddress': (basestring, True),
'MemberId': (basestring, True),
'Message': (basestring, False),
}
| Update Detective per 2021-03-15 changes | Update Detective per 2021-03-15 changes
| Python | bsd-2-clause | cloudtools/troposphere,cloudtools/troposphere |
08a1bfc233dc44f40ee0b8a5950eda19bca1d6a6 | quran_tafseer/serializers.py | quran_tafseer/serializers.py | from django.urls import reverse
from rest_framework import serializers
from .models import Tafseer, TafseerText
class TafseerSerializer(serializers.ModelSerializer):
class Meta:
model = Tafseer
fields = ['id', 'name']
class TafseerTextSerializer(serializers.ModelSerializer):
tafseer_id = serializers.IntegerField(source='tafseer.id')
tafseer_name = serializers.CharField(source='tafseer.name')
ayah_url = serializers.SerializerMethodField()
def get_ayah_url(self, obj):
return reverse('ayah-detail', kwargs={'number': obj.ayah.number,
'sura_num': obj.ayah.sura.pk})
class Meta:
model = TafseerText
fields = ['tafseer_id', 'tafseer_name', 'ayah_url', 'ayah', 'text']
| from django.urls import reverse
from rest_framework import serializers
from .models import Tafseer, TafseerText
class TafseerSerializer(serializers.ModelSerializer):
class Meta:
model = Tafseer
fields = ['id', 'name']
class TafseerTextSerializer(serializers.ModelSerializer):
tafseer_id = serializers.IntegerField(source='tafseer.id')
tafseer_name = serializers.CharField(source='tafseer.name')
ayah_url = serializers.SerializerMethodField()
ayah_number = serializers.IntegerField(source='ayah')
def get_ayah_url(self, obj):
return reverse('ayah-detail', kwargs={'number': obj.ayah.number,
'sura_num': obj.ayah.sura.pk})
class Meta:
model = TafseerText
fields = ['tafseer_id', 'tafseer_name', 'ayah_url', 'ayah_number', 'text']
| Change serializer field name from ayah to ayah_number | Change serializer field name from ayah to ayah_number
| Python | mit | EmadMokhtar/tafseer_api |
31c79697db0d5c973cff9b845ed28845695ecb02 | website/addons/twofactor/views.py | website/addons/twofactor/views.py | # -*- coding: utf-8 -*-
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
| # -*- coding: utf-8 -*-
import httplib as http
from framework import request
from framework.auth.decorators import must_be_logged_in
from framework.exceptions import HTTPError
from website.project.decorators import must_have_addon
@must_be_logged_in
@must_have_addon('twofactor', 'user')
def user_settings(user_addon, *args, **kwargs):
code = request.json.get('code')
if code is None:
raise HTTPError(code=http.BAD_REQUEST)
if user_addon.verify_code(code):
user_addon.is_confirmed = True
user_addon.save()
return {'message': 'Successfully verified two-factor authentication.'}, http.OK
raise HTTPError(http.FORBIDDEN, data=dict(
message_short='Forbidden',
message_long='The two-factor verification code you provided is invalid.'
))
| Fix response when user successfully confirms 2fa | Fix response when user successfully confirms 2fa
| Python | apache-2.0 | CenterForOpenScience/osf.io,revanthkolli/osf.io,jnayak1/osf.io,mluo613/osf.io,caneruguz/osf.io,alexschiller/osf.io,adlius/osf.io,himanshuo/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,jeffreyliu3230/osf.io,cslzchen/osf.io,cwisecarver/osf.io,billyhunt/osf.io,SSJohns/osf.io,cosenal/osf.io,GageGaskins/osf.io,kushG/osf.io,zamattiac/osf.io,njantrania/osf.io,petermalcolm/osf.io,jinluyuan/osf.io,bdyetton/prettychart,monikagrabowska/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,chennan47/osf.io,haoyuchen1992/osf.io,cwisecarver/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,wearpants/osf.io,binoculars/osf.io,jolene-esposito/osf.io,billyhunt/osf.io,reinaH/osf.io,kushG/osf.io,GageGaskins/osf.io,kwierman/osf.io,leb2dg/osf.io,felliott/osf.io,revanthkolli/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,Nesiehr/osf.io,acshi/osf.io,barbour-em/osf.io,doublebits/osf.io,acshi/osf.io,zkraime/osf.io,TomBaxter/osf.io,kushG/osf.io,baylee-d/osf.io,adlius/osf.io,cosenal/osf.io,jolene-esposito/osf.io,kushG/osf.io,njantrania/osf.io,DanielSBrown/osf.io,sbt9uc/osf.io,adlius/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,zachjanicki/osf.io,SSJohns/osf.io,TomBaxter/osf.io,cldershem/osf.io,reinaH/osf.io,revanthkolli/osf.io,zachjanicki/osf.io,sloria/osf.io,felliott/osf.io,hmoco/osf.io,hmoco/osf.io,brianjgeiger/osf.io,HarryRybacki/osf.io,danielneis/osf.io,emetsger/osf.io,chennan47/osf.io,icereval/osf.io,RomanZWang/osf.io,jeffreyliu3230/osf.io,kch8qx/osf.io,chennan47/osf.io,binoculars/osf.io,KAsante95/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,cwisecarver/osf.io,AndrewSallans/osf.io,mluo613/osf.io,alexschiller/osf.io,fabianvf/osf.io,alexschiller/osf.io,mluke93/osf.io,abought/osf.io,MerlinZhang/osf.io,zachjanicki/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,dplorimer/osf,cosenal/osf.io,Nesiehr/osf.io,mluo613/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,arpitar/osf.io,cwisecarver/osf.io,lamdnhan/osf.io,arpitar/osf.io,ckc6cz/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,wearpants/osf.io,alexschiller/osf.io,TomBaxter/osf.io,icereval/osf.io,doublebits/osf.io,crcresearch/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,dplorimer/osf,GaryKriebel/osf.io,doublebits/osf.io,samchrisinger/osf.io,fabianvf/osf.io,wearpants/osf.io,jinluyuan/osf.io,chrisseto/osf.io,samanehsan/osf.io,mluke93/osf.io,leb2dg/osf.io,GaryKriebel/osf.io,laurenrevere/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,cslzchen/osf.io,ckc6cz/osf.io,pattisdr/osf.io,brandonPurvis/osf.io,leb2dg/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,sloria/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,arpitar/osf.io,aaxelb/osf.io,KAsante95/osf.io,amyshi188/osf.io,HarryRybacki/osf.io,brandonPurvis/osf.io,Nesiehr/osf.io,cosenal/osf.io,jinluyuan/osf.io,zamattiac/osf.io,abought/osf.io,KAsante95/osf.io,rdhyee/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,acshi/osf.io,sbt9uc/osf.io,chrisseto/osf.io,felliott/osf.io,Johnetordoff/osf.io,DanielSBrown/osf.io,emetsger/osf.io,caseyrygt/osf.io,caseyrollins/osf.io,TomHeatwole/osf.io,lamdnhan/osf.io,lyndsysimon/osf.io,crcresearch/osf.io,zkraime/osf.io,mluo613/osf.io,brandonPurvis/osf.io,ckc6cz/osf.io,erinspace/osf.io,jnayak1/osf.io,HarryRybacki/osf.io,mluke93/osf.io,felliott/osf.io,mfraezz/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,chrisseto/osf.io,Nesiehr/osf.io,SSJohns/osf.io,dplorimer/osf,jmcarp/osf.io,cslzchen/osf.io,crcresearch/osf.io,samchrisinger/osf.io,revanthkolli/osf.io,bdyetton/prettychart,abought/osf.io,erinspace/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,lyndsysimon/osf.io,cslzchen/osf.io,reinaH/osf.io,wearpants/osf.io,zkraime/osf.io,lamdnhan/osf.io,jeffreyliu3230/osf.io,RomanZWang/osf.io,lyndsysimon/osf.io,caneruguz/osf.io,GageGaskins/osf.io,mfraezz/osf.io,pattisdr/osf.io,baylee-d/osf.io,Ghalko/osf.io,GaryKriebel/osf.io,chrisseto/osf.io,Ghalko/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,acshi/osf.io,cldershem/osf.io,rdhyee/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,arpitar/osf.io,baylee-d/osf.io,samanehsan/osf.io,rdhyee/osf.io,acshi/osf.io,mfraezz/osf.io,njantrania/osf.io,monikagrabowska/osf.io,fabianvf/osf.io,mattclark/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,saradbowman/osf.io,GaryKriebel/osf.io,fabianvf/osf.io,abought/osf.io,kwierman/osf.io,aaxelb/osf.io,MerlinZhang/osf.io,barbour-em/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,SSJohns/osf.io,KAsante95/osf.io,dplorimer/osf,himanshuo/osf.io,jnayak1/osf.io,barbour-em/osf.io,cldershem/osf.io,sloria/osf.io,HalcyonChimera/osf.io,sbt9uc/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,Ghalko/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,leb2dg/osf.io,mfraezz/osf.io,jmcarp/osf.io,AndrewSallans/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,petermalcolm/osf.io,reinaH/osf.io,erinspace/osf.io,mluo613/osf.io,zachjanicki/osf.io,jolene-esposito/osf.io,laurenrevere/osf.io,amyshi188/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,ckc6cz/osf.io,hmoco/osf.io,zkraime/osf.io,doublebits/osf.io,kwierman/osf.io,jeffreyliu3230/osf.io,himanshuo/osf.io,jmcarp/osf.io,emetsger/osf.io,himanshuo/osf.io,lamdnhan/osf.io,icereval/osf.io,asanfilippo7/osf.io,doublebits/osf.io,bdyetton/prettychart,njantrania/osf.io,hmoco/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,lyndsysimon/osf.io,asanfilippo7/osf.io,aaxelb/osf.io,samanehsan/osf.io,cldershem/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,adlius/osf.io,kch8qx/osf.io,emetsger/osf.io,danielneis/osf.io,mluke93/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,amyshi188/osf.io,ticklemepierce/osf.io,jolene-esposito/osf.io,HalcyonChimera/osf.io,danielneis/osf.io,kwierman/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,danielneis/osf.io,petermalcolm/osf.io,brandonPurvis/osf.io,barbour-em/osf.io,Ghalko/osf.io,petermalcolm/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,billyhunt/osf.io,bdyetton/prettychart,zamattiac/osf.io |
8ce2cbff7063b97e249dd87dd2acb8d83bdbf509 | examples/py/fetch-ohlcv-sequentially.py | examples/py/fetch-ohlcv-sequentially.py | # -*- coding: utf-8 -*-
import os
import sys
import time
# -----------------------------------------------------------------------------
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(root)
# -----------------------------------------------------------------------------
import ccxt # noqa: E402
# -----------------------------------------------------------------------------
# common constants
msec = 1000
minute = 60 * msec
# -----------------------------------------------------------------------------
kraken = ccxt.kraken()
# -----------------------------------------------------------------------------
from_datetime = '2017-09-01 00:00:00'
from_timestamp = kraken.parse8601(from_datetime)
# -----------------------------------------------------------------------------
now = kraken.milliseconds()
# -----------------------------------------------------------------------------
while from_timestamp < now:
print('Fetching candles starting from', kraken.iso8601(from_timestamp))
ohlcvs = kraken.fetch_ohlcv('BTC/USD', '1m', from_timestamp)
# don't hit the rateLimit or you will be banned
time.sleep(kraken.rateLimit / msec)
# Kraken returns 720 candles for 1m timeframe at once
from_timestamp += len(ohlcvs) * minute
| # -*- coding: utf-8 -*-
import os
import sys
import time
# -----------------------------------------------------------------------------
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(root)
# -----------------------------------------------------------------------------
import ccxt # noqa: E402
# -----------------------------------------------------------------------------
# common constants
msec = 1000
minute = 60 * msec
# -----------------------------------------------------------------------------
kraken = ccxt.kraken()
# -----------------------------------------------------------------------------
from_datetime = '2017-09-01 00:00:00'
from_timestamp = kraken.parse8601(from_datetime)
# -----------------------------------------------------------------------------
now = kraken.milliseconds()
# -----------------------------------------------------------------------------
data = []
while from_timestamp < now:
print('Fetching candles starting from', kraken.iso8601(from_timestamp))
ohlcvs = kraken.fetch_ohlcv('BTC/USD', '1m', from_timestamp)
# don't hit the rateLimit or you will be banned
time.sleep(3 * kraken.rateLimit / msec)
# Kraken returns 720 candles for 1m timeframe at once
from_timestamp += len(ohlcvs) * minute
data += ohlcvs
| Add delay 3 times the rate limit to avoid API errors | Add delay 3 times the rate limit to avoid API errors
| Python | mit | tritoanst/ccxt,tritoanst/ccxt,ccxt/ccxt,ccxt/ccxt,tritoanst/ccxt,ccxt/ccxt,ccxt/ccxt,ccxt/ccxt,tritoanst/ccxt |
6e6c5bb9f02a4f9d380ee20216f710a6de0b0583 | fenum.py | fenum.py | #!/bin/env python
import os
import sys
def main():
if len(sys.argv) == 1 or sys.argv[1].lower() == "-h" or sys.argv[1].lower() == "--help":
print("Syntax: fenum.py [files...]")
print("\tEnumerate the given files (starting at 1) in the same order as they are passed to the script.")
return
for k,v in enumerate(sys.argv[1:], 1):
path, name = os.path.split(v if not v.endswith("/") else v[:-1])
if path:
path += "/"
try:
fname = "{}{} - {}".format(path, str(k).zfill(len(str(len(sys.argv) - 1))), name)
print("\"{}\" -> \"{}\"".format(v, fname))
os.rename(v, fname)
except Exception as e:
print(str(e))
main()
| #!/bin/env python
import os
import sys
import math
def main():
if len(sys.argv) == 1 or sys.argv[1].lower() == "-h" or sys.argv[1].lower() == "--help":
print("Syntax: fenum.py [files...]")
print("\tEnumerate the given files (starting at 1) in the same order as they are passed to the script.")
return
for k,v in enumerate(sys.argv[1:], 1):
path, name = os.path.split(v if not v.endswith("/") else v[:-1])
if path:
path += "/"
try:
fname = "{}{} - {}".format(
path,
str(k).zfill(int(math.log10(len(sys.argv) - 1)) + 1),
name)
print("\"{}\" -> \"{}\"".format(v, fname))
os.rename(v, fname)
except Exception as e:
print(str(e))
main()
| Use log10 to get the amount of digits | Use log10 to get the amount of digits
| Python | mit | mphe/scripts,mphe/scripts,mall0c/scripts,mall0c/scripts |
9e42514ac030cc2fa3aab89addfa48cf0342f991 | kremlin/utils.py | kremlin/utils.py | """
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
""" Create a thumbnail for an image
fp filesystem path to the full size image
h height (default is 128)
w width (default is 128)
The thumbnail will be unceremoniously dumped in the same
directory with 'thumbnail' between the file name and extension.
"""
size = (h, w)
f, ext = os.path.splitext(fp)
im = Image.open(fp)
im.thumbnail(size, Image.ANTIALIAS)
im.save('.thumbnail'.join([f, ext]))
| """
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
""" Create a thumbnail for an image
fp filesystem path to the full size image
h height (default is 128)
w width (default is 128)
The thumbnail will be unceremoniously dumped in the same
directory with 'thumbnail' between the file name and extension.
"""
size = (h, w)
f, ext = os.path.splitext(fp)
with Image.open(fp) as im:
im.thumbnail(size, Image.ANTIALIAS)
im.save('.thumbnail'.join([f, ext]))
| Use context manager for PIL Open | Use context manager for PIL Open
This is currently kind of buggy with Python3. A harmless warning is
emitted since the handle is still closed late inside PIL/Pillow.
| Python | bsd-2-clause | glasnost/kremlin,glasnost/kremlin,glasnost/kremlin |
12d2e8033c46f06f5442cba40a7c2c673196ad1c | akanda/horizon/routers/views.py | akanda/horizon/routers/views.py | from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
ports = [api.quantum.Port(p) for p in router.ports]
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
| from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Right now we are listing, for both normal and
# admin users, all the ports on the user's networks
# the router is associated with. We may want in the
# future show the ports on the mgt and the external
# networks for the admin users.
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] == 'network:router_interface']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
| Fix the router's interfaces listing view to show only the interfaces on the user's networks filtering out interfaces on the mgt and public networks. | Fix the router's interfaces listing view to show only the
interfaces on the user's networks filtering out interfaces
on the mgt and public networks.
DHC-1512
Change-Id: I9b68b75d5e8325c4c70090fa500a417e23b1836f
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com>
| Python | apache-2.0 | dreamhost/akanda-horizon,dreamhost/akanda-horizon |
a2530b9cd2baf70591e377b6eb7d5104493989a1 | test/conftest.py | test/conftest.py | def pytest_addoption(parser):
parser.addoption("--domain", action="append", default=[],
help="list of stringinputs to pass to test functions")
def pytest_generate_tests(metafunc):
if 'domain' in metafunc.fixturenames:
metafunc.parametrize("domain",
metafunc.config.option.domain)
| def pytest_addoption(parser):
parser.addoption("--domain", action="append", default=[],
help="list of stringinputs to pass to test functions")
parser.addoption("--url", action="append", default=[],
help="list of stringinputs to pass to test functions")
def pytest_generate_tests(metafunc):
if 'domain' in metafunc.fixturenames:
metafunc.parametrize("domain",
metafunc.config.option.domain)
if 'url' in metafunc.fixturenames:
metafunc.parametrize("url",
metafunc.config.option.url)
| Add test parameter for site url | Add test parameter for site url
| Python | mit | HIIT/mediacollection |
e0af42d5431c42ee5e12fb228978ac8ce9c62807 | server/modules/persistent.py | server/modules/persistent.py | #!/usr/bin/python3
import argparse
import os
import icon_lib
parser = argparse.ArgumentParser(description='iconograph persistent')
parser.add_argument(
'--chroot-path',
dest='chroot_path',
action='store',
required=True)
FLAGS = parser.parse_args()
def main():
module = icon_lib.IconModule(FLAGS.chroot_path)
os.mkdir(os.path.join(FLAGS.chroot_path, 'persistent'))
tool_path = os.path.join(FLAGS.chroot_path, 'icon', 'persistent')
os.makedirs(tool_path, exist_ok=True)
script = os.path.join(tool_path, 'startup.sh')
with open(script, 'w') as fh:
os.chmod(fh.fileno(), 0o755)
fh.write("""\
#!/bin/bash
set -ex
e2fsck -a /persistent
mount -o data=journal,noatime,sync LABEL=PERSISTENT /persistent
""")
with module.ServiceFile('persistent.service') as fh:
fh.write("""
[Unit]
Description=Mount /persistent
DefaultDependencies=no
Conflicts=shutdown.target
After=systemd-remount-fs.service
Before=sysinit.target
[Service]
Type=oneshot
RemainAfterExit=yes
ExecStart=/icon/persistent/startup.sh
[Install]
WantedBy=sysinit.target
""")
module.EnableService('persistent.service')
if __name__ == '__main__':
main()
| #!/usr/bin/python3
import argparse
import os
import icon_lib
parser = argparse.ArgumentParser(description='iconograph persistent')
parser.add_argument(
'--chroot-path',
dest='chroot_path',
action='store',
required=True)
FLAGS = parser.parse_args()
def main():
module = icon_lib.IconModule(FLAGS.chroot_path)
os.mkdir(os.path.join(FLAGS.chroot_path, 'persistent'))
tool_path = os.path.join(FLAGS.chroot_path, 'icon', 'persistent')
os.makedirs(tool_path, exist_ok=True)
script = os.path.join(tool_path, 'startup.sh')
with open(script, 'w') as fh:
os.chmod(fh.fileno(), 0o755)
fh.write("""\
#!/bin/bash
set -ex
e2fsck -a /persistent
mount -o noatime LABEL=PERSISTENT /persistent
""")
with module.ServiceFile('persistent.service') as fh:
fh.write("""
[Unit]
Description=Mount /persistent
DefaultDependencies=no
Conflicts=shutdown.target
After=systemd-remount-fs.service
Before=sysinit.target
[Service]
Type=oneshot
RemainAfterExit=yes
ExecStart=/icon/persistent/startup.sh
[Install]
WantedBy=sysinit.target
""")
module.EnableService('persistent.service')
if __name__ == '__main__':
main()
| Remove flags that drastically slow down writes | Remove flags that drastically slow down writes
| Python | apache-2.0 | robot-tools/iconograph,robot-tools/iconograph,robot-tools/iconograph,robot-tools/iconograph |
ec2092c683f721e32a2d1d9792f296e140d6ba45 | paperwork_parser/exceptions.py | paperwork_parser/exceptions.py |
class InvalidPDFError(ValueError):
pass
class FieldParseError(Exception):
pass
|
class InvalidPDFError(ValueError):
pass
class UnknownVariantError(Exception):
pass
class FieldParseError(Exception):
pass
| Add new exception type for unknown variants | Add new exception type for unknown variants
| Python | mit | loanzen/zen_document_parser |
0b49114a6b0830fa0b05d32803ae52526b8e48ca | gnsq/backofftimer.py | gnsq/backofftimer.py | from random import randint
class BackoffTimer(object):
def __init__(self, ratio=1, max_interval=None, min_interval=None):
self.c = 0
self.ratio = ratio
self.max_interval = max_interval
self.min_interval = min_interval
def is_reset(self):
return self.c == 0
def reset(self):
self.c = 0
return self
def success(self):
self.c = max(self.c - 1, 0)
return self
def failure(self):
self.c += 1
return self
def get_interval(self):
k = pow(2, self.c) - 1
interval = randint(0, k) * self.ratio
if self.max_interval is not None:
interval = min(interval, self.max_interval)
if self.min_interval is not None:
interval = max(interval, self.min_interval)
return interval
| import random
class BackoffTimer(object):
def __init__(self, ratio=1, max_interval=None, min_interval=None):
self.c = 0
self.ratio = ratio
self.max_interval = max_interval
self.min_interval = min_interval
def is_reset(self):
return self.c == 0
def reset(self):
self.c = 0
return self
def success(self):
self.c = max(self.c - 1, 0)
return self
def failure(self):
self.c += 1
return self
def get_interval(self):
k = pow(2, self.c) - 1
interval = random.random() * k * self.ratio
if self.max_interval is not None:
interval = min(interval, self.max_interval)
if self.min_interval is not None:
interval = max(interval, self.min_interval)
return interval
| Return float for interval instead of int. | Return float for interval instead of int.
| Python | bsd-3-clause | hiringsolved/gnsq,wtolson/gnsq,wtolson/gnsq |
1d74b003818e260ae1f453cb26f1c9efc29e8ba2 | scripts/run_unit_test.py | scripts/run_unit_test.py | #!/usr/bin/env python
import serial
import os
import sys
import time
# Make and flash the unit test
FILE_LOCATION = os.path.dirname(os.path.abspath(__file__))
os.chdir(FILE_LOCATION + "/../")
print os.system("make flash_unit_test")
# Ask the user to reset the board
raw_input("\nPlease press the phsyical reset button on the STM32F4Discovery board and then press enter to continue...")
# Open a serial port
time.sleep(1)
print 'Connecting to /dev/serial/by-id/usb-eecs567_final_project-if00'
ser = serial.Serial("/dev/serial/by-id/usb-eecs567_final_project-if00", 115200)
# time.sleep(1)
# Send data to start USB OTG
print 'Write start'
ser.write("start")
print 'Run test'
# Read until we see the finished text
result = ''
try:
while True:
num_chars = ser.inWaiting()
if num_chars:
new = ''
try:
new = ser.read(num_chars)
except:
print '\nFailed to read'
sys.stdout.write(new)
result += new
if result.find("Finished") != -1:
break
finally:
# Print the result so the user can see and close the serial port
#print result
ser.close() | #!/usr/bin/env python
import serial
import os
import sys
import time
# Make and flash the unit test
FILE_LOCATION = os.path.dirname(os.path.abspath(__file__))
os.chdir(FILE_LOCATION + "/../")
print os.system("make flash_unit_test")
# Ask the user to reset the board
raw_input("""\nPlease press the phsyical reset button on
the STM32F4Discovery board and then press enter to continue...""")
# Open a serial port
time.sleep(1)
print 'Connecting to /dev/serial/by-id/usb-eecs567_final_project-if00'
ser = serial.Serial("/dev/serial/by-id/usb-eecs567_final_project-if00", 115200)
# time.sleep(1)
# Send data to start USB OTG
print 'Write start'
ser.write("start")
print 'Run test'
# Read until we see the finished text
result = ''
try:
while True:
num_chars = ser.inWaiting()
if num_chars:
new = ''
try:
new = ser.read(num_chars)
except:
print '\nFailed to read'
sys.stdout.write(new)
result += new
if result.find("Finished") != -1:
break
finally:
# Close the serial port
ser.close()
| Bring scripts folder up to pep8 standards | PEP8: Bring scripts folder up to pep8 standards
| Python | mit | fnivek/eecs567-final-project,fnivek/eecs567-final-project,fnivek/eecs567-final-project,fnivek/eecs567-final-project,fnivek/eecs567-final-project |
d498a9846567e4986ba2a2541b2b4e4719c2c83f | keras/__init__.py | keras/__init__.py | from __future__ import absolute_import
from . import activations
from . import applications
from . import backend
from . import datasets
from . import engine
from . import layers
from . import preprocessing
from . import utils
from . import wrappers
from . import callbacks
from . import constraints
from . import initializers
from . import metrics
from . import models
from . import losses
from . import optimizers
from . import regularizers
__version__ = '2.0.3'
| from __future__ import absolute_import
from . import activations
from . import applications
from . import backend
from . import datasets
from . import engine
from . import layers
from . import preprocessing
from . import utils
from . import wrappers
from . import callbacks
from . import constraints
from . import initializers
from . import metrics
from . import models
from . import losses
from . import optimizers
from . import regularizers
# Importable from root because it's technically not a layer
from .layers import Input
__version__ = '2.0.3'
| Make Input importable from root | Make Input importable from root
| Python | apache-2.0 | keras-team/keras,keras-team/keras |
8e20b56c4b91f673429697232926115db46e0c2d | spanky/commands/cmd_users.py | spanky/commands/cmd_users.py | import click
from spanky.cli import pass_context
from spanky.lib.users import UserInit
@click.command('users', short_help='creates users base on /etc/spanky/users')
@pass_context
def cli(ctx):
config = ctx.config.load('users.yml')()
user_init = UserInit(config)
user_init.build()
| import sys
import click
from spanky.cli import pass_context
from spanky.lib.users import UserInit
@click.command('users', short_help='creates users base on /etc/spanky/users')
@pass_context
def cli(ctx):
try:
config = ctx.config.load('users.yml')()
except IOError:
# no config lets bail
click.echo('No users to install')
sys.exit(1)
user_init = UserInit(config)
user_init.build()
| Return a 1 when we don't have user to install. | Return a 1 when we don't have user to install.
TODO: Make better return code
| Python | bsd-3-clause | pglbutt/spanky,pglbutt/spanky,pglbutt/spanky |
d0e139d286b18c9dcdc8c46161c4ebdf0f0f8d96 | examples/cooperative_binding.py | examples/cooperative_binding.py | import sys
import os
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.realpath(__file__)), '..'))
from crnpy.crn import CRN, from_react_file
__author__ = "Elisa Tonello"
__copyright__ = "Copyright (c) 2016, Elisa Tonello"
__license__ = "BSD"
__version__ = "0.0.1"
# Cooperative binding
print "Creating model..."
crn = from_react_file("data/reactions/cooperative_binding")
crn.inspect(True)
print
print("Remove ps1, ps2 and ps3 by qss")
crn.remove(qss = ['ps1', 'ps2', 'ps3'], debug = True)
for s, f in crn.removed_species: print(s + " = " + str(f))
crn.inspect(True)
| import sys
import os
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.realpath(__file__)), '..'))
from crnpy.crn import CRN, from_react_file
__author__ = "Elisa Tonello"
__copyright__ = "Copyright (c) 2016, Elisa Tonello"
__license__ = "BSD"
__version__ = "0.0.1"
# Cooperative binding
print "Creating model..."
crn = from_react_file("data/reactions/cooperative_binding")
crn.inspect(True)
print("")
print("Remove ps1, ps2 and ps3 by qssa")
crn.remove(qss = ['ps1', 'ps2', 'ps3'])
for s, f in crn.removed_species: print(s + " = " + str(f))
crn.inspect(True)
| Remove debug and adjusted print. | Remove debug and adjusted print.
| Python | bsd-3-clause | etonello/crnpy |
67d3193683d2215fdd660bdc086801fe761c7db7 | src/views.py | src/views.py | from flask import render_template
from app import app
@app.route('/')
def index():
return render_template('index.html', active='index')
@app.route('/contact/')
def contact():
return render_template('contact.html', active='contact')
@app.context_processor
def utility_processor():
def page_title(title=None):
return "{} | {}".format(title, app.config['SITE_TITLE']) if title \
else app.config['SITE_TITLE']
def post_source(path):
return '{}{}{}'.format(app.config['POST_SOURCE_ROOT'],
path,
app.config['FLATPAGES_EXTENSION'])
return dict(page_title=page_title, post_source=post_source)
@app.template_filter('date')
def date_filter(date):
return date.strftime('%B %-d, %Y')
| import os
from flask import render_template
from flask import send_from_directory
from app import app
@app.route('/')
def index():
return render_template('index.html', active='index')
@app.route('/contact/')
def contact():
return render_template('contact.html', active='contact')
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),
'favicon.ico',
mimetype='image/vnd.microsoft.icon')
@app.context_processor
def utility_processor():
def page_title(title=None):
return "{} | {}".format(title, app.config['SITE_TITLE']) if title \
else app.config['SITE_TITLE']
def post_source(path):
return '{}{}{}'.format(app.config['POST_SOURCE_ROOT'],
path,
app.config['FLATPAGES_EXTENSION'])
return dict(page_title=page_title, post_source=post_source)
@app.template_filter('date')
def date_filter(date):
return date.strftime('%B %-d, %Y')
| Make the favicon available at /favicon.ico | Make the favicon available at /favicon.ico
| Python | mit | matachi/MaTachi.github.io,matachi/MaTachi.github.io |
0cda8950a661a0d994d7b5824af949ec1e40f584 | setup.py | setup.py | from distutils.core import setup
setup(name="zutil",
version='0.1.4',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'mpi4py',
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
) | from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
| Make mpi an optional dependency | Make mpi an optional dependency
| Python | mit | zCFD/zutil |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.