commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
4e73a057354d3445d24e7e480793f2fe18940fb1 | Bump version to 0.4.4.dev1 | team23/django_backend,team23/django_backend,team23/django_backend,team23/django_backend,team23/django_backend | django_backend/__init__.py | django_backend/__init__.py | from .backend.renderable import Renderable # noqa
from .group import Group # noqa
from .sitebackend import SiteBackend
__version__ = '0.4.4.dev1'
default_app_config = 'django_backend.apps.DjangoBackendConfig'
site = SiteBackend(id='backend')
| from .backend.renderable import Renderable # noqa
from .group import Group # noqa
from .sitebackend import SiteBackend
__version__ = '0.4.3'
default_app_config = 'django_backend.apps.DjangoBackendConfig'
site = SiteBackend(id='backend')
| bsd-3-clause | Python |
6e9b61a10ca47770d3e0b5ada25339fee1383385 | Fix typo and use += instead of .extend() to concat lists | jaap3/django-richtextfield,jaap3/django-richtextfield | djrichtextfield/widgets.py | djrichtextfield/widgets.py | from __future__ import unicode_literals
import json
from django.forms.widgets import Media, Textarea
from django.urls import reverse
from django.utils import six
from django.utils.encoding import force_text
from django.utils.html import format_html
from djrichtextfield import settings
class RichTextWidget(Textarea):
CSS_CLASS = 'djrichtextfield'
INIT_URL = 'djrichtextfield_init'
SETTINGS_ATTR = 'data-field-settings'
CONTAINER_CLASS = 'field-box'
PROFILE_KEY = 'profiles'
def __init__(self, attrs=None, field_settings=None):
defaults = {'class': self.CSS_CLASS}
if attrs:
if 'class' in attrs:
attrs['class'] = ' '.join([attrs['class'], defaults['class']])
defaults.update(attrs)
self.field_settings = field_settings or {}
super(RichTextWidget, self).__init__(defaults)
@property
def media(self):
js = settings.CONFIG['js']
js += [
'admin/js/vendor/jquery/jquery.min.js',
'admin/js/jquery.init.js',
reverse(self.INIT_URL)
]
return Media(js=js)
def get_field_settings(self):
"""
Get the field settings, if the configured setting is a string try
to get a 'profile' from the global config.
"""
field_settings = None
if self.field_settings:
if isinstance(self.field_settings, six.string_types):
profiles = settings.CONFIG.get(self.PROFILE_KEY, {})
field_settings = profiles.get(self.field_settings)
else:
field_settings = self.field_settings
return field_settings
def render(self, name, value, attrs=None):
attrs = attrs or {}
field_settings = self.get_field_settings()
if field_settings:
attrs[self.SETTINGS_ATTR] = json.dumps(field_settings,
default=force_text)
textarea = super(RichTextWidget, self).render(name, value, attrs=attrs)
return format_html(
'<div class="{0}">{1}</div>', self.CONTAINER_CLASS, textarea)
| from __future__ import unicode_literals
import json
from django.forms.widgets import Media, Textarea
from django.urls import reverse
from django.utils import six
from django.utils.encoding import force_text
from django.utils.html import format_html
from djrichtextfield import settings
class RichTextWidget(Textarea):
CSS_CLASS = 'djrichtextfield'
INIT_URL = 'djrichtextfield_init'
SETTINGS_ATTR = 'data-field-settings'
CONTAINER_CLASS = 'field-box'
PROFILE_KEY = 'profiles'
def __init__(self, attrs=None, field_settings=None):
defaults = {'class': self.CSS_CLASS}
if attrs:
if 'class' in attrs:
attrs['class'] = ' '.join([attrs['class'], defaults['class']])
defaults.update(attrs)
self.field_settings = field_settings or {}
super(RichTextWidget, self).__init__(defaults)
@property
def media(self):
js = settings.CONFIG['js']
js.extend([
'admin/js/vendor/jquery/jquery.min.js',
'admin/js/jquery.init.js',
reverse(self.INIT_URL))
])
return Media(js=js)
def get_field_settings(self):
"""
Get the field settings, if the configured setting is a string try
to get a 'profile' from the global config.
"""
field_settings = None
if self.field_settings:
if isinstance(self.field_settings, six.string_types):
profiles = settings.CONFIG.get(self.PROFILE_KEY, {})
field_settings = profiles.get(self.field_settings)
else:
field_settings = self.field_settings
return field_settings
def render(self, name, value, attrs=None):
attrs = attrs or {}
field_settings = self.get_field_settings()
if field_settings:
attrs[self.SETTINGS_ATTR] = json.dumps(field_settings,
default=force_text)
textarea = super(RichTextWidget, self).render(name, value, attrs=attrs)
return format_html(
'<div class="{0}">{1}</div>', self.CONTAINER_CLASS, textarea)
| mit | Python |
b8258d3ad32fbd283f01578bf46aef4c8e5ac55d | Add assertion to prevent a class extending itself | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | thinglang/parser/definitions/thing_definition.py | thinglang/parser/definitions/thing_definition.py | from thinglang.compiler.buffer import CompilationBuffer
from thinglang.compiler.context import CompilationContext
from thinglang.foundation import definitions
from thinglang.lexer.definitions.tags import LexicalInheritanceTag
from thinglang.lexer.definitions.thing_definition import LexicalDeclarationThing
from thinglang.lexer.values.identifier import Identifier
from thinglang.parser.definitions.member_definition import MemberDefinition
from thinglang.parser.definitions.method_definition import MethodDefinition
from thinglang.parser.nodes import BaseNode
from thinglang.parser.rule import ParserRule
class ThingDefinition(BaseNode):
"""
Defines a thing, also known as a class
"""
def __init__(self, name, extends=None, generics=None):
super(ThingDefinition, self).__init__([name, extends, generics])
self.name, self.extends, self.generics = name, extends, generics
def __repr__(self):
return f'thing {self.name}'
def compile(self, context: CompilationContext):
symbol_map = context.symbols[self.name]
for method in self.methods:
buffer = CompilationBuffer(context.symbols, method.locals)
method.compile(buffer)
context.add((context.symbols.index(symbol_map), symbol_map[method.name].index), method, buffer)
def finalize(self):
super().finalize()
if Identifier.constructor() not in self.names: # Add implicit constructor
self.children.insert(0, MethodDefinition.empty_constructor(self))
if self.extends and self.extends.untyped in definitions.INTERNAL_SOURCES:
self.children.insert(0, MemberDefinition(Identifier.super(), self.extends).deriving_from(self))
@property
def members(self):
return [x for x in self.children if isinstance(x, MemberDefinition)]
@property
def methods(self):
return [x for x in self.children if isinstance(x, MethodDefinition)]
@property
def names(self):
return [x.name for x in self.members + self.methods]
def slots(self, context):
return sum(len(container.members) for container in context.symbols.inheritance(self))
@staticmethod
@ParserRule.mark
def base_definition(_: LexicalDeclarationThing, name: Identifier):
return ThingDefinition(name)
@staticmethod
@ParserRule.mark
def define_generic(thing: 'ThingDefinition', generics: 'TypeVector'):
thing.generics = generics
return thing
@staticmethod
@ParserRule.mark
def define_inheritance(thing: 'ThingDefinition', _: LexicalInheritanceTag, extends: Identifier):
assert extends != thing.name, 'Class cannot extend itself'
thing.extends = extends
return thing
| from thinglang.compiler.buffer import CompilationBuffer
from thinglang.compiler.context import CompilationContext
from thinglang.foundation import definitions
from thinglang.lexer.definitions.tags import LexicalInheritanceTag
from thinglang.lexer.definitions.thing_definition import LexicalDeclarationThing
from thinglang.lexer.values.identifier import Identifier
from thinglang.parser.definitions.member_definition import MemberDefinition
from thinglang.parser.definitions.method_definition import MethodDefinition
from thinglang.parser.nodes import BaseNode
from thinglang.parser.rule import ParserRule
class ThingDefinition(BaseNode):
"""
Defines a thing, also known as a class
"""
def __init__(self, name, extends=None, generics=None):
super(ThingDefinition, self).__init__([name, extends, generics])
self.name, self.extends, self.generics = name, extends, generics
def __repr__(self):
return f'thing {self.name}'
def compile(self, context: CompilationContext):
symbol_map = context.symbols[self.name]
for method in self.methods:
buffer = CompilationBuffer(context.symbols, method.locals)
method.compile(buffer)
context.add((context.symbols.index(symbol_map), symbol_map[method.name].index), method, buffer)
def finalize(self):
super().finalize()
if Identifier.constructor() not in self.names: # Add implicit constructor
self.children.insert(0, MethodDefinition.empty_constructor(self))
if self.extends and self.extends.untyped in definitions.INTERNAL_SOURCES:
self.children.insert(0, MemberDefinition(Identifier.super(), self.extends).deriving_from(self))
@property
def members(self):
return [x for x in self.children if isinstance(x, MemberDefinition)]
@property
def methods(self):
return [x for x in self.children if isinstance(x, MethodDefinition)]
@property
def names(self):
return [x.name for x in self.members + self.methods]
def slots(self, context):
return sum(len(container.members) for container in context.symbols.inheritance(self))
@staticmethod
@ParserRule.mark
def base_definition(_: LexicalDeclarationThing, name: Identifier):
return ThingDefinition(name)
@staticmethod
@ParserRule.mark
def define_generic(thing: 'ThingDefinition', generics: 'TypeVector'):
thing.generics = generics
return thing
@staticmethod
@ParserRule.mark
def define_inheritance(thing: 'ThingDefinition', _: LexicalInheritanceTag, extends: Identifier):
thing.extends = extends
return thing
| mit | Python |
37677e052c948d5ccced28bcf62ea8f85bac9822 | test song | romulojales/to-be-musician,romulojales/to-be-musician | to_be_a_musician/djtinysong/tests/test_models.py | to_be_a_musician/djtinysong/tests/test_models.py | import unittest
from djtinysong.models import Song
class TestView(unittest.TestCase):
def test_simple_class_instance(self):
dictionary = {
"Url": "http:\/\/tinysong.com\/8We2",
"SongID": 269743,
"SongName": "The Legend Of Lil' Beethoven",
"ArtistID": 7620,
"ArtistName": "Sparks",
"AlbumID": 204019,
"AlbumName": "Sparks"
}
music = Song(**dictionary)
self.assertEquals(music.tinySongURL, dictionary["Url"])
self.assertEquals(music.songId, dictionary["SongID"])
self.assertEquals(music.songName, dictionary["SongName"])
self.assertEquals(music.artistId, dictionary["ArtistID"])
self.assertEquals(music.artistName, dictionary["ArtistName"])
self.assertEquals(music.albumId, dictionary["AlbumID"])
self.assertEquals(music.albumName, dictionary["AlbumName"])
| import unittest
from djtinysong.models import Song
class Test(unittest.TestCase):
def test_simple_class_instance(self):
dictionary = {
"Url": "http:\/\/tinysong.com\/8We2",
"SongID": 269743,
"SongName": "The Legend Of Lil' Beethoven",
"ArtistID": 7620,
"ArtistName": "Sparks",
"AlbumID": 204019,
"AlbumName": "Sparks"
}
music = Song(**dictionary)
self.assertEquals(music.tinySongURL, dictionary["Url"])
self.assertEquals(music.songId, dictionary["SongID"])
self.assertEquals(music.songName, dictionary["SongName"])
self.assertEquals(music.artistId, dictionary["ArtistID"])
self.assertEquals(music.artistName, dictionary["ArtistName"])
self.assertEquals(music.albumId, dictionary["AlbumID"])
self.assertEquals(music.albumName, dictionary["AlbumName"])
| apache-2.0 | Python |
365e3acae3973dc8fee795312ecd7a5990144af4 | remove redundant params, update annotations and comments, add doctest | hell03end/hse_ruz | ruz/utils/logging.py | ruz/utils/logging.py | import logging
from collections import Callable
from functools import wraps
def Logger(name: str, level: int=logging.INFO, **kwargs) -> logging.RootLogger:
"""
Creates configured logger
:param name, required - name for logger.
:param level - logging level.
:param format, str - logging format.
Usage
-----
logger = Logger(__name__)
>>> Logger("some name").info("Hello, world!")
... - some name - INFO - Hello, world!
>>> Logger()
Traceback (most recent call last):
...
TypeError: Logger() missing 1 required positional argument: 'name'
>>> Logger(123)
Traceback (most recent call last):
...
ValueError: Expect str, got: <class 'int'>
"""
if not isinstance(name, str):
raise ValueError("Expect str, got: {}".format(type(name)))
logging.basicConfig(
format=kwargs.pop(
"format",
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
),
level=level
)
return logging.getLogger(name)
def log(func: Callable) -> Callable:
"""
Log function entering, arguments and exiting (to debug)
:param func, required - wrapped function/method.
Usage
-----
@log
def some_func():
pass
"""
logger = Logger(
name="{}::{}".format(func.__module__, func.__name__),
level=logging.DEBUG
)
@wraps(func)
def wrapper(*args, **kwargs) -> object:
logger.debug("Entering: %s", func.__name__)
for arg in args:
logger.debug("arg::%s", arg)
for key, value in kwargs.items():
logger.debug("kwarg::%s=%s", key, value)
result = func(*args, **kwargs)
logger.debug("Exiting: %s", func.__name__)
return result
return wrapper
if __name__ == "__main__":
import doctest
doctest.testmod()
| import logging
from collections import Callable
from functools import wraps
def Logger(name: str, level: int=logging.INFO, **kwargs) -> logging.RootLogger:
logging.basicConfig(
format=kwargs.pop(
"format",
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
),
level=level
)
return logging.getLogger(name)
def log(func: Callable, level: int=logging.DEBUG, **kwargs) -> Callable:
""" Log function entering, arguments and exiting (to debug) """
logger = Logger(
"{}::{}".format(func.__module__, func.__name__),
level,
**kwargs
)
with_args = kwargs.pop("with_args", True)
@wraps(func)
def wrapper(*args, **kwargs) -> object:
logger.debug("Entering: %s", func.__name__)
if with_args:
for arg in args:
logger.debug(arg)
for key, value in kwargs.items():
logger.debug("%s = %s", key, value)
result = func(*args, **kwargs)
logger.debug("Exiting: %s", func.__name__)
return result
return wrapper
| mit | Python |
958b86044e92ab44c95acae86155be0547569c9e | Use rescent linuxdeployqt for appimage | OneMoreGres/ScreenTranslator,OneMoreGres/ScreenTranslator,OneMoreGres/ScreenTranslator,OneMoreGres/ScreenTranslator | share/ci/appimage.py | share/ci/appimage.py | import common as c
from config import *
import os
import sys
import subprocess as sub
import shutil
from glob import glob
if len(sys.argv) > 1 and sys.argv[1] == 'glibc_version': # subcommand
sub.run('ldd --version | head -n 1 | grep -Po "\\d\\.\\d\\d"', shell=True)
exit(0)
tag = os.environ.get('TAG', '')
artifact_name = '{}-{}{}.AppImage'.format(app_name, app_version, tag)
if len(sys.argv) > 1 and sys.argv[1] == 'artifact_name': # subcommand
c.print(artifact_name)
exit(0)
artifact_path = os.path.abspath(artifact_name)
c.print('>> Making appimage')
base_url = 'https://github.com/probonopd/linuxdeployqt/releases/download'
continuous_url = base_url + '/continuous/linuxdeployqt-continuous-x86_64.AppImage'
tagged_url = base_url + '/6/linuxdeployqt-6-x86_64.AppImage'
linuxdeployqt_url = continuous_url
linuxdeployqt_original = os.path.basename(linuxdeployqt_url)
c.download(linuxdeployqt_url, linuxdeployqt_original)
c.run('chmod a+x {}'.format(linuxdeployqt_original))
linuxdeployqt_bin = os.path.abspath('linuxdeployqt')
c.symlink(linuxdeployqt_original, linuxdeployqt_bin)
os.chdir(build_dir)
install_dir = os.path.abspath('appdir')
c.recreate_dir(install_dir)
c.run('make INSTALL_ROOT={0} DESTDIR={0} install'.format(install_dir))
if c.is_inside_docker():
c.run('{} --appimage-extract'.format(linuxdeployqt_bin))
linuxdeployqt_bin = os.path.abspath('squashfs-root/AppRun')
os.environ['LD_LIBRARY_PATH'] = dependencies_dir + '/lib'
os.environ['VERSION'] = app_version
# debug flags: -unsupported-bundle-everything -unsupported-allow-new-glibc
flags = '' if os.getenv("DEBUG") is None else '-unsupported-allow-new-glibc'
additional_files = glob(ssl_dir + '/lib/lib*.so.*') + \
glob('/usr/lib/x86_64-linux-gnu/nss/*') + \
glob(dependencies_dir + '/lib/libtesseract-*.so')
out_lib_dir = install_dir + '/usr/lib'
os.makedirs(out_lib_dir, exist_ok=True)
for f in additional_files:
c.print('>> Copying {} to {}'.format(f, out_lib_dir))
shutil.copy(f, out_lib_dir)
c.ensure_got_path('{}/usr/share/doc/libc6/copyright'.format(install_dir))
c.run('{} {}/usr/share/applications/*.desktop {} -appimage -qmake={}/bin/qmake'.format(
linuxdeployqt_bin, install_dir, flags, qt_dir))
c.run('mv {}-{}*.AppImage "{}"'.format(app_name, app_version, artifact_path))
bin_path = install_dir + '/usr/bin/' + bin_name
c.print('>> Md5 {} {}'.format(bin_path, c.md5sum(bin_path)))
| import common as c
from config import *
import os
import sys
import subprocess as sub
import shutil
from glob import glob
if len(sys.argv) > 1 and sys.argv[1] == 'glibc_version': # subcommand
sub.run('ldd --version | head -n 1 | grep -Po "\\d\\.\\d\\d"', shell=True)
exit(0)
tag = os.environ.get('TAG', '')
artifact_name = '{}-{}{}.AppImage'.format(app_name, app_version, tag)
if len(sys.argv) > 1 and sys.argv[1] == 'artifact_name': # subcommand
c.print(artifact_name)
exit(0)
artifact_path = os.path.abspath(artifact_name)
c.print('>> Making appimage')
base_url = 'https://github.com/probonopd/linuxdeployqt/releases/download'
continuous_url = base_url + '/continuous/linuxdeployqt-continuous-x86_64.AppImage'
tagged_url = base_url + '/6/linuxdeployqt-6-x86_64.AppImage'
linuxdeployqt_url = tagged_url
linuxdeployqt_original = os.path.basename(linuxdeployqt_url)
c.download(linuxdeployqt_url, linuxdeployqt_original)
c.run('chmod a+x {}'.format(linuxdeployqt_original))
linuxdeployqt_bin = os.path.abspath('linuxdeployqt')
c.symlink(linuxdeployqt_original, linuxdeployqt_bin)
os.chdir(build_dir)
install_dir = os.path.abspath('appdir')
c.recreate_dir(install_dir)
c.run('make INSTALL_ROOT={0} DESTDIR={0} install'.format(install_dir))
if c.is_inside_docker():
c.run('{} --appimage-extract'.format(linuxdeployqt_bin))
linuxdeployqt_bin = os.path.abspath('squashfs-root/AppRun')
os.environ['LD_LIBRARY_PATH'] = dependencies_dir + '/lib'
os.environ['VERSION'] = app_version
# debug flags: -unsupported-bundle-everything -unsupported-allow-new-glibc
flags = '' if os.getenv("DEBUG") is None else '-unsupported-allow-new-glibc'
additional_files = glob(ssl_dir + '/lib/lib*.so.*') + \
glob('/usr/lib/x86_64-linux-gnu/nss/*') + \
glob(dependencies_dir + '/lib/libtesseract-*.so')
out_lib_dir = install_dir + '/usr/lib'
os.makedirs(out_lib_dir, exist_ok=True)
for f in additional_files:
c.print('>> Copying {} to {}'.format(f, out_lib_dir))
shutil.copy(f, out_lib_dir)
c.run('{} {}/usr/share/applications/*.desktop {} -appimage -qmake={}/bin/qmake'.format(
linuxdeployqt_bin, install_dir, flags, qt_dir))
c.run('mv {}-{}*.AppImage "{}"'.format(app_name, app_version, artifact_path))
bin_path = install_dir + '/usr/bin/' + bin_name
c.print('>> Md5 {} {}'.format(bin_path, c.md5sum(bin_path)))
| mit | Python |
58acfa1cdfadd60d63c0e485c512bbe974f89407 | Fix to pythonpep code. | alnorth/stackdoc,alnorth/stackdoc,alnorth/stackdoc | update-database/stackdoc/namespaces/pythonpep.py | update-database/stackdoc/namespaces/pythonpep.py | import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://www.python.org/" in body:
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
m = re.match("http:\/\/www\.python\.org\/dev\/peps\/pep\-([0-9]+)", url)
if m:
ids.append(m.group(1))
return ids
def get_tags():
return [
"python"
]
| import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://docs.python.org/" in body:
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
m = re.match("http:\/\/www\.python\.org\/dev\/peps\/pep\-([0-9]+)", url)
if m:
ids.append(m.group(1))
return ids
def get_tags():
return [
"python"
]
| bsd-3-clause | Python |
f6e5833e60015f762a95dd1753e2aa43b6baddda | Update melodyextraction.py | altugkarakurt/morty | extras/melodyextraction.py | extras/melodyextraction.py | # -*- coding: utf-8 -*-
from predominantmelodymakam.predominantmelodymakam import PredominantMelodyMakam
from fileoperations.fileoperations import get_filenames_in_dir
import os
import json
import numpy as np
import sys
import math
# Number of decimal points to round the pitch track to.
DECIMAL = 2
# get the input index
if len(sys.argv) == 1:
idx = []
elif len(sys.argv) == 2: # for parallelization
idx = int(sys.argv[1])
else:
raise ValueError('Only accepts zero or one argument')
print(idx)
extractor = PredominantMelodyMakam()
audiodir = './' # audio folder and sub folders
# text file
audio_files = get_filenames_in_dir(audiodir, keyword="*.mp3")[0]
txtfiles = [os.path.join(os.path.dirname(f), os.path.basename(
os.path.splitext(f)[0]) + '.pitch') for f in audio_files]
if idx: # if index is given
audio_files = [audio_files[idx]]
txtfiles = [txtfiles[idx]]
for ii, mp3 in enumerate(audio_files):
print("{0:d}: {1:s}".format(ii + 1, os.path.basename(mp3)))
if os.path.isfile(txtfiles[ii]): # already exists
print(" > Already exist; skipped.")
else:
results = extractor.run(mp3)
pitch_track = np.array(json.loads(results['pitch']))[:, [0, 1]]
pitch_track = (np.around([i * math.pow(10, DECIMAL)
for i in pitch_track[:, 1]]) / 100.0)
pitch_track = pitch_track.tolist()
with open(txtfiles[ii], 'w') as f:
for i in pitch_track:
f.write("{0:.2f}\n".format(i))
| # -*- coding: utf-8 -*-
from predominantmelodymakam.predominantmelodymakam import PredominantMelodyMakam
from fileoperations.fileoperations import get_filenames_in_dir
import os
import json
import numpy as np
import sys
import math
# Number of decimal points to round the pitch track to.
DECIMAL = 2
# get the input index
if len(sys.argv) == 1:
idx = []
elif len(sys.argv) == 2: # for parallelization
idx = int(sys.argv[1])
else:
raise ValueError('Only accepts zero or one argument')
print(idx)
extractor = PredominantMelodyMakam()
audiodir = './' # audio folder and sub folders
# text file
audio_files = get_filenames_in_dir(audiodir, keyword="*.mp3")[0]
txtfiles = [os.path.join(os.path.dirname(f), os.path.basename(
os.path.splitext(f)[0]) + '.pitch') for f in audio_files]
if idx: # if index is given
audio_files = [audio_files[idx]]
txtfiles = [txtfiles[idx]]
for ii, mp3 in enumerate(audio_files):
print(' ')
print("{0:d}: {1:s}".format(ii + 1, os.path.basename(mp3)))
if os.path.isfile(txtfiles[ii]): # already exists
print(" > Already exist; skipped.")
else:
results = extractor.run(mp3)
pitch_track = np.array(json.loads(results['pitch']))[:, [0, 1]]
pitch_track = (np.around([i * math.pow(10, DECIMAL)
for i in pitch_track[:, 1]]) / 100.0)
pitch_track = pitch_track.tolist()
with open(txtfiles[ii], 'w') as f:
for i in pitch_track:
f.write("{0:.2f}\n".format(i))
| agpl-3.0 | Python |
85e089e3fa573e90e966927322257728f5fc7df7 | add new version (#16130) | LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/nccl/package.py | var/spack/repos/builtin/packages/nccl/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Nccl(MakefilePackage):
"""Optimized primitives for collective multi-GPU communication."""
homepage = "https://github.com/NVIDIA/nccl"
url = "https://github.com/NVIDIA/nccl/archive/v2.6.4-1.tar.gz"
maintainers = ['adamjstewart']
version('2.6.4-1', sha256='ed8c9dfd40e013003923ae006787b1a30d3cb363b47d2e4307eaa2624ebba2ba')
version('2.5.7-1', sha256='781a6bb2278566be4abbdf22b2fa19afc7306cff4b312c82bd782979b368014e')
version('2.5.6-2', sha256='8a30e0b4813a825592872fcbeeede22a659e2c399074dcce02960591dc81387d')
version('2.5.6-1', sha256='38a37d98be11f43232b988719226866b407f08b9666dcaf345796bd8f354ef54')
version('2.4.8-1', sha256='e2260da448ebbebe437f74768a346d28c74eabdb92e372a3dc6652a626318924')
version('2.4.6-1', sha256='ea4421061a7b9c454f2e088f68bfdbbcefab80ce81cafc70ee6c7742b1439591')
version('2.4.2-1', sha256='e3dd04b22eb541394bd818e5f78ac23a09cc549690d5d55d6fccc1a36155385a')
version('2.3.7-1', sha256='e6eff80d9d2db13c61f8452e1400ca2f098d2dfe42857cb23413ce081c5b9e9b')
version('2.3.5-5', sha256='bac9950b4d3980c25baa8e3e4541d2dfb4d21edf32ad3b89022d04920357142f')
version('1.3.4-1', sha256='11e4eb44555bb28b9cbad973dacb4640b82710c9769e719afc2013b63ffaf884')
version('1.3.0-1', sha256='53f36151061907bdcafad1c26c1d9370a0a8400f561a83704a5138213ba51003')
depends_on('cuda')
depends_on('rdma-core', when='@2.3.5-5:')
# https://github.com/NVIDIA/nccl/issues/244
patch('so_reuseport.patch', when='@2.3.7-1:2.4.8-1')
@property
def build_targets(self):
return ['CUDA_HOME={0}'.format(self.spec['cuda'].prefix)]
@property
def install_targets(self):
if self.version >= Version('2.3.5-5'):
return ['PREFIX={0}'.format(self.prefix), 'src.install']
else:
return ['PREFIX={0}'.format(self.prefix), 'install']
| # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Nccl(MakefilePackage):
"""Optimized primitives for collective multi-GPU communication."""
homepage = "https://github.com/NVIDIA/nccl"
url = "https://github.com/NVIDIA/nccl/archive/v2.4.8-1.tar.gz"
version('2.5.7-1', sha256='781a6bb2278566be4abbdf22b2fa19afc7306cff4b312c82bd782979b368014e')
version('2.5.6-2', sha256='8a30e0b4813a825592872fcbeeede22a659e2c399074dcce02960591dc81387d')
version('2.5.6-1', sha256='38a37d98be11f43232b988719226866b407f08b9666dcaf345796bd8f354ef54')
version('2.4.8-1', sha256='e2260da448ebbebe437f74768a346d28c74eabdb92e372a3dc6652a626318924')
version('2.4.6-1', sha256='ea4421061a7b9c454f2e088f68bfdbbcefab80ce81cafc70ee6c7742b1439591')
version('2.4.2-1', sha256='e3dd04b22eb541394bd818e5f78ac23a09cc549690d5d55d6fccc1a36155385a')
version('2.3.7-1', sha256='e6eff80d9d2db13c61f8452e1400ca2f098d2dfe42857cb23413ce081c5b9e9b')
version('2.3.5-5', sha256='bac9950b4d3980c25baa8e3e4541d2dfb4d21edf32ad3b89022d04920357142f')
version('1.3.4-1', sha256='11e4eb44555bb28b9cbad973dacb4640b82710c9769e719afc2013b63ffaf884')
version('1.3.0-1', sha256='53f36151061907bdcafad1c26c1d9370a0a8400f561a83704a5138213ba51003')
depends_on('cuda')
depends_on('rdma-core', when='@2.3.5-5:')
# https://github.com/NVIDIA/nccl/issues/244
patch('so_reuseport.patch', when='@2.3.7-1:2.4.8-1')
@property
def build_targets(self):
return ['CUDA_HOME={0}'.format(self.spec['cuda'].prefix)]
@property
def install_targets(self):
if self.version >= Version('2.3.5-5'):
return ['PREFIX={0}'.format(self.prefix), 'src.install']
else:
return ['PREFIX={0}'.format(self.prefix), 'install']
| lgpl-2.1 | Python |
6855e8e10fa5e0c00bceabbe8baee942dfadd5d1 | Add 8.44 (#16397) | LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/pcre/package.py | var/spack/repos/builtin/packages/pcre/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Pcre(AutotoolsPackage):
"""The PCRE package contains Perl Compatible Regular Expression
libraries. These are useful for implementing regular expression
pattern matching using the same syntax and semantics as Perl 5."""
homepage = "http://www.pcre.org"
url = "https://ftp.pcre.org/pub/pcre/pcre-8.42.tar.bz2"
version('8.44', sha256='19108658b23b3ec5058edc9f66ac545ea19f9537234be1ec62b714c84399366d')
version('8.43', sha256='91e762520003013834ac1adb4a938d53b22a216341c061b0cf05603b290faf6b')
version('8.42', sha256='2cd04b7c887808be030254e8d77de11d3fe9d4505c39d4b15d2664ffe8bf9301')
version('8.41', sha256='e62c7eac5ae7c0e7286db61ff82912e1c0b7a0c13706616e94a7dd729321b530')
version('8.40', sha256='00e27a29ead4267e3de8111fcaa59b132d0533cdfdbdddf4b0604279acbcf4f4')
version('8.39', sha256='b858099f82483031ee02092711689e7245586ada49e534a06e678b8ea9549e8b')
version('8.38', sha256='b9e02d36e23024d6c02a2e5b25204b3a4fa6ade43e0a5f869f254f49535079df')
patch('intel.patch', when='@8.38')
variant('jit', default=False,
description='Enable JIT support.')
variant('multibyte', default=True,
description='Enable support for 16 and 32 bit characters.')
variant('utf', default=True,
description='Enable support for UTF-8/16/32, '
'incompatible with EBCDIC.')
def configure_args(self):
args = []
if '+jit' in self.spec:
args.append('--enable-jit')
if '+multibyte' in self.spec:
args.append('--enable-pcre16')
args.append('--enable-pcre32')
if '+utf' in self.spec:
args.append('--enable-utf')
args.append('--enable-unicode-properties')
return args
| # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Pcre(AutotoolsPackage):
"""The PCRE package contains Perl Compatible Regular Expression
libraries. These are useful for implementing regular expression
pattern matching using the same syntax and semantics as Perl 5."""
homepage = "http://www.pcre.org"
url = "https://ftp.pcre.org/pub/pcre/pcre-8.42.tar.bz2"
version('8.43', sha256='91e762520003013834ac1adb4a938d53b22a216341c061b0cf05603b290faf6b')
version('8.42', sha256='2cd04b7c887808be030254e8d77de11d3fe9d4505c39d4b15d2664ffe8bf9301')
version('8.41', sha256='e62c7eac5ae7c0e7286db61ff82912e1c0b7a0c13706616e94a7dd729321b530')
version('8.40', sha256='00e27a29ead4267e3de8111fcaa59b132d0533cdfdbdddf4b0604279acbcf4f4')
version('8.39', sha256='b858099f82483031ee02092711689e7245586ada49e534a06e678b8ea9549e8b')
version('8.38', sha256='b9e02d36e23024d6c02a2e5b25204b3a4fa6ade43e0a5f869f254f49535079df')
patch('intel.patch', when='@8.38')
variant('jit', default=False,
description='Enable JIT support.')
variant('multibyte', default=True,
description='Enable support for 16 and 32 bit characters.')
variant('utf', default=True,
description='Enable support for UTF-8/16/32, '
'incompatible with EBCDIC.')
def configure_args(self):
args = []
if '+jit' in self.spec:
args.append('--enable-jit')
if '+multibyte' in self.spec:
args.append('--enable-pcre16')
args.append('--enable-pcre32')
if '+utf' in self.spec:
args.append('--enable-utf')
args.append('--enable-unicode-properties')
return args
| lgpl-2.1 | Python |
f65ec11fd763dda6d7b8988a6764f89aa5383df8 | Increment to version 1.11.4 | nyaruka/smartmin,nyaruka/smartmin,nyaruka/smartmin | smartmin/__init__.py | smartmin/__init__.py | from __future__ import unicode_literals
__version__ = '1.11.4'
| from __future__ import unicode_literals
__version__ = '1.11.3'
| bsd-3-clause | Python |
74ca49c62ba63b7eb42f3825ea5c036e32b98d50 | Work around null TfL common names | jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk | busstops/management/commands/import_tfl_stops.py | busstops/management/commands/import_tfl_stops.py | """
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code) or stop.common_name
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
| """
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code)
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
| mpl-2.0 | Python |
05f7c621b7c7504771349900d74633c2c2698fee | update test | project-callisto/callisto-core,project-callisto/callisto-core,SexualHealthInnovations/callisto-core,SexualHealthInnovations/callisto-core | callisto_core/wizard_builder/tests/test_forms.py | callisto_core/wizard_builder/tests/test_forms.py | from django.test import TestCase
from .. import managers
class FormSerializationTest(TestCase):
manager = managers.FormManager
fixtures = [
'wizard_builder_data',
]
expected_data = [{
'descriptive_text': 'answer wisely',
'field_id': 'question_2',
'id': 2,
'page': 2,
'position': 0,
'question_text': 'do androids dream of electric sheep?',
'text': 'do androids dream of electric sheep?',
'type': 'singlelinetext',
'choices': [],
'skip_eval': True,
}]
@classmethod
def setUpClass(cls):
super().setUpClass()
form = cls.manager.get_form_models()[1]
cls.actual_data = form.serialized
def test_same_size(self):
actual_data = self.actual_data
expected_data = self.expected_data
self.assertEqual(
len(actual_data),
len(expected_data),
)
def test_same_questions(self):
actual_data = self.actual_data
expected_data = self.expected_data
for index, expected_question in enumerate(expected_data):
actual_question = actual_data[index]
self.assertEqual(
actual_question,
expected_question,
)
| from django.test import TestCase
from .. import managers
class FormSerializationTest(TestCase):
manager = managers.FormManager
fixtures = [
'wizard_builder_data',
]
expected_data = [{
'descriptive_text': 'answer wisely',
'field_id': 'question_2',
'id': 2,
'page': 2,
'position': 0,
'question_text': 'do androids dream of electric sheep?',
'text': 'do androids dream of electric sheep?',
'type': 'singlelinetext',
'choices': [],
}]
@classmethod
def setUpClass(cls):
super().setUpClass()
form = cls.manager.get_form_models()[1]
cls.actual_data = form.serialized
def test_same_size(self):
actual_data = self.actual_data
expected_data = self.expected_data
self.assertEqual(
len(actual_data),
len(expected_data),
)
def test_same_questions(self):
actual_data = self.actual_data
expected_data = self.expected_data
for index, expected_question in enumerate(expected_data):
actual_question = actual_data[index]
self.assertEqual(
actual_question,
expected_question,
)
| agpl-3.0 | Python |
9a8fd944fb78d582f06d7165f097c1e54cb870dc | Add a mixin for calling full_clean() on save() | ojousima/asylum,rambo/asylum,HelsinkiHacklab/asylum,ojousima/asylum,hacklab-fi/asylum,hacklab-fi/asylum,jautero/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,rambo/asylum,jautero/asylum,rambo/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,ojousima/asylum,rambo/asylum,ojousima/asylum | project/asylum/mixins.py | project/asylum/mixins.py | from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
"""Makes sure saves and deletes go via transactions and version control
even when objects are modified outside Django Admin"""
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
class CleanSaveMixin(object):
"""Makes sure clean() is checked before object is saved"""
def save(self, *args, **kwargs):
if not kwargs.pop('skip_clean', False):
self.full_clean()
return super().save(*args, **kwargs)
| from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
| mit | Python |
9100bbba7936c4944bae91062670d199bcfe9634 | change the input url | elixirhub/events-portal-scraping-scripts | ScheduleUpdateData.py | ScheduleUpdateData.py | __author__ = 'chuqiao'
from apscheduler.schedulers.blocking import BlockingScheduler
import EventsPortal
import sys
import logging
def logger():
"""
Function that initialises logging system
"""
global logger
# create logger with 'syncsolr'
logger = logging.getLogger('scheduleAddData')
logger.setLevel(logging.DEBUG)
# specifies the lowest severity that will be dispatched to the appropriate destination
# create file handler which logs even debug messages
fh = logging.FileHandler('scheduleUpdateData.log')
# fh.setLevel(logging.WARN)
# create console handler and set level to debug
ch = logging.StreamHandler()
# StreamHandler instances send messages to streams
# ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
logger.addHandler(fh)
def scheduleUpdateSolr(sourceUrl,patternUrl,solrUrl):
"""
"""
logger()
logger.info('***Start updating every 12 hours***')
sched = BlockingScheduler()
sched.add_job(EventsPortal.updateSolr, 'interval', hours=12, args=[sourceUrl,patternUrl,solrUrl])
sched.start()
try:
# Keeps the main thread alive.
while True:
time.sleep(20)
except (KeyboardInterrupt, SystemExit):
logger.error('Can not schedule add data to solr \n%s' % str(sys.exc_info()))
if __name__ == '__main__':
scheduleUpdateSolr(
# "http://bioevents-portal.org/events/upcoming?state=published&field_type_tid=All",
# "http://bioevents-portal.org/eventsfull/test?state=published&field_type_tid=All",
"http://bioevents.pro/events?combine=&state=published&field_type_tid=All",
"http://bioevents.pro/events",
"139.162.217.53:8983/solr/eventsportal/"
# "localhost:8983/solr/event_portal"
)
# scheduleUpdateSolr(sys.argv[1],sys.argv[2])
| __author__ = 'chuqiao'
from apscheduler.schedulers.blocking import BlockingScheduler
import EventsPortal
import sys
import logging
def logger():
"""
Function that initialises logging system
"""
global logger
# create logger with 'syncsolr'
logger = logging.getLogger('scheduleAddData')
logger.setLevel(logging.DEBUG)
# specifies the lowest severity that will be dispatched to the appropriate destination
# create file handler which logs even debug messages
fh = logging.FileHandler('scheduleUpdateData.log')
# fh.setLevel(logging.WARN)
# create console handler and set level to debug
ch = logging.StreamHandler()
# StreamHandler instances send messages to streams
# ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
logger.addHandler(fh)
def scheduleUpdateSolr(sourceUrl,patternUrl,solrUrl):
"""
"""
logger()
logger.info('***Start updating every 12 hours***')
sched = BlockingScheduler()
sched.add_job(EventsPortal.updateSolr, 'interval', hours=12, args=[sourceUrl,patternUrl,solrUrl])
sched.start()
try:
# Keeps the main thread alive.
while True:
time.sleep(20)
except (KeyboardInterrupt, SystemExit):
logger.error('Can not schedule add data to solr \n%s' % str(sys.exc_info()))
if __name__ == '__main__':
scheduleUpdateSolr(
"http://bioevents-portal.org/events/upcoming?state=published&field_type_tid=All",
# "http://bioevents-portal.org/eventsfull/test?state=published&field_type_tid=All",
"http://bioevents-portal.org/events",
"139.162.217.53:8983/solr/eventsportal/"
# "localhost:8983/solr/event_portal"
)
# scheduleUpdateSolr(sys.argv[1],sys.argv[2])
| mit | Python |
b492c988a8b51214b746cceb99bda0f046a550fd | Use numpy | rezoo/chainer,okuta/chainer,cupy/cupy,niboshi/chainer,chainer/chainer,wkentaro/chainer,ktnyt/chainer,hvy/chainer,kashif/chainer,okuta/chainer,tkerola/chainer,cupy/cupy,hvy/chainer,ktnyt/chainer,okuta/chainer,delta2323/chainer,benob/chainer,ktnyt/chainer,niboshi/chainer,kikusu/chainer,keisuke-umezawa/chainer,wkentaro/chainer,ktnyt/chainer,okuta/chainer,ysekky/chainer,niboshi/chainer,hvy/chainer,niboshi/chainer,keisuke-umezawa/chainer,jnishi/chainer,jnishi/chainer,jnishi/chainer,keisuke-umezawa/chainer,cupy/cupy,ronekko/chainer,pfnet/chainer,wkentaro/chainer,chainer/chainer,cupy/cupy,hvy/chainer,kiyukuta/chainer,chainer/chainer,wkentaro/chainer,jnishi/chainer,benob/chainer,kikusu/chainer,keisuke-umezawa/chainer,anaruse/chainer,chainer/chainer,aonotas/chainer | chainer/functions/array/permutate.py | chainer/functions/array/permutate.py | import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
def _reverse_indices(indices):
r = numpy.empty(len(indices), 'i')
for i, ind in enumerate(indices):
r[ind] = i
return r
class Permutate(function.Function):
def __init__(self, indices, axis=0, rev=False):
self.indices = indices
self.axis = axis
self.rev = rev
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
x_type = in_types[0]
if self.axis < 0:
type_check.expect(x_type.ndim >= -self.axis)
else:
type_check.expect(x_type.ndim > self.axis)
type_check.expect(x_type.shape[self.axis] == len(self.indices))
def _permutate(self, x, rev):
xp = cuda.get_array_module(x)
if rev:
indices = _reverse_indices(self.indices)
else:
indices = self.indices
if xp is not numpy:
indices = xp.array(indices, 'i')
return xp.take(x, indices, axis=self.axis)
def forward(self, inputs):
x = inputs[0]
return self._permutate(x, self.rev),
def backward(self, inputs, grads):
g = grads[0]
return self._permutate(g, not self.rev),
def permutate(x, indices, axis=0, rev=False):
return Permutate(indices, axis=axis, rev=rev)(x)
| import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
def _reverse_indices(indices):
r = numpy.empty(len(indices), 'i')
for i, ind in enumerate(indices):
r[ind] = i
return r
class Permutate(function.Function):
def __init__(self, indices, axis=0, rev=False):
self.indices = indices
self.axis = axis
self.rev = rev
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
x_type = in_types[0]
if self.axis < 0:
type_check.expect(x_type.ndim >= -self.axis)
else:
type_check.expect(x_type.ndim > self.axis)
type_check.expect(x_type.shape[self.axis] == len(self.indices))
def _permutate(self, x, rev):
xp = cuda.get_array_module(x)
if rev:
indices = _reverse_indices(self.indices)
else:
indices = self.indices
if xp is cuda.cupy:
indices = xp.array(indices, 'i')
return xp.take(x, indices, axis=self.axis)
def forward(self, inputs):
x = inputs[0]
return self._permutate(x, self.rev),
def backward(self, inputs, grads):
g = grads[0]
return self._permutate(g, not self.rev),
def permutate(x, indices, axis=0, rev=False):
return Permutate(indices, axis=axis, rev=rev)(x)
| mit | Python |
a7686d673a52ac1f00b71f4b1e55fc9735003073 | Make harmonic oscillator in 2D work | WaveBlocks/WaveBlocksND,WaveBlocks/WaveBlocksND | examples/harmonic_oscillators/harmonic_2D_f.py | examples/harmonic_oscillators/harmonic_2D_f.py | algorithm = "fourier"
T = 12
dt = 0.01
dimension = 2
ncomponents = 1
eps = 0.1
potential = "quadratic_2d"
# The grid of our simulation domain
limits = [(-6.283185307179586, 6.283185307179586), (-6.283185307179586, 6.283185307179586)]
#number_nodes = [512, 512]
number_nodes = [2048, 2048]
# The parameter set of the initial wavepacket
Q = [[1.0, 0.0],
[0.0, 1.0]]
P = [[1.0j, 0.0 ],
[0.0, 1.0j]]
q = [[-3.0],
[ 0.0]]
p = [[0.0],
[0.5]]
S = [[0.0]]
# What it takes to specify a wavepacket!
wp0 = {
"type" : "HagedornWavepacket",
"dimension" : 2,
"ncomponents": 1,
"eps" : 0.1,
"Pi" : [q,p,Q,P,S],
"basis_shapes" : [{
"type" : "HyperbolicCutShape",
"K" : 4,
"dimension" : 2
}],
"coefficients" : [[ ((0,0), 1.0) ]],
"quadrature" : {
"type" : "HomogeneousQuadrature",
'qr': {
'type': 'TensorProductQR',
'dimension': 2,
'qr_rules': [{'dimension': 1, 'order': 8, 'type': 'GaussHermiteQR'},
{'dimension': 1, 'order': 8, 'type': 'GaussHermiteQR'}],
}
}
}
# Which wavepackets are initial values
initvals = [ wp0 ]
# How often do we write data to disk
write_nth = 5
| algorithm = "fourier"
T = 12
dt = 0.01
dimension = 2
ncomponents = 1
eps = 0.1
potential = "quadratic_2d"
# The grid of our simulation domain
limits = [(-6.283185307179586, 6.283185307179586), (-6.283185307179586, 6.283185307179586)]
number_nodes = [512, 512]
# The parameter set of the initial wavepacket
Q = [[1.0, 0.0],
[0.0, 1.0]]
P = [[1.0j, 0.0 ],
[0.0, 1.0j]]
q = [[-3.0],
[ 0.0]]
p = [[0.0],
[0.5]]
S = [[0.0]]
# What it takes to specify a wavepacket!
wp0 = {
"type" : "HagedornWavepacket",
"dimension" : 2,
"ncomponents": 1,
"eps" : 0.1,
"Pi" : [q,p,Q,P,S],
"basis_shapes" : [{
"type" : "HyperbolicCutShape",
"K" : 4,
"dimension" : 2
}],
"coefficients" : [[ ((0,0), 1.0) ]],
"quadrature" : {
"type" : "HomogeneousQuadrature",
'qr': {
'type': 'TensorProductQR',
'dimension': 2,
'qr_rules': [{'dimension': 1, 'order': 8, 'type': 'GaussHermiteQR'},
{'dimension': 1, 'order': 8, 'type': 'GaussHermiteQR'}],
}
}
}
# Which wavepackets are initial values
initvals = [ wp0 ]
# How often do we write data to disk
# WARNING: Produces about 1GB of data!
write_nth = 5
| bsd-3-clause | Python |
39f7530f9082062abdd530255ab34d642097b605 | Update ptvsd version number for 2.2 release. | bolabola/PTVS,christer155/PTVS,gomiero/PTVS,DEVSENSE/PTVS,mlorbetske/PTVS,ChinaQuants/PTVS,christer155/PTVS,dut3062796s/PTVS,Microsoft/PTVS,huguesv/PTVS,msunardi/PTVS,fivejjs/PTVS,alanch-ms/PTVS,Habatchii/PTVS,Microsoft/PTVS,denfromufa/PTVS,fjxhkj/PTVS,crwilcox/PTVS,MetSystem/PTVS,bolabola/PTVS,denfromufa/PTVS,modulexcite/PTVS,denfromufa/PTVS,alanch-ms/PTVS,xNUTs/PTVS,DinoV/PTVS,Habatchii/PTVS,huguesv/PTVS,msunardi/PTVS,MetSystem/PTVS,MetSystem/PTVS,DEVSENSE/PTVS,alanch-ms/PTVS,zooba/PTVS,denfromufa/PTVS,crwilcox/PTVS,crwilcox/PTVS,int19h/PTVS,christer155/PTVS,jkorell/PTVS,dut3062796s/PTVS,Microsoft/PTVS,DEVSENSE/PTVS,modulexcite/PTVS,Habatchii/PTVS,xNUTs/PTVS,alanch-ms/PTVS,int19h/PTVS,DinoV/PTVS,Habatchii/PTVS,zooba/PTVS,MetSystem/PTVS,dut3062796s/PTVS,bolabola/PTVS,ChinaQuants/PTVS,alanch-ms/PTVS,xNUTs/PTVS,DinoV/PTVS,christer155/PTVS,bolabola/PTVS,modulexcite/PTVS,zooba/PTVS,DEVSENSE/PTVS,jkorell/PTVS,mlorbetske/PTVS,huguesv/PTVS,ChinaQuants/PTVS,DinoV/PTVS,MetSystem/PTVS,DinoV/PTVS,mlorbetske/PTVS,mlorbetske/PTVS,jkorell/PTVS,huguesv/PTVS,gomiero/PTVS,Habatchii/PTVS,fjxhkj/PTVS,crwilcox/PTVS,DEVSENSE/PTVS,ChinaQuants/PTVS,xNUTs/PTVS,xNUTs/PTVS,mlorbetske/PTVS,fjxhkj/PTVS,msunardi/PTVS,modulexcite/PTVS,DinoV/PTVS,int19h/PTVS,fivejjs/PTVS,int19h/PTVS,zooba/PTVS,dut3062796s/PTVS,msunardi/PTVS,xNUTs/PTVS,fjxhkj/PTVS,gomiero/PTVS,msunardi/PTVS,Microsoft/PTVS,fjxhkj/PTVS,gomiero/PTVS,christer155/PTVS,fivejjs/PTVS,DEVSENSE/PTVS,mlorbetske/PTVS,fjxhkj/PTVS,MetSystem/PTVS,crwilcox/PTVS,ChinaQuants/PTVS,gomiero/PTVS,Microsoft/PTVS,jkorell/PTVS,denfromufa/PTVS,alanch-ms/PTVS,jkorell/PTVS,modulexcite/PTVS,Habatchii/PTVS,zooba/PTVS,jkorell/PTVS,Microsoft/PTVS,fivejjs/PTVS,bolabola/PTVS,fivejjs/PTVS,modulexcite/PTVS,christer155/PTVS,fivejjs/PTVS,dut3062796s/PTVS,crwilcox/PTVS,gomiero/PTVS,int19h/PTVS,dut3062796s/PTVS,ChinaQuants/PTVS,msunardi/PTVS,denfromufa/PTVS,int19h/PTVS,bolabola/PTVS,zooba/PTVS,huguesv/PTVS,huguesv/PTVS | Python/Product/PythonTools/ptvsd/setup.py | Python/Product/PythonTools/ptvsd/setup.py | #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
| #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0rc2',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
| apache-2.0 | Python |
aa72ac13733badef0e679bea22d8691d4ea05b88 | Rebase off of develop | iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api | whats_fresh/whats_fresh_api/views/data_entry/preparations.py | whats_fresh/whats_fresh_api/views/data_entry/preparations.py | from django.http import (HttpResponse,
HttpResponseNotFound,
HttpResponseServerError)
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from whats_fresh_api.models import *
from whats_fresh_api.forms import *
from whats_fresh_api.functions import *
import json
def preparation_list(request):
preparations = Preparation.objects.all()
preparations_list = []
for preparation in preparations:
preparation_data = {}
preparation_data['name'] = preparation.name
preparation_data['description'] = preparation.description
preparation_data['link'] = reverse('edit-preparation', kwargs={'id': preparation.id})
if len(preparation_data['description']) > 100:
preparation_data['description'] = preparation_data['description'][:100] + "..."
preparations_list.append(preparation_data)
return render(request, 'list.html', {
'new_url': reverse('new-preparation'),
'new_text': "New preparation",
'title': "All preparations",
'item_classification': "preparation",
'item_list': preparations_list,
})
def preparation(request, id=None):
if request.method == 'POST':
post_data = request.POST.copy()
errors = []
preparation_form = PreparationForm(post_data)
if preparation_form.is_valid() and not errors:
preparation = Preparation.objects.create(**preparation_form.cleaned_data)
preparation.save()
return HttpResponseRedirect(reverse('entry-list-preparations'))
else:
pass
else:
preparation_form = PreparationForm()
title = "New Preparation"
post_url = reverse('new-preparation')
message = "Fields marked with bold are required."
return render(request, 'preparation.html', {
'parent_url': reverse('entry-list-preparations'),
'parent_text': 'Preparation List',
'message': message,
'title': title,
'post_url': post_url,
'errors': [],
'preparation_form': preparation_form,
})
| from django.http import (HttpResponse,
HttpResponseNotFound,
HttpResponseServerError)
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from whats_fresh_api.models import *
from whats_fresh_api.forms import *
from whats_fresh_api.functions import *
import json
def preparation_list(request):
preparations = Preparation.objects.all()
preparations_list = []
for preparation in preparations:
preparation_data = {}
preparation_data['name'] = preparation.name
preparation_data['description'] = preparation.description
preparation_data['link'] = reverse('edit-preparation', kwargs={'id': preparation.id})
if len(preparation_data['description']) > 100:
preparation_data['description'] = preparation_data['description'][:100] + "..."
preparations_list.append(preparation_data)
return render(request, 'list.html', {
'new_url': reverse('new-preparation'),
'new_text': "New preparation",
'title': "All preparations",
'item_classification': "preparation",
'item_list': preparations_list,
})
def preparation(request, id=None):
if request.method == 'POST':
post_data = request.POST.copy()
errors = []
preparation_form = PreparationForm(post_data)
if preparation_form.is_valid() and not errors:
preparation = Preparation.objects.create(**preparation_form.cleaned_data)
preparation.save()
return HttpResponseRedirect(reverse('preparations-list-edit'))
else:
pass
else:
preparation_form = PreparationForm()
title = "New Preparation"
post_url = reverse('new-preparation')
message = "Fields marked with bold are required."
return render(request, 'preparation.html', {
'parent_url': reverse('preparations-list-edit'),
'parent_text': 'Preparation List',
'message': message,
'title': title,
'post_url': post_url,
'errors': [],
'preparation_form': preparation_form,
})
| apache-2.0 | Python |
2bf9589a32cb0a8be157e09e789799608b8f386f | update dependency | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | custom/icds_reports/migrations/0022_fix_aggregation.py | custom/icds_reports/migrations/0022_fix_aggregation.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from corehq.sql_db.operations import RawSQLMigration
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates'))
class Migration(migrations.Migration):
dependencies = [
('icds_reports',
'0021_remove_categories'),
]
operations = [
migrator.get_migration('create_functions.sql'),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from corehq.sql_db.operations import RawSQLMigration
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates'))
class Migration(migrations.Migration):
dependencies = [
('icds_reports',
'0020_combine_person_query'),
]
operations = [
migrator.get_migration('create_functions.sql'),
]
| bsd-3-clause | Python |
7eab91efc4ee7f8fbcfe768440ef88c077989e66 | Add a test for `svo_fps` query parameter names | imbasimba/astroquery,imbasimba/astroquery | astroquery/svo_fps/tests/test_svo_fps_remote.py | astroquery/svo_fps/tests/test_svo_fps_remote.py | from io import BytesIO
import pytest
from astropy import units as u
from astropy.io.votable import parse
from astroquery.svo_fps import conf, SvoFps
from astroquery.svo_fps.core import QUERY_PARAMETERS
@pytest.mark.remote_data
class TestSvoFpsClass:
def test_get_filter_index(self):
table = SvoFps.get_filter_index(12_000*u.angstrom, 12_100*u.angstrom)
# Check if column for Filter ID (named 'filterID') exists in table
assert 'filterID' in table.colnames
@pytest.mark.parametrize('test_filter_id',
['HST/NICMOS1.F113N', 'HST/WFPC2-pc.f218w'])
def test_get_transmission_data(self, test_filter_id):
table = SvoFps.get_transmission_data(test_filter_id)
# Check if data is downloaded properly, with > 0 rows
assert len(table) > 0
@pytest.mark.parametrize('test_facility, test_instrument',
[('HST', 'WFPC2'), ('Keck', None)])
def test_get_filter_list(self, test_facility, test_instrument):
table = SvoFps.get_filter_list(test_facility, test_instrument)
# Check if column for Filter ID (named 'filterID') exists in table
assert 'filterID' in table.colnames
def test_query_parameter_names(self):
# Checks if `QUERY_PARAMETERS` is up to date.
query = {"FORMAT": "metadata"}
response = BytesIO(
SvoFps._request(
"GET", conf.base_url, params=query, timeout=conf.timeout, cache=False
).content
)
params = {p.name.split(":")[1] for p in parse(response).resources[0].params}
# All valid parameters should be present in `QUERY_PARAMETERS`.
assert not params.difference(QUERY_PARAMETERS)
# Some valid parameter names are not in `params`.
for p in QUERY_PARAMETERS.difference(params):
# `QUERY_PARAMETERS` also contains names without "_min" or "_max" ending
# because "Param_min=a&Param_max=b" can be replaced with "Param=a/b".
if p + "_min" not in params:
# There's a few extra parameters we didn't get from the server.
assert p in {"VERB", "FORMAT", "PhotCalID", "ID"}
| import pytest
from astropy import units as u
from ..core import SvoFps
@pytest.mark.remote_data
class TestSvoFpsClass:
def test_get_filter_index(self):
table = SvoFps.get_filter_index(12_000*u.angstrom, 12_100*u.angstrom)
# Check if column for Filter ID (named 'filterID') exists in table
assert 'filterID' in table.colnames
@pytest.mark.parametrize('test_filter_id',
['HST/NICMOS1.F113N', 'HST/WFPC2-pc.f218w'])
def test_get_transmission_data(self, test_filter_id):
table = SvoFps.get_transmission_data(test_filter_id)
# Check if data is downloaded properly, with > 0 rows
assert len(table) > 0
@pytest.mark.parametrize('test_facility, test_instrument',
[('HST', 'WFPC2'), ('Keck', None)])
def test_get_filter_list(self, test_facility, test_instrument):
table = SvoFps.get_filter_list(test_facility, test_instrument)
# Check if column for Filter ID (named 'filterID') exists in table
assert 'filterID' in table.colnames
| bsd-3-clause | Python |
6794bb897e7e8730b1c3ab2fc6b856865887ac8b | Use a simpler trace logger that does not prepend timestamps | sassoftware/rbm,sassoftware/rbm,sassoftware/rbm | scripts/upsrv_schema.py | scripts/upsrv_schema.py | #!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
| #!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
| apache-2.0 | Python |
5b50c1067673a0284a1bf47f7f7eabd780a95b14 | Complete TODO | conwetlab/ckanext-privatedatasets,conwetlab/ckanext-privatedatasets,conwetlab/ckanext-privatedatasets,conwetlab/ckanext-privatedatasets | ckanext/privatedatasets/converters_validators.py | ckanext/privatedatasets/converters_validators.py | import db
from ckan.plugins import toolkit
from ckan.common import _
from itertools import count
def private_datasets_metadata_checker(key, data, errors, context):
dataset_id = data.get(('id',))
private_val = data.get(('private',))
# If the private field is not included in the data dict, we must check the current value
if not private_val and dataset_id:
dataset_dict = toolkit.get_action('package_show')({'ignore_auth': True}, {'id': dataset_id})
private_val = dataset_dict.get('private')
private = private_val is True if isinstance(private_val, bool) else private_val == "True"
metadata_value = data[key]
# If allowed users are included and the dataset is not private outside and organization, an error will be raised.
if metadata_value != '' and not private:
errors[key].append(_('This field is only valid when you create a private dataset'))
def allowed_users_convert(key, data, errors, context):
if isinstance(data[key], basestring):
allowed_users = [allowed_user for allowed_user in data[key].split(',')]
else:
allowed_users = data[key]
current_index = max([int(k[1]) for k in data.keys() if len(k) == 2 and k[0] == 'allowed_users'] + [-1])
for num, allowed_user in zip(count(current_index + 1), allowed_users):
data[('allowed_users', num)] = allowed_user
def get_allowed_users(key, data, errors, context):
pkg_id = data[('id',)]
if db.package_allowed_users_table is None:
db.init_db(context['model'])
users = db.AllowedUser.get(package_id=pkg_id)
counter = 0
for user in users:
data[(key[0], counter)] = user.user_name
counter += 1
| import db
from ckan.common import _
from itertools import count
def private_datasets_metadata_checker(key, data, errors, context):
# TODO: In some cases, we will need to retireve all the dataset information if it isn't present...
private_val = data.get(('private',))
private = private_val is True if isinstance(private_val, bool) else private_val == "True"
metadata_value = data[key]
# If allowed users are included and the dataset is not private outside and organization, an error will be raised.
if metadata_value != '' and not private:
errors[key].append(_('This field is only valid when you create a private dataset outside an organization'))
def allowed_users_convert(key, data, errors, context):
if isinstance(data[key], basestring):
allowed_users = [allowed_user for allowed_user in data[key].split(',')]
else:
allowed_users = data[key]
current_index = max([int(k[1]) for k in data.keys() if len(k) == 2 and k[0] == 'allowed_users'] + [-1])
for num, allowed_user in zip(count(current_index + 1), allowed_users):
data[('allowed_users', num)] = allowed_user
def get_allowed_users(key, data, errors, context):
pkg_id = data[('id',)]
if db.package_allowed_users_table is None:
db.init_db(context['model'])
users = db.AllowedUser.get(package_id=pkg_id)
counter = 0
for user in users:
data[(key[0], counter)] = user.user_name
counter += 1
| agpl-3.0 | Python |
f542d2856f2df723a580f93125a79b97fca96098 | change save filename | majorika/crawlers,majorika/crawlers,teampopong/crawlers,lexifdev/crawlers,lexifdev/crawlers,teampopong/crawlers | committee_list/crawlers/committee_list/parser.py | committee_list/crawlers/committee_list/parser.py | #! /usr/bin/python2.7
# -*- coding: utf-8 -*-
import os
import urllib2
import html5lib
def get_webpage(f):
page = html5lib.HTMLParser(\
tree=html5lib.treebuilders.getTreeBuilder("lxml"),\
namespaceHTMLElements=False)
p = page.parse(f)
return p
def get_elements(p, x):
elems = p.xpath(x)
e = [list(elem.itertext()) for elem in elems]
return e
def get_committee_code(inf, x):
with open(inf, 'r') as f:
p = get_webpage(f)
e = p.xpath(x)[0:]
codes = [i[i.index('(')+1:i.index(')')] for i in e]
return codes
def get_committee_name(inf, x):
with open(inf, 'r') as f:
p = get_webpage(f)
e = p.xpath(x)[0:]
return e
def get_committee_list(inf, x):
with open(inf, 'r') as f:
p = get_webpage(f)
e = get_elements(p, x)
return ['"%s","%s","%s","%s","%s"' % (i[1].strip(), i[3].strip(), i[5].strip(), i[7].strip(), i[10].strip()) for i in e[1:]]
def crawl(url, directory, filename):
r = urllib2.urlopen(url)
with open('%s/%s.html' % (directory, filename), 'w') as f:
f.write(r.read())
print '%s to %s/%s.html' % (filename, directory, filename)
def parse(directory, filename, rng=None):
url = 'http://www.assembly.go.kr/assm/assmCommittee/committeePopupAddrView.do?dept_cd=%s'
x = '//a[contains(@onclick, "jsDeptAddrPopup")]/@onclick'
x_name = '//h4[@class="title02"]/node()'
x2 = '//table/*/tr'
inf = '%s/1.html' % directory
committee_codes = get_committee_code(inf, x)
committee_names = get_committee_name(inf, x_name)
for p in committee_codes:
crawl(url % p, directory, p)
index = 0
for p in committee_codes:
n = ('%s' % filename).replace(".csv",'_%s.csv' % committee_names[index])
with open(n, 'wa') as f:
inf = '%s/%s.html' % (directory, p)
f.write('"title","political party","name","phone","email"\n')
f.write('\n'.join(get_committee_list(inf, x2)).encode('utf-8'))
f.write('\n')
print 'parsed %s' % inf
print 'Results written to ' + n
index = index + 1 | #! /usr/bin/python2.7
# -*- coding: utf-8 -*-
import os
import urllib2
import html5lib
def get_webpage(f):
page = html5lib.HTMLParser(\
tree=html5lib.treebuilders.getTreeBuilder("lxml"),\
namespaceHTMLElements=False)
p = page.parse(f)
return p
def get_elements(p, x):
elems = p.xpath(x)
e = [list(elem.itertext()) for elem in elems]
return e
def get_committee_code(inf, x):
with open(inf, 'r') as f:
p = get_webpage(f)
e = p.xpath(x)[0:]
codes = [i[i.index('(')+1:i.index(')')] for i in e]
return codes
def get_committee_list(inf, x):
with open(inf, 'r') as f:
p = get_webpage(f)
e = get_elements(p, x)
return ['"%s","%s","%s","%s","%s"' % (i[1].strip(), i[3].strip(), i[5].strip(), i[7].strip(), i[10].strip()) for i in e[1:]]
def crawl(url, directory, filename):
r = urllib2.urlopen(url)
with open('%s/%s.html' % (directory, filename), 'w') as f:
f.write(r.read())
print '%s to %s/%s.html' % (filename, directory, filename)
def parse(directory, filename, rng=None):
url = 'http://www.assembly.go.kr/assm/assmCommittee/committeePopupAddrView.do?dept_cd=%s'
x = '//a[contains(@onclick, "jsDeptAddrPopup")]/@onclick'
x2 = '//table/*/tr'
inf = '%s/1.html' % directory
committe_codes = get_committee_code(inf, x)
for p in committe_codes:
crawl(url % p, directory, p)
for p in committe_codes:
n = ('%s' % filename).replace(".csv",'_%s.csv' % p)
with open(n, 'wa') as f:
inf = '%s/%s.html' % (directory, p)
f.write('"title","political party","name","phone","email"\n')
f.write('\n'.join(get_committee_list(inf, x2)).encode('utf-8'))
f.write('\n')
print 'parsed %s' % inf
print 'Results written to ' + n | agpl-3.0 | Python |
1a288b14c047f5ec823017b33af23e3f51fdfb5e | fix minor bug in pageobjects/__init__ This if block wasn't executing the class defined _object_name as None, but it should. | SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI | cumulusci/robotframework/pageobjects/__init__.py | cumulusci/robotframework/pageobjects/__init__.py | from robot.libraries.BuiltIn import BuiltIn
from .PageObjects import PageObjects # noqa: F401
from .baseobjects import BasePage, ListingPage, HomePage, DetailPage # noqa: F401
def pageobject(page_type, object_name=None):
"""A decorator to designate a class as a page object"""
BuiltIn().log("importing page object {} {}".format(page_type, object_name), "DEBUG")
def wrapper(cls):
key = (page_type, object_name)
PageObjects.registry[key] = cls
if getattr(cls, "_object_name", None) is None:
cls._object_name = object_name
return cls
return wrapper
| from robot.libraries.BuiltIn import BuiltIn
from .PageObjects import PageObjects # noqa: F401
from .baseobjects import BasePage, ListingPage, HomePage, DetailPage # noqa: F401
def pageobject(page_type, object_name=None):
"""A decorator to designate a class as a page object"""
BuiltIn().log("importing page object {} {}".format(page_type, object_name), "DEBUG")
def wrapper(cls):
key = (page_type, object_name)
PageObjects.registry[key] = cls
if not hasattr(cls, "_object_name"):
cls._object_name = object_name
return cls
return wrapper
| bsd-3-clause | Python |
fbd57a4b62772368182425ae15226d78de0ce13e | Reset fix | edx/edx-ora,edx/edx-ora,edx/edx-ora,edx/edx-ora | controller/management/commands/remove_expired_subs.py | controller/management/commands/remove_expired_subs.py | from django.core.management.base import BaseCommand
from django.conf import settings
from django.utils import timezone
#from http://jamesmckay.net/2009/03/django-custom-managepy-commands-not-committing-transactions/
#Fix issue where db data in manage.py commands is not refreshed at all once they start running
from django.db import transaction
import requests
import urlparse
import time
import json
import logging
from statsd import statsd
import controller.util as util
from controller.models import Submission, SubmissionState
import controller.expire_submissions as expire_submissions
from staff_grading import staff_grading_util
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = "<queue_name>"
help = "Pull items from given queues and send to grading controller"
def handle(self, *args, **options):
flag = True
log.debug("Starting check for expired subs.")
while flag:
try:
transaction.commit_unless_managed()
subs = Submission.objects.all()
expire_submissions.reset_timed_out_submissions(subs)
expired_list = expire_submissions.get_submissions_that_have_expired(subs)
if len(expired_list) > 0:
success = expire_submissions.finalize_expired_submissions(expired_list)
statsd.increment("open_ended_assessment.grading_controller.remove_expired_subs",
tags=["success:{0}".format(success)])
expire_submissions.reset_in_subs_to_ml(subs)
expire_submissions.reset_subs_in_basic_check(subs)
expire_submissions.reset_ml_subs_to_in()
except Exception as err:
log.error("Could not get submissions to expire! Error: {0}".format(err))
statsd.increment("open_ended_assessment.grading_controller.remove_expired_subs",
tags=["success:Exception"])
transaction.commit_unless_managed()
time.sleep(settings.TIME_BETWEEN_EXPIRED_CHECKS) | from django.core.management.base import BaseCommand
from django.conf import settings
from django.utils import timezone
#from http://jamesmckay.net/2009/03/django-custom-managepy-commands-not-committing-transactions/
#Fix issue where db data in manage.py commands is not refreshed at all once they start running
from django.db import transaction
import requests
import urlparse
import time
import json
import logging
from statsd import statsd
import controller.util as util
from controller.models import Submission, SubmissionState
import controller.expire_submissions as expire_submissions
from staff_grading import staff_grading_util
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = "<queue_name>"
help = "Pull items from given queues and send to grading controller"
def handle(self, *args, **options):
flag = True
log.debug("Starting check for expired subs.")
while flag:
try:
transaction.commit_unless_managed()
subs = Submission.objects.all()
expire_submissions.reset_timed_out_submissions(subs)
expired_list = expire_submissions.get_submissions_that_have_expired(subs)
if len(expired_list) > 0:
success = expire_submissions.finalize_expired_submissions(expired_list)
statsd.increment("open_ended_assessment.grading_controller.remove_expired_subs",
tags=["success:{0}".format(success)])
expire_submissions.reset_in_subs_to_ml(subs)
expire_submissions.reset_subs_in_basic_check(subs)
except Exception as err:
log.error("Could not get submissions to expire! Error: {0}".format(err))
statsd.increment("open_ended_assessment.grading_controller.remove_expired_subs",
tags=["success:Exception"])
transaction.commit_unless_managed()
time.sleep(settings.TIME_BETWEEN_EXPIRED_CHECKS) | agpl-3.0 | Python |
7a890ef8edc02870219aa85a9a3295674a7a50d3 | Update getClanUserJSONs.py | adherrling/destiny-gotg,adherrling/destiny-gotg | Leaderboard/getClanUserJSONs.py | Leaderboard/getClanUserJSONs.py | #!/usr/bin/python
import json
import requests
from getHeader import getHeader
def retrieveClanUserJSON():
morePages = True
pageCounter = 1
while morePages:
clan_url = "https://bungie.net/Platform/Group//Members/?lc=en&fmt=true¤tPage="+str(pageCounter)+"&platformType=2"
print "Connecting to Bungie: " + clan_url
print "Fetching page " + str(pageCounter) + " of users."
res = requests.get(clan_url, headers=getHeader())
data = res.json()
error_stat = data['ErrorStatus']
print "Error Stats: " + error_stat
#Stores each page of clan user responses as a different .json
with open('./Clan/clanUsersPage'+str(pageCounter)+'.json','w') as f:
json.dump(data,f)
hasMore = res.json()['Response']['hasMore']
morePages = hasMore
pageCounter+=1
def getClanUserJSONs():
retrieveClanUserJSON()
if __name__ == "__main__":
getClanUserJSONs()
| #!/usr/bin/python
import json
import requests
from getHeader import getHeader
def retrieveClanUserJSON():
morePages = True
pageCounter = 1
while morePages:
clan_url = "https://bungie.net/Platform/Group/1407546/Members/?lc=en&fmt=true¤tPage="+str(pageCounter)+"&platformType=2"
print "Connecting to Bungie: " + clan_url
print "Fetching page " + str(pageCounter) + " of users."
res = requests.get(clan_url, headers=getHeader())
data = res.json()
error_stat = data['ErrorStatus']
print "Error Stats: " + error_stat
#Stores each page of clan user responses as a different .json
with open('./Clan/clanUsersPage'+str(pageCounter)+'.json','w') as f:
json.dump(data,f)
hasMore = res.json()['Response']['hasMore']
morePages = hasMore
pageCounter+=1
def getClanUserJSONs():
retrieveClanUserJSON()
if __name__ == "__main__":
getClanUserJSONs()
| mit | Python |
84ad17a2d968f47da0f8d22b3a371f51d2f65411 | Update humidity_chart.py | webbhm/OpenAg_MVP_UI,webbhm/OpenAg_MVP_UI,webbhm/OpenAg_MVP_UI | MVP_UI/python/humidity_chart.py | MVP_UI/python/humidity_chart.py | # /usr/bin/env python
import pygal
import requests
import json
#Query the database for data
#Order descending so the last rows are first
#These modifiers get humidity and return only 60 rows
r = requests.get('http://127.0.0.1:5984/mvp_sensor_data/_design/doc/_view/attribute_value?startkey=["humidity",{}]&endkey=["humidity"]&descending=true&limit=60')
#Iterate over the rows and xtract the values and timestamp into Python lists
v_lst = [float(x['value']['value']) for x in r.json()['rows']]
ts_lst = [x['value']['timestamp'] for x in r.json()['rows']]
#Build the chart from the lists
line_chart = pygal.Line()
line_chart.title = 'Humidity'
line_chart.y_title="Percent"
line_chart.x_title="Timestamp (hover over to display)"
# reverse order for proper time sequence
ts_lst.reverse()
line_chart.x_labels = ts_lst
#revrese order for proper time sequence
v_lst.reverse()
line_chart.add('Humidity', v_lst)
#Save the chart as SVG to the web directory
line_chart.render_to_file('/home/pi/MVP_UI/web/humidity_chart.svg')
| import pygal
import requests
import json
#Query the database for data
#Order descending so the last rows are first
#These modifiers get humidity and return only 60 rows
r = requests.get('http://127.0.0.1:5984/mvp_sensor_data/_design/doc/_view/attribute_value?startkey=["humidity",{}]&endkey=["humidity"]&descending=true&limit=60')
#Iterate over the rows and xtract the values and timestamp into Python lists
v_lst = [float(x['value']['value']) for x in r.json()['rows']]
ts_lst = [x['value']['timestamp'] for x in r.json()['rows']]
#Build the chart from the lists
line_chart = pygal.Line()
line_chart.title = 'Humidity'
line_chart.y_title="Percent"
line_chart.x_title="Timestamp (hover over to display)"
# reverse order for proper time sequence
ts_lst.reverse()
line_chart.x_labels = ts_lst
#revrese order for proper time sequence
v_lst.reverse()
line_chart.add('Humidity', v_lst)
#Save the chart as SVG to the web directory
line_chart.render_to_file('/home/pi/MVP_UI/web/humidity_chart.svg')
| mit | Python |
31e4da5e782c29d7d0c893a3fc9af48260c50a3a | Save form data to DB on each step | lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin | src/ansible/views.py | src/ansible/views.py | from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github, Playbook
import sys
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def get_form_step_data(self, form):
data = {}
if self.get_form_prefix() == '0':
github = Github()
github.repository = form.data.dict()['0-repository']
github.username = form.data.dict()['0-username']
github.save()
if self.get_form_prefix() == '1':
playbook = Playbook()
playbook.name = form.data.dict()['1-name']
playbook.inventory = form.data.dict()['1-inventory']
playbook.user = form.data.dict()['1-user']
playbook.save()
return form.data
def done(self, form_list, **kwargs):
return HttpResponseRedirect('/ansible')
| from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def done(self, form_list, **kwargs):
form_data = {}
for form in form_list:
form.save()
return HttpResponseRedirect('/ansible')
| bsd-3-clause | Python |
09726db0fc453cb1ebc7b0cf91d7e8dd5917bcf3 | Update wsgi/myflaskapp.py | ghaff/whitewater-gauges,ghaff/whitewater-gauges | wsgi/myflaskapp.py | wsgi/myflaskapp.py | import os
from flask import Flask
from flask import request
import pymongo
import json
from bson import json_util
from bson import objectid
import re
app = Flask(__name__)
#add this so that flask doesn't swallow error messages
app.config['PROPAGATE_EXCEPTIONS'] = True
#a base urls that returns all the parks in the collection (of course in the future we would implement paging)
@app.route("/ws/parks")
def parks():
#setup the connection
conn = pymongo.Connection(os.environ['OPENSHIFT_NOSQL_DB_URL'])
db = conn.parks
#query the DB for all the parkpoints
result = db.parkpoints.find()
#Now turn the results into valid JSON
return str(json.dumps({'results':list(result)},default=json_util.default))
#return a specific park given it's mongo _id
@app.route("/ws/parks/park/<parkId>")
def onePark(parkId):
#setup the connection
conn = pymongo.Connection(os.environ['OPENSHIFT_NOSQL_DB_URL'])
db = conn.parks
#query based on the objectid
result = db.parkpoints.find({'_id': objectid.ObjectId(parkId)})
#turn the results into valid JSON
return str(json.dumps({'results' : list(result)},default=json_util.default))
#find parks near a lat and long passed in as query parameters (near?lat=45.5&lon=-82)
@app.route("/ws/parks/near")
def near():
#setup the connection
conn = pymongo.Connection(os.environ['OPENSHIFT_NOSQL_DB_URL'])
db = conn.parks
#get the request parameters
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
#use the request parameters in the query
result = db.parkpoints.find({"pos" : { "$near" : [lon,lat]}})
#turn the results into valid JSON
return str(json.dumps({'results' : list(result)},default=json_util.default))
#find parks with a certain name (use regex) near a lat long pair such as above
@app.route("/ws/parks/name/near/<name>")
def nameNear(name):
#setup the connection
conn = pymongo.Connection(os.environ['OPENSHIFT_NOSQL_DB_URL'])
db = conn.parks
#get the request parameters
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
#compile the regex we want to search for and make it case insensitive
myregex = re.compile(name, re.I)
#use the request parameters in the query along with the regex
result = db.parkpoints.find({"Name" : myregex, "pos" : { "$near" : [lon,lat]}})
#turn the results into valid JSON
return str(json.dumps({'results' : list(result)},default=json_util.default))
@app.route("/test")
def test():
return "<strong>It actually worked</strong>"
if __name__ == "__main__":
app.run()
| from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run()
| mit | Python |
737234eec4a0c043e064c5c238339a90e52e0dc9 | Revert "Made import relative in __main__.py" | antske/coref_draft | multisieve_coreference/__main__.py | multisieve_coreference/__main__.py | from multisieve_coreference import resolve_coreference
resolve_coreference.main()
| from . import resolve_coreference
resolve_coreference.main()
| apache-2.0 | Python |
03c4323cf13365204540cf9d06ead88a6a3925b7 | work around script reloading bug in redis-py | vpuzzella/sixpack,smokymountains/sixpack,blackskad/sixpack,llonchj/sixpack,smokymountains/sixpack,smokymountains/sixpack,vpuzzella/sixpack,blackskad/sixpack,spjwebster/sixpack,seatgeek/sixpack,blackskad/sixpack,nickveenhof/sixpack,seatgeek/sixpack,blackskad/sixpack,spjwebster/sixpack,seatgeek/sixpack,nickveenhof/sixpack,nickveenhof/sixpack,llonchj/sixpack,vpuzzella/sixpack,seatgeek/sixpack,llonchj/sixpack,llonchj/sixpack,spjwebster/sixpack,vpuzzella/sixpack | sixpack/db.py | sixpack/db.py | import redis
from redis.connection import ConnectionPool, PythonParser
from config import CONFIG as cfg
# Because of a bug (https://github.com/andymccurdy/redis-py/issues/318) with
# script reloading in `redis-py, we need to force the `PythonParser` to prevent
# sixpack from crashing if redis restarts (or scripts are flushed).
pool = ConnectionPool(host=cfg.get('redis_host'),
port=cfg.get('redis_port'),
db=cfg.get('redis_db'),
parser_class=PythonParser)
REDIS = redis.StrictRedis(connection_pool=pool)
DEFAULT_PREFIX = cfg.get('redis_prefix')
def _key(k):
return "{0}:{1}".format(DEFAULT_PREFIX, k)
monotonic_zadd = REDIS.register_script("""
local sequential_id = redis.call('zscore', KEYS[1], ARGV[1])
if not sequential_id then
sequential_id = redis.call('zcard', KEYS[1])
redis.call('zadd', KEYS[1], sequential_id, ARGV[1])
end
return sequential_id
""")
def sequential_id(k, identifier):
"""Map an arbitrary string identifier to a set of sequential ids"""
key = _key(k)
return int(monotonic_zadd(keys=[key], args=[identifier]))
msetbit = REDIS.register_script("""
for index, value in ipairs(KEYS) do
redis.call('setbit', value, ARGV[(index - 1) * 2 + 1], ARGV[(index - 1) * 2 + 2])
end
return redis.status_reply('ok')
""")
| import redis
from config import CONFIG as cfg
REDIS = redis.StrictRedis(host=cfg.get('redis_host'), port=cfg.get('redis_port'), db=cfg.get('redis_db'))
DEFAULT_PREFIX = cfg.get('redis_prefix')
def _key(k):
return "{0}:{1}".format(DEFAULT_PREFIX, k)
monotonic_zadd = REDIS.register_script("""
local sequential_id = redis.call('zscore', KEYS[1], ARGV[1])
if not sequential_id then
sequential_id = redis.call('zcard', KEYS[1])
redis.call('zadd', KEYS[1], sequential_id, ARGV[1])
end
return sequential_id
""")
def sequential_id(k, identifier):
"""Map an arbitrary string identifier to a set of sequential ids"""
key = _key(k)
return int(monotonic_zadd(keys=[key], args=[identifier]))
msetbit = REDIS.register_script("""
for index, value in ipairs(KEYS) do
redis.call('setbit', value, ARGV[(index - 1) * 2 + 1], ARGV[(index - 1) * 2 + 2])
end
return redis.status_reply('ok')
""")
| bsd-2-clause | Python |
b0421ab7cca69862bcacf7922cd1d37276dfdbe3 | convert doctype log settings db.sql calls | yashodhank/frappe,frappe/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,yashodhank/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,yashodhank/frappe,frappe/frappe,yashodhank/frappe,mhbu50/frappe,almeidapaulopt/frappe,frappe/frappe,mhbu50/frappe,mhbu50/frappe,almeidapaulopt/frappe,mhbu50/frappe | frappe/core/doctype/log_settings/log_settings.py | frappe/core/doctype/log_settings/log_settings.py | # -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and contributors
# License: MIT. See LICENSE
import frappe
from frappe import _
from frappe.model.document import Document
class LogSettings(Document):
def clear_logs(self):
self.clear_error_logs()
self.clear_activity_logs()
self.clear_email_queue()
def clear_error_logs(self):
frappe.db.sql(""" DELETE FROM `tabError Log`
WHERE `creation` < (NOW() - INTERVAL '{0}' DAY)
""".format(self.clear_error_log_after))
def clear_activity_logs(self):
from frappe.core.doctype.activity_log.activity_log import clear_activity_logs
clear_activity_logs(days=self.clear_activity_log_after)
def clear_email_queue(self):
from frappe.email.queue import clear_outbox
clear_outbox(days=self.clear_email_queue_after)
def run_log_clean_up():
doc = frappe.get_doc("Log Settings")
doc.clear_logs()
@frappe.whitelist()
def has_unseen_error_log(user):
def _get_response(show_alert=True):
return {
'show_alert': True,
'message': _("You have unseen {0}").format('<a href="/app/List/Error%20Log/List"> Error Logs </a>')
}
if frappe.get_all("Error Log", filters={"seen": 0}, limit=1):
log_settings = frappe.get_cached_doc('Log Settings')
if log_settings.users_to_notify:
if user in [u.user for u in log_settings.users_to_notify]:
return _get_response()
else:
return _get_response(show_alert=False)
else:
return _get_response() | # -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and contributors
# License: MIT. See LICENSE
import frappe
from frappe import _
from frappe.model.document import Document
class LogSettings(Document):
def clear_logs(self):
self.clear_error_logs()
self.clear_activity_logs()
self.clear_email_queue()
def clear_error_logs(self):
frappe.db.sql(""" DELETE FROM `tabError Log`
WHERE `creation` < (NOW() - INTERVAL '{0}' DAY)
""".format(self.clear_error_log_after))
def clear_activity_logs(self):
from frappe.core.doctype.activity_log.activity_log import clear_activity_logs
clear_activity_logs(days=self.clear_activity_log_after)
def clear_email_queue(self):
from frappe.email.queue import clear_outbox
clear_outbox(days=self.clear_email_queue_after)
def run_log_clean_up():
doc = frappe.get_doc("Log Settings")
doc.clear_logs()
@frappe.whitelist()
def has_unseen_error_log(user):
def _get_response(show_alert=True):
return {
'show_alert': True,
'message': _("You have unseen {0}").format('<a href="/app/List/Error%20Log/List"> Error Logs </a>')
}
if frappe.db.sql_list("select name from `tabError Log` where seen = 0 limit 1"):
log_settings = frappe.get_cached_doc('Log Settings')
if log_settings.users_to_notify:
if user in [u.user for u in log_settings.users_to_notify]:
return _get_response()
else:
return _get_response(show_alert=False)
else:
return _get_response() | mit | Python |
c99db48154ed327aead8a56211ac5125529f8ea0 | Change merge_and_count to accept 2 array arguments | timpel/stanford-algs,timpel/stanford-algs | count-inversions/count_inversions.py | count-inversions/count_inversions.py | from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len) | from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len) | mit | Python |
df1b2ad504d422247ac27f551810ab463eaea86e | add changelog to the admin | crateio/crate.io | crate_project/apps/packages/admin.py | crate_project/apps/packages/admin.py | from django.contrib import admin
from packages.models import Package, Release, ReleaseFile, TroveClassifier, PackageURI
from packages.models import ReleaseRequire, ReleaseProvide, ReleaseObsolete, ReleaseURI, ChangeLog
from packages.tasks import save_releases
def releases_save(modeladmin, request, queryset):
save_releases.delay([x.pk for x in queryset])
releases_save.short_description = "Trigger a Save on the Selected Releases"
class PackageURIAdmin(admin.TabularInline):
model = PackageURI
extra = 0
class PackageAdmin(admin.ModelAdmin):
inlines = [PackageURIAdmin]
list_display = ["name", "created", "modified"]
list_filter = ["created", "modified"]
search_fields = ["name"]
class ReleaseRequireInline(admin.TabularInline):
model = ReleaseRequire
extra = 0
class ReleaseProvideInline(admin.TabularInline):
model = ReleaseProvide
extra = 0
class ReleaseObsoleteInline(admin.TabularInline):
model = ReleaseObsolete
extra = 0
class ReleaseFileInline(admin.TabularInline):
model = ReleaseFile
extra = 0
class ReleaseURIInline(admin.TabularInline):
model = ReleaseURI
extra = 0
class ReleaseAdmin(admin.ModelAdmin):
inlines = [ReleaseURIInline, ReleaseFileInline, ReleaseRequireInline, ReleaseProvideInline, ReleaseObsoleteInline]
actions = [releases_save]
list_display = ["__unicode__", "package", "version", "summary", "author", "author_email", "maintainer", "maintainer_email", "created", "modified"]
list_filter = ["created", "modified", "hidden"]
search_fields = ["package__name", "version", "summary", "author", "author_email", "maintainer", "maintainer_email"]
raw_id_fields = ["package"]
class TroveClassifierAdmin(admin.ModelAdmin):
list_display = ["trove"]
search_fields = ["trove"]
class ReleaseFileAdmin(admin.ModelAdmin):
list_display = ["release", "type", "python_version", "downloads", "comment", "created", "modified"]
list_filter = ["type", "created", "modified"]
search_fields = ["release__package__name", "filename", "digest"]
raw_id_fields = ["release"]
class ChangeLogAdmin(admin.ModelAdmin):
list_display = ["package", "release", "type", "created", "modified"]
list_filter = ["type", "created", "modified"]
search_fields = ["package__name"]
raw_id_fields = ["package", "release"]
admin.site.register(Package, PackageAdmin)
admin.site.register(Release, ReleaseAdmin)
admin.site.register(ReleaseFile, ReleaseFileAdmin)
admin.site.register(TroveClassifier, TroveClassifierAdmin)
admin.site.register(ChangeLog, ChangeLogAdmin)
| from django.contrib import admin
from packages.models import Package, Release, ReleaseFile, TroveClassifier, PackageURI
from packages.models import ReleaseRequire, ReleaseProvide, ReleaseObsolete, ReleaseURI
from packages.tasks import save_releases
def releases_save(modeladmin, request, queryset):
save_releases.delay([x.pk for x in queryset])
releases_save.short_description = "Trigger a Save on the Selected Releases"
class PackageURIAdmin(admin.TabularInline):
model = PackageURI
extra = 0
class PackageAdmin(admin.ModelAdmin):
inlines = [PackageURIAdmin]
list_display = ["name", "created", "modified"]
list_filter = ["created", "modified"]
search_fields = ["name"]
class ReleaseRequireInline(admin.TabularInline):
model = ReleaseRequire
extra = 0
class ReleaseProvideInline(admin.TabularInline):
model = ReleaseProvide
extra = 0
class ReleaseObsoleteInline(admin.TabularInline):
model = ReleaseObsolete
extra = 0
class ReleaseFileInline(admin.TabularInline):
model = ReleaseFile
extra = 0
class ReleaseURIInline(admin.TabularInline):
model = ReleaseURI
extra = 0
class ReleaseAdmin(admin.ModelAdmin):
inlines = [ReleaseURIInline, ReleaseFileInline, ReleaseRequireInline, ReleaseProvideInline, ReleaseObsoleteInline]
actions = [releases_save]
list_display = ["__unicode__", "package", "version", "summary", "author", "author_email", "maintainer", "maintainer_email", "created", "modified"]
list_filter = ["created", "modified", "hidden"]
search_fields = ["package__name", "version", "summary", "author", "author_email", "maintainer", "maintainer_email"]
raw_id_fields = ["package"]
class TroveClassifierAdmin(admin.ModelAdmin):
list_display = ["trove"]
search_fields = ["trove"]
class ReleaseFileAdmin(admin.ModelAdmin):
list_display = ["release", "type", "python_version", "downloads", "comment", "created", "modified"]
list_filter = ["type", "created", "modified"]
search_fields = ["release__package__name", "filename", "digest"]
raw_id_fields = ["release"]
admin.site.register(Package, PackageAdmin)
admin.site.register(Release, ReleaseAdmin)
admin.site.register(ReleaseFile, ReleaseFileAdmin)
admin.site.register(TroveClassifier, TroveClassifierAdmin)
| bsd-2-clause | Python |
553f3a3a1a72f0f0ba60f739fce5d6b22f23cb22 | fix xpath of yandex | dalf/searx,potato/searx,jibe-b/searx,matejc/searx,asciimoo/searx,jcherqui/searx,dalf/searx,jibe-b/searx,jibe-b/searx,jcherqui/searx,matejc/searx,jcherqui/searx,matejc/searx,PwnArt1st/searx,potato/searx,misnyo/searx,asciimoo/searx,asciimoo/searx,PwnArt1st/searx,misnyo/searx,PwnArt1st/searx,matejc/searx,potato/searx,potato/searx,asciimoo/searx,PwnArt1st/searx,misnyo/searx,dalf/searx,misnyo/searx,dalf/searx,jibe-b/searx,jcherqui/searx | searx/engines/yandex.py | searx/engines/yandex.py | """
Yahoo (Web)
@website https://yandex.ru/
@provide-api ?
@using-api no
@results HTML (using search portal)
@stable no (HTML can change)
@parse url, title, content
"""
from urllib import urlencode
from lxml import html
from searx.search import logger
logger = logger.getChild('yandex engine')
# engine dependent config
categories = ['general']
paging = True
language_support = True # TODO
default_tld = 'com'
language_map = {'ru': 'ru',
'ua': 'uk',
'tr': 'com.tr'}
# search-url
base_url = 'https://yandex.{tld}/'
search_url = 'search/?{query}&p={page}'
results_xpath = '//li[@class="serp-item"]'
url_xpath = './/h2/a/@href'
title_xpath = './/h2/a//text()'
content_xpath = './/div[@class="text-container typo typo_text_m typo_line_m organic__text"]//text()'
def request(query, params):
lang = params['language'].split('_')[0]
host = base_url.format(tld=language_map.get(lang) or default_tld)
params['url'] = host + search_url.format(page=params['pageno'] - 1,
query=urlencode({'text': query}))
return params
# get response from search-request
def response(resp):
dom = html.fromstring(resp.text)
results = []
for result in dom.xpath(results_xpath):
try:
res = {'url': result.xpath(url_xpath)[0],
'title': ''.join(result.xpath(title_xpath)),
'content': ''.join(result.xpath(content_xpath))}
except:
logger.exception('yandex parse crash')
continue
results.append(res)
return results
| """
Yahoo (Web)
@website https://yandex.ru/
@provide-api ?
@using-api no
@results HTML (using search portal)
@stable no (HTML can change)
@parse url, title, content
"""
from urllib import urlencode
from lxml import html
from searx.search import logger
logger = logger.getChild('yandex engine')
# engine dependent config
categories = ['general']
paging = True
language_support = True # TODO
default_tld = 'com'
language_map = {'ru': 'ru',
'ua': 'uk',
'tr': 'com.tr'}
# search-url
base_url = 'https://yandex.{tld}/'
search_url = 'search/?{query}&p={page}'
results_xpath = '//div[@class="serp-item serp-item_plain_yes clearfix i-bem"]'
url_xpath = './/h2/a/@href'
title_xpath = './/h2/a//text()'
content_xpath = './/div[@class="serp-item__text"]//text()'
def request(query, params):
lang = params['language'].split('_')[0]
host = base_url.format(tld=language_map.get(lang) or default_tld)
params['url'] = host + search_url.format(page=params['pageno'] - 1,
query=urlencode({'text': query}))
return params
# get response from search-request
def response(resp):
dom = html.fromstring(resp.text)
results = []
for result in dom.xpath(results_xpath):
try:
res = {'url': result.xpath(url_xpath)[0],
'title': ''.join(result.xpath(title_xpath)),
'content': ''.join(result.xpath(content_xpath))}
except:
logger.exception('yandex parse crash')
continue
results.append(res)
return results
| agpl-3.0 | Python |
71ba73af5b2d9a54f340be901199f4facc2617a6 | Update build_sdk.py | adjust/unity_sdk,adjust/unity_sdk,adjust/unity_sdk | scripts/build_sdk.py | scripts/build_sdk.py | #!/usr/bin/python
import os, sys
from scripting_utils import *
import build_sdk_android as android
import build_sdk_ios as ios
import build_sdk_windows as windows
set_log_tag('BUILD-SDK')
if __name__ != "__main__":
error('Error. Do not import this script, but run it explicitly.')
exit()
# ------------------------------------------------------------------
# get arguments
usage_message = 'Usage: python build_sdk.py [ios | android | windows] [optional, to build test library too: --with-testlib | -tl]\n';
if len(sys.argv) < 2:
error('Error. Platform not provided.')
debug(usage_message)
exit()
platform = sys.argv[1]
if platform != 'ios' and platform != 'android' and platform != 'windows':
error('Error. Unknown platform provided: [{0}]'.format(platform))
debug(usage_message)
exit()
with_test_lib = False
if len(sys.argv) == 3 and (sys.argv[2] == '--with-testlib' or sys.argv[2] == '-tl'):
with_test_lib = True
elif len(sys.argv) == 3:
error('Unknown 2nd parameter.')
debug(usage_message)
exit()
debug_green('Script start. Platform=[{0}]. With Test Library=[{1}]. Build Adjust Unity SDK ...'.format(platform, with_test_lib))
# ------------------------------------------------------------------
# Paths
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(os.path.normpath(script_dir))
android_submodule_dir = '{0}/ext/android'.format(root_dir)
ios_submodule_dir = '{0}/ext/ios'.format(root_dir)
windows_submodule_dir = '{0}/ext/windows'.format(root_dir)
# ------------------------------------------------------------------
# Call platform specific build method.
if platform == 'ios':
set_log_tag('IOS-SDK-BUILD')
check_submodule_dir('iOS', ios_submodule_dir + '/sdk')
ios.build(root_dir, ios_submodule_dir, with_test_lib)
elif platform == 'android':
set_log_tag('ANROID-SDK-BUILD')
check_submodule_dir('Android', android_submodule_dir + '/sdk')
android.build(root_dir, android_submodule_dir, with_test_lib)
else:
set_log_tag('WINDOWS-SDK-BUILD')
check_submodule_dir('Windows', windows_submodule_dir + '/sdk')
windows.build(root_dir, windows_submodule_dir)
remove_files('*.pyc', script_dir, log=False)
# ------------------------------------------------------------------
# Script completed.
debug_green('Script completed!')
| #!/usr/bin/python
import os, sys
from scripting_utils import *
import build_sdk_android as android
import build_sdk_ios as ios
import build_sdk_windows as windows
set_log_tag('BUILD-SDK')
if __name__ != "__main__":
error('Error. Do not import this script, but run it explicitly.')
exit()
# ------------------------------------------------------------------
# get arguments
usage_message = 'Usage: python build_sdk.py [ios | android | windows] [otpional, to build test library too: --with-testlib | -tl]\n';
if len(sys.argv) < 2:
error('Error. Platform not provided.')
debug(usage_message)
exit()
platform = sys.argv[1]
if platform != 'ios' and platform != 'android' and platform != 'windows':
error('Error. Unknown platform provided: [{0}]'.format(platform))
debug(usage_message)
exit()
with_test_lib = False
if len(sys.argv) == 3 and (sys.argv[2] == '--with-testlib' or sys.argv[2] == '-tl'):
with_test_lib = True
elif len(sys.argv) == 3:
error('Unknown 2nd parameter.')
debug(usage_message)
exit()
debug_green('Script start. Platform=[{0}]. With Test Library=[{1}]. Build Adjust Unity SDK ...'.format(platform, with_test_lib))
# ------------------------------------------------------------------
# Paths
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(os.path.normpath(script_dir))
android_submodule_dir = '{0}/ext/android'.format(root_dir)
ios_submodule_dir = '{0}/ext/ios'.format(root_dir)
windows_submodule_dir = '{0}/ext/windows'.format(root_dir)
# ------------------------------------------------------------------
# Call platform specific build method.
if platform == 'ios':
set_log_tag('IOS-SDK-BUILD')
check_submodule_dir('iOS', ios_submodule_dir + '/sdk')
ios.build(root_dir, ios_submodule_dir, with_test_lib)
elif platform == 'android':
set_log_tag('ANROID-SDK-BUILD')
check_submodule_dir('Android', android_submodule_dir + '/sdk')
android.build(root_dir, android_submodule_dir, with_test_lib)
else:
set_log_tag('WINDOWS-SDK-BUILD')
check_submodule_dir('Windows', windows_submodule_dir + '/sdk')
windows.build(root_dir, windows_submodule_dir)
remove_files('*.pyc', script_dir, log=False)
# ------------------------------------------------------------------
# Script completed.
debug_green('Script completed!')
| mit | Python |
aa026fb39bd4a053766988383e9374dba20fd7f5 | Use symlinks to avoid weird behavior from removing the CWD while we're in it | lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123 | scripts/init_tree.py | scripts/init_tree.py | import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
| import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/makefile.deps', 'deps/makefile')
shutil.copyfile('src/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
6e764cf0a4185adbc543683f2781665d1f7fbd48 | fix on preloading new job page with "undefined" git hash | catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult | dashboard/dashboard/pinpoint/handlers/commits.py | dashboard/dashboard/pinpoint/handlers/commits.py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
from dashboard.api import api_request_handler
from dashboard.pinpoint.models import change
from dashboard.services import request
from dashboard.common import utils
if utils.IsRunningFlask():
from flask import request as flask_request
def _CheckUser():
pass
@api_request_handler.RequestHandlerDecoratorFactory(_CheckUser)
def CommitsHandlerPost():
try:
repository = flask_request.args.get('repository', 'chromium')
# crbug/1363418: workaround when start_git_hash is 'undefined'
start_git_hash = flask_request.args.get('start_git_hash')
if start_git_hash == 'undefined':
logging.warning(
'start_git_hash has "undefined" as the value. Using "HEAD" as default.'
)
start_git_hash = 'HEAD'
c1 = change.Commit.FromDict({
'repository': repository,
'git_hash': start_git_hash,
})
c2 = change.Commit.FromDict({
'repository': repository,
'git_hash': flask_request.args.get('end_git_hash'),
})
commits = change.Commit.CommitRange(c1, c2)
commits = [
change.Commit(repository, c['commit']).AsDict() for c in commits
]
return [c1.AsDict()] + commits
except request.RequestError as e:
raise api_request_handler.BadRequestError(str(e))
else:
class Commits(api_request_handler.ApiRequestHandler):
# pylint: disable=abstract-method
def _CheckUser(self):
pass
def Post(self, *args, **kwargs):
del args, kwargs # Unused.
try:
repository = self.request.get('repository', 'chromium')
c1 = change.Commit.FromDict({
'repository': repository,
'git_hash': self.request.get('start_git_hash'),
})
c2 = change.Commit.FromDict({
'repository': repository,
'git_hash': self.request.get('end_git_hash'),
})
commits = change.Commit.CommitRange(c1, c2)
commits = [
change.Commit(repository, c['commit']).AsDict() for c in commits
]
return [c1.AsDict()] + commits
except request.RequestError as e:
raise api_request_handler.BadRequestError(str(e))
| # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from dashboard.api import api_request_handler
from dashboard.pinpoint.models import change
from dashboard.services import request
from dashboard.common import utils
if utils.IsRunningFlask():
from flask import request as flask_request
def _CheckUser():
pass
@api_request_handler.RequestHandlerDecoratorFactory(_CheckUser)
def CommitsHandlerPost():
try:
repository = flask_request.args.get('repository', 'chromium')
c1 = change.Commit.FromDict({
'repository': repository,
'git_hash': flask_request.args.get('start_git_hash'),
})
c2 = change.Commit.FromDict({
'repository': repository,
'git_hash': flask_request.args.get('end_git_hash'),
})
commits = change.Commit.CommitRange(c1, c2)
commits = [
change.Commit(repository, c['commit']).AsDict() for c in commits
]
return [c1.AsDict()] + commits
except request.RequestError as e:
raise api_request_handler.BadRequestError(str(e))
else:
class Commits(api_request_handler.ApiRequestHandler):
# pylint: disable=abstract-method
def _CheckUser(self):
pass
def Post(self, *args, **kwargs):
del args, kwargs # Unused.
try:
repository = self.request.get('repository', 'chromium')
c1 = change.Commit.FromDict({
'repository': repository,
'git_hash': self.request.get('start_git_hash'),
})
c2 = change.Commit.FromDict({
'repository': repository,
'git_hash': self.request.get('end_git_hash'),
})
commits = change.Commit.CommitRange(c1, c2)
commits = [
change.Commit(repository, c['commit']).AsDict() for c in commits
]
return [c1.AsDict()] + commits
except request.RequestError as e:
raise api_request_handler.BadRequestError(str(e))
| bsd-3-clause | Python |
cd014901a4453cda1576b2f10ee3fc6235b8c5e5 | update for 1.7 | asteven/dpkt | dpkt/__init__.py | dpkt/__init__.py | # $Id$
"""fast, simple packet creation and parsing."""
__author__ = 'Dug Song <dugsong@monkey.org>'
__copyright__ = 'Copyright (c) 2004 Dug Song'
__license__ = 'BSD'
__url__ = 'http://dpkt.googlecode.com/'
__version__ = '1.7'
from dpkt import *
import ah
import aim
import arp
import asn1
import bgp
import cdp
import dhcp
import diameter
import dns
import dtp
import esp
import ethernet
import gre
import gzip
import h225
import hsrp
import http
import icmp
import icmp6
import ieee80211
import igmp
import ip
import ip6
import ipx
import loopback
import mrt
import netbios
import netflow
import ntp
import ospf
import pcap
import pim
import pmap
import ppp
import pppoe
import qq
import radiotap
import radius
import rfb
import rip
import rpc
import rtp
import rx
import sccp
import sctp
import sip
import sll
import smb
import ssl
import stp
import stun
import tcp
import telnet
import tftp
import tns
import tpkt
import udp
import vrrp
import yahoo
| # $Id$
"""fast, simple packet creation and parsing."""
__author__ = 'Dug Song <dugsong@monkey.org>'
__copyright__ = 'Copyright (c) 2004 Dug Song'
__license__ = 'BSD'
__url__ = 'http://monkey.org/~dugsong/dpkt/'
__version__ = '1.6'
from dpkt import *
import ah
import aim
import arp
import asn1
import bgp
import cdp
import dhcp
import diameter
import dns
import dtp
import esp
import ethernet
import gre
import gzip
import h225
import hsrp
import http
import icmp
import icmp6
import ieee80211
import igmp
import ip
import ip6
import ipx
import loopback
import mrt
import netbios
import netflow
import ntp
import ospf
import pcap
import pim
import pmap
import ppp
import pppoe
import qq
import radiotap
import radius
import rfb
import rip
import rpc
import rtp
import rx
import sccp
import sctp
import sip
import sll
import smb
import ssl
import stp
import stun
import tcp
import telnet
import tftp
import tns
import tpkt
import udp
import vrrp
import yahoo
| bsd-3-clause | Python |
0b5675c8940b5b4ffe0e758432c2c724e54f685e | add background and mouse responder methods | schriftgestalt/GlyphsSDK,schriftgestalt/GlyphsSDK,schriftgestalt/GlyphsSDK | ObjectWrapper/GlyphsApp/UI/CanvasView.py | ObjectWrapper/GlyphsApp/UI/CanvasView.py | # -*- coding: utf-8 -*-
from __future__ import print_function
__all__ = ["CanvasView"]
import traceback
from vanilla import Group
from AppKit import NSView, NSRectFill, NSColor
class CanvasView_view(NSView):
def drawRect_(self, rect):
try:
if self._backgroundColor is not None:
self._backgroundColor.set()
NSRectFill(rect)
if self._delegate != None:
self._delegate.draw(self)
except:
print(traceback.format_exc())
def mouseDown_(self, event):
try:
if self._delegate != None and hasattr(self._delegate, "mouseDown"):
self._delegate.mouseDown(event)
except:
print(traceback.format_exc())
def mouseDragged_(self, event):
try:
if self._delegate != None and hasattr(self._delegate, "mouseDragged"):
self._delegate.mouseDragged(event)
except:
print(traceback.format_exc())
def mouseUp_(self, event):
try:
if self._delegate != None and hasattr(self._delegate, "mouseUp"):
self._delegate.mouseUp(event)
except:
print(traceback.format_exc())
class CanvasView(Group):
'''
A vanilla object that can be used to draw anything.
from AppKit import *
from vanilla import *
from GlyphsApp.UI import *
class CanvasViewDemo(object):
def __init__(self):
self.w = Window((150, 150))
self.w.group = CanvasView((10, 10, -10, -10), self)
self.w.open()
def draw(self, view):
bounds = view.bounds()
NSColor.greenColor().set()
NSRectFill(bounds)
CanvasViewDemo()
'''
version = "1.0"
nsViewClass = CanvasView_view
def __init__(self, posSize, delegate, backgroundColor=None):
self._setupView(self.nsViewClass, posSize)
self.delegate = delegate
self._nsObject._backgroundColor = backgroundColor
def _get_delegate(self):
return self.view._delegate
def _set_delegate(self, delegate):
self._nsObject._delegate = delegate
self._nsObject.setNeedsDisplay_(True)
delegate = property(_get_delegate, _set_delegate)
def update(self):
self._nsObject.setNeedsDisplay_(True)
| # -*- coding: utf-8 -*-
from __future__ import print_function
__all__ = ["CanvasView"]
import traceback
from vanilla import Group
from AppKit import NSView
class CanvasView_view(NSView):
def drawRect_(self, rect):
try:
if self._delegate != None:
self._delegate.draw(self)
except:
print(traceback.format_exc())
class CanvasView(Group):
'''
A vanilla object that can be used to draw anything.
from AppKit import *
from vanilla import *
from GlyphsApp.UI import *
class CanvasViewDemo(object):
def __init__(self):
self.w = Window((150, 150))
self.w.group = CanvasView((10, 10, -10, -10), self)
self.w.open()
def draw(self, view):
bounds = view.bounds()
NSColor.greenColor().set()
NSRectFill(bounds)
CanvasViewDemo()
'''
version = "1.0"
nsViewClass = CanvasView_view
def __init__(self, posSize, delegate):
self._setupView(self.nsViewClass, posSize)
self.delegate = delegate
def _get_delegate(self):
return self.view._delegate
def _set_delegate(self, delegate):
self._nsObject._delegate = delegate
self._nsObject.setNeedsDisplay_(True)
delegate = property(_get_delegate, _set_delegate)
| apache-2.0 | Python |
edb2bfb45c069a1b83f1e9a82c64e2910c4d1fda | add help and show version. | Tynox/SimpleRenameTool | rename_tool.py | rename_tool.py | #!/usr/bin/env python
# encoding:utf-8
"""
A Simple Rename Tool.
"""
import os
import sys
import getopt
version = 0.2
opts = None
args = None
fileList = None
dir = None
def init():
"""
Init and get arguments.
"""
global opts
global args
# get options
try:
opts, args = getopt.getopt(sys.argv[1:], "hv", ["--help", "--version"])
except getopt.GetoptError as err:
print str(err)
getHelp()
sys.exit()
else:
parseOpts()
def parseOpts():
"""
parse opts and arguments
"""
global dir
global fileList
# check options. If options is None, exit.
for o, a in opts:
if o in ("-h", "--help"): # get help
getHelp()
sys.exit()
elif o in ("-v", "--version"):
showVersion()
sys.exit()
# get dir
if args is None or len(args) == 0:
print "SRT:no source dictionary."
sys.exit()
dir = args[0]
try:
fileList = os.listdir(dir)
except:
print "SRT:wrong path"
sys.exit()
else:
renameFiles()
def getHelp():
"""
get tool help
"""
showVersion()
print "usage:rename_tool.py [-h|--help] [-v|--version] [<dictionary>]"
def showVersion():
"""
show version
"""
print "Simple Rename Tool version:v{0}".format(version)
def renameFiles():
"""
rename files
"""
# check fileList. if fileList is None, exit.
if fileList is None:
print "no files in the dictionary."
sys.exit()
try:
for i, filename in enumerate(fileList):
os.renames(dir+filename, "{0}{1}.txt".format(dir, i+1))
except:
print "Error! Failed to rename files. Check the existing filenames."
else:
print "Done!"
if __name__ == "__main__":
init() | #!/usr/bin/env python
# encoding:utf-8
"""
A Simple Rename Tool.
version: 0.2
"""
import os
import sys
import getopt
opts = None
args = None
fileList = None
dir = None
def init():
"""
Init and get arguments.
"""
global opts
global args
try:
#dir = sys.argv[1]
opts, args = getopt.getopt(sys.argv[1:], "d:", ["dir="])
except getopt.GetoptError, err:
print str(err)
sys.exit()
else:
parseOpts()
def parseOpts():
"""
parse opts and arguments
"""
# fileList = os.listdir(dir)
global fileList
global dir
# check options. If options is None, exit.
if opts is None:
print "no source dictionary."
sys.exit()
for o, a in opts:
if o in ("-d", "--dir"):
dir = a
fileList = os.listdir(a)
renameFiles()
def renameFiles():
"""
rename files
"""
# check fileList. if fileList is None, exit.
if fileList is None:
print "no files in the dictionary."
sys.exit()
try:
for i, filename in enumerate(fileList):
os.renames(dir+filename, "{0}{1}.txt".format(dir, i+1))
except:
print "Error! Failed to rename files. Check the existing filenames."
else:
print "Done!"
if __name__ == "__main__":
init() | unlicense | Python |
9c642cb06a4f3a7cea11d6020752f0eabaebc90c | update dev version after 0.17.0 tag [skip ci] | desihub/desisurvey,desihub/desisurvey | py/desisurvey/_version.py | py/desisurvey/_version.py | __version__ = '0.17.0.dev1045'
| __version__ = '0.17.0'
| bsd-3-clause | Python |
9118b0326ac1c27e9773cc12fd88fccbea26442b | update dev version after 1.1.1 tag [ci skip] | desihub/desitarget,desihub/desitarget | py/desitarget/_version.py | py/desitarget/_version.py | __version__ = '1.1.1.dev5118'
| __version__ = '1.1.1'
| bsd-3-clause | Python |
ba662a902384ca2a07316f66407ee703f65a58bd | Update urls | aptivate/alfie,aptivate/kashana,aptivate/alfie,aptivate/alfie,daniell/kashana,aptivate/alfie,aptivate/kashana,aptivate/kashana,daniell/kashana,daniell/kashana,daniell/kashana,aptivate/kashana | django/website/contacts/auth_urls.py | django/website/contacts/auth_urls.py | from django.conf.urls import patterns, url
from django.contrib.auth.views import (
login, logout_then_login, password_reset_confirm
)
from .views import ResetPassword, change_password
urlpatterns = [
url(r'login/$', login, name='login'),
url(r'logout/$', logout_then_login, name='logout'),
# Activation and password reset
url(r'password_reset/$', ResetPassword.as_view(), name='password_reset'),
url(r'password_reset_confirm/(?P<uidb64>[0-9A-Za-z]{1,13})-(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
password_reset_confirm, {'post_reset_redirect': '/'}, name='password_reset_confirm'),
url(r'password_change/$', change_password, name='password_change'),
]
| from django.conf.urls import patterns, url
from django.contrib.auth.views import (
login, logout_then_login, password_reset_confirm
)
from .views import ResetPassword, change_password
urlpatterns = patterns('',
url(r'login/$', login, name='login'),
url(r'logout/$', logout_then_login, name='logout'),
# Activation and password reset
url(r'password_reset/$', ResetPassword.as_view(), name='password_reset'),
url(r'password_reset_confirm/(?P<uidb64>[0-9A-Za-z]{1,13})-(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
password_reset_confirm, {'post_reset_redirect': '/'}, name='password_reset_confirm'),
url(r'password_change/$', change_password, name='password_change'),
)
| agpl-3.0 | Python |
69ed2a36ab8531cb22bdc26afcf58780e3889d19 | check service manager using /proc/1/comm | tcpcloud/openvstorage,tcpcloud/openvstorage,tcpcloud/openvstorage,tcpcloud/openvstorage | ovs/extensions/services/service.py | ovs/extensions/services/service.py | # Copyright 2015 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Service Factory module
"""
from subprocess import check_output
from ovs.extensions.services.upstart import Upstart
# from ovs.extensions.services.systemd import SystemD
from ovs.log.logHandler import LogHandler
logger = LogHandler('extensions', name='servicemanager')
class ServiceManager(object):
"""
Factory class returning specialized classes
"""
ImplementationClass = None
class MetaClass(type):
"""
Metaclass
"""
def __getattr__(cls, item):
"""
Returns the appropriate class
"""
_ = cls
if ServiceManager.ImplementationClass is None:
try:
init_info = check_output('cat /proc/1/comm', shell=True)
# All service classes used in below code should share the exact same interface!
if 'init' in init_info:
version_info = check_output('init --version', shell=True)
if 'upstart' in version_info:
ServiceManager.ImplementationClass = Upstart
else:
raise RuntimeError('The ServiceManager is unrecognizable')
# elif 'systemd' in init_info:
# ServiceManager.ImplementationClass = SystemD
else:
raise RuntimeError('There was no known ServiceManager detected')
except Exception as ex:
logger.exception('Error loading ServiceManager: {0}'.format(ex))
raise
return getattr(ServiceManager.ImplementationClass, item)
__metaclass__ = MetaClass
| # Copyright 2015 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Service Factory module
"""
from subprocess import check_output
from ovs.extensions.services.upstart import Upstart
# from ovs.extensions.services.systemd import SystemD
from ovs.log.logHandler import LogHandler
logger = LogHandler('extensions', name='servicemanager')
class ServiceManager(object):
"""
Factory class returning specialized classes
"""
ImplementationClass = None
class MetaClass(type):
"""
Metaclass
"""
def __getattr__(cls, item):
"""
Returns the appropriate class
"""
_ = cls
if ServiceManager.ImplementationClass is None:
try:
init_info = check_output('init --version', shell=True)
# All service classes used in below code should share the exact same interface!
if 'upstart' in init_info:
ServiceManager.ImplementationClass = Upstart
# elif 'systemd' in init_info:
# ServiceManager.ImplementationClass = SystemD
else:
raise RuntimeError('There was no known ServiceManager detected')
except Exception as ex:
logger.exception('Error loading ServiceManager: {0}'.format(ex))
raise
return getattr(ServiceManager.ImplementationClass, item)
__metaclass__ = MetaClass
| apache-2.0 | Python |
761b8e7e471f111b78f36f7aad34e8d89fdc3002 | Make oweb.tests.views.account_delete.test_redirect work | Mischback/django-oweb,Mischback/django-oweb | oweb/tests/views/account_delete.py | oweb/tests/views/account_delete.py | """Contains tests for oweb.views.updates.account_delete"""
# Python imports
from unittest import skip
# Django imports
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from django.contrib.auth.models import User
# app imports
from oweb.tests import OWebViewTests
from oweb.models.account import Account
@override_settings(AUTH_USER_MODEL='auth.User')
class OWebViewsAccountDeleteTests(OWebViewTests):
def test_login_required(self):
"""Unauthenticated users should be redirected to oweb:app_login"""
r = self.client.get(reverse('oweb:account_delete', args=[1,]))
self.assertRedirects(r,
reverse('oweb:app_login'),
status_code=302,
target_status_code=200)
def test_account_owner(self):
"""Can somebody delete an account he doesn't posess?"""
self.client.login(username='test02', password='foo')
# Should display a 403-page and use oweb/403.html
r = self.client.get(reverse('oweb:account_delete', args=[1,]))
self.assertEqual(r.status_code, 403)
self.assertTemplateUsed(r, 'oweb/403.html')
r = self.client.post(reverse('oweb:account_delete', args=[1,]))
self.assertEqual(r.status_code, 403)
self.assertTemplateUsed(r, 'oweb/403.html')
def test_get(self):
"""Does a GET to ``account_delete()`` show the confirmation template?"""
self.client.login(username='test01', password='foo')
# Should display oweb/account_delete.html with status 200
r = self.client.get(reverse('oweb:account_delete', args=[1,]))
self.assertEqual(r.status_code, 200)
self.assertTemplateUsed(r, 'oweb/account_delete.html')
def test_redirect(self):
"""Does ``account_delete()`` redirect to the correct page?"""
u = User.objects.get(username='test01')
acc = Account.objects.create(owner=u)
self.client.login(username='test01', password='foo')
# create account for this test
r = self.client.post(reverse('oweb:account_delete', args=[acc.id]),
data={'confirm_account_deletion': 'confirm'})
self.assertRedirects(r,
reverse('oweb:home'),
status_code=302,
target_status_code=200)
@skip('not yet implemented')
def test_post_tamper(self):
"""What does happen, if somebody tampers POST data?"""
# TODO insert real test here
self.assertEqual(True, True)
| """Contains tests for oweb.views.updates.account_delete"""
# Python imports
from unittest import skip
# Django imports
from django.core.urlresolvers import reverse
# app imports
from oweb.tests import OWebViewTests
class OWebViewsAccountDeleteTests(OWebViewTests):
def test_login_required(self):
"""Unauthenticated users should be redirected to oweb:app_login"""
r = self.client.get(reverse('oweb:account_delete', args=[1,]))
self.assertRedirects(r,
reverse('oweb:app_login'),
status_code=302,
target_status_code=200)
def test_account_owner(self):
"""Can somebody delete an account he doesn't posess?"""
self.client.login(username='test02', password='foo')
# Should display a 403-page and use oweb/403.html
r = self.client.get(reverse('oweb:account_delete', args=[1,]))
self.assertEqual(r.status_code, 403)
self.assertTemplateUsed(r, 'oweb/403.html')
r = self.client.post(reverse('oweb:account_delete', args=[1,]))
self.assertEqual(r.status_code, 403)
self.assertTemplateUsed(r, 'oweb/403.html')
def test_get(self):
"""Does a GET to ``account_delete()`` show the confirmation template?"""
self.client.login(username='test01', password='foo')
# Should display oweb/account_delete.html with status 200
r = self.client.get(reverse('oweb:account_delete', args=[1,]))
self.assertEqual(r.status_code, 200)
self.assertTemplateUsed(r, 'oweb/account_delete.html')
@skip('not yet implemented')
def test_redirect(self):
"""Does ``planet_delete()`` redirect to the correct page?"""
# TODO insert real test here (should redirect to home)
self.assertEqual(True, True)
@skip('not yet implemented')
def test_post_tamper(self):
"""What does happen, if somebody tampers POST data?"""
# TODO insert real test here
self.assertEqual(True, True)
| mit | Python |
25aaca207d1c720d414df2eebca3acc954c5eb88 | Use extensions in view | alexandermendes/pybossa-discourse | pybossa_discourse/view.py | pybossa_discourse/view.py | # -*- coding: utf8 -*-
"""Views module for pybossa-discourse."""
from flask import Blueprint, request, url_for, flash, redirect
from flask import current_app as app
from flask.ext.login import current_user
from . import discourse_sso, discourse_client
def index():
"""Attempt to sign in via SSO then redirect to Discourse."""
try:
return redirect(discourse_sso.signin())
except AttributeError as e: # pragma: no cover
flash('Access Denied: {0}'.format(str(e)), 'error')
return redirect(url_for('home.home'))
def oauth_authorized():
"""Authorise a Discourse login."""
sso = request.args.get('sso')
sig = request.args.get('sig')
if current_user.is_anonymous():
next_url = url_for('discourse.oauth_authorized', sso=sso, sig=sig)
return redirect(url_for('account.signin', next=next_url))
try:
return redirect(discourse_sso.validate(sso, sig))
except (ValueError, AttributeError) as e: # pragma: no cover
flash('Access Denied: {0}'.format(str(e)), 'error')
return redirect(url_for('home.home'))
def signout():
"""Signout the current user from both PyBossa and Discourse."""
if not current_user.is_anonymous():
try:
discourse_client.user_signout()
except (ValueError, AttributeError) as e: # pragma: no cover
flash('Discourse Logout Failed: {0}'.format(str(e)), 'error')
return redirect(url_for('account.signout'))
| # -*- coding: utf8 -*-
"""Views module for pybossa-discourse."""
from flask import Blueprint, request, url_for, flash, redirect
from flask import current_app as app
from flask.ext.login import current_user
def index():
"""Attempt to sign in via SSO then redirect to Discourse."""
discourse_sso = app.extensions['discourse']['sso']
try:
return redirect(discourse_sso.signin())
except AttributeError as e: # pragma: no cover
flash('Access Denied: {0}'.format(str(e)), 'error')
return redirect(url_for('home.home'))
def oauth_authorized():
"""Authorise a Discourse login."""
discourse_sso = app.extensions['discourse']['sso']
sso = request.args.get('sso')
sig = request.args.get('sig')
if current_user.is_anonymous():
next_url = url_for('discourse.oauth_authorized', sso=sso, sig=sig)
return redirect(url_for('account.signin', next=next_url))
try:
return redirect(discourse_sso.validate(sso, sig))
except (ValueError, AttributeError) as e: # pragma: no cover
flash('Access Denied: {0}'.format(str(e)), 'error')
return redirect(url_for('home.home'))
def signout():
"""Signout the current user from both PyBossa and Discourse."""
discourse_client = app.extensions['discourse']['client']
if not current_user.is_anonymous():
try:
discourse_client.user_signout()
except (ValueError, AttributeError) as e: # pragma: no cover
flash('Discourse Logout Failed: {0}'.format(str(e)), 'error')
return redirect(url_for('account.signout'))
| bsd-3-clause | Python |
036a71a41e64f9eb08314a285e3c4895af3b410b | Update pylsy_test.py | bcho/Pylsy,muteness/Pylsy,huiyi1990/Pylsy,gnithin/Pylsy,gnithin/Pylsy,huiyi1990/Pylsy,bcho/Pylsy,muteness/Pylsy | pylsy/tests/pylsy_test.py | pylsy/tests/pylsy_test.py | # -*- coding: utf-8 -*-
import sys
sys.path.append('..')
from __future__ import absolute_import
import unittest
from pylsy.pylsy import pylsytable
class PylsyTableTests(unittest.TestCase):
def setUp(self):
attributes = ["name", "age"]
self.table = pylsytable(attributes)
def tearDown(self):
self.table = None
def testCreateTable(self):
name = ["a", "b"]
self.table.add_data("name", name)
age = [1, 2]
self.table.add_data("age", age)
correct_file = open('correct.out', 'r')
correctPrint = correct_file.read()
try:
# import io
# from contextlib import redirect_stdout
# with io.StringIO() as buf, redirect_stdout(buf):
# print(self.table,end='')
output = self.table.__str__()
self.assertEqual(output, correctPrint)
except ImportError:
import sys
f_handler = open('test.out', 'w')
sys.stdout = f_handler
self.table.create_table()
f_handler.close()
f_handler = open('test.out', 'r')
self.assertEqual(f_handler.read(), correctPrint)
if __name__ == '__main__':
unittest.main()
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
import unittest
from pylsy.pylsy import pylsytable
class PylsyTableTests(unittest.TestCase):
def setUp(self):
attributes = ["name", "age"]
self.table = pylsytable(attributes)
def tearDown(self):
self.table = None
def testCreateTable(self):
name = ["a", "b"]
self.table.add_data("name", name)
age = [1, 2]
self.table.add_data("age", age)
correct_file = open('correct.out', 'r')
correctPrint = correct_file.read()
try:
# import io
# from contextlib import redirect_stdout
# with io.StringIO() as buf, redirect_stdout(buf):
# print(self.table,end='')
output = self.table.__str__()
self.assertEqual(output, correctPrint)
except ImportError:
import sys
f_handler = open('test.out', 'w')
sys.stdout = f_handler
self.table.create_table()
f_handler.close()
f_handler = open('test.out', 'r')
self.assertEqual(f_handler.read(), correctPrint)
if __name__ == '__main__':
unittest.main()
| mit | Python |
4a8678d2b8ac5b7609244a38affaf308564a5c1f | Make SVDFile.from_builtin() handle missing or bad svd zip. | mbedmicro/pyOCD,pyocd/pyOCD,mesheven/pyOCD,flit/pyOCD,flit/pyOCD,mesheven/pyOCD,mesheven/pyOCD,mbedmicro/pyOCD,mbedmicro/pyOCD,pyocd/pyOCD | pyocd/debug/svd/loader.py | pyocd/debug/svd/loader.py | # pyOCD debugger
# Copyright (c) 2015-2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import logging
import pkg_resources
import zipfile
from .parser import SVDParser
from ...utility.compatibility import FileNotFoundError_
LOG = logging.getLogger(__name__)
## Path within the pyocd package to the generated zip containing builting SVD files.
BUILTIN_SVD_DATA_PATH = "debug/svd/svd_data.zip"
class SVDFile(object):
@classmethod
def from_builtin(cls, svd_name):
try:
zip_stream = pkg_resources.resource_stream("pyocd", BUILTIN_SVD_DATA_PATH)
zip = zipfile.ZipFile(zip_stream, 'r')
return SVDFile(zip.open(svd_name))
except (FileNotFoundError_, zipfile.BadZipFile) as err:
from ...core.session import Session
LOG.warning("unable to open builtin SVD file: %s", err, exc_info=Session.get_current().log_tracebacks)
return None
def __init__(self, filename=None):
self.filename = filename
self.device = None
def load(self):
self.device = SVDParser.for_xml_file(self.filename).get_device()
class SVDLoader(threading.Thread):
"""! @brief Thread to read an SVD file in the background."""
def __init__(self, svdFile, completionCallback):
super(SVDLoader, self).__init__(name='load-svd')
self.daemon = True
self._svd_location = svdFile
self._svd_device = None
self._callback = completionCallback
@property
def device(self):
if not self._svd_device:
self.join()
return self._svd_device
def load(self):
if not self._svd_device and self._svd_location:
self.start()
def run(self):
try:
self._svd_location.load()
self._svd_device = self._svd_location.device
if self._callback:
self._callback(self._svd_device)
except IOError:
LOG.warning("Failed to load SVD file %s", self._svd_location.filename)
| # pyOCD debugger
# Copyright (c) 2015-2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import logging
import pkg_resources
from zipfile import ZipFile
from .parser import SVDParser
LOG = logging.getLogger(__name__)
## Path within the pyocd package to the generated zip containing builting SVD files.
BUILTIN_SVD_DATA_PATH = "debug/svd/svd_data.zip"
class SVDFile(object):
@classmethod
def from_builtin(cls, svd_name):
zip_stream = pkg_resources.resource_stream("pyocd", BUILTIN_SVD_DATA_PATH)
zip = ZipFile(zip_stream, 'r')
return SVDFile(zip.open(svd_name))
def __init__(self, filename=None):
self.filename = filename
self.device = None
def load(self):
self.device = SVDParser.for_xml_file(self.filename).get_device()
class SVDLoader(threading.Thread):
"""! @brief Thread to read an SVD file in the background."""
def __init__(self, svdFile, completionCallback):
super(SVDLoader, self).__init__(name='load-svd')
self.daemon = True
self._svd_location = svdFile
self._svd_device = None
self._callback = completionCallback
@property
def device(self):
if not self._svd_device:
self.join()
return self._svd_device
def load(self):
if not self._svd_device and self._svd_location:
self.start()
def run(self):
try:
self._svd_location.load()
self._svd_device = self._svd_location.device
if self._callback:
self._callback(self._svd_device)
except IOError:
LOG.warning("Failed to load SVD file %s", self._svd_location.filename)
| apache-2.0 | Python |
64d8bb7f620c8ce047a442fe8d17092428a0e33b | fix tag | PyThaiNLP/pythainlp | pythainlp/tag/__init__.py | pythainlp/tag/__init__.py | # -*- coding: utf-8 -*-
# TODO ปรับ API ให้เหมือน nltk
from __future__ import absolute_import,division,print_function,unicode_literals
import sys
def pos_tag(text,engine='old'):
"""
ระบบ postaggers
pos_tag(text,engine='old')
engine ที่รองรับ
* old เป็น UnigramTagger
* artagger เป็น RDR POS Tagger
"""
if engine=='old':
from .old import tag
elif engine=='artagger':
if sys.version_info < (3,4):
sys.exit('Sorry, Python < 3.4 is not supported')
def tag(text1):
try:
from artagger import Tagger
except ImportError:
import pip
pip.main(['install','https://github.com/wannaphongcom/artagger/archive/master.zip'])
try:
from artagger import Tagger
except ImportError:
print("Error ! using 'pip install https://github.com/wannaphongcom/artagger/archive/master.zip'")
sys.exit(0)
tagger = Tagger()
words = tagger.tag(' '.join(text1))
totag=[]
for word in words:
totag.append((word.word, word.tag))
return totag
return tag(text)
| # -*- coding: utf-8 -*-
# TODO ปรับ API ให้เหมือน nltk
from __future__ import absolute_import,division,print_function,unicode_literals
import sys
def pos_tag(text,engine='old'):
"""
ระบบ postaggers
pos_tag(text,engine='old')
engine ที่รองรับ
* old เป็น UnigramTagger
* artagger เป็น RDR POS Tagger
"""
if engine=='old':
from .old import tag
elif engine=='artagger':
if sys.version_info < (3,4):
sys.exit('Sorry, Python < 3.4 is not supported')
def tag(text1):
try:
from artagger import Tagger
except ImportError:
import pip
pip.main(['install','https://github.com/franziz/artagger/archive/master.zip'])
try:
from artagger import Tagger
except ImportError:
print("Error ! using 'pip install https://github.com/franziz/artagger/archive/master.zip'")
sys.exit(0)
tagger = Tagger()
words = tagger.tag(' '.join(text1))
totag=[]
for word in words:
totag.append((word.word, word.tag))
return totag
return tag(text)
| apache-2.0 | Python |
75c50c31cc6f7a5075f35d4913acbeb5f41ded50 | fix for error messages | rmorlok/pytracts | pytracts/message_types.py | pytracts/message_types.py | #!/usr/bin/env python
#
# Copyright 2014 Docalytics Inc, Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple protocol message types.
Includes new message and field types that are outside what is defined by the
protocol buffers standard.
"""
__author__ = 'rafek@google.com (Rafe Kaplan)'
import datetime
from . import messages
from . import util
__all__ = [
'VoidMessage',
'ErrorMessage',
'error_message_from_exception'
]
class VoidMessage(messages.Message):
"""Empty message."""
class ErrorMessage(messages.Message):
"""
A message to accompany errors.
"""
title = messages.StringField()
message = messages.StringField()
explanation = messages.StringField()
def error_message_from_exception(exception):
"""
Create an instance from an exception object.
:param exception: the exception
:return: ErrorMessage
"""
result = ErrorMessage()
if hasattr(exception, 'message'):
result.message = exception.message
elif hasattr(exception, 'description'):
result.message = exception.description
if hasattr(exception, 'title'):
result.title = exception.title
elif hasattr(exception, 'name'):
result.title = exception.name
if hasattr(exception, 'explanation'):
result.explanation = exception.explanation
if hasattr(exception, 'detail'):
result.message = exception.detail
return result
| #!/usr/bin/env python
#
# Copyright 2014 Docalytics Inc, Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple protocol message types.
Includes new message and field types that are outside what is defined by the
protocol buffers standard.
"""
__author__ = 'rafek@google.com (Rafe Kaplan)'
import datetime
from . import messages
from . import util
__all__ = [
'VoidMessage',
'ErrorMessage',
'error_message_from_exception'
]
class VoidMessage(messages.Message):
"""Empty message."""
class ErrorMessage(messages.Message):
"""
A message to accompany errors.
"""
title = messages.StringField()
message = messages.StringField()
explanation = messages.StringField()
def error_message_from_exception(exception):
"""
Create an instance from an exception object.
:param exception: the exception
:return: ErrorMessage
"""
result = ErrorMessage()
if hasattr(exception, 'message'):
result.message = exception.message
elif hasattr(exception, 'description'):
result.message = exception.description
if hasattr(exception, 'title'):
result.title = exception.title
elif hasattr(exception, 'name'):
result.title = exception.title
if hasattr(exception, 'explanation'):
result.explanation = exception.explanation
if hasattr(exception, 'detail'):
result.message = exception.detail
return result
| apache-2.0 | Python |
3a8e857aa12ac55a5b5e531e72356541a185da56 | Load from different file | kim135797531/opencog,Allend575/opencog,roselleebarle04/opencog,kinoc/opencog,sumitsourabh/opencog,rTreutlein/atomspace,cosmoharrigan/opencog,yantrabuddhi/atomspace,ruiting/opencog,iAMr00t/opencog,virneo/atomspace,kinoc/opencog,Allend575/opencog,tim777z/opencog,williampma/opencog,rodsol/atomspace,inflector/atomspace,tim777z/opencog,printedheart/opencog,eddiemonroe/opencog,virneo/opencog,Tiggels/opencog,gaapt/opencog,rodsol/opencog,gaapt/opencog,virneo/atomspace,Allend575/opencog,williampma/opencog,AmeBel/opencog,virneo/opencog,misgeatgit/opencog,inflector/opencog,prateeksaxena2809/opencog,jswiergo/atomspace,rTreutlein/atomspace,anitzkin/opencog,sanuj/opencog,zhaozengguang/opencog,misgeatgit/atomspace,virneo/opencog,roselleebarle04/opencog,ArvinPan/opencog,ceefour/atomspace,rodsol/opencog,kim135797531/opencog,AmeBel/atomspace,zhaozengguang/opencog,printedheart/atomspace,rohit12/opencog,shujingke/opencog,sanuj/opencog,Allend575/opencog,sumitsourabh/opencog,rodsol/atomspace,roselleebarle04/opencog,jlegendary/opencog,misgeatgit/opencog,anitzkin/opencog,Selameab/atomspace,virneo/opencog,gavrieltal/opencog,inflector/opencog,inflector/opencog,misgeatgit/atomspace,Selameab/atomspace,gavrieltal/opencog,Allend575/opencog,iAMr00t/opencog,kim135797531/opencog,inflector/opencog,jlegendary/opencog,TheNameIsNigel/opencog,yantrabuddhi/atomspace,ArvinPan/opencog,iAMr00t/opencog,Tiggels/opencog,virneo/opencog,ceefour/opencog,ArvinPan/atomspace,kim135797531/opencog,TheNameIsNigel/opencog,zhaozengguang/opencog,andre-senna/opencog,ArvinPan/opencog,Selameab/opencog,Selameab/opencog,roselleebarle04/opencog,inflector/opencog,cosmoharrigan/opencog,cosmoharrigan/atomspace,sanuj/opencog,cosmoharrigan/opencog,misgeatgit/opencog,inflector/atomspace,prateeksaxena2809/opencog,gavrieltal/opencog,rodsol/atomspace,andre-senna/opencog,Tiggels/opencog,cosmoharrigan/atomspace,TheNameIsNigel/opencog,gaapt/opencog,Selameab/opencog,zhaozengguang/opencog,cosmoharrigan/opencog,ArvinPan/opencog,andre-senna/opencog,printedheart/atomspace,sumitsourabh/opencog,AmeBel/atomspace,TheNameIsNigel/opencog,shujingke/opencog,yantrabuddhi/opencog,eddiemonroe/atomspace,cosmoharrigan/atomspace,rodsol/atomspace,yantrabuddhi/opencog,eddiemonroe/opencog,williampma/opencog,MarcosPividori/atomspace,eddiemonroe/opencog,misgeatgit/atomspace,williampma/atomspace,eddiemonroe/opencog,ceefour/opencog,printedheart/opencog,andre-senna/opencog,williampma/opencog,iAMr00t/opencog,rohit12/atomspace,printedheart/opencog,jlegendary/opencog,rohit12/opencog,Selameab/opencog,gaapt/opencog,rohit12/opencog,prateeksaxena2809/opencog,rohit12/opencog,sanuj/opencog,rohit12/atomspace,kim135797531/opencog,ceefour/atomspace,sumitsourabh/opencog,tim777z/opencog,yantrabuddhi/atomspace,rTreutlein/atomspace,andre-senna/opencog,inflector/atomspace,prateeksaxena2809/opencog,UIKit0/atomspace,ruiting/opencog,AmeBel/atomspace,TheNameIsNigel/opencog,ruiting/opencog,misgeatgit/atomspace,ceefour/opencog,gaapt/opencog,iAMr00t/opencog,gavrieltal/opencog,kinoc/opencog,andre-senna/opencog,jlegendary/opencog,rohit12/atomspace,yantrabuddhi/opencog,kinoc/opencog,ArvinPan/atomspace,rohit12/opencog,ceefour/opencog,gavrieltal/opencog,yantrabuddhi/opencog,jswiergo/atomspace,eddiemonroe/atomspace,sanuj/opencog,cosmoharrigan/opencog,Tiggels/opencog,ceefour/opencog,virneo/opencog,rohit12/opencog,ruiting/opencog,tim777z/opencog,yantrabuddhi/atomspace,ceefour/atomspace,misgeatgit/opencog,MarcosPividori/atomspace,rohit12/atomspace,misgeatgit/opencog,printedheart/atomspace,virneo/atomspace,UIKit0/atomspace,rodsol/opencog,misgeatgit/opencog,shujingke/opencog,zhaozengguang/opencog,kinoc/opencog,roselleebarle04/opencog,ceefour/opencog,ceefour/atomspace,roselleebarle04/opencog,shujingke/opencog,MarcosPividori/atomspace,AmeBel/opencog,misgeatgit/opencog,jswiergo/atomspace,eddiemonroe/opencog,rodsol/opencog,kinoc/opencog,rodsol/opencog,AmeBel/opencog,printedheart/opencog,prateeksaxena2809/opencog,prateeksaxena2809/opencog,tim777z/opencog,anitzkin/opencog,ArvinPan/opencog,inflector/atomspace,anitzkin/opencog,inflector/opencog,misgeatgit/opencog,ArvinPan/opencog,williampma/atomspace,eddiemonroe/atomspace,roselleebarle04/opencog,MarcosPividori/atomspace,inflector/opencog,eddiemonroe/atomspace,inflector/opencog,yantrabuddhi/opencog,ruiting/opencog,prateeksaxena2809/opencog,Allend575/opencog,misgeatgit/atomspace,Tiggels/opencog,TheNameIsNigel/opencog,eddiemonroe/atomspace,williampma/atomspace,AmeBel/opencog,AmeBel/atomspace,rTreutlein/atomspace,Selameab/opencog,Selameab/opencog,yantrabuddhi/atomspace,jlegendary/opencog,gavrieltal/opencog,ceefour/opencog,eddiemonroe/opencog,yantrabuddhi/opencog,Selameab/atomspace,andre-senna/opencog,eddiemonroe/opencog,shujingke/opencog,AmeBel/atomspace,ruiting/opencog,jlegendary/opencog,AmeBel/opencog,shujingke/opencog,Tiggels/opencog,printedheart/atomspace,jlegendary/opencog,AmeBel/opencog,kim135797531/opencog,printedheart/opencog,anitzkin/opencog,rodsol/opencog,cosmoharrigan/opencog,ArvinPan/atomspace,gavrieltal/opencog,cosmoharrigan/atomspace,sumitsourabh/opencog,gaapt/opencog,kinoc/opencog,ArvinPan/atomspace,iAMr00t/opencog,kim135797531/opencog,Allend575/opencog,rTreutlein/atomspace,gaapt/opencog,anitzkin/opencog,tim777z/opencog,ruiting/opencog,yantrabuddhi/opencog,sumitsourabh/opencog,williampma/opencog,sanuj/opencog,jswiergo/atomspace,shujingke/opencog,zhaozengguang/opencog,anitzkin/opencog,Selameab/atomspace,printedheart/opencog,misgeatgit/opencog,virneo/atomspace,UIKit0/atomspace,virneo/opencog,UIKit0/atomspace,williampma/atomspace,AmeBel/opencog,sumitsourabh/opencog,inflector/atomspace,williampma/opencog | opencog/python/pln/examples/relex2logic/evaluation_to_member_example.py | opencog/python/pln/examples/relex2logic/evaluation_to_member_example.py | """
For running evaluation_to_member_agent.py without the cogserver
"""
from __future__ import print_function
from pln.examples.relex2logic import evaluation_to_member_agent
from opencog.atomspace import types, AtomSpace, TruthValue
from opencog.scheme_wrapper import load_scm, scheme_eval, scheme_eval_h, __init__
from pln.examples.interactive_agent import InteractiveAgent
__author__ = 'Cosmo Harrigan'
atomspace = AtomSpace()
__init__(atomspace)
coreTypes = "opencog/atomspace/core_types.scm"
utilities = "opencog/scm/utilities.scm"
#data = "opencog/python/pln/examples/relex2logic/evaluation-to-member.scm"
data = "opencog/python/pln/examples/relex2logic/r2l-outpust-test.scm"
for item in [coreTypes, utilities, data]:
load_scm(atomspace, item)
agent = InteractiveAgent(atomspace=atomspace,
agent=evaluation_to_member_agent.EvaluationToMemberAgent(),
num_steps=200,
print_starting_contents=True)
agent.run()
| """
For running evaluation_to_member_agent.py without the cogserver
"""
from __future__ import print_function
from pln.examples.relex2logic import evaluation_to_member_agent
from opencog.atomspace import types, AtomSpace, TruthValue
from opencog.scheme_wrapper import load_scm, scheme_eval, scheme_eval_h, __init__
from pln.examples.interactive_agent import InteractiveAgent
__author__ = 'Cosmo Harrigan'
atomspace = AtomSpace()
__init__(atomspace)
coreTypes = "opencog/atomspace/core_types.scm"
utilities = "opencog/scm/utilities.scm"
data = "opencog/python/pln/examples/relex2logic/evaluation-to-member.scm"
for item in [coreTypes, utilities, data]:
load_scm(atomspace, item)
agent = InteractiveAgent(atomspace=atomspace,
agent=evaluation_to_member_agent.EvaluationToMemberAgent(),
num_steps=200,
print_starting_contents=True)
agent.run()
| agpl-3.0 | Python |
7b1b323da3e2fcf79c0bff36d6e0665d1b88e74e | Fix stupid typo... | editorsnotes/editorsnotes,editorsnotes/editorsnotes | editorsnotes/auth/utils.py | editorsnotes/auth/utils.py | from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.conf import settings
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.utils.http import urlsafe_base64_encode
def send_activation_email(request, user):
b64uid = urlsafe_base64_encode(str(user.id))
token_generator = PasswordResetTokenGenerator()
token = token_generator.make_token(user)
site_url = '{protocol}://{site}'.format(
protocol='https' if request.is_secure() else 'http',
site=conf.SITE_URL
)
if user.is_active:
raise Exception('Will not send activation key to active user')
send_mail(
'Activate your Editors\' Notes account',
'This email was used to create an account at {site_url}.\n\n'
'To activate your account, visit the following link:\n\n'
'\t{site_url}{activation_url}\n\n'
'If you did not request an account, please ignore this email.'.format(
site_url=site_url,
activation_url=reverse('auth:activate_account', args=[b64uid, token]),
activation_token=token),
settings.SITE_EMAIL,
[user.email]
)
| from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django conf import settings
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.utils.http import urlsafe_base64_encode
def send_activation_email(request, user):
b64uid = urlsafe_base64_encode(str(user.id))
token_generator = PasswordResetTokenGenerator()
token = token_generator.make_token(user)
site_url = '{protocol}://{site}'.format(
protocol='https' if request.is_secure() else 'http',
site=conf.SITE_URL
)
if user.is_active:
raise Exception('Will not send activation key to active user')
send_mail(
'Activate your Editors\' Notes account',
'This email was used to create an account at {site_url}.\n\n'
'To activate your account, visit the following link:\n\n'
'\t{site_url}{activation_url}\n\n'
'If you did not request an account, please ignore this email.'.format(
site_url=site_url,
activation_url=reverse('auth:activate_account', args=[b64uid, token]),
activation_token=token),
settings.SITE_EMAIL,
[user.email]
)
| agpl-3.0 | Python |
9359d64c56c1735544c2e2e77b160bb14048b497 | use polling_district_id | andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_salford.py | polling_stations/apps/data_collection/management/commands/import_salford.py | """
Import Salford
"""
import sys
from django.contrib.gis.geos import Point
from data_collection.management.commands import BaseShpImporter
class Command(BaseShpImporter):
"""
Imports the Polling Station data from Salford
"""
council_id = 'E08000006'
districts_name = 'Salford_Polling_Districts'
stations_name = 'Polling_Stations.csv'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[0],
}
def station_record_to_dict(self, record):
try:
location = Point(int(record.easting), int(record.northing), srid=self.srid)
except ValueError:
location = Point(float(record.easting), float(record.northing), srid=self.srid)
return {
'internal_council_id': record.id,
'postcode' : record.location.split(',')[-1],
'address' : "\n".join(record.location.split(',')[:-1]),
'location' : location,
'polling_district_id': record.polling_district_code
}
| """
Import Salford
"""
import sys
from django.contrib.gis.geos import Point
from data_collection.management.commands import BaseShpImporter
class Command(BaseShpImporter):
"""
Imports the Polling Station data from Salford
"""
council_id = 'E08000006'
districts_name = 'Salford_Polling_Districts'
stations_name = 'Polling_Stations.csv'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[0],
}
def station_record_to_dict(self, record):
try:
location = Point(int(record.easting), int(record.northing), srid=self.srid)
except ValueError:
location = Point(float(record.easting), float(record.northing), srid=self.srid)
return {
'internal_council_id': record.id,
'postcode' : record.location.split(',')[-1],
'address' : "\n".join(record.location.split(',')[:-1]),
'location' : location
}
| bsd-3-clause | Python |
cd48c66406c39ca6dd6bdc6ba7c2be0df623e6ae | Fix return codes for check recipient | meskio/leap_mx,meskio/leap_mx,leapcode/leap_mx,micah/leap_mx,leapcode/leap_mx,micah/leap_mx | src/leap/mx/check_recipient_access.py | src/leap/mx/check_recipient_access.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
# For more info, see:
# http://www.postfix.org/tcp_table.5.html
# http://www.postfix.org/access.5.html
if value is None:
self.sendCode(500, postfix.quote("REJECT"))
else:
self.sendCode(200, postfix.quote("OK"))
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
if value is None:
self.sendCode(500, postfix.quote("NOT FOUND SORRY"))
else:
# We do not send the value in this case
self.sendCode(200)
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
| agpl-3.0 | Python |
3d53d3fe37e36a8f05a8cae27f8aa138b063fcd6 | add docstrings (#937) | TresAmigosSD/SMV,TresAmigosSD/SMV,TresAmigosSD/SMV,TresAmigosSD/SMV | src/main/python/smv/py4j_interface.py | src/main/python/smv/py4j_interface.py | # This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import traceback
def create_py4j_interface_method(interface_method_name, impl_method_name):
"""Create a method that implements a Java interface for Py4J callback
Given the name of a method which needs to be called from Scala code
(e.g. SmvModule.doRun), creates a wrapper for the method which returns
an appropriate SmvPy4JResponse object. For example, ISmvModule declares
getDoRun which should call doRun and return an SmvPy4JResponse object
indicating success or failure. SmvDataSet implements doRun, then calls
create_py4j_interface_method to create getDoRun. See SmvDataSet for more
examples.
Args:
interface_method_name (str): name of the interface method (e.g. getDoRun)
impl_method_name (str): name of the implementation of the interface method (e.g. doRun)
Returns:
(method): new interface method
"""
def interface_method(obj, *args):
impl_method = getattr(obj, impl_method_name)
try:
result = impl_method(*args)
response = SmvPy4JValidResponse(result)
except Exception as e:
error = traceback.format_exc()
response = SmvPy4JErrorResponse(error)
return response
interface_method.__name__ = interface_method_name
return interface_method
class SmvPy4JResponse(object):
"""A response to a Py4J callback from Scala code
A tiny container for results and errors from running a callback.
"""
def __init__(self, _successful, _result, _error):
self._successful = _successful
self._result = _result
self._error = _error
def successful(self):
"""True if there was no error, False otherwise
"""
return self._successful
def result(self):
"""The result of the callback (if it was successful)
"""
return self._result
def error(self):
"""The error message produced by the callback (if it wasn't successful)
"""
return self._error
class Java:
implements = ["org.tresamigos.smv.IPythonResponsePy4J"]
class SmvPy4JErrorResponse(SmvPy4JResponse):
"""An SmvPy4JResponse for error responses
"""
def __init__(self, error):
super(SmvPy4JErrorResponse, self).__init__(False, None, error)
class SmvPy4JValidResponse(SmvPy4JResponse):
"""An SmvPy4JResponse for successful responses
"""
def __init__(self, result):
super(SmvPy4JValidResponse, self).__init__(True, result, None)
| # This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import traceback
def create_py4j_interface_method(interface_method_name, impl_method_name):
def interface_method(obj, *args):
impl_method = getattr(obj, impl_method_name)
try:
result = impl_method(*args)
response = SmvPy4JValidResponse(result)
except Exception as e:
error = traceback.format_exc()
response = SmvPy4JErrorResponse(error)
return response
interface_method.__name__ = interface_method_name
return interface_method
class SmvPy4JResponse(object):
def __init__(self, _successful, _result, _error):
self._successful = _successful
self._result = _result
self._error = _error
def successful(self):
return self._successful
def result(self):
return self._result
def error(self):
return self._error
class Java:
implements = ["org.tresamigos.smv.IPythonResponsePy4J"]
class SmvPy4JErrorResponse(SmvPy4JResponse):
def __init__(self, error):
super(SmvPy4JErrorResponse, self).__init__(False, None, error)
class SmvPy4JValidResponse(SmvPy4JResponse):
def __init__(self, result):
super(SmvPy4JValidResponse, self).__init__(True, result, None)
| apache-2.0 | Python |
7f32ca34ec26174e10b5b8de65883b39293c3ed0 | Update assignment5.py | LamaHamadeh/Microsoft-DAT210x | Module-3/assignment5.py | Module-3/assignment5.py | '''
author: Lama Hamadeh
'''
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
from pandas.tools.plotting import parallel_coordinates
# Look pretty...
matplotlib.style.use('ggplot')
wheat_dataset=pd.read_csv('/Users/ADB3HAMADL/Desktop/Anaconda_Packages/DAT210x-master/Module3/Datasets/wheat.data',index_col = 0)
'''
New_wheat_dataset=wheat_dataset.drop(['area', 'perimeter'], axis=1) #deleting columns by labels
data = load_iris()
# Parallel Coordinates Start Here:
plt.figure()
andrews_curves(New_wheat_dataset, 'wheat_type',alpha= 0.4)
plt.show()
'''
matplotlib.style.use('ggplot')
wheat_dataset=pd.read_csv('/Users/ADB3HAMADL/Desktop/Anaconda_Packages/DAT210x-master/Module3/Datasets/wheat.data',index_col = 0)
data = load_iris()
# Parallel Coordinates Start Here:
plt.figure()
andrews_curves(wheat_dataset, 'wheat_type',alpha= 0.4)
plt.show()
| #
# This code is intentionally missing!
# Read the directions on the course lab page!
#
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
from pandas.tools.plotting import parallel_coordinates
# Look pretty...
matplotlib.style.use('ggplot')
wheat_dataset=pd.read_csv('/Users/ADB3HAMADL/Desktop/Anaconda_Packages/DAT210x-master/Module3/Datasets/wheat.data',index_col = 0)
'''
New_wheat_dataset=wheat_dataset.drop(['area', 'perimeter'], axis=1) #deleting columns by labels
data = load_iris()
# Parallel Coordinates Start Here:
plt.figure()
andrews_curves(New_wheat_dataset, 'wheat_type',alpha= 0.4)
plt.show()
'''
matplotlib.style.use('ggplot')
wheat_dataset=pd.read_csv('/Users/ADB3HAMADL/Desktop/Anaconda_Packages/DAT210x-master/Module3/Datasets/wheat.data',index_col = 0)
data = load_iris()
# Parallel Coordinates Start Here:
plt.figure()
andrews_curves(wheat_dataset, 'wheat_type',alpha= 0.4)
plt.show() | mit | Python |
7888242104e9f14b4e8a926e37aa793a60c36f10 | Implement files.put/files.template | Fizzadar/pyinfra,Fizzadar/pyinfra | pyinfra/modules/files.py | pyinfra/modules/files.py | # pyinfra
# File: pyinfra/modules/file.py
# Desc: manage files/templates <> server
from cStringIO import StringIO
from jinja2 import Template
from pyinfra.api import operation
@operation
def put(local_file, remote_file):
'''Copy a local file to the remote system.'''
# Just load the local file
local_file = open(local_file, 'r')
return [(local_file, remote_file)]
@operation
def template(template_file, remote_file, **data):
'''Generate a template and write it to the remote system.'''
# Load the template from file
template_file = open(template_file, 'r')
template = Template(template_file.read())
# Render and make file-like it's output
output = template.render(data)
output_file = StringIO(output)
return [(output_file, remote_file)]
| # pyinfra
# File: pyinfra/modules/file.py
# Desc: manage files/templates <> server
from pyinfra.api import operation
@operation
def put(local_file, remote_file):
'''[Not implemented] Copy a local file to the remote system.'''
pass
@operation
def template(template_name, remote_file, **data):
'''[Not implemented]Generate a template and write it to the remote system.'''
pass
| mit | Python |
da24aa8c97713a1ba5b5735aa169f8cf050fe88a | add function(image to csv). | CORDEA/deeplearning-tutorials,CORDEA/deeplearning-tutorials | pylearn2/convertImage.py | pylearn2/convertImage.py | #!/usr/bin/env python
# encoding:utf-8
#
# Copyright [2015] [Yoshihiro Tanaka]
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__Author__ = "Yoshihiro Tanaka"
__date__ = "2015-01-22"
import numpy
from PIL import Image, ImageOps
import os, sys
_OUTDIR = "out"
_SIZE = 128, 128
if not os.path.exists(_OUTDIR + "/"):
os.system("mkdir " + _OUTDIR)
ls = os.listdir("in/")
with open(sys.argv[1]) as f:
for filename in ls:
input_image = Image.open("in/" + filename)
resize_image = input_image.resize(_SIZE)
output_image = ImageOps.grayscale(resize_image)
# output_image.save(_OUTDIR + "/" + filename)
data = ' '.join([str(r) for r in (numpy.asarray(output_image).flatten() / 255.0).tolist()])
f.write(filename + ',' + data + '\n')
| #!/usr/bin/env python
# encoding:utf-8
#
# Copyright [2015] [Yoshihiro Tanaka]
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__Author__ = "Yoshihiro Tanaka"
__date__ = "2015-01-22"
from PIL import Image
from PIL import ImageOps
import os, sys
_OUTDIR = "out"
_SIZE = 128, 128
if not os.path.exists(_OUTDIR + "/"):
os.system("mkdir " + _OUTDIR)
ls = os.listdir("in/")
for filename in ls:
input_image = Image.open("in/" + filename)
resize_image = input_image.resize(_SIZE)
output_image = ImageOps.grayscale(resize_image)
output_image.save(_OUTDIR + "/" + filename)
| apache-2.0 | Python |
368306261e2d206d4dfad8f647a7d459462631a4 | Optimize number 37: x in set() faster than x in list() | dhermes/project-euler,dhermes/project-euler,dhermes/project-euler | python/too_slow/no037.py | python/too_slow/no037.py | #!/usr/bin/env python
# The number 3797 has an interesting property. Being prime itself,
# it is possible to continuously remove digits from left to right,
# and remain prime at each stage:
# 3797, 797, 97, and 7.
# Similarly we can work from right to left: 3797, 379, 37, and 3.
# Find the sum of the only eleven primes that are both truncatable from
# left to right and right to left.
# NOTE: 2, 3, 5, and 7 are not considered to be truncatable primes.
import copy
from python.decorators import euler_timer
from python.functions import is_prime
from python.functions import sieve
def truncated_list(n, from_left):
if from_left:
digs = [dig for dig in str(n)]
return [int("".join(digs[i:])) for i in range(len(digs))]
# If the bool from_left is false, we are right
else:
digs = [dig for dig in str(n)]
return [int("".join(digs[:i + 1])) for i in range(len(digs))]
def truncated_all(n):
return list(set(truncated_list(n, True) + truncated_list(n, False)))
def is_truncatable_prime(n, primes):
candidates = truncated_all(n)
for candidate in candidates:
if candidate in primes:
continue
elif is_prime(candidate):
primes.add(candidate)
else:
return False
return True
def find_first_n_truncatable(n, max_n):
result = []
primes = set(sieve(max_n)[4:]) # We don't include 2, 3, 5, or 7
for prime in copy.copy(primes):
if is_truncatable_prime(prime, primes):
result.append(prime)
if len(result) == n:
return result
if len(result) < n:
raise Exception("Not enough found, raise max_n")
return result
def main(verbose=False):
ans = find_first_n_truncatable(11, 10**6)
if verbose:
return "%s.\nThe primes are: %s." % (
sum(ans), ", ".join(str(prime) for prime in ans))
else:
return sum(ans)
if __name__ == '__main__':
print euler_timer(37)(main)(verbose=True)
| #!/usr/bin/env python
# The number 3797 has an interesting property. Being prime itself,
# it is possible to continuously remove digits from left to right,
# and remain prime at each stage:
# 3797, 797, 97, and 7.
# Similarly we can work from right to left: 3797, 379, 37, and 3.
# Find the sum of the only eleven primes that are both truncatable from
# left to right and right to left.
# NOTE: 2, 3, 5, and 7 are not considered to be truncatable primes.
from python.decorators import euler_timer
from python.functions import is_prime
from python.functions import sieve
def truncated_list(n, from_left):
if from_left:
digs = [dig for dig in str(n)]
return [int("".join(digs[i:])) for i in range(len(digs))]
# If the bool from_left is false, we are right
else:
digs = [dig for dig in str(n)]
return [int("".join(digs[:i + 1])) for i in range(len(digs))]
def truncated_all(n):
return list(set(truncated_list(n, True) + truncated_list(n, False)))
def is_truncatable_prime(n, primes):
candidates = truncated_all(n)
for candidate in candidates:
if candidate in primes:
continue
elif is_prime(candidate):
primes.append(candidate)
else:
return False
return True
def find_first_n_truncatable(n, max_n):
result = []
primes = sieve(max_n)[4:] # We don't include 2, 3, 5, or 7
for prime in primes:
if is_truncatable_prime(prime, primes):
result.append(prime)
if len(result) == n:
return result
if len(result) < n:
raise Exception("Not enough found, raise max_n")
return result
def main(verbose=False):
ans = find_first_n_truncatable(11, 10**6)
if verbose:
return "%s.\nThe primes are: %s." % (
sum(ans), ", ".join(str(prime) for prime in ans))
else:
return sum(ans)
if __name__ == '__main__':
print euler_timer(37)(main)(verbose=True)
| apache-2.0 | Python |
3ac8788c9090b5655676b239db826ef5da3544f4 | Update version to 0.11.0 | artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service | storage_service/storage_service/__init__.py | storage_service/storage_service/__init__.py | __version__ = '0.11.0'
| __version__ = '0.10.0'
| agpl-3.0 | Python |
0675090877483bcbbfbc7b3d7ef8ed3f97085f8c | Add python_2_unicode_compatible | zooming-tan/Project-AENEAS,zooming-tan/Project-AENEAS,zooming-tan/Project-AENEAS,zooming-tan/Project-AENEAS | Project-AENEAS/issues/models.py | Project-AENEAS/issues/models.py | """Mini Issue Tracker program. Originally taken from Paul Bissex's blog post:
http://news.e-scribe.com/230 and snippet: http://djangosnippets.org/snippets/28/
"""
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
STATUS_CODES = (
(1, _('Open')),
(2, _('Working')),
(3, _('Closed')),
)
PRIORITY_CODES = (
(1, _('Now')),
(2, _('Soon')),
(3, _('Someday')),
)
apps = [app for app in settings.INSTALLED_APPS if not app.startswith('django.')]
@python_2_unicode_compatible
class Ticket(models.Model):
"""Trouble tickets"""
title = models.CharField(_('title'), max_length=100)
project = models.CharField(_('project'), blank=True, max_length=100, choices=list(enumerate(apps)))
submitted_date = models.DateField(_('date submitted'), auto_now_add=True)
modified_date = models.DateField(_('date modified'), auto_now=True)
submitter = models.ForeignKey(User, verbose_name=_('submitter'), related_name="submitter")
assigned_to = models.ForeignKey(User, verbose_name=_('assigned to'))
description = models.TextField(_('description'), blank=True)
status = models.IntegerField(_('status'), default=1, choices=STATUS_CODES)
priority = models.IntegerField(_('priority'), default=1, choices=PRIORITY_CODES)
class Meta:
verbose_name = _('ticket')
verbose_name_plural = _('tickets')
ordering = ('status', 'priority', 'submitted_date', 'title')
def __str__(self):
return self.title
| """Mini Issue Tracker program. Originally taken from Paul Bissex's blog post:
http://news.e-scribe.com/230 and snippet: http://djangosnippets.org/snippets/28/
"""
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
STATUS_CODES = (
(1, _('Open')),
(2, _('Working')),
(3, _('Closed')),
)
PRIORITY_CODES = (
(1, _('Now')),
(2, _('Soon')),
(3, _('Someday')),
)
apps = [app for app in settings.INSTALLED_APPS if not app.startswith('django.')]
class Ticket(models.Model):
"""Trouble tickets"""
title = models.CharField(_('title'), max_length=100)
project = models.CharField(_('project'), blank=True, max_length=100, choices=list(enumerate(apps)))
submitted_date = models.DateField(_('date submitted'), auto_now_add=True)
modified_date = models.DateField(_('date modified'), auto_now=True)
submitter = models.ForeignKey(User, verbose_name=_('submitter'), related_name="submitter")
assigned_to = models.ForeignKey(User, verbose_name=_('assigned to'))
description = models.TextField(_('description'), blank=True)
status = models.IntegerField(_('status'), default=1, choices=STATUS_CODES)
priority = models.IntegerField(_('priority'), default=1, choices=PRIORITY_CODES)
class Meta:
verbose_name = _('ticket')
verbose_name_plural = _('tickets')
ordering = ('status', 'priority', 'submitted_date', 'title')
def __unicode__(self):
return self.title
| bsd-3-clause | Python |
14c2019aa5bf716cab0f2ed2b97bbc32d8c74a81 | Improve parameters of sync_suricate command | GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek | geotrek/feedback/management/commands/sync_suricate.py | geotrek/feedback/management/commands/sync_suricate.py | from django.core.management.base import BaseCommand
from geotrek.feedback.parsers import SuricateParser
class Command(BaseCommand):
leave_locale_alone = True
def add_arguments(self, parser):
# parser.add_argument('-l', dest='limit', type=int, help='Limit number of lines to import')
# todo
parser.add_argument(
"--activities_only", dest="activities_only", help="Import activities but no statuses nor alerts", default=False
)
parser.add_argument(
"--statuses_only", dest="statuses_only", help="Import statuses but no activities nor alerts", default=False
)
#todo documents
def handle(self, *args, **options):
parser = SuricateParser()
if options["statuses_only"]:
parser.get_statuses()
elif options["activities_only"]:
parser.get_activities()
elif options["activities_only"] and options["statuses_only"]:
parser.get_statuses()
parser.get_activities()
else:
parser.get_statuses()
parser.get_activities()
parser.get_alerts()
| from django.core.management.base import BaseCommand
from geotrek.feedback.parsers import SuricateParser
class Command(BaseCommand):
leave_locale_alone = True
def add_arguments(self, parser):
# parser.add_argument('-l', dest='limit', type=int, help='Limit number of lines to import')
# todo
parser.add_argument(
"--activities", dest="activities", help="Import activities", default=True
)
parser.add_argument(
"--statuses", dest="statuses", help="Import statuses", default=True
)
def handle(self, *args, **options):
parser = SuricateParser()
if options["statuses"]:
parser.get_statuses()
if options["activities"]:
parser.get_activities()
| bsd-2-clause | Python |
e78bd17a742961368c4b026be5a447e3d234c870 | Update tensorflow10_def_add_layer.py | wangwei7175878/tutorials | tensorflowTUT/tensorflow10_def_add_layer.py | tensorflowTUT/tensorflow10_def_add_layer.py | # View more python learning tutorial on my Youtube and Youku channel!!!
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
# Youku video tutorial: http://i.youku.com/pythontutorial
import tensorflow as tf
def add_layer(inputs, in_size, out_size, activation_function=None):
Weights = tf.Variable(tf.random_normal([in_size, out_size]))
biases = tf.Variable(tf.zeros([1, out_size]) + 0.1)
Wx_plus_b = tf.matmul(inputs, Weights) + biases
if activation_function is None:
outputs = Wx_plus_b
else:
outputs = activation_function(Wx_plus_b)
return outputs
| import tensorflow as tf
def add_layer(inputs, in_size, out_size, activation_function=None):
Weights = tf.Variable(tf.random_normal([in_size, out_size]))
biases = tf.Variable(tf.zeros([1, out_size]) + 0.1)
Wx_plus_b = tf.matmul(inputs, Weights) + biases
if activation_function is None:
outputs = Wx_plus_b
else:
outputs = activation_function(Wx_plus_b)
return outputs
| mit | Python |
44629d5a6d3b76e721aa0cf11041924f5a088d6e | fix output values | FRC-1123/frc2017-1123,FRC-1123/frc2017-1123,FRC-1123/frc2017-1123,FRC-1123/frc2017-1123 | robot/robot.py | robot/robot.py | #!/usr/bin/env python3
import wpilib
import ctre
class Robot(wpilib.IterativeRobot):
def robotInit(self):
"""
This function is called upon program startup and
should be used for any initialization code.
"""
left_motor = ctre.CANTalon(0)
right_motor = ctre.CANTalon(1)
self.robot_drive = wpilib.RobotDrive(left_motor, right_motor)
self.robot_drive.setMaxOutput(2)
self.stick = wpilib.Joystick(0)
self.controller = wpilib.XboxController(0)
self.printTimer = wpilib.Timer()
self.printTimer.start()
def autonomousInit(self):
"""This function is run once each time the robot enters autonomous mode."""
self.auto_loop_counter = 0
def autonomousPeriodic(self):
"""This function is called periodically during autonomous."""
# Check if we've completed 100 loops (approximately 2 seconds)
if self.auto_loop_counter < 100:
self.robot_drive.drive(-0.5, 0) # Drive forwards at half speed
self.auto_loop_counter += 1
else:
self.robot_drive.drive(0, 0) # Stop robot
def teleopPeriodic(self):
"""This function is called periodically during operator control."""
self.robot_drive.tankDrive(self.stick, 5, self.stick, 1, True) # 5 and 1 are left and right joystick axes, respectively
if self.controller.getAButton():
self.robot_drive.drive(-.5, 0) # move forward slowly
if self.controller.getXButton(): # turn in place
self.robot_drive.setLeftRightMotorOutputs(-.5, .5)
elif self.controller.getYButton(): # turn in place
self.robot_drive.setLeftRightMotorOutputs(.5, -.5)
def testPeriodic(self):
"""This function is called periodically during test mode."""
wpilib.LiveWindow.run()
if __name__ == "__main__":
wpilib.run(Robot) | #!/usr/bin/env python3
import wpilib
import ctre
class Robot(wpilib.IterativeRobot):
def robotInit(self):
"""
This function is called upon program startup and
should be used for any initialization code.
"""
left_motor = ctre.CANTalon(0)
right_motor = ctre.CANTalon(1)
self.robot_drive = wpilib.RobotDrive(left_motor, right_motor)
self.robot_drive.setMaxOutput(.5)
self.stick = wpilib.Joystick(0)
self.controller = wpilib.XboxController(0)
self.printTimer = wpilib.Timer()
self.printTimer.start()
def autonomousInit(self):
"""This function is run once each time the robot enters autonomous mode."""
self.auto_loop_counter = 0
def autonomousPeriodic(self):
"""This function is called periodically during autonomous."""
# Check if we've completed 100 loops (approximately 2 seconds)
if self.auto_loop_counter < 100:
self.robot_drive.drive(-0.5, 0) # Drive forwards at half speed
self.auto_loop_counter += 1
else:
self.robot_drive.drive(0, 0) # Stop robot
def teleopPeriodic(self):
"""This function is called periodically during operator control."""
self.robot_drive.tankDrive(self.stick, 1, self.stick, 5) # 1 and 5 are left and right joystick axes
if self.controller.getAButton():
self.robot_drive.drive(-.5, 0) # move forward slowly
if self.controller.getXButton():
self.robot_drive.setLeftRightMotorOutputs(-.5, -.5)
elif self.controller.getYButton():
self.robot_drive.setLeftRightMotorOutputs(.5, .5)
def testPeriodic(self):
"""This function is called periodically during test mode."""
wpilib.LiveWindow.run()
if __name__ == "__main__":
wpilib.run(Robot)
| mit | Python |
99553ec68046962cda61c4296e6345180f377780 | fix serialization bug | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | soil/views.py | soil/views.py | import uuid
from datetime import datetime
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django.core.cache import cache
import logging
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from soil.tasks import demo_sleep
import json
from soil.heartbeat import get_file_heartbeat, get_cache_heartbeat,\
last_heartbeat
def _parse_date(string):
if isinstance(string, basestring):
return datetime.strptime(string, "%Y-%m-%d").date()
else:
return string
@login_required
def demo(request):
download_id = uuid.uuid4().hex
howlong = int(request.GET.get('secs', 5))
demo_sleep.delay(download_id, howlong)
return HttpResponseRedirect(reverse('retrieve_download', kwargs={'download_id': download_id}))
@login_required
def heartbeat_status(request):
return HttpResponse(json.dumps({"last_timestamp": str(last_heartbeat()),
"last_from_file": get_file_heartbeat(),
"last_from_cache": get_cache_heartbeat()}))
@login_required
def ajax_job_poll(request, download_id, template="soil/partials/dl_status.html"):
download_data = cache.get(download_id, None)
if download_data == None:
is_ready = False
else:
is_ready=True
context = RequestContext(request)
context['is_ready'] = is_ready
context['download_id'] = download_id
return render_to_response(template, context_instance=context)
@login_required
def retrieve_download(request, download_id, template="soil/file_download.html"):
"""
Retrieve a download that's waiting to be generated. If it is the get_file,
then download it, else, let the ajax on the page poll.
"""
context = RequestContext(request)
context['download_id'] = download_id
do_download = request.GET.has_key('get_file')
if do_download:
download = cache.get(download_id, None)
if download == None:
logging.error("Download file request for expired/nonexistent file requested")
raise Http404
else:
return download.toHttpResponse()
return render_to_response(template, context_instance=context)
| import uuid
from datetime import datetime
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django.core.cache import cache
import logging
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from soil.tasks import demo_sleep
import json
from soil.heartbeat import get_file_heartbeat, get_cache_heartbeat,\
last_heartbeat
def _parse_date(string):
if isinstance(string, basestring):
return datetime.strptime(string, "%Y-%m-%d").date()
else:
return string
@login_required
def demo(request):
download_id = uuid.uuid4().hex
howlong = int(request.GET.get('secs', 5))
demo_sleep.delay(download_id, howlong)
return HttpResponseRedirect(reverse('retrieve_download', kwargs={'download_id': download_id}))
@login_required
def heartbeat_status(request):
return HttpResponse(json.dumps({"last_timestamp": last_heartbeat(),
"last_from_file": get_file_heartbeat(),
"last_from_cache": get_cache_heartbeat()}))
@login_required
def ajax_job_poll(request, download_id, template="soil/partials/dl_status.html"):
download_data = cache.get(download_id, None)
if download_data == None:
is_ready = False
else:
is_ready=True
context = RequestContext(request)
context['is_ready'] = is_ready
context['download_id'] = download_id
return render_to_response(template, context_instance=context)
@login_required
def retrieve_download(request, download_id, template="soil/file_download.html"):
"""
Retrieve a download that's waiting to be generated. If it is the get_file,
then download it, else, let the ajax on the page poll.
"""
context = RequestContext(request)
context['download_id'] = download_id
do_download = request.GET.has_key('get_file')
if do_download:
download = cache.get(download_id, None)
if download == None:
logging.error("Download file request for expired/nonexistent file requested")
raise Http404
else:
return download.toHttpResponse()
return render_to_response(template, context_instance=context)
| bsd-3-clause | Python |
b013a84227adf2f25c09519762c2610ea8a500da | Test Unit | R3SWebDevelopment/HappyDogs,R3SWebDevelopment/HappyDogs,R3SWebDevelopment/HappyDogs,R3SWebDevelopment/HappyDogs | HappyDogs/HappyDogs/urls.py | HappyDogs/HappyDogs/urls.py | from django.conf.urls import include, url
from HappyDogs.apps.HappyDogs.views import happy_dogs_home
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^$', happy_dogs_home , name='index'),
url(r'^happy_dogs/', include('HappyDogs.apps.HappyDogs.urls')),
url(r'^rest/happy_dogs/', include('HappyDogs.apps.HappyDogs.rest_urls')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| from django.conf.urls import include, url
urlpatterns = [
url(r'^happy_dogs/', include('HappyDogs.apps.HappyDogs.urls')),
url(r'^rest/happy_dogs/', include('HappyDogs.apps.HappyDogs.rest_urls')),
]
| mit | Python |
cf4e24ce50c535e91525465c65217f21b89448a6 | add example using unicode | kmaehashi/sensorbee-python | example/general_example.py | example/general_example.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from pysensorbee import SensorBeeAPI
class GeneralExample(object):
def main(self):
api = SensorBeeAPI()
print("Runtime Status:")
print(api.runtime_status())
print("Create Topology:")
print(api.create_topology('beepy_test'))
try:
print("List Topologies:")
print(api.topologies())
print("Show Topology:")
print(api.topology('beepy_test'))
print("Send CREATE SOURCE Query:")
print(api.query('beepy_test', 'CREATE SOURCE ns TYPE node_statuses;'))
print("List Sources:")
print(api.sources('beepy_test'))
print("List Streams:")
print(api.streams('beepy_test'))
print("List Sinks:")
print(api.sinks('beepy_test'))
print("Send EVAL Query:")
print(api.query('beepy_test', 'EVAL concat((1+2+3)::string, " -> ", "六");'))
print("Send SELECT Query:")
rs = api.query('beepy_test', 'SELECT RSTREAM * FROM ns [RANGE 1 TUPLES];')
count = 3
for r in rs:
print(r)
count -= 1
if count <= 0:
break
print("Show Source:")
print(api.source('beepy_test', 'ns'))
finally:
print("Delete Topology:")
print(api.delete_topology('beepy_test'))
print("List Topologies:")
print(api.topologies())
if __name__ == '__main__':
GeneralExample().main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from pysensorbee import SensorBeeAPI
class GeneralExample(object):
def main(self):
api = SensorBeeAPI()
print("Runtime Status:")
print(api.runtime_status())
print("Create Topology:")
print(api.create_topology('beepy_test'))
try:
print("List Topologies:")
print(api.topologies())
print("Show Topology:")
print(api.topology('beepy_test'))
print("Send CREATE SOURCE Query:")
print(api.query('beepy_test', 'CREATE SOURCE ns TYPE node_statuses;'))
print("List Sources:")
print(api.sources('beepy_test'))
print("List Streams:")
print(api.streams('beepy_test'))
print("List Sinks:")
print(api.sinks('beepy_test'))
print("Send EVAL Query:")
print(api.query('beepy_test', 'EVAL 1+2+3;'))
print("Send SELECT Query:")
rs = api.query('beepy_test', 'SELECT RSTREAM * FROM ns [RANGE 1 TUPLES];')
count = 3
for r in rs:
print(r)
count -= 1
if count <= 0:
break
print("Show Source:")
print(api.source('beepy_test', 'ns'))
finally:
print("Delete Topology:")
print(api.delete_topology('beepy_test'))
print("List Topologies:")
print(api.topologies())
if __name__ == '__main__':
GeneralExample().main()
| mit | Python |
ec082a531ad2e6dbc3d8ce4de78c76b84d615c38 | update view.py - import render from django shortcuts | django-fluent/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents | example/simplecms/views.py | example/simplecms/views.py | from django.http import Http404
from django.shortcuts import render
from django.template.context import RequestContext
from simplecms.models import Page
def page_detail(request, path):
stripped = path.strip('/') if path else ''
stripped = stripped and u'/%s/' % stripped or '/'
try:
page = Page.objects.get(_cached_url=stripped)
except Page.DoesNotExist:
raise Http404("No page found for the path '%s'" % stripped)
return render(request, page.template_name, {
'simplecms_page': page,
})
| from django.http import Http404
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from simplecms.models import Page
def page_detail(request, path):
stripped = path.strip('/') if path else ''
stripped = stripped and u'/%s/' % stripped or '/'
try:
page = Page.objects.get(_cached_url=stripped)
except Page.DoesNotExist:
raise Http404("No page found for the path '%s'" % stripped)
return render(request, page.template_name, {
'simplecms_page': page,
})
| apache-2.0 | Python |
b275b06379fea006e91f45c976d72751595031dc | Comment out string import since it is not used | mozilla-services/screenshots,mozilla-services/pageshot,mozilla-services/screenshots,mozilla-services/screenshots,mozilla-services/pageshot,mozilla-services/pageshot,mozilla-services/pageshot,mozilla-services/screenshots | test/server/test_image_validation.py | test/server/test_image_validation.py | from urlparse import urljoin
from clientlib import (
make_example_shot,
make_random_id,
screenshots_session,
example_images
)
import random
# import string
# Hack to make this predictable:
random.seed(0)
def test_invalid_image_url():
with screenshots_session() as user:
shot_id = make_random_id() + "/test.com"
shot_data = urljoin(user.backend, "data/" + shot_id)
shot_json = make_example_shot(user.deviceId)
invalid_url = "https://example.com/?aaA=bbb=\"); background-color: red;"
for clip_id in shot_json['clips']:
shot_json['clips'][clip_id]['image']['url'] = invalid_url
break
resp = user.session.put(
shot_data,
json=shot_json,
)
print(resp.text)
assert resp.status_code == 500 # assertion failure on clip image url
def test_invalid_data_image():
with screenshots_session() as user:
shot_id = make_random_id() + "/test.com"
shot_data = urljoin(user.backend, "data/" + shot_id)
shot_json = make_example_shot(user.deviceId)
for entry in example_images:
valid_data_image = entry['url']
if "iVBORw0KGgo" in valid_data_image:
invalid_data_image = valid_data_image.replace('iVBORw0KGgo', 'R0k')
for clip_id in shot_json['clips']:
shot_json['clips'][clip_id]['image'] = invalid_data_image
break
resp = user.session.put(
shot_data,
json=shot_json,
)
print(resp.text)
assert resp.status_code == 500
def test_invalid_data_image_decoded():
pass
def test_invalid_data_url():
pass
if __name__ == "__main__":
test_invalid_data_image()
test_invalid_data_image_decoded()
test_invalid_data_url()
| from urlparse import urljoin
from clientlib import (
make_example_shot,
make_random_id,
screenshots_session,
example_images
)
import random
import string
# Hack to make this predictable:
random.seed(0)
def test_invalid_image_url():
with screenshots_session() as user:
shot_id = make_random_id() + "/test.com"
shot_data = urljoin(user.backend, "data/" + shot_id)
shot_json = make_example_shot(user.deviceId)
invalid_url = "https://example.com/?aaA=bbb=\"); background-color: red;"
for clip_id in shot_json['clips']:
shot_json['clips'][clip_id]['image']['url'] = invalid_url
break
resp = user.session.put(
shot_data,
json=shot_json,
)
print(resp.text)
assert resp.status_code == 500 # assertion failure on clip image url
def test_invalid_data_image():
with screenshots_session() as user:
shot_id = make_random_id() + "/test.com"
shot_data = urljoin(user.backend, "data/" + shot_id)
shot_json = make_example_shot(user.deviceId)
for entry in example_images:
valid_data_image = entry['url']
if "iVBORw0KGgo" in valid_data_image:
invalid_data_image = valid_data_image.replace('iVBORw0KGgo', 'R0k')
for clip_id in shot_json['clips']:
shot_json['clips'][clip_id]['image'] = invalid_data_image
break
resp = user.session.put(
shot_data,
json=shot_json,
)
print(resp.text)
assert resp.status_code == 500
def test_invalid_data_image_decoded():
pass
def test_invalid_data_url():
pass
if __name__ == "__main__":
test_invalid_data_image()
test_invalid_data_image_decoded()
test_invalid_data_url()
| mpl-2.0 | Python |
aefd972c7fb423396f59da03a1d460cd3559d1e1 | Remove unnecesssary comments of old function signatures | nelson-liu/paraphrase-id-tensorflow,nelson-liu/paraphrase-id-tensorflow | duplicate_questions/data/tokenizers/word_tokenizers.py | duplicate_questions/data/tokenizers/word_tokenizers.py | class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
def get_words_for_indexer(self, text):
return self.tokenize(text)
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
| class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
# def tokenize(self, sentence: str) -> List[str]:
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
# def get_words_for_indexer(self, text: str) -> List[str]:
def get_words_for_indexer(self, text):
return self.tokenize(text)
# def index_text(self, text: str, data_indexer: DataIndexer) -> List:
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
| mit | Python |
586c3fb90bd765ebee1f5e63f94a86144623d9d2 | Update map-sum-pairs.py | yiwen-luo/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,yiwen-luo/LeetCode,yiwen-luo/LeetCode | Python/map-sum-pairs.py | Python/map-sum-pairs.py | # Time: O(n), n is the length of key
# Space: O(t), t is the number of nodes in trie
# Implement a MapSum class with insert, and sum methods.
#
# For the method insert, you'll be given a pair of (string, integer).
# The string represents the key and the integer represents the value.
# If the key already existed, then the original key-value pair will be overridden to the new one.
#
# For the method sum, you'll be given a string representing the prefix,
# and you need to return the sum of all the pairs' value whose key starts with the prefix.
#
# Example 1:
# Input: insert("apple", 3), Output: Null
# Input: sum("ap"), Output: 3
# Input: insert("app", 2), Output: Null
# Input: sum("ap"), Output: 5
class MapSum(object):
def __init__(self):
"""
Initialize your data structure here.
"""
_trie = lambda: collections.defaultdict(_trie)
self.__root = _trie()
def insert(self, key, val):
"""
:type key: str
:type val: int
:rtype: void
"""
# Time: O(n)
curr = self.__root
for c in key:
curr = curr[c]
delta = val
if "_end" in curr:
delta -= curr["_end"]
curr = self.__root
for c in key:
curr = curr[c]
if "_count" in curr:
curr["_count"] += delta
else:
curr["_count"] = delta
curr["_end"] = val
def sum(self, prefix):
"""
:type prefix: str
:rtype: int
"""
# Time: O(n)
curr = self.__root
for c in prefix:
if c not in curr:
return 0
curr = curr[c]
return curr["_count"]
# Your MapSum object will be instantiated and called as such:
# obj = MapSum()
# obj.insert(key,val)
# param_2 = obj.sum(prefix)
| # Time: O(n), n is the length of key
# Space: O(t), t is the number of nodes in trie
class MapSum(object):
def __init__(self):
"""
Initialize your data structure here.
"""
_trie = lambda: collections.defaultdict(_trie)
self.__root = _trie()
def insert(self, key, val):
"""
:type key: str
:type val: int
:rtype: void
"""
# Time: O(n)
curr = self.__root
for c in key:
curr = curr[c]
delta = val
if "_end" in curr:
delta -= curr["_end"]
curr = self.__root
for c in key:
curr = curr[c]
if "_count" in curr:
curr["_count"] += delta
else:
curr["_count"] = delta
curr["_end"] = val
def sum(self, prefix):
"""
:type prefix: str
:rtype: int
"""
# Time: O(n)
curr = self.__root
for c in prefix:
if c not in curr:
return 0
curr = curr[c]
return curr["_count"]
# Your MapSum object will be instantiated and called as such:
# obj = MapSum()
# obj.insert(key,val)
# param_2 = obj.sum(prefix)
| mit | Python |
6a833e64c4fdfd321a1e031133cf22aaec2d92a6 | Test format_error | PyBossa/pbs,PyBossa/pbs,PyBossa/pbs | test/test.py | test/test.py | """Test module for pbs client."""
import pbclient
from helpers import *
from mock import patch
from nose.tools import assert_raises
from requests import exceptions
class Test(object):
"""Test class for pbs."""
error = {"action": "GET",
"exception_cls": "NotFound",
"exception_msg": "(NotFound)",
"status": "failed",
"status_code": 404,
"target": "/api/app"}
@patch('pbclient.find_app')
def test_find_app_by_short_name(self, mock):
"""Test find_app_by_short_name returns a project."""
mock.return_value = ['project']
project = find_app_by_short_name('project', pbclient)
err_msg = "It shoul return: project"
assert project == 'project', err_msg
@patch('pbclient.find_app')
def test_find_app_by_short_name_connection_error(self, mock):
"""Test find_app_by_short_name connection_error is raised."""
mock.side_effect = exceptions.ConnectionError
assert_raises(exceptions.ConnectionError,
find_app_by_short_name,
'project',
pbclient)
@patch('helpers.format_error')
@patch('pbclient.find_app')
def test_find_app_by_short_name_error(self, mock, mock2):
"""Test find_app_by_short_name error is printed."""
mock.return_value = self.error
find_app_by_short_name('project', pbclient)
mock2.assert_called_with('pbclient.find_app', self.error)
def test_check_api_error_raises_exception(self):
"""Test check_api_error raises HTTPError exception."""
assert_raises(exceptions.HTTPError, check_api_error, self.error)
def test_check_api_error_returns_none(self):
"""Test check_api_error returns none."""
error = self.error
error['status'] = 'wrong'
check_api_error(error)
error = 'not_a_dict'
check_api_error(error)
@patch('pbclient.find_app')
def test_format_error(self, mock):
"""Test format_error works."""
mock.return_value = ['project']
assert_raises(SystemExit, format_error, 'pbclient.find_app', ['error'])
assert_raises(SystemExit, format_error, 'pbclient.find_app', self.error)
| """Test module for pbs client."""
import pbclient
from helpers import *
from mock import patch
from nose.tools import assert_raises
from requests import exceptions
class Test(object):
"""Test class for pbs."""
error = {"action": "GET",
"exception_cls": "NotFound",
"exception_msg": "(NotFound)",
"status": "failed",
"status_code": 404,
"target": "/api/app"}
@patch('pbclient.find_app')
def test_find_app_by_short_name(self, mock):
"""Test find_app_by_short_name returns a project."""
mock.return_value = ['project']
project = find_app_by_short_name('project', pbclient)
err_msg = "It shoul return: project"
assert project == 'project', err_msg
@patch('pbclient.find_app')
def test_find_app_by_short_name_connection_error(self, mock):
"""Test find_app_by_short_name connection_error is raised."""
mock.side_effect = exceptions.ConnectionError
assert_raises(exceptions.ConnectionError,
find_app_by_short_name,
'project',
pbclient)
@patch('helpers.format_error')
@patch('pbclient.find_app')
def test_find_app_by_short_name_error(self, mock, mock2):
"""Test find_app_by_short_name error is printed."""
mock.return_value = self.error
find_app_by_short_name('project', pbclient)
mock2.assert_called_with('pbclient.find_app', self.error)
def test_check_api_error_raises_exception(self):
"""Test check_api_error raises HTTPError exception."""
assert_raises(exceptions.HTTPError, check_api_error, self.error)
def test_check_api_error_returns_none(self):
"""Test check_api_error returns none."""
error = self.error
error['status'] = 'wrong'
check_api_error(error)
error = 'not_a_dict'
check_api_error(error)
| agpl-3.0 | Python |
d9c677a35d18a878ef8d253a9453e93da3341e96 | Add extremely basic template for command modules | johnmarcampbell/twircBot | runTwircBot.py | runTwircBot.py | #!/usr/bin/env python3
from src.TwircBot import TwircBot
from src.CommandModule import CommandModule
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
module = CommandModule()
bot.print_config()
# bot.start()
| #!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
| mit | Python |
d58015f39451f4efc4a66f77f62e01009c22917b | Fix the windows red build by removing the known_host backup all the time | neo4j/neo4j-python-driver,neo4j/neo4j-python-driver | test/util.py | test/util.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright (c) 2002-2016 "Neo Technology,"
# Network Engine for Objects in Lund AB [http://neotechnology.com]
#
# This file is part of Neo4j.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from os import remove, rename
from os.path import isfile
from unittest import TestCase
from neo4j.util import Watcher
from neo4j.v1.constants import KNOWN_HOSTS
KNOWN_HOSTS_BACKUP = KNOWN_HOSTS + ".backup"
def watch(f):
""" Decorator to enable log watching for the lifetime of a function.
Useful for debugging unit tests, simply add `@watch` to the top of
the test function.
:param f: the function to decorate
:return: a decorated function
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
watcher = Watcher("neo4j.bolt")
watcher.watch()
f(*args, **kwargs)
watcher.stop()
return wrapper
class ServerTestCase(TestCase):
""" Base class for test cases that use a remote server.
"""
known_hosts = KNOWN_HOSTS
known_hosts_backup = known_hosts + ".backup"
def setUp(self):
if isfile(self.known_hosts):
if isfile(self.known_hosts_backup):
remove(self.known_hosts_backup)
rename(self.known_hosts, self.known_hosts_backup)
def tearDown(self):
if isfile(self.known_hosts_backup):
if isfile(self.known_hosts):
remove(self.known_hosts)
rename(self.known_hosts_backup, self.known_hosts)
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright (c) 2002-2016 "Neo Technology,"
# Network Engine for Objects in Lund AB [http://neotechnology.com]
#
# This file is part of Neo4j.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from os import rename
from os.path import isfile
from unittest import TestCase
from neo4j.util import Watcher
from neo4j.v1.constants import KNOWN_HOSTS
KNOWN_HOSTS_BACKUP = KNOWN_HOSTS + ".backup"
def watch(f):
""" Decorator to enable log watching for the lifetime of a function.
Useful for debugging unit tests, simply add `@watch` to the top of
the test function.
:param f: the function to decorate
:return: a decorated function
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
watcher = Watcher("neo4j.bolt")
watcher.watch()
f(*args, **kwargs)
watcher.stop()
return wrapper
class ServerTestCase(TestCase):
""" Base class for test cases that use a remote server.
"""
known_hosts = KNOWN_HOSTS
known_hosts_backup = known_hosts + ".backup"
def setUp(self):
if isfile(self.known_hosts):
rename(self.known_hosts, self.known_hosts_backup)
def tearDown(self):
if isfile(self.known_hosts_backup):
rename(self.known_hosts_backup, self.known_hosts)
| apache-2.0 | Python |
fa14d14bcd83a630eeffb0b6504612be24754c09 | add catch for windows -- clear() doesn't exist | gskielian/SimpleCV,jlegendary/SimpleCV,jayrambhia/SimpleCV2,nils-werner/SimpleCV,jyt109/SimpleCV,nikhilgk/SimpleCV,gskielian/SimpleCV,sightmachine/SimpleCV2,beni55/SimpleCV,gskielian/SimpleCV,nils-werner/SimpleCV,hayd/SimpleCV,nils-werner/SimpleCV,nikhilgk/SimpleCV,jyt109/SimpleCV,tpltnt/SimpleCV,jlegendary/SimpleCV,jyt109/SimpleCV,hurdlea/SimpleCV,nils-werner/SimpleCV,sightmachine/SimpleCV,hayd/SimpleCV,sightmachine/SimpleCV,tpltnt/SimpleCV,onaclovtech/SimpleCV,nikhilgk/SimpleCV,nikhilgk/SimpleCV,hayd/SimpleCV,jyt109/SimpleCV,jyt109/SimpleCV,gskielian/SimpleCV,hurdlea/SimpleCV,hurdlea/SimpleCV,hurdlea/SimpleCV,jayrambhia/SimpleCV2,jayrambhia/SimpleCV2,gskielian/SimpleCV,sightmachine/SimpleCV,jlegendary/SimpleCV,tpltnt/SimpleCV,nikhilgk/SimpleCV,tpltnt/SimpleCV,hayd/SimpleCV,sightmachine/SimpleCV,onaclovtech/SimpleCV,beni55/SimpleCV,sightmachine/SimpleCV,onaclovtech/SimpleCV,tpltnt/SimpleCV,jlegendary/SimpleCV,beni55/SimpleCV,sightmachine/SimpleCV2,sightmachine/SimpleCV2,jlegendary/SimpleCV,hayd/SimpleCV,hurdlea/SimpleCV,onaclovtech/SimpleCV,onaclovtech/SimpleCV,jayrambhia/SimpleCV2,beni55/SimpleCV,nils-werner/SimpleCV,sightmachine/SimpleCV2,beni55/SimpleCV | SimpleCV/Shell/Shell.py | SimpleCV/Shell/Shell.py | #!/usr/bin/python
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# SimpleCV
# a kinder, gentler machine vision python library
#-----------------------------------------------------------------------
# SimpleCV is an interface for Open Source machine
# vision libraries in Python.
# It provides a consise, readable interface for cameras,
# image manipulation, feature extraction, and format conversion.
# Our mission is to give casual users a comprehensive interface
# for basic machine vision functions and an
# elegant programming interface for advanced users.
#
# more info:
# http://sf.net/p/simplecv
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#load system libraries
from SimpleCV.__init__ import *
from subprocess import call
import platform
#Load simpleCV libraries
from SimpleCV.Shell.Tutorial import *
#libraries for the shell
from IPython.Shell import IPShellEmbed
def clear():
if platform.system() == "Windows":
return
call("clear")
"""
If you run SimpleCV directly, it will launch an ipython shell
"""
def main():
clear()
banner = '+----------------------------------------------------+\n'
banner += ' SimpleCV [interactive shell]\n'
banner += '+----------------------------------------------------+\n'
banner += '\n\n'
banner += 'Commands: \n'
banner += '\t"Exit" or press "Ctrl+ D" to exit the shell\n'
banner += '\t"clear()" to clear the shell screen\n'
banner += '\t"tutorial.start()" to begin the SimpleCV interactive tutorial\n'
banner += '\n'
banner += 'Usage:\n'
banner += '\tdot complete works to show library\n'
banner += '\tfor example: Image().save("/tmp/test.jpg") will dot complete\n'
banner += '\tjust by touching TAB after typing Image().\n'
banner += '\n'
banner += 'Help:\n'
banner += '\ttyping "help function_name" will give in depth documentation of API\n'
banner += '\t\texample:'
banner += 'help Image\n'
banner += '\t\twill give the in-depth information about that class\n'
banner += '\ttyping "?function_name" will give the quick API documentation\n'
banner += '\t\texample:'
banner += '?Image.save\n'
banner += '\t\twill give help on the image save function'
exit_msg = '\nExiting the SimpleCV interactive shell\n'
#setup terminal to show SCV prompt
argsv = ['-pi1','SimpleCV:\\#>','-pi2',' .\\D.:','-po','SimpleCV:\\#>','-nosep']
tutorial = Tutorial()
scvShell = IPShellEmbed(argsv)
scvShell.set_banner(banner)
scvShell.set_exit_msg(exit_msg)
#Note that all loaded libraries are inherited in the embedded ipython shell
sys.exit(scvShell())
| #!/usr/bin/python
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# SimpleCV
# a kinder, gentler machine vision python library
#-----------------------------------------------------------------------
# SimpleCV is an interface for Open Source machine
# vision libraries in Python.
# It provides a consise, readable interface for cameras,
# image manipulation, feature extraction, and format conversion.
# Our mission is to give casual users a comprehensive interface
# for basic machine vision functions and an
# elegant programming interface for advanced users.
#
# more info:
# http://sf.net/p/simplecv
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#load system libraries
from SimpleCV.__init__ import *
from subprocess import call
#Load simpleCV libraries
from SimpleCV.Shell.Tutorial import *
#libraries for the shell
from IPython.Shell import IPShellEmbed
def clear():
call("clear")
"""
If you run SimpleCV directly, it will launch an ipython shell
"""
def main():
clear()
banner = '+----------------------------------------------------+\n'
banner += ' SimpleCV [interactive shell]\n'
banner += '+----------------------------------------------------+\n'
banner += '\n\n'
banner += 'Commands: \n'
banner += '\t"Exit" or press "Ctrl+ D" to exit the shell\n'
banner += '\t"clear()" to clear the shell screen\n'
banner += '\t"tutorial.start()" to begin the SimpleCV interactive tutorial\n'
banner += '\n'
banner += 'Usage:\n'
banner += '\tdot complete works to show library\n'
banner += '\tfor example: Image().save("/tmp/test.jpg") will dot complete\n'
banner += '\tjust by touching TAB after typing Image().\n'
banner += '\n'
banner += 'Help:\n'
banner += '\ttyping "help function_name" will give in depth documentation of API\n'
banner += '\t\texample:'
banner += 'help Image\n'
banner += '\t\twill give the in-depth information about that class\n'
banner += '\ttyping "?function_name" will give the quick API documentation\n'
banner += '\t\texample:'
banner += '?Image.save\n'
banner += '\t\twill give help on the image save function'
exit_msg = '\nExiting the SimpleCV interactive shell\n'
#setup terminal to show SCV prompt
argsv = ['-pi1','SimpleCV:\\#>','-pi2',' .\\D.:','-po','SimpleCV:\\#>','-nosep']
tutorial = Tutorial()
scvShell = IPShellEmbed(argsv)
scvShell.set_banner(banner)
scvShell.set_exit_msg(exit_msg)
#Note that all loaded libraries are inherited in the embedded ipython shell
sys.exit(scvShell())
| bsd-3-clause | Python |
94c50696a9ec448766023c9575d5b73785b1afeb | print vertical | R-Mascarenhas/git-tutorial | hello.py | hello.py | x = "Hello World!"
for i in x:
print(i)
input()
| x = "Hello World!"
print(x)
input()
| apache-2.0 | Python |
cb6c1a854f64c72bc4bf809307692b801286fe95 | remove redundant and broken re-definition of programrolegrant | masschallenge/django-accelerator,masschallenge/django-accelerator | accelerator/models/program_role_grant.py | accelerator/models/program_role_grant.py | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.conf import settings
from django.db import models
from accelerator_abstract.models.accelerator_model import AcceleratorModel
from accelerator_abstract.models.base_program_role_grant import (
BaseProgramRoleGrant
)
class ProgramRoleGrant(BaseProgramRoleGrant):
class Meta(BaseProgramRoleGrant.Meta):
swappable = swapper.swappable_setting(
BaseProgramRoleGrant.Meta.app_label, "ProgramRoleGrant")
| # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.conf import settings
from django.db import models
from accelerator_abstract.models.accelerator_model import AcceleratorModel
from accelerator_abstract.models.base_program_role_grant import (
BaseProgramRoleGrant
)
class ProgramRoleGrant(BaseProgramRoleGrant):
person = models.ForeignKey(settings.AUTH_USER_MODEL)
program_role = models.ForeignKey(
swapper.get_model_name(AcceleratorModel.Meta.app_label,
"ProgramRoleGrant"))
class Meta(BaseProgramRoleGrant.Meta):
swappable = swapper.swappable_setting(
BaseProgramRoleGrant.Meta.app_label, "ProgramRoleGrant")
| mit | Python |
ecb060f85c3de21b6f2e6ea94417685307252791 | Improve QtScriptEngineDebugger test behavior | IronManMark20/pyside2,BadSingleton/pyside2,gbaty/pyside2,M4rtinK/pyside-android,RobinD42/pyside,enthought/pyside,IronManMark20/pyside2,enthought/pyside,PySide/PySide,RobinD42/pyside,BadSingleton/pyside2,enthought/pyside,BadSingleton/pyside2,gbaty/pyside2,RobinD42/pyside,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,pankajp/pyside,M4rtinK/pyside-android,M4rtinK/pyside-android,IronManMark20/pyside2,enthought/pyside,M4rtinK/pyside-android,gbaty/pyside2,qtproject/pyside-pyside,qtproject/pyside-pyside,M4rtinK/pyside-bb10,pankajp/pyside,RobinD42/pyside,RobinD42/pyside,enthought/pyside,M4rtinK/pyside-android,qtproject/pyside-pyside,RobinD42/pyside,gbaty/pyside2,M4rtinK/pyside-bb10,pankajp/pyside,enthought/pyside,BadSingleton/pyside2,PySide/PySide,qtproject/pyside-pyside,enthought/pyside,M4rtinK/pyside-bb10,BadSingleton/pyside2,PySide/PySide,IronManMark20/pyside2,qtproject/pyside-pyside,pankajp/pyside,IronManMark20/pyside2,pankajp/pyside,M4rtinK/pyside-bb10,PySide/PySide,gbaty/pyside2,PySide/PySide,RobinD42/pyside,M4rtinK/pyside-android | tests/QtScriptTools/debugger_test.py | tests/QtScriptTools/debugger_test.py |
import unittest
from PySide.QtCore import SIGNAL, QTimer
from PySide.QtScript import QScriptEngine
from PySide.QtScriptTools import QScriptEngineDebugger
from helper import UsesQApplication
class DebuggerTest(UsesQApplication):
def setUp(self):
UsesQApplication.setUp(self)
self.engine = QScriptEngine()
self.debugger = QScriptEngineDebugger()
self.has_suspended = 0
self.has_resumed = 0
self.count = 3
def suspended(self):
self.has_suspended += 1
# Will emit evaluationResumed until there are more instructions to be run
QTimer.singleShot(100, self.debugger.action(QScriptEngineDebugger.StepIntoAction).trigger)
def resumed(self):
# Will be called when debugger.state() change from Suspended to Running
# except for the first time.
self.has_resumed += 1
def testBasic(self):
'''Interrupt and resume evaluation with QScriptEngineDebugger'''
self.debugger.attachTo(self.engine)
self.debugger.setAutoShowStandardWindow(False)
self.debugger.connect(SIGNAL('evaluationSuspended()'), self.suspended)
self.debugger.connect(SIGNAL('evaluationResumed()'), self.resumed)
self.debugger.action(QScriptEngineDebugger.InterruptAction).trigger()
self.engine.evaluate("3+4\n2+1\n5+1")
self.assertEqual(self.has_resumed, 2)
self.assertEqual(self.has_suspended, 3)
if __name__ == '__main__':
unittest.main()
|
import unittest
from PySide.QtCore import SIGNAL
from PySide.QtScript import QScriptEngine
from PySide.QtScriptTools import QScriptEngineDebugger
from helper import UsesQApplication
class DebuggerTest(UsesQApplication):
def setUp(self):
UsesQApplication.setUp(self)
self.engine = QScriptEngine()
self.debugger = QScriptEngineDebugger()
self.has_suspended = False
self.has_resumed = False
def suspended(self):
self.has_suspended = True
self.debugger.action(QScriptEngineDebugger.ContinueAction).trigger()
def resumed(self):
self.has_resumed = True
def testBasic(self):
'''Interrupt and resume evaluation with QScriptEngineDebugger'''
self.debugger.attachTo(self.engine)
self.debugger.setAutoShowStandardWindow(False)
self.debugger.connect(SIGNAL('evaluationSuspended()'), self.suspended)
self.debugger.connect(SIGNAL('evaluationResumed()'), self.resumed)
self.debugger.action(QScriptEngineDebugger.InterruptAction).trigger()
self.engine.evaluate("3+4")
self.debugger.action(QScriptEngineDebugger.ContinueAction).trigger()
self.assert_(self.has_resumed)
self.assert_(self.has_suspended)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | Python |
8a8cc7b5f37baa315470083eedf942b004ff3662 | add more cases to member list acceptance test (#4804) | mvaled/sentry,ifduyue/sentry,looker/sentry,JackDanger/sentry,JamesMura/sentry,looker/sentry,jean/sentry,beeftornado/sentry,jean/sentry,mvaled/sentry,gencer/sentry,jean/sentry,ifduyue/sentry,JamesMura/sentry,jean/sentry,looker/sentry,mvaled/sentry,gencer/sentry,mvaled/sentry,jean/sentry,JamesMura/sentry,JackDanger/sentry,BuildingLink/sentry,looker/sentry,JackDanger/sentry,BuildingLink/sentry,ifduyue/sentry,looker/sentry,beeftornado/sentry,mvaled/sentry,BuildingLink/sentry,mvaled/sentry,ifduyue/sentry,BuildingLink/sentry,JamesMura/sentry,BuildingLink/sentry,beeftornado/sentry,JamesMura/sentry,gencer/sentry,gencer/sentry,gencer/sentry,ifduyue/sentry | tests/acceptance/test_member_list.py | tests/acceptance/test_member_list.py | from __future__ import absolute_import
from sentry.models import OrganizationMember
from sentry.testutils import AcceptanceTestCase
class ListOrganizationMembersTest(AcceptanceTestCase):
def setUp(self):
super(ListOrganizationMembersTest, self).setUp()
self.user = self.create_user('foo@example.com')
self.org = self.create_organization(
name='Rowdy Tiger',
owner=None,
)
self.team = self.create_team(
organization=self.org,
name='Mariachi Band'
)
self.create_member(
user=self.user,
organization=self.org,
role='owner',
teams=[self.team],
)
OrganizationMember.objects.create(
email='bar@example.com',
organization=self.org,
role='member'
)
self.create_member(
user=self.create_user('baz@example.com'),
organization=self.org,
role='admin',
teams=[self.team],
)
self.login_as(self.user)
def test_list(self):
self.browser.get('/organizations/{}/members/'.format(self.org.slug))
self.browser.snapshot(name='list organization members')
| from __future__ import absolute_import
from sentry.testutils import AcceptanceTestCase
class ListOrganizationMembersTest(AcceptanceTestCase):
def setUp(self):
super(ListOrganizationMembersTest, self).setUp()
self.user = self.create_user('foo@example.com')
self.org = self.create_organization(
name='Rowdy Tiger',
owner=None,
)
self.team = self.create_team(
organization=self.org,
name='Mariachi Band'
)
self.create_member(
user=self.user,
organization=self.org,
role='owner',
teams=[self.team],
)
self.login_as(self.user)
def test_list(self):
self.browser.get('/organizations/{}/members/'.format(self.org.slug))
self.browser.snapshot(name='list organization members')
| bsd-3-clause | Python |
16b3e30a88e9101db58c0549e515848df29f29b9 | Set up sample project to throw web exceptions (for request oject) | Osmose/raygun4py,MindscapeHQ/raygun4py,ferringb/raygun4py | raygun4py-sample/test.py | raygun4py-sample/test.py | import sys, os, urllib2
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
urllib2.urlopen("gopher://nonsense.org").read()
sys.excepthook = handle_exception
methodone()
| import sys, os
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
raise Exception("My exception")
sys.excepthook = handle_exception
methodone()
| mit | Python |
9d2f071c28d4f84ae57ab7b572b3c98620c820a4 | Fix sort order | adamtheturtle/vws-python,adamtheturtle/vws-python | tests/mock_vws/test_delete_target.py | tests/mock_vws/test_delete_target.py | """
Tests for deleting targets.
"""
import pytest
from requests import codes
from requests_mock import DELETE
from common.constants import ResultCodes
from tests.mock_vws.utils import assert_vws_failure
from tests.utils import VuforiaServerCredentials
from vws._request_utils import target_api_request
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestDelete:
"""
Tests for deleting targets.
"""
def test_no_wait(self,
target_id: str,
vuforia_server_credentials: VuforiaServerCredentials,
) -> None:
"""
When attempting to delete a target immediately after creating it, a
`FORBIDDEN` response is returned.
This is because the target goes into a processing state.
There is a race condition here - if the target goes into a success or
fail state before the deletion attempt.
"""
request_path = '/targets/' + target_id
response = target_api_request(
access_key=vuforia_server_credentials.access_key,
secret_key=vuforia_server_credentials.secret_key,
method=DELETE,
content=b'',
request_path=request_path,
)
assert_vws_failure(
response=response,
status_code=codes.FORBIDDEN,
result_code=ResultCodes.TARGET_STATUS_PROCESSING,
)
| """
Tests for deleting targets.
"""
import pytest
from requests import codes
from requests_mock import DELETE
from common.constants import ResultCodes
from tests.mock_vws.utils import assert_vws_failure
from vws._request_utils import target_api_request
from tests.utils import VuforiaServerCredentials
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestDelete:
"""
Tests for deleting targets.
"""
def test_no_wait(self,
target_id: str,
vuforia_server_credentials: VuforiaServerCredentials,
) -> None:
"""
When attempting to delete a target immediately after creating it, a
`FORBIDDEN` response is returned.
This is because the target goes into a processing state.
There is a race condition here - if the target goes into a success or
fail state before the deletion attempt.
"""
request_path = '/targets/' + target_id
response = target_api_request(
access_key=vuforia_server_credentials.access_key,
secret_key=vuforia_server_credentials.secret_key,
method=DELETE,
content=b'',
request_path=request_path,
)
assert_vws_failure(
response=response,
status_code=codes.FORBIDDEN,
result_code=ResultCodes.TARGET_STATUS_PROCESSING,
)
| mit | Python |
719bd53fe9b7346e858becce99b6f0eead40b3fe | bump to 1.1 | s0undt3ch/sorbic,thatch45/sorbic | sorbic/__init__.py | sorbic/__init__.py | # -*- coding: utf-8 -*-
version = '0.1.1'
| # -*- coding: utf-8 -*-
version = '0.1.0'
| apache-2.0 | Python |
d147832e3c6d86ec81c2ea35bde1de7358cac27e | Fix atol in test | poliastro/poliastro | tests/tests_twobody/test_sampling.py | tests/tests_twobody/test_sampling.py | from functools import partial
import hypothesis.strategies as st
import numpy as np
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from hypothesis import example, given, settings
from poliastro.twobody.sampling import sample_closed
angles = partial(st.floats, min_value=-2 * np.pi, max_value=2 * np.pi)
eccentricities = partial(st.floats, min_value=0, max_value=1, exclude_max=True)
@st.composite
def with_units(draw, elements, unit):
angle = draw(elements)
return angle * unit
angles_q = partial(with_units, elements=angles(), unit=u.rad)
eccentricities_q = partial(with_units, elements=eccentricities(), unit=u.one)
@settings(deadline=None)
@given(
min_nu=angles_q(), ecc=eccentricities_q(), max_nu=st.one_of(angles_q(), st.none()),
)
def test_sample_closed_is_always_between_minus_pi_and_pi(min_nu, ecc, max_nu):
result = sample_closed(min_nu, ecc, max_nu)
assert ((-np.pi * u.rad <= result) & (result <= np.pi * u.rad)).all()
@settings(deadline=None)
@given(
min_nu=with_units(
elements=st.floats(min_value=-np.pi, max_value=np.pi), unit=u.rad
),
ecc=eccentricities_q(),
max_nu=st.one_of(angles_q(), st.none()),
)
def test_sample_closed_starts_at_min_anomaly_if_in_range(min_nu, ecc, max_nu):
result = sample_closed(min_nu, ecc, max_nu)
assert_quantity_allclose(result[0], min_nu, atol=1e-15 * u.rad)
@settings(deadline=None)
@given(
min_nu=with_units(
elements=st.floats(min_value=-np.pi, max_value=np.pi), unit=u.rad
),
ecc=eccentricities_q(),
)
@example(1e-16 * u.rad, 0 * u.one)
@example(0 * u.rad, 0 * u.one)
@example(0 * u.rad, 0.88680956 * u.one)
def test_sample_closed_starts_and_ends_at_min_anomaly_if_in_range_and_no_max_given(
min_nu, ecc
):
result = sample_closed(min_nu, ecc)
assert_quantity_allclose(result[0], min_nu, atol=1e-14 * u.rad)
assert_quantity_allclose(result[-1], min_nu, atol=1e-14 * u.rad)
| from functools import partial
import hypothesis.strategies as st
import numpy as np
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from hypothesis import example, given, settings
from poliastro.twobody.sampling import sample_closed
angles = partial(st.floats, min_value=-2 * np.pi, max_value=2 * np.pi)
eccentricities = partial(st.floats, min_value=0, max_value=1, exclude_max=True)
@st.composite
def with_units(draw, elements, unit):
angle = draw(elements)
return angle * unit
angles_q = partial(with_units, elements=angles(), unit=u.rad)
eccentricities_q = partial(with_units, elements=eccentricities(), unit=u.one)
@settings(deadline=None)
@given(
min_nu=angles_q(), ecc=eccentricities_q(), max_nu=st.one_of(angles_q(), st.none()),
)
def test_sample_closed_is_always_between_minus_pi_and_pi(min_nu, ecc, max_nu):
result = sample_closed(min_nu, ecc, max_nu)
assert ((-np.pi * u.rad <= result) & (result <= np.pi * u.rad)).all()
@settings(deadline=None)
@given(
min_nu=with_units(
elements=st.floats(min_value=-np.pi, max_value=np.pi), unit=u.rad
),
ecc=eccentricities_q(),
max_nu=st.one_of(angles_q(), st.none()),
)
def test_sample_closed_starts_at_min_anomaly_if_in_range(min_nu, ecc, max_nu):
result = sample_closed(min_nu, ecc, max_nu)
assert_quantity_allclose(result[0], min_nu, atol=1e-15 * u.rad)
@settings(deadline=None)
@given(
min_nu=with_units(
elements=st.floats(min_value=-np.pi, max_value=np.pi), unit=u.rad
),
ecc=eccentricities_q(),
)
@example(0 * u.rad, 0 * u.one)
@example(0 * u.rad, 0.88680956 * u.one)
def test_sample_closed_starts_and_ends_at_min_anomaly_if_in_range_and_no_max_given(
min_nu, ecc
):
result = sample_closed(min_nu, ecc)
assert_quantity_allclose(result[0], min_nu)
assert_quantity_allclose(result[-1], min_nu, atol=1e-14 * u.rad)
| mit | Python |
6979d67e50366ec0fb1336cec8a5a69fee190433 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/7c99aeadacee949f4de11eafe7e83bae80358791. | paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,karllessard/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "7c99aeadacee949f4de11eafe7e83bae80358791"
TFRT_SHA256 = "482deb3aecbaff840b9de99c5dc571a2cd1e2e7b7f7fbce6ea3f6782a02f10fc"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "a0616887359cc71195d562d2f6eb53abd7080f4a"
TFRT_SHA256 = "943724d90fecd811d6c93cb7d3f5d8a2a11f7a74f416edaeb046ad552528bd5e"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
bccbd3ec0795371653c8d0cd5cc82999715b0687 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/36bc3321fbeacbc170257d5f345f0962fde39a64. | google/tsl,google/tsl,google/tsl | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "36bc3321fbeacbc170257d5f345f0962fde39a64"
TFRT_SHA256 = "61cb6c8bf4c69b71475572c4730921a70bb6759f150a691c90d93df3919c264b"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "3d3b2e9a5cdb682c2cb7a04f80846734dafb9880"
TFRT_SHA256 = "73af62ae6962b38e1fe8d675a43a9f47e9c3df7203aea2c6b26a4172c660f7d2"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
968a1751ef6ccadc30ac6bd0f0be5056ac0e9288 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/780e7a8cc4bb4c9338ad2da7cf63c75ce491cb96. | Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "780e7a8cc4bb4c9338ad2da7cf63c75ce491cb96"
TFRT_SHA256 = "983a6740b2d2737bb2b0fc50caed7aca8a18760f2d7efde0dd0049d0cf6e1b1c"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "04472136dd8959de458771bbe1127f7a860e6884"
TFRT_SHA256 = "16e69b914bb119dbe30422b86e7db2cc8c9e13b36e7b3346200f86363e6fa11c"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
0f28fe59a0f166f70b7fa9e6f4b209cc7632a126 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/abc30e257aea33e6637010a90e015f018ac065ca. | gautam1858/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "abc30e257aea33e6637010a90e015f018ac065ca"
TFRT_SHA256 = "9382080217c32d0a0029fe58e72efd711323ddebd70f20844ca8bb1d2a3d875a"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "02ce138aadc5b4a7f585085fe75d9024d921c5a5"
TFRT_SHA256 = "1abe70a9684aadf8aa5b2ce32d5b0a1ac67f4f8f7fb0fb0ee95bcea7a3036bae"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
f2ff64a2897610d8b3d629ce9a198f60fb5402ce | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/e3a3076004d39ea17f6573898033e78a6321f505. | frreiss/tensorflow-fred,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "e3a3076004d39ea17f6573898033e78a6321f505"
TFRT_SHA256 = "99b146cbac1288f104e65060219b63c26ea0d485d1de0b7da79eb165abba420c"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "ff325fafd1976e494831e9b3897ddf2c93235fbc"
TFRT_SHA256 = "ff221d793a6938cddb8075bf599a5060864803e0a96eef78bce0cece0dde4844"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| apache-2.0 | Python |
3dfb6078d3f22235a856183499f5c1d6581a7de7 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/7ed8de2e571d95575634f354762740aefd34e7fb. | Intel-Corporation/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "7ed8de2e571d95575634f354762740aefd34e7fb"
TFRT_SHA256 = "33831854b097fc733649886d74e6c10546c8aeee94e953cf3f020a05beadab69"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "433795f38edd59236fc2ce2acc2f050be4646d8f"
TFRT_SHA256 = "37beb3024c1821e93f42959dd14f845467b17a35f6e10dfe414631469d9c4465"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
45b39e8fca6341e69c3772c5914f8c78fb9545f8 | Fix bug with game screen UI being created more than once | TheCodeInside/NumberMunchersXO,TheCodeInside/NumberMunchersXO | Source/Gamestates/gamescreen.py | Source/Gamestates/gamescreen.py | import pygame, sys
from input import xo_input
from UI.uicontainer import UIContainer
class GameScreen:
def __init__(self, manager, screen):
self.stateManager = manager
self.window = screen
self.uiContainer = UIContainer(self.window)
self.uiContainer.horizontalStride = 5
self.initializedUI = False
self.screenInfo = pygame.display.Info()
pass
def start(self):
if not self.initializedUI:
# Button sizes
padding = 10
side_margin = 50
top_margin = 150
width = (self.screenInfo.current_w - (padding * 5) - (side_margin * 2)) / 5
height = (self.screenInfo.current_h - (padding * 5) - (top_margin)) / 5
for i in range(0, 5):
for j in range(0, 5):
button = self.uiContainer.add_button()
# button.rect = pygame.Rect((padding * (i + 1)) + (width * i), (padding * (j + 1)) + (height * j), width, height)
button.rect = pygame.Rect((padding * i) + (width * i) + side_margin, (padding * j) + (height * j) + top_margin, width, height)
self.initializedUI = True
def update(self):
if (xo_input.btn_cross):
self.stateManager.switchGameState("MainScreen")
self.uiContainer.update()
def draw(self):
pygame.draw.rect(self.window, (255, 255, 255), (50, 50, 50, 50), 0)
# UI needs to be drawn LAST
self.uiContainer.draw()
def final(self):
pass | import pygame, sys
from input import xo_input
from UI.uicontainer import UIContainer
class GameScreen:
def __init__(self, manager, screen):
self.stateManager = manager
self.window = screen
self.uiContainer = UIContainer(self.window)
self.uiContainer.horizontalStride = 5
self.screenInfo = pygame.display.Info()
pass
def start(self):
# Button sizes
padding = 10
side_margin = 50
top_margin = 150
width = (self.screenInfo.current_w - (padding * 5) - (side_margin * 2)) / 5
height = (self.screenInfo.current_h - (padding * 5) - (top_margin)) / 5
for i in range(0, 5):
for j in range(0, 5):
button = self.uiContainer.add_button()
# button.rect = pygame.Rect((padding * (i + 1)) + (width * i), (padding * (j + 1)) + (height * j), width, height)
button.rect = pygame.Rect((padding * i) + (width * i) + side_margin, (padding * j) + (height * j) + top_margin, width, height)
pass
def update(self):
if (xo_input.btn_cross):
self.stateManager.switchGameState("MainScreen")
self.uiContainer.update()
pass
def draw(self):
pygame.draw.rect(self.window, (255, 255, 255), (50, 50, 50, 50), 0)
# UI needs to be drawn LAST
self.uiContainer.draw()
def final(self):
pass | mit | Python |
ec15f264f5deec8a31b72c52edecdbe16c6c3f1f | Fix Score Keeper | andreimatei/congratboto | app_engine/plugins/score_keeper_plugin.py | app_engine/plugins/score_keeper_plugin.py | import re
from operator import attrgetter
from google.appengine.ext import db
BOT_ID = "100007101244912"
class Participant(db.Model):
"""Models an individual chat participant, with her score."""
name = db.StringProperty(indexed=False)
thread_id = db.StringProperty(indexed=True)
score = db.IntegerProperty(indexed=False)
TRIGGER = re.compile('.*?([a-zA-Z0-9]*)((\\+\\+)|(--)).*', re.IGNORECASE)
class ScoreKeeper(object):
def GenerateKey(self, thread_id, addressee):
return "%s-%s" % (thread_id, addressee)
def IncrementScore(self, conversation, addressee, increment):
addressee = addressee.capitalize()
participant_key = self.GenerateKey(conversation.GetThreadId(), addressee)
participant = db.get(db.Key.from_path('Participant', participant_key))
if not participant:
participant = Participant(key_name = self.GenerateKey(conversation.GetThreadId(), addressee))
participant.name = addressee
participant.score = increment
participant.thread_id = conversation.GetThreadId()
participant.put()
conversation.PostMessage("Hello %s. You start at %d." % (addressee, increment))
return
participant.score += increment
participant.put()
conversation.PostMessage("%s, you're at %d." % (addressee, participant.score))
def PrintScores(self, conversation):
q = Participant.all()
q.filter("thread_id =", conversation.GetThreadId())
participants = q.run()
participants = sorted(participants, key=attrgetter('score'), reverse=True)
max_name_width = max([len(p.name) for p in participants])
message = '==== LEADERBOARD ====\n'
for p in participants:
message += '%-*s -> %d\n' % (max_name_width + 1, p.name, p.score)
conversation.PostMessage(message)
def HandleMessages(self, conversation):
messages = conversation.GetMessages()
to_score = [] # (name, increment)
scores_needed = False
for message in messages:
if not message.text: continue
if message.user and message.user.uid == BOT_ID:
# clear everything from before
del to_score[:]
scores_needed = False
continue
match = TRIGGER.match(message.text)
if match:
if match.groups()[0]:
to_score.append((match.groups()[0], 1 if match.groups()[1] == '++' else -1))
if message.text.startswith("/score"):
scores_needed = True
# post replies, if need
if to_score:
for (name, increment) in to_score:
self.IncrementScore(conversation, name, increment)
if scores_needed:
self.PrintScores(conversation)
| import re
from operator import attrgetter
from google.appengine.ext import db
BOT_ID = "100007101244912"
class Participant(db.Model):
"""Models an individual chat participant, with her score."""
name = db.StringProperty(indexed=False)
thread_id = db.StringProperty(indexed=True)
score = db.IntegerProperty(indexed=False)
TRIGGER = re.compile('.*?([a-zA-Z0-9]*)((\\+\\+)|(--)).*', re.IGNORECASE)
class ScoreKeeper(object):
def GenerateKey(self, thread_id, addressee):
return "%s-%s" % (thread_id, addressee)
def IncrementScore(self, conversation, addressee, increment):
addressee = addressee.capitalize()
participant_key = self.GenerateKey(conversation.GetThreadId(), addressee)
participant = db.get(db.Key.from_path('Participant', participant_key))
if not participant:
participant = Participant(key_name = self.GenerateKey(conversation.GetThreadId(), addressee))
participant.name = addressee
participant.score = increment
participant.thread_id = conversation.GetThreadId()
participant.put()
conversation.PostMessage("Hello %s. You start at %d." % (addressee, increment))
return
participant.score += increment
participant.put()
conversation.PostMessage("%s, you're at %d." % (addressee, participant.score))
def PrintScores(self, thread_id):
q = Participant.all()
q.filter("thread_id =", thread_id)
participants = q.run()
participants = sorted(participants, key=attrgetter('score'), reverse=True)
max_name_width = max([len(p.name) for p in participants])
message = '==== LEADERBOARD ====\n'
for p in participants:
message += '%-*s -> %d\n' % (max_name_width + 1, p.name, p.score)
self.write_session.PostMessage(thread_id, message)
def HandleMessages(self, conversation):
messages = conversation.GetMessages()
to_score = [] # (name, increment)
scores_needed = False
for message in messages:
if not message.text: continue
if message.user and message.user.uid == BOT_ID:
# clear everything from before
del to_score[:]
scores_needed = False
continue
match = TRIGGER.match(message.text)
if match:
if match.groups()[0]:
to_score.append((match.groups()[0], 1 if match.groups()[1] == '++' else -1))
if message.text.startswith("/score"):
scores_needed = True
# post replies, if need
if to_score:
for (name, increment) in to_score:
self.IncrementScore(conversation, name, increment)
if scores_needed:
self.PrintScores(conversation.GetThreadId())
| mit | Python |
9f3356d06067dbcc77a79afee6bccf80600dab28 | Add a method to get the idle time. Also data are directly readed in /proc/uptime. | juliendelplanque/raspirestmonitor | server/systeminfo.py | server/systeminfo.py | #!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
| #!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
| mit | Python |
06196dcac0573cd853e72222bd067ec023157a6c | add rejected apps to list_packaged_apps management command | Revanth47/addons-server,andymckay/zamboni,wagnerand/zamboni,lavish205/olympia,jasonthomas/zamboni,jpetto/olympia,kmaglione/olympia,ngokevin/zamboni,diox/olympia,Joergen/zamboni,mozilla/olympia,robhudson/zamboni,psiinon/addons-server,beni55/olympia,atiqueahmedziad/addons-server,mozilla/zamboni,mudithkr/zamboni,aviarypl/mozilla-l10n-addons-server,muffinresearch/addons-server,koehlermichael/olympia,Nolski/olympia,luckylavish/zamboni,Prashant-Surya/addons-server,wagnerand/olympia,Joergen/olympia,kmaglione/olympia,ingenioustechie/zamboni,mstriemer/olympia,kumar303/addons-server,muffinresearch/addons-server,ddurst/zamboni,mudithkr/zamboni,yfdyh000/olympia,tsl143/zamboni,robhudson/zamboni,kmaglione/olympia,johancz/olympia,clouserw/zamboni,andymckay/zamboni,crdoconnor/olympia,bqbn/addons-server,Jobava/zamboni,washort/zamboni,Joergen/olympia,elysium001/zamboni,Witia1/olympia,wagnerand/addons-server,spasovski/zamboni,wagnerand/zamboni,Hitechverma/zamboni,eviljeff/zamboni,muffinresearch/olympia,shahbaz17/zamboni,atiqueahmedziad/addons-server,crdoconnor/olympia,Nolski/olympia,mozilla/addons-server,crdoconnor/olympia,muffinresearch/addons-server,spasovski/zamboni,mstriemer/addons-server,yfdyh000/olympia,harikishen/addons-server,mdaif/olympia,andymckay/addons-server,diox/zamboni,jamesthechamp/zamboni,mrrrgn/olympia,Joergen/olympia,Jobava/zamboni,luckylavish/zamboni,mozilla/zamboni,aviarypl/mozilla-l10n-addons-server,andymckay/addons-server,anaran/olympia,SuriyaaKudoIsc/olympia,aviarypl/mozilla-l10n-addons-server,magopian/olympia,koehlermichael/olympia,andymckay/olympia,diox/olympia,koehlermichael/olympia,mrrrgn/olympia,beni55/olympia,Hitechverma/zamboni,ddurst/zamboni,ingenioustechie/zamboni,harikishen/addons-server,clouserw/zamboni,Prashant-Surya/addons-server,mozilla/zamboni,SuriyaaKudoIsc/olympia,spasovski/zamboni,mstriemer/zamboni,mozilla/olympia,beni55/olympia,kumar303/addons-server,johancz/olympia,jpetto/olympia,kumar303/olympia,ayushagrawal288/zamboni,wagnerand/olympia,atiqueahmedziad/addons-server,yfdyh000/olympia,tsl143/addons-server,harry-7/addons-server,atiqueahmedziad/addons-server,mrrrgn/olympia,eviljeff/olympia,johancz/olympia,Nolski/olympia,Witia1/olympia,kumar303/addons-server,Witia1/olympia,shahbaz17/zamboni,Joergen/zamboni,washort/zamboni,mdaif/olympia,luckylavish/zamboni,mstriemer/zamboni,elysium001/zamboni,ingenioustechie/zamboni,andymckay/addons-server,tsl143/addons-server,eviljeff/olympia,koehlermichael/olympia,ddurst/zamboni,Joergen/zamboni,diox/zamboni,jamesthechamp/zamboni,luckylavish/zamboni,Witia1/olympia,muffinresearch/olympia,koehlermichael/olympia,ingenioustechie/zamboni,jasonthomas/zamboni,Hitechverma/zamboni,mrrrgn/olympia,eviljeff/olympia,Prashant-Surya/addons-server,wagnerand/addons-server,jamesthechamp/zamboni,mozilla/olympia,clouserw/zamboni,anaran/olympia,anaran/olympia,diox/zamboni,mdaif/olympia,shahbaz17/zamboni,kumar303/addons-server,wagnerand/zamboni,magopian/olympia,magopian/olympia,mozilla/olympia,eviljeff/zamboni,Nolski/olympia,bqbn/addons-server,kumar303/zamboni,lavish205/olympia,Jobava/zamboni,Nolski/olympia,kumar303/olympia,andymckay/olympia,ayushagrawal288/zamboni,yfdyh000/olympia,aviarypl/mozilla-l10n-addons-server,mstriemer/olympia,Jobava/zamboni,mstriemer/olympia,andymckay/olympia,harry-7/addons-server,kumar303/zamboni,johancz/olympia,tsl143/zamboni,mudithkr/zamboni,Revanth47/addons-server,robhudson/zamboni,Witia1/olympia,andymckay/zamboni,washort/zamboni,jasonthomas/zamboni,kumar303/zamboni,mstriemer/addons-server,mstriemer/addons-server,mozilla/zamboni,psiinon/addons-server,kumar303/zamboni,Revanth47/addons-server,clouserw/zamboni,wagnerand/addons-server,ddurst/zamboni,psiinon/addons-server,jpetto/olympia,elysium001/zamboni,lavish205/olympia,Joergen/zamboni,yfdyh000/olympia,Joergen/olympia,harry-7/addons-server,wagnerand/olympia,kumar303/olympia,Joergen/zamboni,muffinresearch/addons-server,crdoconnor/olympia,mudithkr/zamboni,diox/olympia,lavish205/olympia,SuriyaaKudoIsc/olympia,mozilla/addons-server,diox/zamboni,mrrrgn/olympia,kmaglione/olympia,washort/zamboni,mstriemer/olympia,elysium001/zamboni,mozilla/addons-server,Hitechverma/zamboni,eviljeff/olympia,wagnerand/addons-server,tsl143/addons-server,Joergen/zamboni,muffinresearch/olympia,wagnerand/zamboni,jpetto/olympia,mstriemer/zamboni,andymckay/addons-server,crdoconnor/olympia,harikishen/addons-server,bqbn/addons-server,Prashant-Surya/addons-server,bqbn/addons-server,muffinresearch/olympia,beni55/olympia,eviljeff/zamboni,jamesthechamp/zamboni,tsl143/zamboni,ayushagrawal288/zamboni,kumar303/olympia,mozilla/addons-server,andymckay/olympia,wagnerand/olympia,diox/olympia,robhudson/zamboni,johancz/olympia,SuriyaaKudoIsc/olympia,ayushagrawal288/zamboni,mstriemer/zamboni,anaran/olympia,spasovski/zamboni,tsl143/zamboni,shahbaz17/zamboni,mdaif/olympia,magopian/olympia,beni55/olympia,eviljeff/zamboni,muffinresearch/olympia,Revanth47/addons-server,mstriemer/addons-server,jasonthomas/zamboni,kmaglione/olympia,harry-7/addons-server,magopian/olympia,Joergen/olympia,ngokevin/zamboni,mdaif/olympia,harikishen/addons-server,muffinresearch/addons-server,psiinon/addons-server,tsl143/addons-server,ngokevin/zamboni | mkt/webapps/management/commands/list_packaged_apps.py | mkt/webapps/management/commands/list_packaged_apps.py | from optparse import make_option
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand
import amo
from files.models import File
HELP = 'List all Marketplace packaged apps'
statuses = {'pending': amo.STATUS_PENDING,
'public': amo.STATUS_PUBLIC,
'approved': amo.STATUS_PUBLIC_WAITING,
'rejected': amo.STATUS_DISABLED}
class Command(BaseCommand):
"""
Usage:
python manage.py list_packaged_apps --status=<status>
"""
option_list = BaseCommand.option_list + (
make_option('--status',
choices=statuses.keys(),
help='Status of packaged-app files'),
)
help = HELP
def handle(self, *args, **kwargs):
files = File.objects.filter(version__addon__type=amo.ADDON_WEBAPP,
version__addon__is_packaged=True)
if kwargs.get('status'):
files = files.filter(status=statuses[kwargs['status']])
filenames = []
for f in files:
try:
filenames.append(f.file_path)
except ObjectDoesNotExist:
pass
print '\n'.join(filenames)
| from optparse import make_option
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand
import amo
from files.models import File
HELP = 'List all Marketplace packaged apps'
statuses = {'pending': amo.STATUS_PENDING,
'public': amo.STATUS_PUBLIC,
'approved': amo.STATUS_PUBLIC_WAITING}
class Command(BaseCommand):
"""
Usage:
python manage.py list_packaged_apps --status=<status>
"""
option_list = BaseCommand.option_list + (
make_option('--status',
choices=statuses.keys(),
help='Status of packaged-app files'),
)
help = HELP
def handle(self, *args, **kwargs):
files = File.objects.filter(version__addon__type=amo.ADDON_WEBAPP,
version__addon__is_packaged=True)
if kwargs.get('status'):
files = files.filter(status=statuses[kwargs['status']])
filenames = []
for f in files:
try:
filenames.append(f.file_path)
except ObjectDoesNotExist:
pass
print '\n'.join(filenames)
| bsd-3-clause | Python |
090d7a0e877b838beda7cb7ca6955673def8c243 | Remove debug code | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | bluebottle/utils/tests/test_functional.py | bluebottle/utils/tests/test_functional.py | from django.core.urlresolvers import reverse
from django.contrib.auth.models import Group
from django.template.response import TemplateResponse
from django.http.response import HttpResponseForbidden
from django.test.client import RequestFactory
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
class AdminPermissionsTest(BluebottleTestCase):
def setUp(self):
self.init_projects()
# Create staff user without superuser permission
self.user = BlueBottleUserFactory.create(password='testing')
self.user.is_staff = True
self.user.is_superuser = False
self.user.save()
# Add 'Staff' group for user
self.user.groups.add(Group.objects.get(name='Staff'))
self.user.save()
# Login user
self.client.login(
request=RequestFactory().post('/'), email=self.user.email, password='testing'
)
def tearDown(self):
self.client.logout()
self.user.delete()
def test_staff_forbidden_access(self):
response = self.client.get(reverse('admin:auth_group_changelist'))
self.assertIsInstance(response, HttpResponseForbidden)
def test_staff_create_initiative(self):
response = self.client.get(reverse('admin:initiatives_initiative_add'))
self.assertIsInstance(response, TemplateResponse)
def test_superuser_access(self):
self.client.logout()
# Create staff user without superuser permission
self.user = BlueBottleUserFactory.create(password='testing')
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
self.assertTrue(
self.client.login(request=RequestFactory().post('/'), email=self.user.email, password='testing'))
response = self.client.get(reverse('admin:auth_group_changelist'))
self.assertIsInstance(response, TemplateResponse)
| from django.core.urlresolvers import reverse
from django.contrib.auth.models import Group
from django.template.response import TemplateResponse
from django.http.response import HttpResponseForbidden
from django.test.client import RequestFactory
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
class AdminPermissionsTest(BluebottleTestCase):
def setUp(self):
self.init_projects()
# Create staff user without superuser permission
self.user = BlueBottleUserFactory.create(password='testing')
self.user.is_staff = True
self.user.is_superuser = False
self.user.save()
# Add 'Staff' group for user
self.user.groups.add(Group.objects.get(name='Staff'))
self.user.save()
# Login user
import ipdb
ipdb.set_trace()
self.client.login(
request=RequestFactory().post('/'), email=self.user.email, password='testing'
)
def tearDown(self):
self.client.logout()
self.user.delete()
def test_staff_forbidden_access(self):
response = self.client.get(reverse('admin:auth_group_changelist'))
self.assertIsInstance(response, HttpResponseForbidden)
def test_staff_create_initiative(self):
response = self.client.get(reverse('admin:initiatives_initiative_add'))
self.assertIsInstance(response, TemplateResponse)
def test_superuser_access(self):
self.client.logout()
# Create staff user without superuser permission
self.user = BlueBottleUserFactory.create(password='testing')
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
self.assertTrue(
self.client.login(request=RequestFactory().post('/'), email=self.user.email, password='testing'))
response = self.client.get(reverse('admin:auth_group_changelist'))
self.assertIsInstance(response, TemplateResponse)
| bsd-3-clause | Python |
ee6273f4bf2939294f0843e77ab9d6b8e7979cda | Remove reference to nonexistent attribute from string party sequence's representation | m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps | byceps/blueprints/shop/models/sequence.py | byceps/blueprints/shop/models/sequence.py | # -*- coding: utf-8 -*-
"""
byceps.blueprints.shop.models.sequence
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from enum import Enum
from sqlalchemy.ext.hybrid import hybrid_property
from ....database import db
from ....util.instances import ReprBuilder
from ...party.models import Party
class PartySequencePrefix(db.Model):
"""A set of party-specific sequence number prefixes."""
__tablename__ = 'shop_party_sequences_prefixes'
party_id = db.Column(db.Unicode(20), db.ForeignKey('parties.id'), primary_key=True)
party = db.relationship(Party, backref=db.backref('shop_number_prefix', uselist=False))
article_number = db.Column(db.Unicode(20), unique=True, nullable=False)
order_number = db.Column(db.Unicode(20), unique=True, nullable=False)
def __init__(self, party, article_number_prefix, order_number_prefix):
self.party = party
self.article_number = article_number_prefix
self.order_number = order_number_prefix
def __repr__(self):
return ReprBuilder(self) \
.add('party', self.party_id) \
.add_with_lookup('article_number') \
.add_with_lookup('order_number') \
.build()
PartySequencePurpose = Enum('PartySequencePurpose', ['article', 'order'])
class PartySequence(db.Model):
"""A sequence for a party and a purpose."""
__tablename__ = 'shop_party_sequences'
party_id = db.Column(db.Unicode(20), db.ForeignKey('parties.id'), primary_key=True)
party = db.relationship(Party)
_purpose = db.Column('purpose', db.Unicode(20), primary_key=True)
value = db.Column(db.Integer, default=0, nullable=False)
def __init__(self, party, purpose):
self.party = party
self.purpose = purpose
@hybrid_property
def purpose(self):
return PartySequencePurpose[self._purpose]
@purpose.setter
def purpose(self, purpose):
assert purpose is not None
self._purpose = purpose.name
def __repr__(self):
return ReprBuilder(self) \
.add('party', self.party_id) \
.add('purpose', self.purpose.name) \
.add_with_lookup('value') \
.build()
| # -*- coding: utf-8 -*-
"""
byceps.blueprints.shop.models.sequence
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from enum import Enum
from sqlalchemy.ext.hybrid import hybrid_property
from ....database import db
from ....util.instances import ReprBuilder
from ...party.models import Party
class PartySequencePrefix(db.Model):
"""A set of party-specific sequence number prefixes."""
__tablename__ = 'shop_party_sequences_prefixes'
party_id = db.Column(db.Unicode(20), db.ForeignKey('parties.id'), primary_key=True)
party = db.relationship(Party, backref=db.backref('shop_number_prefix', uselist=False))
article_number = db.Column(db.Unicode(20), unique=True, nullable=False)
order_number = db.Column(db.Unicode(20), unique=True, nullable=False)
def __init__(self, party, article_number_prefix, order_number_prefix):
self.party = party
self.article_number = article_number_prefix
self.order_number = order_number_prefix
def __repr__(self):
return ReprBuilder(self) \
.add('party', self.party_id) \
.add_with_lookup('article_number') \
.add_with_lookup('order_number') \
.build()
PartySequencePurpose = Enum('PartySequencePurpose', ['article', 'order'])
class PartySequence(db.Model):
"""A sequence for a party and a purpose."""
__tablename__ = 'shop_party_sequences'
party_id = db.Column(db.Unicode(20), db.ForeignKey('parties.id'), primary_key=True)
party = db.relationship(Party)
_purpose = db.Column('purpose', db.Unicode(20), primary_key=True)
value = db.Column(db.Integer, default=0, nullable=False)
def __init__(self, party, purpose):
self.party = party
self.purpose = purpose
@hybrid_property
def purpose(self):
return PartySequencePurpose[self._purpose]
@purpose.setter
def purpose(self, purpose):
assert purpose is not None
self._purpose = purpose.name
def __repr__(self):
return ReprBuilder(self) \
.add_with_lookup('id') \
.add('party', self.party_id) \
.add('purpose', self.purpose.name) \
.add_with_lookup('value') \
.build()
| bsd-3-clause | Python |
6588c1f953f063b120e7f096cfd1ae0dfafcd9aa | Add 'gbf.game.mbga' to gbf checker | moondropx/dogbot,moondropx/dogbot | dogbot/bot/listeners/gbf.py | dogbot/bot/listeners/gbf.py | import re
import requests
from requests.exceptions import MissingSchema
from dogbot.cqsdk.utils import reply
GBF_PATTERN = re.compile("(((https|http)?://)" # 端口
"?(([0-9a-z_!~*'().&=+$%-]+: )?[0-9a-z_!~*'().&=+$%-]+@)?" # ftp 用戶密碼
"(([0-9]{1,3}\.){3}[0-9]{1,3}" # IP URL
"|"
"([0-9a-z_!~*'()-]+\.)*" # 域名前綴
"([0-9a-zA-Z][A-Z0-9a-z-]{0,61})?[0-9a-zA-Z]\." # 二級域名
"[a-z]{2,6})" # 頂級域名
"(:[0-9]{1,4})?" # 端口
"(/[0-9a-zA-Z_!~*'().;?:@&=+$,%#-]+)*/?)") # 路徑
match_urls = []
def gbf(bot, message):
result = GBF_PATTERN.findall(message.text)
for index, match in enumerate(result):
if index > 10:
return False
url = match[0]
if url.split('.')[-1] in ['png', 'jpg', 'jpeg', 'gif']:
return False
if url not in match_urls:
try:
resp = requests.get(url)
except MissingSchema:
resp = requests.get('http://' + url)
if 'granblue' in resp.url or 'gbf.game.mbga' in resp.url:
match_urls.append(url)
reply(bot, message, '碧蓝幻想,请勿访问')
return True
else:
reply(bot, message, '碧蓝幻想,请勿访问')
return True
| import re
import requests
from requests.exceptions import MissingSchema
from dogbot.cqsdk.utils import reply
GBF_PATTERN = re.compile("(((https|http|ftp|rtsp|mms)?://)" # 端口
"?(([0-9a-z_!~*'().&=+$%-]+: )?[0-9a-z_!~*'().&=+$%-]+@)?" # ftp 用戶密碼
"(([0-9]{1,3}\.){3}[0-9]{1,3}" # IP URL
"|"
"([0-9a-z_!~*'()-]+\.)*" # 域名前綴
"([0-9a-zA-Z][A-Z0-9a-z-]{0,61})?[0-9a-zA-Z]\." # 二級域名
"[a-z]{2,6})" # 頂級域名
"(:[0-9]{1,4})?" # 端口
"(/[0-9a-zA-Z_!~*'().;?:@&=+$,%#-]+)*/?)") # 路徑
match_urls = []
def gbf(bot, message):
result = GBF_PATTERN.findall(message.text)
for index, match in enumerate(result):
if index > 10:
return False
url = match[0]
if url.split('.')[-1] in ['png', 'jpg', 'jpeg', 'gif']:
return False
if url not in match_urls:
try:
resp = requests.get(url)
except MissingSchema:
resp = requests.get('http://' + url)
if 'granblue' in resp.url:
match_urls.append(url)
reply(bot, message, '碧蓝幻想,请勿访问')
return True
else:
reply(bot, message, '碧蓝幻想,请勿访问')
return True
| apache-2.0 | Python |
5bb2ee7b2a8e0d51437d6536f6cd336ae48fa4a5 | Add ability to match 'Error:\n...', and escape error string | JasonGross/coq-tools,JasonGross/coq-tools | diagnose_error.py | diagnose_error.py | from __future__ import with_statement
import os, sys, tempfile, subprocess, re
from memoize import memoize
__all__ = ["has_error", "get_error_line_number", "make_reg_string", "get_coq_output", "get_error_string"]
DEFAULT_ERROR_REG_STRING = 'File "[^"]+", line ([0-9]+), characters [0-9-]+:\n(Error:\s*[^\n]+|[^\n]+)'
DEFAULT_ERROR_REG_STRING_GENERIC = 'File "[^"]+", line ([0-9]+), characters [0-9-]+:\n(%s)'
@memoize
def has_error(output, reg_string=DEFAULT_ERROR_REG_STRING):
"""Returns True if the coq output encoded in output has an error
matching the given regular expression, False otherwise.
"""
errors = re.search(reg_string, output)
if errors:
return True
else:
return False
@memoize
def get_error_line_number(output, reg_string=DEFAULT_ERROR_REG_STRING):
"""Returns the line number that the error matching reg_string
occured on.
Precondition: has_error(output, reg_string)
"""
return int(re.search(reg_string, output).groups()[0])
@memoize
def get_error_string(output, reg_string=DEFAULT_ERROR_REG_STRING):
"""Returns the error string of the error matching reg_string.
Precondition: has_error(output, reg_string)
"""
return re.search(reg_string, output).groups()[1]
@memoize
def make_reg_string(output):
"""Returns a regular expression for matching the particular error
in output.
Precondition: has_error(output)
"""
return DEFAULT_ERROR_REG_STRING_GENERIC % re.escape(get_error_string(output))
@memoize
def get_coq_output(contents):
"""Returns the coqc output of running through the given
contents."""
with tempfile.NamedTemporaryFile(suffix='.v', delete=False) as f:
f.write(contents)
file_name = f.name
p = subprocess.Popen(['coqc', '-q', file_name], stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if os.path.exists(file_name):
os.remove(file_name)
return stderr
| from __future__ import with_statement
import os, sys, tempfile, subprocess, re
from memoize import memoize
__all__ = ["has_error", "get_error_line_number", "make_reg_string", "get_coq_output", "get_error_string"]
DEFAULT_ERROR_REG_STRING = 'File "[^"]+", line ([0-9]+), characters [0-9-]+:\n([^\n]+)'
DEFAULT_ERROR_REG_STRING_GENERIC = 'File "[^"]+", line ([0-9]+), characters [0-9-]+:\n(%s)'
@memoize
def has_error(output, reg_string=DEFAULT_ERROR_REG_STRING):
"""Returns True if the coq output encoded in output has an error
matching the given regular expression, False otherwise.
"""
errors = re.search(reg_string, output)
if errors:
return True
else:
return False
@memoize
def get_error_line_number(output, reg_string=DEFAULT_ERROR_REG_STRING):
"""Returns the line number that the error matching reg_string
occured on.
Precondition: has_error(output, reg_string)
"""
return int(re.search(reg_string, output).groups()[0])
@memoize
def get_error_string(output, reg_string=DEFAULT_ERROR_REG_STRING):
"""Returns the error string of the error matching reg_string.
Precondition: has_error(output, reg_string)
"""
return re.search(reg_string, output).groups()[1]
@memoize
def make_reg_string(output):
"""Returns a regular expression for matching the particular error
in output.
Precondition: has_error(output)
"""
return DEFAULT_ERROR_REG_STRING_GENERIC % get_error_string(output)
@memoize
def get_coq_output(contents):
"""Returns the coqc output of running through the given
contents."""
with tempfile.NamedTemporaryFile(suffix='.v', delete=False) as f:
f.write(contents)
file_name = f.name
p = subprocess.Popen(['coqc', '-q', file_name], stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if os.path.exists(file_name):
os.remove(file_name)
return stderr
| mit | Python |
5d2043d0b662f38bd51e062367c2e4b703b457a1 | Update version 0.11.4 -> 0.11.5 | dwavesystems/dimod,dwavesystems/dimod | dimod/__init__.py | dimod/__init__.py | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# version is used by serialization below so we need it before everything
__version__ = '0.11.5'
from dimod.constrained import *
import dimod.constrained
from dimod.core import *
import dimod.core
from dimod.cyutilities import *
from dimod.reference import *
import dimod.reference
from dimod.roof_duality import fix_variables
from dimod.binary import *
import dimod.binary
from dimod.discrete import *
import dimod.testing
from dimod.converters import *
import dimod.decorators
import dimod.generators
from dimod.exceptions import *
import dimod.exceptions
from dimod.higherorder import make_quadratic, make_quadratic_cqm, reduce_binary_polynomial, poly_energy, poly_energies, BinaryPolynomial
import dimod.higherorder
from dimod.package_info import __version__, __author__, __authoremail__, __description__
from dimod.quadratic import *
import dimod.quadratic
from dimod.traversal import *
from dimod.sampleset import *
from dimod.serialization.format import set_printoptions
import dimod.lp
from dimod.utilities import *
import dimod.utilities
from dimod.vartypes import *
# flags for some global features
REAL_INTERACTIONS = False
| # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# version is used by serialization below so we need it before everything
__version__ = '0.11.4'
from dimod.constrained import *
import dimod.constrained
from dimod.core import *
import dimod.core
from dimod.cyutilities import *
from dimod.reference import *
import dimod.reference
from dimod.roof_duality import fix_variables
from dimod.binary import *
import dimod.binary
from dimod.discrete import *
import dimod.testing
from dimod.converters import *
import dimod.decorators
import dimod.generators
from dimod.exceptions import *
import dimod.exceptions
from dimod.higherorder import make_quadratic, make_quadratic_cqm, reduce_binary_polynomial, poly_energy, poly_energies, BinaryPolynomial
import dimod.higherorder
from dimod.package_info import __version__, __author__, __authoremail__, __description__
from dimod.quadratic import *
import dimod.quadratic
from dimod.traversal import *
from dimod.sampleset import *
from dimod.serialization.format import set_printoptions
import dimod.lp
from dimod.utilities import *
import dimod.utilities
from dimod.vartypes import *
# flags for some global features
REAL_INTERACTIONS = False
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.