commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
bf73d6d050f3ebbc3f090bb7fc4925c0c8ba9d99 | Add timeout functionality to frog parsing | NLeSC/cptm,NLeSC/cptm | cptm/utils/frog.py | cptm/utils/frog.py | from pynlpl.clients.frogclient import FrogClient
import logging
import re
import sys
import signal
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.INFO)
def get_frogclient(port=8020):
try:
frogclient = FrogClient('localhost', port)
return frogclient
except:
logger.error('Cannot connect to the Frog server. '
'Is it running at port {}?'.format(port))
logger.info('Start the Frog server with "docker run -p '
'127.0.0.1:{}:{} -t -i proycon/lamachine frog '
'-S {}"'.format(port, port, port))
sys.exit(1)
def pos_and_lemmas(text, frogclient):
# add timeout functionality (so frog won't keep parsing faulty text
# forever)
signal.signal(signal.SIGALRM, timeout)
signal.alarm(300)
regex = re.compile(r'\(.*\)')
try:
for data in frogclient.process(text):
word, lemma, morph, ext_pos = data[:4]
if ext_pos: # ext_pos can be None
pos = regex.sub('', ext_pos)
yield pos, lemma
except Exception, e:
raise e
def timeout(signum, frame):
raise Exception("Frog is taking too long!")
| from pynlpl.clients.frogclient import FrogClient
import logging
import re
import sys
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.INFO)
def get_frogclient(port=8020):
try:
frogclient = FrogClient('localhost', port)
return frogclient
except:
logger.error('Cannot connect to the Frog server. '
'Is it running at port {}?'.format(port))
logger.info('Start the Frog server with "docker run -p '
'127.0.0.1:{}:{} -t -i proycon/lamachine frog '
'-S {}"'.format(port, port, port))
sys.exit(1)
def pos_and_lemmas(text, frogclient):
regex = re.compile(r'\(.*\)')
for data in frogclient.process(text):
word, lemma, morph, ext_pos = data[:4]
if ext_pos: # ext_pos can be None
pos = regex.sub('', ext_pos)
yield pos, lemma
| apache-2.0 | Python |
f663c6ad7f5aa99b0486711ffe65cf450c23121b | Update maintenance/admin.py | steingrd/django-maintenance | maintenance/admin.py | maintenance/admin.py | from django.contrib import admin
from models import Maintenance, MaintenanceFilter
class MaintenanceFilterInline(admin.TabularInline):
model = MaintenanceFilter
class MaintenanceAdmin(admin.ModelAdmin):
list_display = ('start_time', 'end_time', 'enabled')
list_filter = ('start_time', 'end_time', 'enabled')
list_editable = ('enabled',)
inlines = (MaintenanceFilterInline,)
admin.site.register(Maintenance, MaintenanceAdmin) | from django.contrib import admin
from models import Maintenance, MaintenanceFilter
class MaintenanceFilterInline(admin.StackedInline):
model = MaintenanceFilter
class MaintenanceAdmin(admin.ModelAdmin):
list_display = ('start_time', 'end_time', 'enabled')
list_filter = ('start_time', 'end_time', 'enabled')
list_editable = ('enabled',)
inlines = (MaintenanceFilterInline,)
admin.site.register(Maintenance, MaintenanceAdmin) | bsd-3-clause | Python |
f393619eaa6ee1508ed324adb1b88f421e55f92f | Bump version number | nabla-c0d3/nassl,nabla-c0d3/nassl,nabla-c0d3/nassl | nassl/__init__.py | nassl/__init__.py | __author__ = "Alban Diquet"
__version__ = "4.0.1"
| __author__ = "Alban Diquet"
__version__ = "4.0.1b0"
| agpl-3.0 | Python |
9597c8bc27be7ca44d503facb6a0cf3dfe76bcc3 | Revert prior change to request timeout | OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops | nc/prime_cache.py | nc/prime_cache.py | #!/usr/bin/env python
import argparse
import logging
import requests
import urllib
from django.core.urlresolvers import reverse
logger = logging.getLogger(__name__)
ENDPOINTS = ('stops', 'stops_by_reason', 'use_of_force', 'searches', 'contraband_hit_rate')
def run(root, host=None):
headers = dict()
if host is not None:
headers['Host'] = host
api = urllib.parse.urljoin(root, reverse('nc:agency-api-list'))
# get agencies
r = requests.get(api, headers=headers)
agencies = r.json()
for agency in agencies:
logger.info(agency['name'])
# prime each API endpoint
for endpoint in ENDPOINTS:
uri = "{}/{}/{}/".format(api.rstrip('/'), agency['id'],
endpoint)
req(uri, headers=headers)
# prime first search page
payload = {'agency': agency['name']}
search_uri = urllib.parse.urljoin(root, reverse('nc:stops-search'))
req(search_uri, headers, payload)
def req(uri, headers, payload=None):
try:
response = requests.get(uri, headers=headers, params=payload)
if response.status_code != 200:
logger.warning("Status not OK: {} ({})".format(
uri, response.status_code))
except requests.ConnectionError as err:
logger.error('Cannot load %s: %s', uri, err)
response = None
return response
def main():
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s',
level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('uri', help='Root URL (e.g. http://0.0.0.0:8000/)')
args = parser.parse_args()
run(args.uri)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import argparse
import logging
import requests
import urllib
from django.core.urlresolvers import reverse
logger = logging.getLogger(__name__)
ENDPOINTS = ('stops', 'stops_by_reason', 'use_of_force', 'searches', 'contraband_hit_rate')
def run(root, host=None):
headers = dict()
if host is not None:
headers['Host'] = host
api = urllib.parse.urljoin(root, reverse('nc:agency-api-list'))
# get agencies
r = requests.get(api, headers=headers)
agencies = r.json()
for agency in agencies:
logger.info(agency['name'])
# prime each API endpoint
for endpoint in ENDPOINTS:
uri = "{}/{}/{}/".format(api.rstrip('/'), agency['id'],
endpoint)
req(uri, headers=headers)
# prime first search page
payload = {'agency': agency['name']}
search_uri = urllib.parse.urljoin(root, reverse('nc:stops-search'))
req(search_uri, headers, payload)
def req(uri, headers, payload=None):
try:
# requests doc says there's no timeout by default, but the log showed
# timeouts every 120 seconds when accessing an expensive agency.
response = requests.get(uri, headers=headers, params=payload, timeout=300)
if response.status_code != 200:
logger.warning("Status not OK: {} ({})".format(
uri, response.status_code))
except requests.ConnectionError as err:
logger.error('Cannot load %s: %s', uri, err)
response = None
return response
def main():
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s',
level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('uri', help='Root URL (e.g. http://0.0.0.0:8000/)')
args = parser.parse_args()
run(args.uri)
if __name__ == "__main__":
main()
| mit | Python |
8bdb5850a5318f1f6196d4800c7c1582caa9fcf4 | Fix config | Gr1N/rpihelper,Gr1N/rpihelper | rpihelper/config.py | rpihelper/config.py | # -*- coding: utf-8 -*-
import os
import yaml
from flask import Flask as BaseFlask, Config as BaseConfig
from rpihelper.utils import make_dir, INSTANCE_FOLDER_PATH
__all__ = (
'Flask',
)
class Config(BaseConfig):
"""
Flask config enhanced with a `from_yaml` method.
"""
def __init__(self, *args, **kwargs):
super(Config, self).__init__(*args, **kwargs)
# Get app root path, also can use flask.root_path.
# ../../config.py
self['PROJECT_ROOT'] = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
LOG_FOLDER = os.path.join(INSTANCE_FOLDER_PATH, 'logs')
self['LOG_FOLDER'] = LOG_FOLDER
make_dir(LOG_FOLDER)
def from_yaml(self, config_file):
env = os.environ.get('FLASK_ENV', 'development').upper()
self['ENVIRONMENT'] = env.lower()
with open(config_file) as f:
c = yaml.load(f)
c = c.get(env, c)
for key in c.keys():
if key.isupper():
self[key] = c[key]
class Flask(BaseFlask):
"""
Extended version of `Flask` that implements custom config class.
"""
def make_config(self, instance_relative=False):
root_path = self.root_path
if instance_relative:
root_path = self.instance_path
return Config(root_path, self.default_config)
| # -*- coding: utf-8 -*-
import os
import yaml
from flask import Flask as BaseFlask, Config as BaseConfig
from rpihelper.utils import make_dir, INSTANCE_FOLDER_PATH
__all__ = (
'Flask',
)
class Config(BaseConfig):
"""
Flask config enhanced with a `from_yaml` method.
"""
def __init__(self, *args, **kwargs):
super(Config, self).__init__(*args, **kwargs)
# Get app root path, also can use flask.root_path.
# ../../config.py
self['PROJECT_ROOT'] = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
LOG_FOLDER = os.path.join(INSTANCE_FOLDER_PATH, 'logs')
self['LOG_FOLDER'] = LOG_FOLDER
make_dir(LOG_FOLDER)
def from_yaml(self, config_file):
env = os.environ.get('FLASK_ENV', 'development')
self['ENVIRONMENT'] = env.lower()
with open(config_file) as f:
c = yaml.load(f)
c = c.get(env, c)
for key in c.keys():
if key.isupper():
self[key] = c[key]
class Flask(BaseFlask):
"""
Extended version of `Flask` that implements custom config class.
"""
def make_config(self, instance_relative=False):
root_path = self.root_path
if instance_relative:
root_path = self.instance_path
return Config(root_path, self.default_config)
| mit | Python |
ab946a092d47fb99bfd08271cfcceed6dcc84f65 | update version | theno/ctutlz,theno/ctutlz | ctutlz/_version.py | ctutlz/_version.py | __version__ = "0.8.3"
| __version__ = "0.8.2"
| mit | Python |
ea269437fef6901920e74227e0706d10fb03e85c | bump to 1.0.5 | alfredodeza/ceph-doctor | ceph_medic/__init__.py | ceph_medic/__init__.py | from collections import namedtuple
class UnloadedConfig(object):
"""
This class is used as the default value for config.ceph so that if
a configuration file is not successfully loaded then it will give
a nice error message when values from the config are used.
"""
def __init__(self, error=None):
self.error = error
def __getattr__(self, *a):
raise RuntimeError(self.error)
config = namedtuple('config', ['verbosity', 'nodes', 'hosts_file', 'file'])
config.file = UnloadedConfig("No valid ceph-medic configuration file was loaded")
config.nodes = {}
metadata = {'failed_nodes': {}, 'rgws': {}, 'mgrs': {}, 'mdss': {}, 'clients': {}, 'osds': {}, 'mons': {}, 'nodes': {}}
daemon_types = [i for i in metadata.keys() if i not in ('nodes', 'failed_nodes')]
__version__ = '1.0.5'
| from collections import namedtuple
class UnloadedConfig(object):
"""
This class is used as the default value for config.ceph so that if
a configuration file is not successfully loaded then it will give
a nice error message when values from the config are used.
"""
def __init__(self, error=None):
self.error = error
def __getattr__(self, *a):
raise RuntimeError(self.error)
config = namedtuple('config', ['verbosity', 'nodes', 'hosts_file', 'file'])
config.file = UnloadedConfig("No valid ceph-medic configuration file was loaded")
config.nodes = {}
metadata = {'failed_nodes': {}, 'rgws': {}, 'mgrs': {}, 'mdss': {}, 'clients': {}, 'osds': {}, 'mons': {}, 'nodes': {}}
daemon_types = [i for i in metadata.keys() if i not in ('nodes', 'failed_nodes')]
__version__ = '1.0.4'
| mit | Python |
9a68fb81e252ecc852525f08b2dc4895c4bd6439 | remove a useless double quote in the div content | starikan/pandas-highcharts,albahnsen/pandas-highcharts,gtnx/pandas-highcharts,spookylukey/pandas-highcharts | pandas_highcharts/display.py | pandas_highcharts/display.py | # -*- coding: utf-8 -*-
"""Functions to quickly display charts in a Notebook.
"""
import string
import random
from IPython.core import getipython
from IPython.core.display import display, HTML
from core import serialize
HIGHCHARTS_SCRIPTS = """<script src="http://code.highcharts.com/highcharts.js"></script>
<script src="http://code.highcharts.com/modules/exporting.js"></script>
"""
# Automatically insert the script tag into your Notebook.
# Call when you import this module.
if 'IPKernelApp' in getipython.get_ipython().config:
display(HTML(HIGHCHARTS_SCRIPTS))
def load_highcharts():
return display(HTML(HIGHCHARTS_SCRIPTS))
def _generate_div_id_chart(prefix="chart_id", digits=8):
"""Generate a random id for div chart.
"""
choices = (random.randrange(0, 52) for _ in xrange(digits))
return prefix + "".join((string.ascii_letters[x] for x in choices))
def display_highcharts(df, render_to=None, **kwargs):
"""Display you DataFrame with Highcharts.
df: DataFrame
render_to: str
div id for plotting your data
"""
chart_id = render_to if render_to is not None else _generate_div_id_chart()
json_data = serialize(df, render_to=chart_id, **kwargs)
content = """<div id="{chart_id}"</div>
<script type="text/javascript">{data}</script>"""
return display(HTML(content.format(chart_id=chart_id,
data=json_data)))
| # -*- coding: utf-8 -*-
"""Functions to quickly display charts in a Notebook.
"""
import string
import random
from IPython.core import getipython
from IPython.core.display import display, HTML
from core import serialize
HIGHCHARTS_SCRIPTS = """<script src="http://code.highcharts.com/highcharts.js"></script>
<script src="http://code.highcharts.com/modules/exporting.js"></script>
"""
# Automatically insert the script tag into your Notebook.
# Call when you import this module.
if 'IPKernelApp' in getipython.get_ipython().config:
display(HTML(HIGHCHARTS_SCRIPTS))
def load_highcharts():
return display(HTML(HIGHCHARTS_SCRIPTS))
def _generate_div_id_chart(prefix="chart_id", digits=8):
"""Generate a random id for div chart.
"""
choices = (random.randrange(0, 52) for _ in xrange(digits))
return prefix + "".join((string.ascii_letters[x] for x in choices))
def display_highcharts(df, render_to=None, **kwargs):
"""Display you DataFrame with Highcharts.
df: DataFrame
render_to: str
div id for plotting your data
"""
chart_id = render_to if render_to is not None else _generate_div_id_chart()
json_data = serialize(df, render_to=chart_id, **kwargs)
content = """<div id="{chart_id}""</div>
<script type="text/javascript">{data}</script>"""
return display(HTML(content.format(chart_id=chart_id,
data=json_data)))
| mit | Python |
4616fdefc1c7df8acccdd89ea792fa24ecfa9ca6 | Check environment variables before the tests are started | tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common,jpopelka/fabric8-analytics-common,tisnik/fabric8-analytics-common,jpopelka/fabric8-analytics-common,jpopelka/fabric8-analytics-common | perf-tests/src/perf-tests.py | perf-tests/src/perf-tests.py | import json
import time
import datetime
import subprocess
import os.path
import sys
import queue
import threading
from coreapi import *
from jobsapi import *
import benchmarks
import graph
def check_environment_variable(env_var_name):
print("Checking: {e} environment variable existence".format(
e=env_var_name))
if os.environ.get(env_var_name) is None:
print("Fatal: {e} environment variable has to be specified"
.format(e=env_var_name))
sys.exit(1)
else:
print(" ok")
def check_environment_variables():
environment_variables = [
"F8A_API_URL",
"F8A_JOB_API_URL",
"RECOMMENDER_API_TOKEN",
"JOB_API_TOKEN",
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"S3_REGION_NAME"]
for environment_variable in environment_variables:
check_environment_variable(environment_variable)
def main():
check_environment_variables()
pass
if __name__ == "__main__":
# execute only if run as a script
main()
| def main():
pass
if __name__ == "__main__":
# execute only if run as a script
main()
| apache-2.0 | Python |
6d76cd9cb5e652507367c9559a9373ed7abec471 | fix all_hosts decorator | pavel-paulau/perfrunner,hsharsha/perfrunner,dkao-cb/perfrunner,hsharsha/perfrunner,EricACooper/perfrunner,vmx/perfrunner,pavel-paulau/perfrunner,EricACooper/perfrunner,PaintScratcher/perfrunner,couchbase/perfrunner,dkao-cb/perfrunner,mikewied/perfrunner,thomas-couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,vmx/perfrunner,PaintScratcher/perfrunner,couchbase/perfrunner,EricACooper/perfrunner,couchbase/perfrunner,mikewied/perfrunner,EricACooper/perfrunner,thomas-couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner | perfrunner/helpers/remote.py | perfrunner/helpers/remote.py | from uuid import uuid4
from fabric.api import execute, get, run, parallel
from fabric import state
from logger import logger
from perfrunner.helpers import Helper
def all_hosts(task):
def wrapper(self, *args, **kargs):
return execute(parallel(task), self, *args, hosts=self.hosts, **kargs)
return wrapper
class RemoteHelper(Helper):
ARCH = {'i686': 'x86', 'i386': 'x86', 'x86_64': 'x86_64'}
def __init__(self, *args, **kwargs):
super(RemoteHelper, self).__init__(*args, **kwargs)
state.env.user = self.ssh_username
state.env.password = self.ssh_password
state.env.host_string = self.hosts[0]
state.output.running = False
state.output.stdout = False
def wget(self, url, outdir='/tmp'):
logger.info('Fetching {0}'.format(url))
run('wget -nc "{0}" -P {1}'.format(url, outdir))
def detect_pkg(self):
logger.info('Detecting package manager')
dist = run('python -c "import platform; print platform.dist()[0]"')
if dist in ('Ubuntu', 'Debian'):
return 'deb'
else:
return 'rpm'
def detect_arch(self):
logger.info('Detecting platform architecture')
arch = run('arch')
return self.ARCH[arch]
@all_hosts
def reset_swap(self):
logger.info('Resetting swap')
run('swapoff --all && swapon --all')
@all_hosts
def drop_caches(self):
logger.info('Dropping memory cache')
run('sync && echo 3 > /proc/sys/vm/drop_caches')
@all_hosts
def clean_data_path(self):
for path in (self.data_path, self.index_path):
run('rm -fr {0}/*'.format(path))
@all_hosts
def collect_info(self):
logger.info('Running cbcollect_info')
fname = '/tmp/{0}.zip'.format(uuid4().hex)
run('/opt/couchbase/bin/cbcollect_info {0}'.format(fname))
get('{0}'.format(fname))
run('rm -f {0}'.format(fname))
| from uuid import uuid4
from fabric.api import execute, get, run, parallel
from fabric import state
from logger import logger
from perfrunner.helpers import Helper
def all_hosts(task):
def wrapper(self, *args, **kargs):
return execute(parallel(task), *args, hosts=self.hosts, **kargs)
return wrapper
class RemoteHelper(Helper):
ARCH = {'i686': 'x86', 'i386': 'x86', 'x86_64': 'x86_64'}
def __init__(self, *args, **kwargs):
super(RemoteHelper, self).__init__(*args, **kwargs)
state.env.user = self.ssh_username
state.env.password = self.ssh_password
state.env.host_string = self.hosts[0]
state.output.running = False
state.output.stdout = False
def wget(self, url, outdir='/tmp'):
logger.info('Fetching {0}'.format(url))
run('wget -nc "{0}" -P {1}'.format(url, outdir))
def detect_pkg(self):
logger.info('Detecting package manager')
dist = run('python -c "import platform; print platform.dist()[0]"')
if dist in ('Ubuntu', 'Debian'):
return 'deb'
else:
return 'rpm'
def detect_arch(self):
logger.info('Detecting platform architecture')
arch = run('arch')
return self.ARCH[arch]
@all_hosts
def reset_swap(self):
logger.info('Resetting swap')
run('swapoff --all && swapon --all')
@all_hosts
def drop_caches(self):
logger.info('Dropping memory cache')
run('sync && echo 3 > /proc/sys/vm/drop_caches')
@all_hosts
def clean_data_path(self):
for path in (self.data_path, self.index_path):
run('rm -fr {0}/*'.format(path))
@all_hosts
def collect_info(self):
logger.info('Running cbcollect_info')
fname = '/tmp/{0}.zip'.format(uuid4().hex)
run('/opt/couchbase/bin/cbcollect_info {0}'.format(fname))
get('{0}'.format(fname))
run('rm -f {0}'.format(fname))
| apache-2.0 | Python |
bc84fa7ee20ffc3ec40239d645a48f79863db33d | Remove access git revision | lewisodriscoll/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview | sasview/__init__.py | sasview/__init__.py | __version__ = "3.1.0"
__build__ = "1"
| __version__ = "3.1.0"
__build__ = "1"
try:
import subprocess
d = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'])
__build__ = str(d).strip()
except:
import logging
import sys
logging.warning("Error while determining build number\n %s" % sys.exc_value)
| bsd-3-clause | Python |
6582bdc62891308359718de78e05066cfc0b9131 | update nodal_stress() in its2D_3.py example | lokik/sfepy,lokik/sfepy,rc/sfepy,sfepy/sfepy,BubuLK/sfepy,vlukes/sfepy,BubuLK/sfepy,lokik/sfepy,vlukes/sfepy,vlukes/sfepy,rc/sfepy,sfepy/sfepy,rc/sfepy,lokik/sfepy,sfepy/sfepy,BubuLK/sfepy | examples/linear_elasticity/its2D_3.py | examples/linear_elasticity/its2D_3.py | r"""
Diametrically point loaded 2-D disk with nodal stress calculation. See
:ref:`sec-primer`.
Find :math:`\ul{u}` such that:
.. math::
\int_{\Omega} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u})
= 0
\;, \quad \forall \ul{v} \;,
where
.. math::
D_{ijkl} = \mu (\delta_{ik} \delta_{jl}+\delta_{il} \delta_{jk}) +
\lambda \ \delta_{ij} \delta_{kl}
\;.
"""
from __future__ import print_function
from __future__ import absolute_import
from examples.linear_elasticity.its2D_1 import *
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from sfepy.discrete.fem.geometry_element import geometry_data
from sfepy.discrete import FieldVariable
from sfepy.discrete.fem import Field
import numpy as nm
gdata = geometry_data['2_3']
nc = len(gdata.coors)
def nodal_stress(out, pb, state, extend=False, integrals=None):
'''
Calculate stresses at nodal points.
'''
# Point load.
mat = pb.get_materials()['Load']
P = 2.0 * mat.get_data('special', 'val')[1]
# Calculate nodal stress.
pb.time_update()
if integrals is None: integrals = pb.get_integrals()
stress = pb.evaluate('ev_cauchy_stress.ivn.Omega(Asphalt.D, u)', mode='qp',
integrals=integrals, copy_materials=False)
sfield = Field.from_args('stress', nm.float64, (3,),
pb.domain.regions['Omega'])
svar = FieldVariable('sigma', 'parameter', sfield,
primary_var_name='(set-to-None)')
svar.set_data_from_qp(stress, integrals['ivn'])
print('\n==================================================================')
print('Given load = %.2f N' % -P)
print('\nAnalytical solution')
print('===================')
print('Horizontal tensile stress = %.5e MPa/mm' % (-2.*P/(nm.pi*150.)))
print('Vertical compressive stress = %.5e MPa/mm' % (-6.*P/(nm.pi*150.)))
print('\nFEM solution')
print('============')
print('Horizontal tensile stress = %.5e MPa/mm' % (svar()[0]))
print('Vertical compressive stress = %.5e MPa/mm' % (-svar()[1]))
print('==================================================================')
return out
asphalt = materials['Asphalt'][0]
asphalt.update({'D' : stiffness_from_youngpoisson(2, young, poisson)})
options.update({'post_process_hook' : 'nodal_stress',})
integrals = {
'ivn' : ('custom', gdata.coors, [gdata.volume / nc] * nc),
}
| r"""
Diametrically point loaded 2-D disk with nodal stress calculation. See
:ref:`sec-primer`.
Find :math:`\ul{u}` such that:
.. math::
\int_{\Omega} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u})
= 0
\;, \quad \forall \ul{v} \;,
where
.. math::
D_{ijkl} = \mu (\delta_{ik} \delta_{jl}+\delta_{il} \delta_{jk}) +
\lambda \ \delta_{ij} \delta_{kl}
\;.
"""
from __future__ import print_function
from __future__ import absolute_import
from examples.linear_elasticity.its2D_1 import *
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from sfepy.discrete.fem.geometry_element import geometry_data
from sfepy.discrete import FieldVariable
from sfepy.discrete.fem import Field
import numpy as nm
gdata = geometry_data['2_3']
nc = len(gdata.coors)
def nodal_stress(out, pb, state, extend=False, integrals=None):
'''
Calculate stresses at nodal points.
'''
# Point load.
mat = pb.get_materials()['Load']
P = 2.0 * mat.get_data('special', 'val')[1]
# Calculate nodal stress.
pb.time_update()
if integrals is None: integrals = pb.get_integrals()
stress = pb.evaluate('ev_cauchy_stress.ivn.Omega(Asphalt.D, u)', mode='qp',
integrals=integrals, copy_materials=False)
sfield = Field.from_args('stress', nm.float64, (3,),
pb.domain.regions['Omega'])
svar = FieldVariable('sigma', 'parameter', sfield,
primary_var_name='(set-to-None)')
svar.set_data_from_qp(stress, integrals['ivn'])
print('\n==================================================================')
print('Given load = %.2f N' % -P)
print('\nAnalytical solution')
print('===================')
print('Horizontal tensile stress = %.5e MPa/mm' % (-2.*P/(nm.pi*150.)))
print('Vertical compressive stress = %.5e MPa/mm' % (-6.*P/(nm.pi*150.)))
print('\nFEM solution')
print('============')
print('Horizontal tensile stress = %.5e MPa/mm' % (svar()[0][0]))
print('Vertical compressive stress = %.5e MPa/mm' % (-svar()[0][1]))
print('==================================================================')
return out
asphalt = materials['Asphalt'][0]
asphalt.update({'D' : stiffness_from_youngpoisson(2, young, poisson)})
options.update({'post_process_hook' : 'nodal_stress',})
integrals = {
'ivn' : ('custom', gdata.coors, [gdata.volume / nc] * nc),
}
| bsd-3-clause | Python |
0903a60041c9215d835f35f400e0175c43014636 | Comment on versioning method in nose2._version | ptthiem/nose2,ptthiem/nose2 | nose2/_version.py | nose2/_version.py | """version information"""
# taken from http://stackoverflow.com/a/17626524/1836144
# The following line *must* be the last in the module, exactly as formatted:
# could also use advice from
# https://packaging.python.org/guides/single-sourcing-package-version/
__version__ = '0.7.0'
| """version information"""
# taken from http://stackoverflow.com/a/17626524/1836144
# The following line *must* be the last in the module, exactly as formatted:
__version__ = '0.7.0'
| bsd-2-clause | Python |
13f7a5adf60d7e51938dc7efcfc475b9703a4859 | Add a "missing column message" | njvack/scorify | scorify/datafile.py | scorify/datafile.py | # -*- coding: utf-8 -*-
# Part of the scorify package
# Copyright 2014 Board of Regents of the University of Wisconsin System
"""
Reads data files (or CSV objects) into datafile objects.
Datafiles are iterable and indexable by column name. When reading, you pass
in a scoresheet.LayoutSection, which tells you where data and header sections
are.
"""
class Datafile(object):
def __init__(self, lines, layout_section):
self.lines = lines
self.layout_section = layout_section
self.header = []
self.data = []
super(Datafile, self).__init__()
def read(self):
self.header = []
self.data = []
for line_num, line in enumerate(self.lines):
# Since we assume layout_section is valid, we only care about
# header and skip lines -- everything else must be data.
line_type = ''
if line_num < len(self.layout_section.directives):
line_type = self.layout_section.directives[line_num].info
if line_type == 'skip':
continue
if line_type == 'header':
self.header = [h.strip() for h in line]
else:
self.append_data(line)
def append_data(self, data):
# Force lines of funny length to be the header's length
len_diff = len(self.header) - len(data)
padding = [''] * len_diff
full_line = data + padding
self.data.append(dict(zip(self.header, full_line)))
def apply_exclusions(self, exclusion_section):
new_data = []
for row in self.data:
exclude = any([e.excludes(row) for e in exclusion_section])
if not exclude:
new_data.append(row)
self.data = new_data
def missing_column_message(self, column):
return "Can't find column {0}\nKnown columns: {1}".format(
column, ", ".join(self.header))
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, item):
return self.data[item]
| # -*- coding: utf-8 -*-
# Part of the scorify package
# Copyright 2014 Board of Regents of the University of Wisconsin System
"""
Reads data files (or CSV objects) into datafile objects.
Datafiles are iterable and indexable by column name. When reading, you pass
in a scoresheet.LayoutSection, which tells you where data and header sections
are.
"""
class Datafile(object):
def __init__(self, lines, layout_section):
self.lines = lines
self.layout_section = layout_section
self.header = []
self.data = []
super(Datafile, self).__init__()
def read(self):
self.header = []
self.data = []
for line_num, line in enumerate(self.lines):
# Since we assume layout_section is valid, we only care about
# header and skip lines -- everything else must be data.
line_type = ''
if line_num < len(self.layout_section.directives):
line_type = self.layout_section.directives[line_num].info
if line_type == 'skip':
continue
if line_type == 'header':
self.header = [h.strip() for h in line]
else:
self.append_data(line)
def append_data(self, data):
# Force lines of funny length to be the header's length
len_diff = len(self.header) - len(data)
padding = [''] * len_diff
full_line = data + padding
self.data.append(dict(zip(self.header, full_line)))
def apply_exclusions(self, exclusion_section):
new_data = []
for row in self.data:
exclude = any([e.excludes(row) for e in exclusion_section])
if not exclude:
new_data.append(row)
self.data = new_data
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, item):
return self.data[item]
| mit | Python |
f654cb58a2a609e3d16a723e9114448c8a17ab62 | clean up script a little | ibrahimcesar/panda,ibrahimcesar/panda,PalmBeachPost/panda,ibrahimcesar/panda,pandaproject/panda,PalmBeachPost/panda,datadesk/panda,PalmBeachPost/panda,ibrahimcesar/panda,datadesk/panda,PalmBeachPost/panda,ibrahimcesar/panda,pandaproject/panda,PalmBeachPost/panda,pandaproject/panda,pandaproject/panda,datadesk/panda,pandaproject/panda,datadesk/panda,datadesk/panda | scripts/check_po.py | scripts/check_po.py | #!/usr/bin/env python
import os, os.path
import re
import sys
placeholder_pat = re.compile("%\((.+?)\)(\w)")
def extract_placeholders(s):
return set(placeholder_pat.findall(s))
def check_file(fn):
msgid = ''
msgstr = ''
workingon = 'msgid'
mismatches = []
for line in open(fn):
if line.startswith('#'): continue
text = ''
line = line.rstrip()
if line.startswith('msg'):
workingon, text = line.split(' ',1)
if workingon == 'msgid':
if msgid and msgstr and len(msgstr.strip()) > 0:
id_placeholders = extract_placeholders(msgid)
str_placeholders = extract_placeholders(msgstr)
if len(id_placeholders) != len(str_placeholders) or (len(id_placeholders.difference(str_placeholders)) != 0):
mismatches.append((msgid,msgstr))
msgid = msgstr = ''
else:
text = line
text = text.strip('"')
if text:
if workingon == 'msgid':
msgid += text
else:
msgstr += text
if mismatches:
print "WARNING: %i mismatches in %s" % (len(mismatches),fn)
for msgid, msgstr in mismatches:
print 'msgid:' + msgid
print 'msgstr:' + msgstr
print
if __name__ == '__main__':
try:
start_dir = sys.argv[1]
except:
start_dir = '../locale'
for path, dirs, files in os.walk(start_dir):
for f in files:
if f.endswith('.po'):
check_file(os.path.join(path,f))
| #!/usr/bin/env python
import os, os.path
import re
placeholder_pat = re.compile("%\((.+?)\)(\w)")
def extract_placeholders(s):
return set(placeholder_pat.findall(s))
def check_file(fn):
msgid = ''
msgstr = ''
workingon = 'msgid'
mismatches = []
for line in open(fn):
if line.startswith('#'): continue
text = ''
if line.startswith('msg'):
workingon, text = line.split(' ',1)
if workingon == 'msgid':
if msgid and msgstr:
id_placeholders = extract_placeholders(msgid)
str_placeholders = extract_placeholders(msgstr)
if len(id_placeholders) != len(str_placeholders) or (len(id_placeholders.difference(str_placeholders)) != 0):
mismatches.append((msgid,msgstr))
msgid = msgstr = ''
else:
text = line
text = text.strip('"')
if text:
if workingon == 'msgid':
msgid += text
else:
msgstr += text
if mismatches:
print "WARNING: %i mismatches in %s" % (len(mismatches),fn)
for msgid, msgstr in mismatches:
print 'msgid:' + msgid
print 'msgstr:' + msgstr
print
for path, dirs, files in os.walk('../locale'):
for f in files:
if f.endswith('.po'):
check_file(os.path.join(path,f))
| mit | Python |
9dc1fd590e39a49789741d0dd171e6411a865640 | adjust commands -> model where applicable | PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild | benchbuild/environments/adapters/repository.py | benchbuild/environments/adapters/repository.py | import abc
import typing as tp
import attr
from benchbuild.environments.domain import events, model
from benchbuild.environments.service_layer import buildah
@attr.s
class AbstractRegistry(abc.ABC):
seen: tp.Set[model.Image] = attr.ib(default=attr.Factory(set))
def add(self, image: model.Image) -> None:
self._add(image)
self.seen.add(image)
def get(self, tag: str) -> model.MaybeImage:
image = self._get(tag)
if image:
self.seen.add(image)
return image
def create(self, tag: str, layers: tp.List[model.Layer]) -> model.Image:
image = self._create(tag, layers)
if image:
self.add(image)
return image
@abc.abstractmethod
def _create(self, tag: str, layers: tp.List[model.Layer]) -> model.Image:
raise NotImplementedError
@abc.abstractmethod
def _add(self, image: model.Image) -> None:
raise NotImplementedError
@abc.abstractmethod
def _get(self, tag: str) -> model.Image:
raise NotImplementedError
@attr.s
class BuildahRegistry(AbstractRegistry):
containers: tp.Set[model.Container] = attr.ib(default=attr.Factory(set))
def _add(self, image: model.Image) -> None:
container = model.Container(
buildah.create_working_container(image.from_), image)
self.containers.add(container)
for layer in image.layers:
buildah.spawn_layer(container, layer)
image.events.append(events.LayerCreated(str(layer)))
def _get(self, tag: str) -> model.MaybeImage:
return buildah.find_image(tag)
def _create(self, tag: str, layers: tp.List[model.Layer]):
from_ = [l for l in layers if isinstance(l, model.FromLayer)].pop(0)
return model.Image(tag, from_, layers[1:])
| import abc
import typing as tp
import attr
from benchbuild.environments.domain import commands, events, model
from benchbuild.environments.service_layer import buildah
@attr.s
class AbstractRegistry(abc.ABC):
seen: tp.Set[model.Image] = attr.ib(default=attr.Factory(set))
def add(self, image: model.Image) -> None:
self._add(image)
self.seen.add(image)
def get(self, tag: str) -> model.MaybeImage:
image = self._get(tag)
if image:
self.seen.add(image)
return image
def create(self, tag: str,
layers: tp.List[commands.LayerCommand]) -> model.Image:
image = self._create(tag, layers)
if image:
self.add(image)
return image
@abc.abstractmethod
def _create(self, tag: str,
layers: tp.List[commands.LayerCommand]) -> model.Image:
raise NotImplementedError
@abc.abstractmethod
def _add(self, image: model.Image) -> None:
raise NotImplementedError
@abc.abstractmethod
def _get(self, tag: str) -> model.Image:
raise NotImplementedError
@attr.s
class BuildahRegistry(AbstractRegistry):
containers: tp.Set[model.Container] = attr.ib(default=attr.Factory(set))
def _add(self, image: model.Image) -> None:
container = model.Container(
buildah.create_working_container(image.from_), image)
self.containers.add(container)
for layer in image.layers:
buildah.spawn_layer(container, layer)
image.events.append(events.LayerCreated(str(layer)))
def _get(self, tag: str) -> model.MaybeImage:
return buildah.find_image(tag)
def _create(self, tag: str, layers: tp.List[commands.LayerCommand]):
from_ = [l for l in layers if isinstance(l, model.FromLayer)].pop(0)
return model.Image(tag, from_, layers[1:])
| mit | Python |
76f61245e4b3e6bbcac624fe436d1655384660a8 | remove .cur as image type | yanni4night/ursa2,yanni4night/ursa2 | ursa2/proxy.py | ursa2/proxy.py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
'''
proxy.py
changelog
2013-12-11[17:23:52]:created
@info yinyong,osx-x64,Undefined,10.129.164.77,py,/Volumes/yinyong/ursa2/src
@author yanni4night@gmail.com
@version 0.0.1
@since 0.0.1
'''
import requests as R
from urlparse import urlparse
#import mimetypes
import utils
import re
from conf import C,log
#mimetypes.init()
def proxy(target_url,req,res):
'''
'''
if not target_url:
return res.send(code = 500,content = 'Empty url not supported')
#二进制资源直接重定向
parsed_url = urlparse(target_url)
if utils.isBinary(parsed_url.path):
return res.redirect(target_url)
if 'GET' == req.method:
request = R.get
elif 'POST' == req.method:
request = R.post
try:
#通知远端服务器不要压缩
if req.headers.get('accept-encoding'):
del req.headers['accept-encoding']
r = request(target_url,headers = req.headers)
#本地服务器覆写Date和Server
if r.headers.get('date'):
del r.headers['date']
if r.headers.get('server'):
del r.headers['server']
return res.send(code = r.status_code,content = r.content or '',headers = r.headers)
except Exception, e:
log.error('[proxy]%s'%e)
return res.send(code = 500,content = str(e))
def main():
r = R.get('http://www.w3.org/TR/css3-color/',data={'name':"yes"})
print r.headers.get('Content-Type')
if __name__ == '__main__':
main() | #!/usr/bin/env python
#-*- coding:utf-8 -*-
'''
proxy.py
changelog
2013-12-11[17:23:52]:created
@info yinyong,osx-x64,Undefined,10.129.164.77,py,/Volumes/yinyong/ursa2/src
@author yanni4night@gmail.com
@version 0.0.1
@since 0.0.1
'''
import requests as R
from urlparse import urlparse
#import mimetypes
import utils
import re
from conf import C,log
#mimetypes.init()
def proxy(target_url,req,res):
'''
'''
if not target_url:
return res.send(code = 500,content = 'Empty url not supported')
#二进制资源直接重定向
parsed_url = urlparse(target_url)
#mime = mimetypes.guess_type(parsed_url.path,False)
#content_type = mime[0] or 'text/plain'
if utils.isBinary(parsed_url.path):#re.match( utils.BINARY_CONTENT_TYPE_KEYWORDS , content_type,re.IGNORECASE ):
return res.redirect(target_url)
if 'GET' == req.method:
request = R.get
elif 'POST' == req.method:
request = R.post
try:
#通知远端服务器不要压缩
if req.headers.get('accept-encoding'):
del req.headers['accept-encoding']
r = request(target_url,headers = req.headers)
#本地服务器覆写Date和Server
if r.headers.get('date'):
del r.headers['date']
if r.headers.get('server'):
del r.headers['server']
return res.send(code = r.status_code,content = r.content or '',headers = r.headers)
except Exception, e:
log.error('[proxy]%s'%e)
return res.send(code = 500,content = str(e))
def main():
r = R.get('http://www.w3.org/TR/css3-color/',data={'name':"yes"})
print r.headers.get('Content-Type')
if __name__ == '__main__':
main() | mit | Python |
3fe775b6e9a3a96a0c2b71334867a4f4f34b8095 | modify first_boot to account for new massive swap on tmp partitions. | DrXyzzy/smc,DrXyzzy/smc,tscholl2/smc,sagemathinc/smc,sagemathinc/smc,tscholl2/smc,sagemathinc/smc,DrXyzzy/smc,DrXyzzy/smc,sagemathinc/smc,tscholl2/smc,tscholl2/smc,tscholl2/smc | salvus/scripts/first_boot.py | salvus/scripts/first_boot.py | #!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os, socket
# If hostname isn't "salvus-base", then setup /tmp and swap.
if socket.gethostname() != "salvus-base":
# Enable swap
os.system("swapon /dev/salvus-base/swap")
# Mount tmp
os.system("mount /dev/salvus-base/tmp /tmp; chmod +t /tmp; chmod a+rwx /tmp/")
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
# os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
# Scratch is persistent but not backed up.
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
| #!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
| agpl-3.0 | Python |
90480fff4e2ad72efec8f2c97f01b3793b8dde42 | Fix pylint warnings | dongjoon-hyun/electron,darwin/electron,jcblw/electron,d-salas/electron,astoilkov/electron,jsutcodes/electron,edulan/electron,webmechanicx/electron,chrisswk/electron,aaron-goshine/electron,twolfson/electron,systembugtj/electron,roadev/electron,shockone/electron,saronwei/electron,kazupon/electron,bwiggs/electron,gerhardberger/electron,jcblw/electron,fffej/electron,jaanus/electron,deepak1556/atom-shell,eric-seekas/electron,brenca/electron,MaxGraey/electron,BionicClick/electron,kazupon/electron,simonfork/electron,mhkeller/electron,yan-foto/electron,biblerule/UMCTelnetHub,michaelchiche/electron,wan-qy/electron,MaxGraey/electron,digideskio/electron,mubassirhayat/electron,setzer777/electron,zhakui/electron,takashi/electron,tylergibson/electron,fritx/electron,benweissmann/electron,sircharleswatson/electron,michaelchiche/electron,shennushi/electron,neutrous/electron,RIAEvangelist/electron,faizalpribadi/electron,lzpfmh/electron,bitemyapp/electron,pombredanne/electron,soulteary/electron,gstack/infinium-shell,Floato/electron,leethomas/electron,vHanda/electron,robinvandernoord/electron,jsutcodes/electron,RobertJGabriel/electron,Evercoder/electron,preco21/electron,christian-bromann/electron,tomashanacek/electron,oiledCode/electron,BionicClick/electron,oiledCode/electron,systembugtj/electron,dahal/electron,bbondy/electron,BionicClick/electron,brave/muon,abhishekgahlot/electron,pandoraui/electron,arturts/electron,neutrous/electron,bright-sparks/electron,coderhaoxin/electron,smczk/electron,edulan/electron,fffej/electron,bbondy/electron,the-ress/electron,Jonekee/electron,bruce/electron,aichingm/electron,greyhwndz/electron,kikong/electron,aaron-goshine/electron,trankmichael/electron,d-salas/electron,jannishuebl/electron,thomsonreuters/electron,eric-seekas/electron,rajatsingla28/electron,voidbridge/electron,bwiggs/electron,pombredanne/electron,jiaz/electron,felixrieseberg/electron,matiasinsaurralde/electron,tylergibson/electron,tomashanacek/electron,noikiy/electron,baiwyc119/electron,chriskdon/electron,fffej/electron,eric-seekas/electron,Andrey-Pavlov/electron,simonfork/electron,bbondy/electron,bwiggs/electron,wolfflow/electron,trankmichael/electron,mrwizard82d1/electron,thingsinjars/electron,kokdemo/electron,John-Lin/electron,baiwyc119/electron,kazupon/electron,jtburke/electron,rajatsingla28/electron,icattlecoder/electron,sky7sea/electron,Rokt33r/electron,meowlab/electron,Evercoder/electron,pombredanne/electron,subblue/electron,DivyaKMenon/electron,christian-bromann/electron,meowlab/electron,Jacobichou/electron,Faiz7412/electron,dkfiresky/electron,sky7sea/electron,BionicClick/electron,aecca/electron,Gerhut/electron,John-Lin/electron,the-ress/electron,gbn972/electron,wan-qy/electron,JesselJohn/electron,fabien-d/electron,Floato/electron,setzer777/electron,roadev/electron,brave/electron,anko/electron,destan/electron,gabrielPeart/electron,leftstick/electron,deed02392/electron,tincan24/electron,bpasero/electron,jtburke/electron,dahal/electron,MaxGraey/electron,gstack/infinium-shell,arusakov/electron,thompsonemerson/electron,rhencke/electron,faizalpribadi/electron,coderhaoxin/electron,jaanus/electron,jonatasfreitasv/electron,bitemyapp/electron,jlhbaseball15/electron,maxogden/atom-shell,gabriel/electron,bwiggs/electron,synaptek/electron,thompsonemerson/electron,felixrieseberg/electron,meowlab/electron,bobwol/electron,cos2004/electron,John-Lin/electron,dongjoon-hyun/electron,Rokt33r/electron,destan/electron,tincan24/electron,fffej/electron,yan-foto/electron,astoilkov/electron,michaelchiche/electron,SufianHassan/electron,RIAEvangelist/electron,destan/electron,baiwyc119/electron,SufianHassan/electron,xfstudio/electron,bpasero/electron,jjz/electron,brave/muon,arusakov/electron,shiftkey/electron,simonfork/electron,Gerhut/electron,pirafrank/electron,nicholasess/electron,noikiy/electron,bwiggs/electron,synaptek/electron,kokdemo/electron,Neron-X5/electron,setzer777/electron,JesselJohn/electron,benweissmann/electron,xiruibing/electron,GoooIce/electron,brave/muon,cos2004/electron,darwin/electron,yalexx/electron,fffej/electron,MaxWhere/electron,gerhardberger/electron,darwin/electron,gabriel/electron,thomsonreuters/electron,jcblw/electron,takashi/electron,mhkeller/electron,RIAEvangelist/electron,greyhwndz/electron,gbn972/electron,medixdev/electron,neutrous/electron,zhakui/electron,sircharleswatson/electron,dkfiresky/electron,Faiz7412/electron,carsonmcdonald/electron,matiasinsaurralde/electron,rprichard/electron,Rokt33r/electron,pandoraui/electron,thomsonreuters/electron,tinydew4/electron,jlord/electron,bbondy/electron,thompsonemerson/electron,twolfson/electron,shiftkey/electron,jannishuebl/electron,adamjgray/electron,simongregory/electron,robinvandernoord/electron,sshiting/electron,felixrieseberg/electron,tincan24/electron,IonicaBizauKitchen/electron,voidbridge/electron,simongregory/electron,mhkeller/electron,JussMee15/electron,tomashanacek/electron,etiktin/electron,beni55/electron,beni55/electron,noikiy/electron,MaxWhere/electron,aecca/electron,bright-sparks/electron,evgenyzinoviev/electron,bruce/electron,pandoraui/electron,d-salas/electron,minggo/electron,deed02392/electron,vHanda/electron,gbn972/electron,RIAEvangelist/electron,fabien-d/electron,bitemyapp/electron,jlord/electron,voidbridge/electron,takashi/electron,leftstick/electron,edulan/electron,ianscrivener/electron,shockone/electron,stevekinney/electron,kostia/electron,etiktin/electron,rsvip/electron,trankmichael/electron,the-ress/electron,bright-sparks/electron,biblerule/UMCTelnetHub,jiaz/electron,hokein/atom-shell,mubassirhayat/electron,aaron-goshine/electron,abhishekgahlot/electron,shiftkey/electron,mrwizard82d1/electron,timruffles/electron,cqqccqc/electron,meowlab/electron,ervinb/electron,voidbridge/electron,natgolov/electron,deepak1556/atom-shell,Faiz7412/electron,wan-qy/electron,joneit/electron,jannishuebl/electron,hokein/atom-shell,stevekinney/electron,farmisen/electron,LadyNaggaga/electron,vHanda/electron,kikong/electron,benweissmann/electron,RIAEvangelist/electron,howmuchcomputer/electron,seanchas116/electron,biblerule/UMCTelnetHub,fritx/electron,robinvandernoord/electron,matiasinsaurralde/electron,GoooIce/electron,preco21/electron,leolujuyi/electron,aecca/electron,jaanus/electron,shockone/electron,micalan/electron,nekuz0r/electron,fireball-x/atom-shell,xfstudio/electron,brave/muon,Jonekee/electron,abhishekgahlot/electron,rreimann/electron,mattdesl/electron,icattlecoder/electron,trigrass2/electron,mattdesl/electron,Jacobichou/electron,posix4e/electron,sky7sea/electron,subblue/electron,bright-sparks/electron,roadev/electron,preco21/electron,noikiy/electron,tonyganch/electron,tonyganch/electron,Evercoder/electron,twolfson/electron,shennushi/electron,minggo/electron,joneit/electron,Andrey-Pavlov/electron,digideskio/electron,nagyistoce/electron-atom-shell,thomsonreuters/electron,rprichard/electron,oiledCode/electron,preco21/electron,bright-sparks/electron,coderhaoxin/electron,Floato/electron,jlhbaseball15/electron,bpasero/electron,carsonmcdonald/electron,dkfiresky/electron,xiruibing/electron,lrlna/electron,sky7sea/electron,seanchas116/electron,sshiting/electron,noikiy/electron,synaptek/electron,fffej/electron,aichingm/electron,bruce/electron,kcrt/electron,bobwol/electron,jaanus/electron,stevekinney/electron,electron/electron,Neron-X5/electron,ankitaggarwal011/electron,fireball-x/atom-shell,Jonekee/electron,shennushi/electron,shiftkey/electron,tonyganch/electron,jlord/electron,posix4e/electron,gamedevsam/electron,mattotodd/electron,jsutcodes/electron,systembugtj/electron,leolujuyi/electron,fritx/electron,JussMee15/electron,tonyganch/electron,howmuchcomputer/electron,MaxGraey/electron,Floato/electron,kcrt/electron,brenca/electron,pombredanne/electron,jiaz/electron,pandoraui/electron,baiwyc119/electron,vipulroxx/electron,fireball-x/atom-shell,vipulroxx/electron,roadev/electron,christian-bromann/electron,posix4e/electron,mattotodd/electron,miniak/electron,brave/electron,shaundunne/electron,astoilkov/electron,IonicaBizauKitchen/electron,lzpfmh/electron,renaesop/electron,bpasero/electron,joneit/electron,noikiy/electron,RobertJGabriel/electron,fireball-x/atom-shell,hokein/atom-shell,iftekeriba/electron,deepak1556/atom-shell,pirafrank/electron,setzer777/electron,sky7sea/electron,egoist/electron,brenca/electron,vaginessa/electron,simonfork/electron,yan-foto/electron,jhen0409/electron,RIAEvangelist/electron,thomsonreuters/electron,subblue/electron,kcrt/electron,MaxGraey/electron,trankmichael/electron,roadev/electron,the-ress/electron,joneit/electron,lzpfmh/electron,Ivshti/electron,smczk/electron,kenmozi/electron,jacksondc/electron,yan-foto/electron,minggo/electron,renaesop/electron,mubassirhayat/electron,wan-qy/electron,aecca/electron,leftstick/electron,kostia/electron,xiruibing/electron,JussMee15/electron,brave/electron,jsutcodes/electron,miniak/electron,baiwyc119/electron,kikong/electron,coderhaoxin/electron,shaundunne/electron,JesselJohn/electron,howmuchcomputer/electron,shiftkey/electron,adamjgray/electron,fomojola/electron,egoist/electron,cos2004/electron,cqqccqc/electron,trigrass2/electron,bbondy/electron,mhkeller/electron,mattdesl/electron,MaxWhere/electron,vHanda/electron,smczk/electron,vipulroxx/electron,bitemyapp/electron,JussMee15/electron,adamjgray/electron,SufianHassan/electron,shennushi/electron,Neron-X5/electron,Andrey-Pavlov/electron,nagyistoce/electron-atom-shell,deed02392/electron,ankitaggarwal011/electron,xfstudio/electron,micalan/electron,felixrieseberg/electron,eriser/electron,tinydew4/electron,adcentury/electron,tomashanacek/electron,bitemyapp/electron,anko/electron,beni55/electron,bpasero/electron,aichingm/electron,mirrh/electron,digideskio/electron,micalan/electron,stevemao/electron,nagyistoce/electron-atom-shell,mirrh/electron,stevemao/electron,stevemao/electron,synaptek/electron,dkfiresky/electron,shockone/electron,rsvip/electron,jhen0409/electron,joaomoreno/atom-shell,webmechanicx/electron,roadev/electron,JesselJohn/electron,tinydew4/electron,posix4e/electron,kazupon/electron,tincan24/electron,Evercoder/electron,etiktin/electron,mjaniszew/electron,matiasinsaurralde/electron,micalan/electron,rhencke/electron,gerhardberger/electron,chrisswk/electron,gamedevsam/electron,nagyistoce/electron-atom-shell,smczk/electron,trigrass2/electron,gstack/infinium-shell,sircharleswatson/electron,Andrey-Pavlov/electron,leolujuyi/electron,SufianHassan/electron,posix4e/electron,soulteary/electron,leolujuyi/electron,gbn972/electron,soulteary/electron,greyhwndz/electron,shaundunne/electron,stevekinney/electron,stevekinney/electron,ankitaggarwal011/electron,neutrous/electron,kostia/electron,bwiggs/electron,mjaniszew/electron,Rokt33r/electron,subblue/electron,setzer777/electron,soulteary/electron,MaxWhere/electron,jsutcodes/electron,mirrh/electron,webmechanicx/electron,jonatasfreitasv/electron,seanchas116/electron,maxogden/atom-shell,smczk/electron,sshiting/electron,LadyNaggaga/electron,deepak1556/atom-shell,leethomas/electron,xiruibing/electron,jhen0409/electron,kokdemo/electron,Zagorakiss/electron,ianscrivener/electron,mattotodd/electron,greyhwndz/electron,timruffles/electron,vaginessa/electron,arusakov/electron,wolfflow/electron,kenmozi/electron,darwin/electron,saronwei/electron,tincan24/electron,DivyaKMenon/electron,MaxWhere/electron,saronwei/electron,arusakov/electron,maxogden/atom-shell,timruffles/electron,twolfson/electron,faizalpribadi/electron,ankitaggarwal011/electron,farmisen/electron,vHanda/electron,fritx/electron,mjaniszew/electron,wolfflow/electron,electron/electron,astoilkov/electron,sircharleswatson/electron,iftekeriba/electron,jcblw/electron,simonfork/electron,medixdev/electron,digideskio/electron,ianscrivener/electron,jtburke/electron,egoist/electron,preco21/electron,RobertJGabriel/electron,greyhwndz/electron,natgolov/electron,Floato/electron,bitemyapp/electron,vipulroxx/electron,DivyaKMenon/electron,dkfiresky/electron,systembugtj/electron,yalexx/electron,ianscrivener/electron,chriskdon/electron,RobertJGabriel/electron,simongregory/electron,smczk/electron,icattlecoder/electron,Ivshti/electron,systembugtj/electron,ervinb/electron,Evercoder/electron,JussMee15/electron,aaron-goshine/electron,joneit/electron,meowlab/electron,rhencke/electron,JesselJohn/electron,wolfflow/electron,leftstick/electron,kenmozi/electron,Evercoder/electron,bobwol/electron,wan-qy/electron,d-salas/electron,iftekeriba/electron,ervinb/electron,thompsonemerson/electron,simongregory/electron,edulan/electron,tomashanacek/electron,tylergibson/electron,ianscrivener/electron,trigrass2/electron,gerhardberger/electron,Floato/electron,kenmozi/electron,benweissmann/electron,rprichard/electron,jtburke/electron,meowlab/electron,pirafrank/electron,jannishuebl/electron,pirafrank/electron,kokdemo/electron,jiaz/electron,evgenyzinoviev/electron,mattotodd/electron,nicobot/electron,Rokt33r/electron,aichingm/electron,evgenyzinoviev/electron,eriser/electron,icattlecoder/electron,rprichard/electron,dongjoon-hyun/electron,stevemao/electron,ervinb/electron,jtburke/electron,oiledCode/electron,seanchas116/electron,dahal/electron,davazp/electron,sshiting/electron,pombredanne/electron,digideskio/electron,thomsonreuters/electron,aliib/electron,trigrass2/electron,arturts/electron,matiasinsaurralde/electron,fritx/electron,thingsinjars/electron,timruffles/electron,kazupon/electron,jonatasfreitasv/electron,evgenyzinoviev/electron,electron/electron,SufianHassan/electron,kostia/electron,zhakui/electron,arusakov/electron,chriskdon/electron,miniak/electron,webmechanicx/electron,greyhwndz/electron,gerhardberger/electron,jaanus/electron,electron/electron,eriser/electron,nicholasess/electron,kcrt/electron,evgenyzinoviev/electron,twolfson/electron,hokein/atom-shell,bruce/electron,voidbridge/electron,takashi/electron,lrlna/electron,jcblw/electron,kokdemo/electron,zhakui/electron,gamedevsam/electron,adcentury/electron,aecca/electron,nicobot/electron,adcentury/electron,xfstudio/electron,arusakov/electron,kostia/electron,thompsonemerson/electron,John-Lin/electron,brave/muon,gabriel/electron,coderhaoxin/electron,kcrt/electron,jlhbaseball15/electron,aliib/electron,adamjgray/electron,saronwei/electron,trankmichael/electron,lrlna/electron,DivyaKMenon/electron,BionicClick/electron,pirafrank/electron,John-Lin/electron,cqqccqc/electron,fomojola/electron,shockone/electron,jhen0409/electron,d-salas/electron,nicobot/electron,astoilkov/electron,Neron-X5/electron,carsonmcdonald/electron,jjz/electron,sircharleswatson/electron,leolujuyi/electron,benweissmann/electron,jtburke/electron,rreimann/electron,lzpfmh/electron,dahal/electron,gbn972/electron,synaptek/electron,jacksondc/electron,brenca/electron,davazp/electron,rreimann/electron,mjaniszew/electron,Faiz7412/electron,eriser/electron,bobwol/electron,benweissmann/electron,etiktin/electron,leolujuyi/electron,egoist/electron,GoooIce/electron,vipulroxx/electron,mhkeller/electron,Ivshti/electron,mrwizard82d1/electron,nicobot/electron,deed02392/electron,thingsinjars/electron,gerhardberger/electron,fabien-d/electron,tylergibson/electron,eric-seekas/electron,mattdesl/electron,natgolov/electron,pirafrank/electron,Neron-X5/electron,joaomoreno/atom-shell,miniak/electron,kikong/electron,mrwizard82d1/electron,electron/electron,fomojola/electron,jacksondc/electron,renaesop/electron,lrlna/electron,nekuz0r/electron,John-Lin/electron,xiruibing/electron,gerhardberger/electron,Gerhut/electron,Andrey-Pavlov/electron,Zagorakiss/electron,darwin/electron,michaelchiche/electron,webmechanicx/electron,LadyNaggaga/electron,jacksondc/electron,sshiting/electron,nekuz0r/electron,Gerhut/electron,chrisswk/electron,cqqccqc/electron,aichingm/electron,dkfiresky/electron,iftekeriba/electron,renaesop/electron,Jonekee/electron,trigrass2/electron,nicobot/electron,tonyganch/electron,LadyNaggaga/electron,nekuz0r/electron,faizalpribadi/electron,rhencke/electron,bpasero/electron,rajatsingla28/electron,fabien-d/electron,leethomas/electron,LadyNaggaga/electron,voidbridge/electron,aliib/electron,aaron-goshine/electron,rsvip/electron,gbn972/electron,michaelchiche/electron,mirrh/electron,christian-bromann/electron,DivyaKMenon/electron,RobertJGabriel/electron,preco21/electron,christian-bromann/electron,thingsinjars/electron,hokein/atom-shell,iftekeriba/electron,felixrieseberg/electron,egoist/electron,simongregory/electron,deed02392/electron,takashi/electron,Faiz7412/electron,yalexx/electron,pombredanne/electron,soulteary/electron,aichingm/electron,adamjgray/electron,arturts/electron,howmuchcomputer/electron,minggo/electron,deed02392/electron,gabriel/electron,beni55/electron,jannishuebl/electron,bruce/electron,minggo/electron,posix4e/electron,the-ress/electron,rsvip/electron,miniak/electron,Ivshti/electron,joaomoreno/atom-shell,arturts/electron,mrwizard82d1/electron,jiaz/electron,ervinb/electron,soulteary/electron,electron/electron,nicobot/electron,bright-sparks/electron,rsvip/electron,vHanda/electron,gamedevsam/electron,gstack/infinium-shell,yan-foto/electron,jjz/electron,wolfflow/electron,seanchas116/electron,ankitaggarwal011/electron,tylergibson/electron,natgolov/electron,gabrielPeart/electron,iftekeriba/electron,dongjoon-hyun/electron,rreimann/electron,yalexx/electron,cos2004/electron,robinvandernoord/electron,biblerule/UMCTelnetHub,icattlecoder/electron,cos2004/electron,bbondy/electron,ervinb/electron,pandoraui/electron,ankitaggarwal011/electron,neutrous/electron,medixdev/electron,mrwizard82d1/electron,egoist/electron,Jacobichou/electron,farmisen/electron,cqqccqc/electron,Zagorakiss/electron,wan-qy/electron,natgolov/electron,mattdesl/electron,davazp/electron,renaesop/electron,mattotodd/electron,joaomoreno/atom-shell,felixrieseberg/electron,thingsinjars/electron,brave/electron,medixdev/electron,chriskdon/electron,robinvandernoord/electron,GoooIce/electron,jcblw/electron,kostia/electron,aaron-goshine/electron,leftstick/electron,fireball-x/atom-shell,jonatasfreitasv/electron,micalan/electron,tinydew4/electron,vipulroxx/electron,vaginessa/electron,davazp/electron,cqqccqc/electron,kenmozi/electron,beni55/electron,GoooIce/electron,xfstudio/electron,kcrt/electron,dongjoon-hyun/electron,carsonmcdonald/electron,takashi/electron,maxogden/atom-shell,minggo/electron,leethomas/electron,nekuz0r/electron,cos2004/electron,matiasinsaurralde/electron,dahal/electron,the-ress/electron,jonatasfreitasv/electron,tomashanacek/electron,vaginessa/electron,dongjoon-hyun/electron,JussMee15/electron,micalan/electron,rreimann/electron,mjaniszew/electron,kenmozi/electron,joneit/electron,shaundunne/electron,the-ress/electron,farmisen/electron,xiruibing/electron,farmisen/electron,setzer777/electron,IonicaBizauKitchen/electron,tinydew4/electron,Andrey-Pavlov/electron,oiledCode/electron,davazp/electron,arturts/electron,adamjgray/electron,gabrielPeart/electron,anko/electron,brave/muon,aliib/electron,eric-seekas/electron,deepak1556/atom-shell,subblue/electron,beni55/electron,jjz/electron,jlhbaseball15/electron,natgolov/electron,Zagorakiss/electron,xfstudio/electron,brave/electron,IonicaBizauKitchen/electron,aecca/electron,leethomas/electron,pandoraui/electron,brenca/electron,maxogden/atom-shell,vaginessa/electron,subblue/electron,fomojola/electron,kokdemo/electron,seanchas116/electron,Jacobichou/electron,brenca/electron,IonicaBizauKitchen/electron,RobertJGabriel/electron,astoilkov/electron,fomojola/electron,yalexx/electron,Jacobichou/electron,MaxWhere/electron,jaanus/electron,SufianHassan/electron,etiktin/electron,mubassirhayat/electron,stevemao/electron,leftstick/electron,gabriel/electron,Jonekee/electron,rajatsingla28/electron,shennushi/electron,abhishekgahlot/electron,etiktin/electron,Gerhut/electron,gabrielPeart/electron,dahal/electron,lrlna/electron,destan/electron,eriser/electron,aliib/electron,lzpfmh/electron,jlhbaseball15/electron,tylergibson/electron,oiledCode/electron,arturts/electron,robinvandernoord/electron,trankmichael/electron,gamedevsam/electron,carsonmcdonald/electron,mattdesl/electron,gamedevsam/electron,DivyaKMenon/electron,bpasero/electron,shaundunne/electron,kazupon/electron,mhkeller/electron,rajatsingla28/electron,icattlecoder/electron,edulan/electron,jlord/electron,destan/electron,adcentury/electron,renaesop/electron,neutrous/electron,zhakui/electron,gabriel/electron,jlord/electron,GoooIce/electron,joaomoreno/atom-shell,michaelchiche/electron,shockone/electron,davazp/electron,kikong/electron,eric-seekas/electron,anko/electron,nicholasess/electron,jhen0409/electron,zhakui/electron,vaginessa/electron,evgenyzinoviev/electron,joaomoreno/atom-shell,Jonekee/electron,lzpfmh/electron,howmuchcomputer/electron,anko/electron,Rokt33r/electron,rhencke/electron,nekuz0r/electron,mubassirhayat/electron,sky7sea/electron,adcentury/electron,bobwol/electron,mjaniszew/electron,jjz/electron,chriskdon/electron,stevemao/electron,d-salas/electron,saronwei/electron,tonyganch/electron,carsonmcdonald/electron,medixdev/electron,shaundunne/electron,rhencke/electron,rajatsingla28/electron,christian-bromann/electron,nagyistoce/electron-atom-shell,tincan24/electron,IonicaBizauKitchen/electron,Jacobichou/electron,digideskio/electron,Ivshti/electron,thingsinjars/electron,chriskdon/electron,fomojola/electron,baiwyc119/electron,timruffles/electron,synaptek/electron,edulan/electron,BionicClick/electron,jsutcodes/electron,simongregory/electron,yan-foto/electron,twolfson/electron,nicholasess/electron,eriser/electron,stevekinney/electron,simonfork/electron,abhishekgahlot/electron,chrisswk/electron,coderhaoxin/electron,electron/electron,shennushi/electron,Neron-X5/electron,bobwol/electron,faizalpribadi/electron,thompsonemerson/electron,nicholasess/electron,gabrielPeart/electron,gabrielPeart/electron,Zagorakiss/electron,jacksondc/electron,jlhbaseball15/electron,adcentury/electron,fritx/electron,abhishekgahlot/electron,gstack/infinium-shell,shiftkey/electron,faizalpribadi/electron,chrisswk/electron,howmuchcomputer/electron,mirrh/electron,Zagorakiss/electron,wolfflow/electron,medixdev/electron,leethomas/electron,jonatasfreitasv/electron,ianscrivener/electron,tinydew4/electron,biblerule/UMCTelnetHub,mattotodd/electron,biblerule/UMCTelnetHub,sircharleswatson/electron,yalexx/electron,jhen0409/electron,rreimann/electron,bruce/electron,Gerhut/electron,jacksondc/electron,miniak/electron,saronwei/electron,brave/electron,destan/electron,jannishuebl/electron,sshiting/electron,lrlna/electron,JesselJohn/electron,anko/electron,systembugtj/electron,jjz/electron,fabien-d/electron,mirrh/electron,webmechanicx/electron,nicholasess/electron,aliib/electron,jiaz/electron,LadyNaggaga/electron,farmisen/electron | script/upload-windows-pdb.py | script/upload-windows-pdb.py | #!/usr/bin/env python
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
DOWNLOAD_DIR = 'vendor\\brightray\\vendor\\download\\libchromiumcontent'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
DOWNLOAD_DIR + '\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols',
files)
if __name__ == '__main__':
import sys
sys.exit(main())
| #!/usr/bin/env python
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
| mit | Python |
0968180fdbb91708115fcbc3187eef93943a912d | Fix crasher | astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin | astrobin_apps_platesolving/templatetags/astrobin_apps_platesolving_tags.py | astrobin_apps_platesolving/templatetags/astrobin_apps_platesolving_tags.py | # Django
from django.contrib.contenttypes.models import ContentType
from django.template import Library, Node
# This app
from astrobin_apps_platesolving.solver import Solver
register = Library()
@register.inclusion_tag(
'astrobin_apps_platesolving/inclusion_tags/platesolving_machinery.html',
takes_context = True)
def platesolving_machinery(context, target):
content_type = ContentType.objects.get_for_model(target)
return {
'object_id': target.pk,
'content_type_id': content_type.pk,
'solution_id': target.solution.pk if target.solution else 0,
'solution_status': target.solution.status if target.solution else 0,
}
| # Django
from django.contrib.contenttypes.models import ContentType
from django.template import Library, Node
# This app
from astrobin_apps_platesolving.solver import Solver
register = Library()
@register.inclusion_tag(
'astrobin_apps_platesolving/inclusion_tags/platesolving_machinery.html',
takes_context = True)
def platesolving_machinery(context, target):
content_type = ContentType.objects.get_for_model(target)
return {
'object_id': target.pk,
'content_type_id': content_type.pk,
'solution_id': target.solution.pk if target.solution else 0,
'solution_status': target.solution.status,
}
| agpl-3.0 | Python |
a2de3371b63654ea205af35314c46b5eb48921a9 | update rules_proto digest to 5c8e443 (#1235) | googleapis/gapic-generator-typescript,googleapis/gapic-generator-typescript,googleapis/gapic-generator-typescript | repositories.bzl | repositories.bzl | load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def gapic_generator_typescript_repositories():
maybe(
http_archive,
name = "build_bazel_rules_nodejs",
sha256 = "c911b5bd8aee8b0498cc387cacdb5f917098ce477fb4182db07b0ef8a9e045c0",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/4.7.1/rules_nodejs-4.7.1.tar.gz"],
)
maybe(
http_archive,
name = "rules_proto",
sha256 = "0e97e44782814000c3942645174d5b081dacfa3e70d51886a0c21f7a47992e89",
strip_prefix = "rules_proto-5c8e443b53d02012375585cabb85fd940c78cfca",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/5c8e443b53d02012375585cabb85fd940c78cfca.tar.gz",
"https://github.com/bazelbuild/rules_proto/archive/5c8e443b53d02012375585cabb85fd940c78cfca.tar.gz",
],
)
_rules_gapic_version = "0.9.0"
maybe(
http_archive,
name = "rules_gapic",
strip_prefix = "rules_gapic-%s" % _rules_gapic_version,
urls = ["https://github.com/googleapis/rules_gapic/archive/v%s.tar.gz" % _rules_gapic_version],
)
maybe(
http_archive,
name = "com_google_protobuf",
sha256 = "d7d204a59fd0d2d2387bd362c2155289d5060f32122c4d1d922041b61191d522",
strip_prefix = "protobuf-3.21.5",
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.21.5.tar.gz"],
)
| load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def gapic_generator_typescript_repositories():
maybe(
http_archive,
name = "build_bazel_rules_nodejs",
sha256 = "c911b5bd8aee8b0498cc387cacdb5f917098ce477fb4182db07b0ef8a9e045c0",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/4.7.1/rules_nodejs-4.7.1.tar.gz"],
)
maybe(
http_archive,
name = "rules_proto",
sha256 = "08af5f34b61c12feae75e6d8b54e961953d057da462620cf629d67ddbe4ef06e",
strip_prefix = "rules_proto-066581b14bcbf87206c89ff5fcdd6f9915fd30fb",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/066581b14bcbf87206c89ff5fcdd6f9915fd30fb.tar.gz",
"https://github.com/bazelbuild/rules_proto/archive/066581b14bcbf87206c89ff5fcdd6f9915fd30fb.tar.gz",
],
)
_rules_gapic_version = "0.9.0"
maybe(
http_archive,
name = "rules_gapic",
strip_prefix = "rules_gapic-%s" % _rules_gapic_version,
urls = ["https://github.com/googleapis/rules_gapic/archive/v%s.tar.gz" % _rules_gapic_version],
)
maybe(
http_archive,
name = "com_google_protobuf",
sha256 = "d7d204a59fd0d2d2387bd362c2155289d5060f32122c4d1d922041b61191d522",
strip_prefix = "protobuf-3.21.5",
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.21.5.tar.gz"],
)
| apache-2.0 | Python |
407784fd00cdab3b9c09e56f89406955110438a9 | solve the dependency in datasets and embedding | faneshion/MatchZoo,faneshion/MatchZoo | matchzoo/__init__.py | matchzoo/__init__.py | from pathlib import Path
USER_DIR = Path.expanduser(Path('~')).joinpath('.matchzoo')
if not USER_DIR.exists():
USER_DIR.mkdir()
USER_DATA_DIR = USER_DIR.joinpath('datasets')
if not USER_DATA_DIR.exists():
USER_DATA_DIR.mkdir()
from .logger import logger
from .version import __version__
from . import processor_units
from .processor_units import chain_transform, ProcessorUnit
from .data_pack import DataPack, pack, build_vocab_unit, \
build_unit_from_data_pack, load_data_pack
from .data_generator import DataGenerator
from .data_generator import PairDataGenerator
from .data_generator import DynamicDataGenerator
from . import tasks
from . import metrics
from . import losses
from . import engine
from . import preprocessors
from . import models
from . import embedding
from . import datasets
from .engine import load_model, load_preprocessor
from .auto import Director
| from pathlib import Path
USER_DIR = Path.expanduser(Path('~')).joinpath('.matchzoo')
if not USER_DIR.exists():
USER_DIR.mkdir()
USER_DATA_DIR = USER_DIR.joinpath('datasets')
if not USER_DATA_DIR.exists():
USER_DATA_DIR.mkdir()
from .logger import logger
from .version import __version__
from . import processor_units
from .processor_units import chain_transform, ProcessorUnit
from .data_pack import DataPack, pack, build_vocab_unit, \
build_unit_from_data_pack, load_data_pack
from .data_generator import DataGenerator
from .data_generator import PairDataGenerator
from .data_generator import DynamicDataGenerator
from . import tasks
from . import metrics
from . import losses
from . import engine
from . import preprocessors
from . import models
from . import datasets
from . import embedding
from .engine import load_model, load_preprocessor
from .auto import Director
| apache-2.0 | Python |
84ad932d4f7536dce6618de8a034880da389a3c0 | Add missing docstring | juampi/algo1 | pa1/inversions.py | pa1/inversions.py | #!/usr/bin/env python3
"""
Implementation of the Merge Sort algorithm in order to
count the number of inversions in a list of integers.
"""
import argparse
def read_file(filename):
with open(filename) as f:
return [int(line) for line in f]
def _merge_sort(A, B, begin, end):
if end - begin < 2:
return
middle = begin + (end - begin)//2
_merge_sort(A, B, begin, middle)
_merge_sort(A, B, middle, end)
merge(A, B, begin, middle, end)
A[begin:end] = B[begin:end]
def merge(A, B, begin, middle, end):
global inversions
i, j = begin, middle
for k in range(begin, end):
if i < middle and (j >= end or A[i] <= A[j]):
B[k] = A[i]
i += 1
else:
inversions += middle - i
B[k] = A[j]
j += 1
def merge_sort(A):
_merge_sort(A, A[:], 0, len(A))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Compute number of inversions.")
parser.add_argument('filename',
type=str,
help="file containing list of integers")
args = parser.parse_args()
integer_list = read_file(args.filename)
inversions = 0
merge_sort(integer_list)
print(inversions)
| #!/usr/bin/env python3
"""
"""
import argparse
def read_file(filename):
with open(filename) as f:
return [int(line) for line in f]
def _merge_sort(A, B, begin, end):
if end - begin < 2:
return
middle = begin + (end - begin)//2
_merge_sort(A, B, begin, middle)
_merge_sort(A, B, middle, end)
merge(A, B, begin, middle, end)
A[begin:end] = B[begin:end]
def merge(A, B, begin, middle, end):
global inversions
i, j = begin, middle
for k in range(begin, end):
if i < middle and (j >= end or A[i] <= A[j]):
B[k] = A[i]
i += 1
else:
inversions += middle - i
B[k] = A[j]
j += 1
def merge_sort(A):
_merge_sort(A, A[:], 0, len(A))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Compute number of inversions.")
parser.add_argument('filename', type=str, help="file containing list of integers")
args = parser.parse_args()
integer_list = read_file(args.filename)
inversions = 0
merge_sort(integer_list)
print(inversions) | mit | Python |
9d39802ab6db8d4d65688aff7ed6af01ceb0cb07 | Make pydocstyle happy | SublimeLinter/SublimeLinter-hlint | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
syntax = ('haskell', 'haskell-sublimehaskell', 'literate haskell')
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = {
'haskell': 'hs',
'haskell-sublimehaskell': 'hs',
'literate haskell': 'lhs'
}
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
syntax = ('haskell', 'haskell-sublimehaskell', 'literate haskell')
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = {
'haskell': 'hs',
'haskell-sublimehaskell': 'hs',
'literate haskell': 'lhs'
}
| mit | Python |
aaaaa6858485962f1cfd52357b2d5f3964a368ad | Refactor tests with setUp | mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation | falcom/decorators/test_try_forever.py | falcom/decorators/test_try_forever.py | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..test.hamcrest import ComposedMatcher, evaluates_to, a_method
from .try_forever import try_forever, TryForever
class FailThenSucceed:
def __init__ (self, number_of_failures, error = RuntimeError):
self.countdown = number_of_failures
self.error = error
def __call__ (self):
if self.__we_need_to_raise_an_error():
self.__decrement_counter_and_raise_the_error()
def __we_need_to_raise_an_error (self):
return self.countdown > 0
def __decrement_counter_and_raise_the_error (self):
self.countdown -= 1
raise self.error
class DecoratorTest (unittest.TestCase):
def test_can_set_decorator (self):
@try_forever
def method():
pass
method()
class GivenDefaultTryForeverDecorator (unittest.TestCase):
def setUp (self):
self.decorator = TryForever()
def test_try_forever_returns_object (self):
assert_that(self.decorator, is_(a_method()))
def test_waits_one_minute_by_default (self):
assert_that(self.decorator.seconds_between_attempts,
is_(equal_to(60)))
class FailThenSucceedTest (unittest.TestCase):
def test_we_can_fail_then_succeed (self):
method = FailThenSucceed(5)
for i in range(5):
assert_that(calling(method), raises(RuntimeError))
method() # raises no exception this time
def test_we_can_use_any_error (self):
method = FailThenSucceed(1, KeyError)
assert_that(calling(method), raises(KeyError))
method() # raises no exception this time
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..test.hamcrest import ComposedMatcher, evaluates_to, a_method
from .try_forever import try_forever, TryForever
class FailThenSucceed:
def __init__ (self, number_of_failures, error = RuntimeError):
self.countdown = number_of_failures
self.error = error
def __call__ (self):
if self.__we_need_to_raise_an_error():
self.__decrement_counter_and_raise_the_error()
def __we_need_to_raise_an_error (self):
return self.countdown > 0
def __decrement_counter_and_raise_the_error (self):
self.countdown -= 1
raise self.error
class DecoratorTest (unittest.TestCase):
def test_can_set_decorator (self):
@try_forever
def method():
pass
method()
class TryForeverClassTest (unittest.TestCase):
def test_try_forever_returns_object (self):
obj = TryForever()
assert_that(obj, is_(a_method()))
def test_waits_one_minute_by_default (self):
obj = TryForever()
assert_that(obj.seconds_between_attempts, is_(equal_to(60)))
class FailThenSucceedTest (unittest.TestCase):
def test_we_can_fail_then_succeed (self):
method = FailThenSucceed(5)
for i in range(5):
assert_that(calling(method), raises(RuntimeError))
method() # raises no exception this time
def test_we_can_use_any_error (self):
method = FailThenSucceed(1, KeyError)
assert_that(calling(method), raises(KeyError))
method() # raises no exception this time
| bsd-3-clause | Python |
f86f0ce876c40e28480baf1da1f51c5f7cc5c74d | apply sorting before serialization of mscale collection | schocco/mds-web,schocco/mds-web | mds_website/views.py | mds_website/views.py | from django.views.generic.base import TemplateView
from apps.muni_scales.api import MscaleResource
from apps.auth.api import UserResource
class HomeView(TemplateView):
template_name = "index.html"
def get_context_data(self, **kwargs):
context = super(HomeView, self).get_context_data(**kwargs)
context['current_user'] = self.user_detail()
context['mscale_collection'] = self.get_mscale_collection()
return context
def get_mscale_collection(self):
'''
Returns all MscaleObjects as json string.
'''
res = MscaleResource()
request_bundle = res.build_bundle(request=self.request)
obj_lst = res.obj_get_list(request_bundle)
obj_lst = res.apply_sorting(obj_lst)
bundles = []
for obj in obj_lst:
bundle = res.build_bundle(obj=obj, request=self.request)
bundles.append(res.full_dehydrate(bundle, for_list=True))
json = res.serialize(None, bundles, "application/json")
return json
def user_detail(self):
'''
Return the request user as json string
'''
ur = UserResource()
ur_bundle = ur.build_bundle(obj=self.request.user, request=self.request)
json = ur.serialize(None, ur.full_dehydrate(ur_bundle), 'application/json')
return json
| from django.views.generic.base import TemplateView
from apps.muni_scales.api import MscaleResource
from apps.auth.api import UserResource
class HomeView(TemplateView):
template_name = "index.html"
def get_context_data(self, **kwargs):
context = super(HomeView, self).get_context_data(**kwargs)
context['current_user'] = self.user_detail()
context['mscale_collection'] = self.get_mscale_collection()
return context
def get_mscale_collection(self):
'''
Returns all MscaleObjects as json string.
'''
res = MscaleResource()
request_bundle = res.build_bundle(request=self.request)
queryset = res.obj_get_list(request_bundle)
bundles = []
for obj in queryset:
bundle = res.build_bundle(obj=obj, request=self.request)
bundles.append(res.full_dehydrate(bundle, for_list=True))
json = res.serialize(None, bundles, "application/json")
return json
def user_detail(self):
'''
Return the request user as json string
'''
ur = UserResource()
ur_bundle = ur.build_bundle(obj=self.request.user, request=self.request)
json = ur.serialize(None, ur.full_dehydrate(ur_bundle), 'application/json')
return json
| mit | Python |
c6e392b724853c59684ec317da7063486a209c94 | Change 'tempfile_suffix' and remove filename check. | likewise/SublimeLinter-contrib-xsvlog,BrunoJJE/SublimeLinter-contrib-xvhdl | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bruno JJE
# Copyright (c) 2015 Bruno JJE
#
# License: MIT
#
"""This module exports the Xvhdl plugin class."""
from SublimeLinter.lint import Linter
class Xvhdl(Linter):
"""Provides an interface to xvhdl (from Xilinx Vivado Simulator)."""
syntax = 'vhdl'
cmd = 'xvhdl @'
version_args = '--version --nolog'
version_re = r'Vivado Simulator (?P<version>\d+\.\d+)'
version_requirement = '>= 2014.4'
tempfile_suffix = 'vhd'
# Here is a sample xvhdl error output:
# ----8<------------
# ERROR: [VRFC 10-91] td_logic is not declared [/home/BrunoJJE/src/filtre8.vhd:35]
# ----8<------------
regex = (
r"^(?P<error>ERROR: )(?P<message>\[.*\].*)"
r"\[(?P<path>.*):(?P<line>[0-9]+)\]"
)
def split_match(self, match):
"""
Extract and return values from match.
We override this method to prefix the error message with the
linter name.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if match:
message = '[xvhdl] ' + message
return match, line, col, error, warning, message, near
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bruno JJE
# Copyright (c) 2015 Bruno JJE
#
# License: MIT
#
"""This module exports the Xvhdl plugin class."""
from SublimeLinter.lint import Linter
class Xvhdl(Linter):
"""Provides an interface to xvhdl (from Xilinx Vivado Simulator)."""
syntax = 'vhdl'
cmd = 'xvhdl @'
version_args = '--version --nolog'
version_re = r'Vivado Simulator (?P<version>\d+\.\d+)'
version_requirement = '>= 2014.4'
tempfile_suffix = '-'
# Here is a sample xvhdl error output:
# ----8<------------
# ERROR: [VRFC 10-91] td_logic is not declared [/home/BrunoJJE/src/filtre8.vhd:35]
# ----8<------------
regex = (
r"^(?P<error>ERROR: )(?P<message>\[.*\].*)"
r"\[(?P<path>.*):(?P<line>[0-9]+)\]"
)
def split_match(self, match):
"""
Extract and return values from match.
We override this method to prefix the error message with the
linter name.
"""
match, line, col, error, warning, message, near = super().split_match(match)
# Not sure the filename check is required, but we do it
# anyway just in case...
if match and match.group('path') != self.filename:
match = None
if match:
message = '[xvhdl] ' + message
return match, line, col, error, warning, message, near
| mit | Python |
cb6a340ba2d0385ae3c406540eaf934c2cd6f454 | Format string. | ryanc/mmmpaste,ryanc/mmmpaste | mmmpaste/filters.py | mmmpaste/filters.py | from flask import make_response
from functools import update_wrapper, wraps
from time import time as now
from mmmpaste import app
def runtime(f):
"""
Add a header that shows the runtime of the route.
"""
@wraps(f)
def wrapper(*args, **kwargs):
start = now()
response = make_response(f(*args, **kwargs))
end = now()
response.headers["X-Runtime"] = "{0}s".format(end - start)
return response
return wrapper
def no_cache(f):
"""
Add "Cache-Control: no-cache" header.
"""
@wraps(f)
def wrapper(*args, **kwargs):
response = make_response(f(*args, **kwargs))
response.cache_control.no_cache = True
return response
return wrapper
def cache(f):
"""
Add "Cache-Control: s-maxage" header.
"""
@wraps(f)
def wrapper(*args, **kwargs):
response = make_response(f(*args, **kwargs))
response.cache_control.s_maxage = app.config.get('CACHE_S_MAXAGE')
return response
return wrapper
| from flask import make_response
from functools import update_wrapper, wraps
from time import time as now
from mmmpaste import app
def runtime(f):
"""
Add a header that shows the runtime of the route.
"""
@wraps(f)
def wrapper(*args, **kwargs):
start = now()
response = make_response(f(*args, **kwargs))
end = now()
response.headers["X-Runtime"] = "%ss" % str(end - start)
return response
return wrapper
def no_cache(f):
"""
Add "Cache-Control: no-cache" header.
"""
@wraps(f)
def wrapper(*args, **kwargs):
response = make_response(f(*args, **kwargs))
response.cache_control.no_cache = True
return response
return wrapper
def cache(f):
"""
Add "Cache-Control: s-maxage" header.
"""
@wraps(f)
def wrapper(*args, **kwargs):
response = make_response(f(*args, **kwargs))
response.cache_control.s_maxage = app.config.get('CACHE_S_MAXAGE')
return response
return wrapper
| bsd-2-clause | Python |
49df4cc72a5e424d3fad45b1679d4519b04e5916 | Update models.py | Beeblio/django-vote | vote/models.py | vote/models.py | from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
try:
from django.contrib.contenttypes.fields import GenericForeignKey
except ImportError:
from django.contrib.contenttypes.generic import GenericForeignKey
from .compat import AUTH_USER_MODEL
class VoteManger(models.Manager):
def filter(self, *args, **kwargs):
if 'content_object' in kwargs:
content_object = kwargs.pop('content_object')
content_type = ContentType.objects.get_for_model(content_object)
kwargs.update({
'content_type':content_type,
'object_id':content_object.pk
})
return super(VoteManger, self).filter(*args, **kwargs)
class Vote(models.Model):
user = models.ForeignKey(AUTH_USER_MODEL)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
create_at = models.DateTimeField(auto_now_add=True)
objects = VoteManger()
class Meta:
unique_together = ('user', 'content_type', 'object_id')
@classmethod
def votes_for(cls, model, instance=None):
ct = ContentType.objects.get_for_model(model)
kwargs = {
"content_type": ct
}
if instance is not None:
kwargs["object_id"] = instance.pk
return cls.objects.filter(**kwargs)
| from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
try:
from django.contrib.contenttypes.fields import GenericForeignKey
except ImportError:
from django.contrib.contenttypes.generic import GenericForeignKey
from .compat import AUTH_USER_MODEL
class VoteManger(models.Manager):
def filter(self, *args, **kwargs):
if kwargs.has_key('content_object'):
content_object = kwargs.pop('content_object')
content_type = ContentType.objects.get_for_model(content_object)
kwargs.update({
'content_type':content_type,
'object_id':content_object.pk
})
return super(VoteManger, self).filter(*args, **kwargs)
class Vote(models.Model):
user = models.ForeignKey(AUTH_USER_MODEL)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
create_at = models.DateTimeField(auto_now_add=True)
objects = VoteManger()
class Meta:
unique_together = ('user', 'content_type', 'object_id')
@classmethod
def votes_for(cls, model, instance=None):
ct = ContentType.objects.get_for_model(model)
kwargs = {
"content_type": ct
}
if instance is not None:
kwargs["object_id"] = instance.pk
return cls.objects.filter(**kwargs)
| bsd-3-clause | Python |
6bccabc3840438b6d9524a3bcbd097459171cc1b | Handle KeyboardInterrupt as nose does. | olivierverdier/nose-progressive,veo-labs/nose-progressive,erikrose/nose-progressive,pmclanahan/pytest-progressive | noseprogressive/runner.py | noseprogressive/runner.py | from time import time
import nose.core
from noseprogressive.result import ProgressiveResult
class ProgressiveRunner(nose.core.TextTestRunner):
"""Test runner that makes a lot less noise than TextTestRunner"""
def __init__(self, cwd, totalTests, stream, **kwargs):
super(ProgressiveRunner, self).__init__(stream, **kwargs)
self._cwd = cwd
self._totalTests = totalTests
def _makeResult(self):
"""Return a Result that doesn't print dots.
Nose's ResultProxy will wrap it, and other plugins can still print
stuff---but without smashing into our progress bar, care of
ProgressivePlugin's stderr/out wrapping.
"""
return ProgressiveResult(self._cwd,
self._totalTests,
self.stream,
config=self.config)
def run(self, test):
"Run the given test case or test suite...quietly."
# These parts of Nose's pluggability are baked into
# nose.core.TextTestRunner. Reproduce them:
wrapper = self.config.plugins.prepareTest(test)
if wrapper is not None:
test = wrapper
wrapped = self.config.plugins.setOutputStream(self.stream)
if wrapped is not None:
self.stream = wrapped
result = self._makeResult()
startTime = time()
try:
test(result)
except KeyboardInterrupt:
pass
stopTime = time()
# We don't care to hear about errors again at the end; we take care of
# that in result.addError(), while the tests run.
# result.printErrors()
#
# However, we do need to call this one useful line from
# nose.result.TextTestResult's implementation of printErrors() to make
# sure other plugins get a chance to report:
self.config.plugins.report(self.stream)
result.printSummary(startTime, stopTime)
self.config.plugins.finalize(result)
return result
| from time import time
import nose.core
from noseprogressive.result import ProgressiveResult
class ProgressiveRunner(nose.core.TextTestRunner):
"""Test runner that makes a lot less noise than TextTestRunner"""
def __init__(self, cwd, totalTests, stream, **kwargs):
super(ProgressiveRunner, self).__init__(stream, **kwargs)
self._cwd = cwd
self._totalTests = totalTests
def _makeResult(self):
"""Return a Result that doesn't print dots.
Nose's ResultProxy will wrap it, and other plugins can still print
stuff---but without smashing into our progress bar, care of
ProgressivePlugin's stderr/out wrapping.
"""
return ProgressiveResult(self._cwd,
self._totalTests,
self.stream,
config=self.config)
def run(self, test):
"Run the given test case or test suite...quietly."
# These parts of Nose's pluggability are baked into
# nose.core.TextTestRunner. Reproduce them:
wrapper = self.config.plugins.prepareTest(test)
if wrapper is not None:
test = wrapper
wrapped = self.config.plugins.setOutputStream(self.stream)
if wrapped is not None:
self.stream = wrapped
result = self._makeResult()
startTime = time()
test(result)
stopTime = time()
# We don't care to hear about errors again at the end; we take care of
# that in result.addError(), while the tests run.
# result.printErrors()
#
# However, we do need to call this one useful line from
# nose.result.TextTestResult's implementation of printErrors() to make
# sure other plugins get a chance to report:
self.config.plugins.report(self.stream)
result.printSummary(startTime, stopTime)
self.config.plugins.finalize(result)
return result
| mit | Python |
e6b80e478eb137f824c9bbc050ba363e24b48707 | Improve twitter URL detection | JohnMaguire/Cardinal | plugins/twitter/plugin.py | plugins/twitter/plugin.py | import re
from urllib.parse import urlparse
import requests
import twitter
from twisted.internet import defer
from twisted.internet.threads import deferToThread
from cardinal.decorators import event
from cardinal.exceptions import EventRejectedMessage
class TwitterPlugin:
def __init__(self, config):
consumer_key = config['consumer_key']
consumer_secret = config['consumer_secret']
if not all([consumer_key, consumer_secret]):
raise Exception(
"Twitter plugin requires consumer_key and consumer_secret"
)
self.api = twitter.Api(
consumer_key=consumer_key,
consumer_secret=consumer_secret,
application_only_auth=True,
)
@defer.inlineCallbacks
def get_tweet(self, tweet_id):
tweet = yield deferToThread(self.api.GetStatus,
tweet_id)
return tweet
@defer.inlineCallbacks
def follow_short_link(self, url):
r = yield deferToThread(requests.get,
url)
# Twitter returns 400 in normal operation
if not r.ok and r.status_code != 400:
r.raise_for_status()
return r.url
@event('urls.detection')
@defer.inlineCallbacks
def handle_tweet(self, cardinal, channel, url):
o = urlparse(url)
# handle t.co short links
if o.netloc == 't.co':
url = yield self.follow_short_link(url)
o = urlparse(url)
if o.netloc in ('twitter.com', 'mobile.twitter.com') \
and (match := re.match(r'^/.*/status/(\d+)$', o.path)):
tweet_id = match.group(1)
t = yield self.get_tweet(tweet_id)
cardinal.sendMsg(channel, "Tweet from @{}: {}".format(
t.user.screen_name,
t.text,
))
else:
raise EventRejectedMessage
entrypoint = TwitterPlugin
| import re
from urllib.parse import urlparse
import requests
import twitter
from twisted.internet import defer
from twisted.internet.threads import deferToThread
from cardinal.decorators import event
from cardinal.exceptions import EventRejectedMessage
class TwitterPlugin:
def __init__(self, config):
consumer_key = config['consumer_key']
consumer_secret = config['consumer_secret']
if not all([consumer_key, consumer_secret]):
raise Exception(
"Twitter plugin requires consumer_key and consumer_secret"
)
self.api = twitter.Api(
consumer_key=consumer_key,
consumer_secret=consumer_secret,
application_only_auth=True,
)
@defer.inlineCallbacks
def get_tweet(self, tweet_id):
tweet = yield deferToThread(self.api.GetStatus,
tweet_id)
return tweet
@defer.inlineCallbacks
def follow_short_link(self, url):
r = yield deferToThread(requests.get,
url)
# Twitter returns 400 in normal operation
if not r.ok and r.status_code != 400:
r.raise_for_status()
return r.url
@event('urls.detection')
@defer.inlineCallbacks
def handle_tweet(self, cardinal, channel, url):
o = urlparse(url)
# handle t.co short links
if o.netloc == 't.co':
url = yield self.follow_short_link(url)
o = urlparse(url)
if o.netloc == 'twitter.com' \
and (match := re.match(r'^/.*/status/(\d+)$', o.path)):
tweet_id = match.group(1)
t = yield self.get_tweet(tweet_id)
cardinal.sendMsg(channel, "Tweet from @{}: {}".format(
t.user.screen_name,
t.text,
))
else:
raise EventRejectedMessage
entrypoint = TwitterPlugin
| mit | Python |
08bd685fd7174c6e96f10eb517b9aa1e8916385a | add u24 keyword to rammbocklibrary | robotframework/Rammbock,samratashok87/Rammbock,robotframework/Rammbock,WamanAvadhani/Rammbock,WamanAvadhani/Rammbock,samratashok87/Rammbock | proto/src/RammbockLibrary.py | proto/src/RammbockLibrary.py | from Rammbock import Rammbock
from robot.libraries.BuiltIn import BuiltIn
class RammbockLibrary(Rammbock):
def u8(self, name, value=None, align=None):
self.uint(1, name, value, align)
def u16(self, name, value=None, align=None):
self.uint(2, name, value, align)
def u24(self, name, value=None, align=None):
self.uint(3, name, value, align)
def u32(self, name, value=None, align=None):
self.uint(4, name, value, align)
def u64(self, name, value=None, align=None):
self.uint(8, name, value, align)
def u128(self, name, value=None, align=None):
self.uint(16, name, value, align)
def array(self, size, type, name, *params):
self.new_list(size, name)
BuiltIn().run_keyword(type, '', *params)
self.end_list()
def container(self, name, length, type, *params):
self.struct('Container', name, 'length=%s' % length)
BuiltIn().run_keyword(type, *params)
self.end_struct()
| from Rammbock import Rammbock
from robot.libraries.BuiltIn import BuiltIn
class RammbockLibrary(Rammbock):
def u8(self, name, value=None, align=None):
self.uint(1, name, value, align)
def u16(self, name, value=None, align=None):
self.uint(2, name, value, align)
def u32(self, name, value=None, align=None):
self.uint(4, name, value, align)
def u64(self, name, value=None, align=None):
self.uint(8, name, value, align)
def u128(self, name, value=None, align=None):
self.uint(16, name, value, align)
def array(self, size, type, name, *params):
self.new_list(size, name)
BuiltIn().run_keyword(type, '', *params)
self.end_list()
def container(self, name, length, type, *params):
self.struct('Container', name, 'length=%s' % length)
BuiltIn().run_keyword(type, *params)
self.end_struct()
| apache-2.0 | Python |
7f1933f9cae7cbb9b0be140671046c0b8919bd10 | fix type | w495/python-video-shot-detector,w495/python-video-shot-detector | shot_detector/filters/slice_filter.py | shot_detector/filters/slice_filter.py | # -*- coding: utf8 -*-
from __future__ import absolute_import, division, print_function
import itertools
import logging
from shot_detector.filters import Filter
class SliceFilter(Filter):
"""
Slice filter.
"""
__logger = logging.getLogger(__name__)
def filter_objects(self,
sequence,
start=0,
stop=None,
step=None,
**kwargs):
"""
:param sequence:
:param start:
:param stop:
:param step:
:param kwargs:
:return:
"""
i_seq = iter(sequence)
sliced_sequence = itertools.islice(
i_seq,
start=start,
stop=start,
step=step,
)
return sliced_sequence
| # -*- coding: utf8 -*-
from __future__ import absolute_import, division, print_function
import itertools
import logging
from shot_detector.filters import Filter
class SliceFilter(Filter):
"""
Slice filter.
"""
__logger = logging.getLogger(__name__)
def filter_objects(self,
sequence,
start=0,
stop=None,
step=None,
**kwargs):
"""
:param sequence:
:param start:
:param stop:
:param step:
:param kwargs:
:return:
"""
i_seq = iter(sequence)
isliced_sequence = itertools.islice(
i_seq,
start=start,
stop=start,
step=step,
)
return isliced_sequence
| bsd-3-clause | Python |
afc51d93271e48ed817400b03f43ae5a55ed7334 | Delete excessive table counting | thombashi/SimpleSQLite,thombashi/SimpleSQLite | simplesqlite/loader/html/formatter.py | simplesqlite/loader/html/formatter.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import bs4
import dataproperty
from ..constant import TableNameTemplate as tnt
from ..data import TableData
from ..error import InvalidDataError
from ..formatter import TableFormatter
class HtmlTableFormatter(TableFormatter):
def __init__(self, source_data):
super(HtmlTableFormatter, self).__init__(source_data)
try:
self.__soup = bs4.BeautifulSoup(self._source_data, "lxml")
except bs4.FeatureNotFound:
self.__soup = bs4.BeautifulSoup(self._source_data, "html.parser")
def to_table_data(self):
self._validate_source_data()
for table in self.__soup.find_all("table"):
tabledata = self.__parse_html(table)
yield tabledata
def _validate_source_data(self):
if len(self._source_data) == 0:
raise InvalidDataError("html data is empty")
def _make_table_name(self):
table_name = self._loader.make_table_name()
key = self.__table_id
if dataproperty.is_empty_string(key):
key = "{:s}{:d}".format(
self._loader.format_name,
self._loader.get_format_table_count())
return table_name.replace(tnt.KEY, key)
def __parse_html(self, table):
header_list = []
data_matrix = []
self.__table_id = table.get("id")
row_list = table.find_all("tr")
for row in row_list:
col_list = row.find_all("td")
if dataproperty.is_empty_sequence(col_list):
th_list = row.find_all("th")
if dataproperty.is_empty_sequence(th_list):
continue
header_list = [row.text.strip() for row in th_list]
continue
data_list = [value.text.strip() for value in col_list]
data_matrix.append(data_list)
self._loader.inc_table_count()
return TableData(
self._make_table_name(), header_list, data_matrix)
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import bs4
import dataproperty
from ..constant import TableNameTemplate as tnt
from ..data import TableData
from ..error import InvalidDataError
from ..formatter import TableFormatter
class HtmlTableFormatter(TableFormatter):
def __init__(self, source_data):
super(HtmlTableFormatter, self).__init__(source_data)
try:
self.__soup = bs4.BeautifulSoup(self._source_data, "lxml")
except bs4.FeatureNotFound:
self.__soup = bs4.BeautifulSoup(self._source_data, "html.parser")
def to_table_data(self):
self._validate_source_data()
for table in self.__soup.find_all("table"):
tabledata = self.__parse_html(table)
self._loader.inc_table_count()
yield tabledata
def _validate_source_data(self):
if len(self._source_data) == 0:
raise InvalidDataError("html data is empty")
def _make_table_name(self):
table_name = self._loader.make_table_name()
key = self.__table_id
if dataproperty.is_empty_string(key):
key = "{:s}{:d}".format(
self._loader.format_name,
self._loader.get_format_table_count())
return table_name.replace(tnt.KEY, key)
def __parse_html(self, table):
header_list = []
data_matrix = []
self.__table_id = table.get("id")
row_list = table.find_all("tr")
for row in row_list:
col_list = row.find_all("td")
if dataproperty.is_empty_sequence(col_list):
th_list = row.find_all("th")
if dataproperty.is_empty_sequence(th_list):
continue
header_list = [row.text.strip() for row in th_list]
continue
data_list = [value.text.strip() for value in col_list]
data_matrix.append(data_list)
self._loader.inc_table_count()
return TableData(
self._make_table_name(), header_list, data_matrix)
| mit | Python |
df6de0eb5185ec88b828c5f08a93c697525a2c6a | add pin numbers for motor | banchee/pirobot,banchee/pirobot | TankController/tank.py | TankController/tank.py | import motor
class tank(object):
def __init__(self):
self.left_motor = motor.motor(7,11)
self.right_motor = motor.motor(12,13)
self.actions = {'forward':False, 'reverse':False, 'left':False, 'right':False, 'stop':True}
def forward(self):
if not self.actions['forward']:
self.left_motor.positive()
self.right_motor.positive()
for key, value in self.actions.items():
if key == 'forward':
self.actions[key] = True
else:
self.actions[key] = False
def reverse(self):
if not self.actions['reverse']:
self.left_motor.negative()
self.right_motor.negative()
for key, value in self.actions.items():
if key == 'reverse':
self.actions[key] = True
else:
self.actions[key] = False
def left(self):
if not self.actions['left']:
self.left_motor.positive()
self.right_motor.negative()
for key, value in self.actions.items():
if key == 'left':
self.actions[key] = True
else:
self.actions[key] = False
def right(self):
if not self.actions['right']:
self.left_motor.negative()
self.right_motor.positive()
for key, value in self.actions.items():
if key == 'right':
self.actions[key] = True
else:
self.actions[key] = False
def stop(self):
if not self.actions['stop']:
self.left_motor.stop()
self.left_motor.stop()
for key, value in self.actions.items():
if key == 'static':
self.actions[key] = True
else:
self.actions[key] = False
| import motor
class tank(object):
def __init__(self):
self.left_motor = motor.motor(11,15)
self.right_motor = motor.motor(13,12)
self.actions = {'forward':False, 'reverse':False, 'left':False, 'right':False, 'stop':True}
def forward(self):
if not self.actions['forward']:
self.left_motor.positive()
self.right_motor.positive()
for key, value in self.actions.items():
if key == 'forward':
self.actions[key] = True
else:
self.actions[key] = False
def reverse(self):
if not self.actions['reverse']:
self.left_motor.negative()
self.right_motor.negative()
for key, value in self.actions.items():
if key == 'reverse':
self.actions[key] = True
else:
self.actions[key] = False
def left(self):
if not self.actions['left']:
self.left_motor.positive()
self.right_motor.negative()
for key, value in self.actions.items():
if key == 'left':
self.actions[key] = True
else:
self.actions[key] = False
def right(self):
if not self.actions['right']:
self.left_motor.negative()
self.right_motor.positive()
for key, value in self.actions.items():
if key == 'right':
self.actions[key] = True
else:
self.actions[key] = False
def stop(self):
if not self.actions['stop']:
self.left_motor.stop()
self.left_motor.stop()
for key, value in self.actions.items():
if key == 'static':
self.actions[key] = True
else:
self.actions[key] = False
| mit | Python |
7e49448019f8e47d773a86a5ad8b6d3e04a73884 | Fix for sessions getting used up | rootio/rootio_web,rootio/rootio_web,rootio/rootio_web,rootio/rootio_web | telephony/podcast/rss_agent.py | telephony/podcast/rss_agent.py | import threading
from time import sleep
from rootio.config import DefaultConfig
from rootio.content.models import ContentPodcast
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from rss_downloader import RSSDownloader
class RSSAgent:
def __init__(self, logger):
self.__logger = logger
self.__engine = create_engine(DefaultConfig.SQLALCHEMY_DATABASE_URI)
self.__session = sessionmaker(bind=self.__engine)()
def __get_podcast_tracks(self):
#session = None
try:
#session = sessionmaker(bind=self.__engine)()
return session.query(ContentPodcast).filter(ContentPodcast.deleted == False).all()
except Exception as e:
self.__logger.error("error in __get_podcast_tracks: {0}".format(e.message))
return []
finally:
try:
if session is not None:
pass
#session.close()
except Exception as e: # some other error:
self.__logger.error("error in __get_podcast_tracks(finally): {0}".format(e.message))
pass # log this
def run(self):
while True:
try:
podcast_tracks = self.__get_podcast_tracks()
self.__logger.info("Checking for new podcasts in: {0}".format(podcast_tracks))
for podcast_track in podcast_tracks:
try:
pd = RSSDownloader(podcast_track, self.__logger, self.__engine)
thr = threading.Thread(target=pd.download)
thr.daemon = True
thr.start()
except Exception as e:
self.__logger.error("error(1) in run: {0}".format(e.message))
sleep(300) # 5 minutes
except Exception as e:
self.__logger.error("error(2) in run: {0}".format(e.message))
| import threading
from time import sleep
from rootio.config import DefaultConfig
from rootio.content.models import ContentPodcast
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from rss_downloader import RSSDownloader
class RSSAgent:
def __init__(self, logger):
self.__logger = logger
self.__engine = create_engine(DefaultConfig.SQLALCHEMY_DATABASE_URI)
def __get_podcast_tracks(self):
session = None
try:
session = sessionmaker(bind=self.__engine)()
return session.query(ContentPodcast).filter(ContentPodcast.deleted == False).all()
except Exception as e:
self.__logger.error("error in __get_podcast_tracks: {0}".format(e.message))
return []
finally:
try:
if session is not None:
session.close()
except Exception as e: # some other error:
self.__logger.error("error in __get_podcast_tracks(finally): {0}".format(e.message))
pass # log this
def run(self):
while True:
try:
podcast_tracks = self.__get_podcast_tracks()
self.__logger.info("Checking for new podcasts in: {0}".format(podcast_tracks))
for podcast_track in podcast_tracks:
try:
pd = RSSDownloader(podcast_track, self.__logger, self.__engine)
thr = threading.Thread(target=pd.download)
thr.daemon = True
thr.start()
except Exception as e:
self.__logger.error("error(1) in run: {0}".format(e.message))
sleep(300) # 5 minutes
except Exception as e:
self.__logger.error("error(2) in run: {0}".format(e.message))
| agpl-3.0 | Python |
b4c8544617528576dfa1eae3c045735b0bbccfad | Fix darwin detecting | itaymendel/taurus,greyfenrir/taurus,Blazemeter/taurus,Blazemeter/taurus,itaymendel/taurus,Blazemeter/taurus,Blazemeter/taurus,Blazemeter/taurus,greyfenrir/taurus,Blazemeter/taurus,Blazemeter/taurus,Blazemeter/taurus,greyfenrir/taurus,greyfenrir/taurus,greyfenrir/taurus,greyfenrir/taurus,itaymendel/taurus,Blazemeter/taurus,greyfenrir/taurus,itaymendel/taurus,greyfenrir/taurus,greyfenrir/taurus,itaymendel/taurus | bzt/__init__.py | bzt/__init__.py | """
Copyright 2015 BlazeMeter Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys, platform
from abc import abstractmethod
VERSION = "1.7.2"
class RCProvider(object):
"""
Abstract return code provider
"""
@abstractmethod
def get_rc(self):
"""
Must be implemented in subclasses
"""
pass
class TaurusException(BaseException):
pass
class TaurusConfigException(TaurusException):
pass
class TaurusInternalException(TaurusException):
pass
class NormalShutdown(KeyboardInterrupt, RCProvider):
def get_rc(self):
"""
Returns normal rc
:return: int
"""
return 0
class ManualShutdown(KeyboardInterrupt, RCProvider):
def get_rc(self):
"""
Returns manual shutdown rc
:return: int
"""
return 2
class AutomatedShutdown(KeyboardInterrupt, RCProvider):
def get_rc(self):
"""
Returns automated shutdown rc
:return: int
"""
return 3
def get_configs_dir():
"""
Generate configs dir path on install, moved from utils due to import error
:return: str
"""
# detect virtualenv or pyenv usage
if hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
path = sys.prefix
else:
if platform.system() == 'Darwin':
path = "/usr/local"
else:
path = os.path.splitdrive(sys.executable)[0]
path += os.path.sep + os.path.join("etc", "bzt.d") # os.path.join does not work for some reason
return path
| """
Copyright 2015 BlazeMeter Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys, platform
from abc import abstractmethod
VERSION = "1.7.2"
class RCProvider(object):
"""
Abstract return code provider
"""
@abstractmethod
def get_rc(self):
"""
Must be implemented in subclasses
"""
pass
class TaurusException(BaseException):
pass
class TaurusConfigException(TaurusException):
pass
class TaurusInternalException(TaurusException):
pass
class NormalShutdown(KeyboardInterrupt, RCProvider):
def get_rc(self):
"""
Returns normal rc
:return: int
"""
return 0
class ManualShutdown(KeyboardInterrupt, RCProvider):
def get_rc(self):
"""
Returns manual shutdown rc
:return: int
"""
return 2
class AutomatedShutdown(KeyboardInterrupt, RCProvider):
def get_rc(self):
"""
Returns automated shutdown rc
:return: int
"""
return 3
def get_configs_dir():
"""
Generate configs dir path on install, moved from utils due to import error
:return: str
"""
# detect virtualenv or pyenv usage
if hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
path = sys.prefix
else:
if platform.system == 'Darwin':
path = "/usr/local"
else:
path = os.path.splitdrive(sys.executable)[0]
path += os.path.sep + os.path.join("etc", "bzt.d") # os.path.join does not work for some reason
return path
| apache-2.0 | Python |
c3d0577b4b6a486d6cc416d362787a836d5ddc08 | fix case where song has no metadata | mutantmonkey/mmbar | mpdstatus.py | mpdstatus.py | import mpd
import os.path
class MpdStatusWidget(object):
def __init__(self, server, timeout=10):
self.server = server
self.client = mpd.MPDClient()
self.client.timeout = timeout
self.client.connect(server, 6600)
def output(self):
try:
song = self.client.currentsong()
except:
pass
if song:
if 'artist' in song and 'title' in song:
text = "{artist} - {title}".format(**song)
else:
text = os.path.basename(song['file'])
return {
'name': "mpdstatus",
'instance': self.server,
'full_text': ' ' + text,
'color': '#8cd0d3',
'icon': 'mmbar/icons/note.xbm',
}
else:
pass
| import mpd
class MpdStatusWidget(object):
def __init__(self, server, timeout=10):
self.server = server
self.client = mpd.MPDClient()
self.client.timeout = timeout
self.client.connect(server, 6600)
def output(self):
song = self.client.currentsong()
if song:
return {
'name': "mpdstatus",
'instance': self.server,
'full_text': ' {artist} - {title}'.format(**song),
'color': '#8cd0d3',
'icon': 'mmbar/icons/note.xbm',
}
else:
pass
| isc | Python |
78c85e07f50b990b90e93cf8280ee47671d2f7e0 | configure cronjob to run on the hour every hour | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | celeryconfig.py | celeryconfig.py | from datetime import timedelta
from celery.schedules import crontab
from django.conf import settings
CELERY_ANNOTATIONS = {
'bluebottle.analytics.tasks.queue_analytics_record': {'rate_limit': '50/s'}
}
CELERYBEAT_SCHEDULE = {
'set_status_realised': {
'task': 'bluebottle.projects.tasks.set_status_realised',
'schedule': crontab(minute=0, hour=0)
},
'update_popularity': {
'task': 'bluebottle.projects.tasks.update_popularity',
'schedule': timedelta(hours=1),
},
'update_exchange_rates': {
'task': 'bluebottle.projects.tasks.update_exchange_rates',
'schedule': crontab(minute=1, hour=3),
},
'update_project_status_stats': {
'task': 'bluebottle.projects.tasks.update_project_status_stats',
'schedule': crontab(hour=0, minute=0),
},
'sync_surveys': {
'task': 'bluebottle.surveys.tasks.sync_surveys',
'schedule': timedelta(hours=1),
},
'update_salesforce_30': {
'task': 'bluebottle.common.tasks.update_salesforce',
'schedule': crontab(minute='*/30'),
'kwargs': {
'tenant': 'onepercent',
'synchronize': True,
'updated': 60,
'log_to_salesforce': True
}
},
'update_salesforce_week': {
'task': 'bluebottle.common.tasks.update_salesforce',
'schedule': crontab(minute=0, hour=12, day_of_week='sun'),
'kwargs': {
'tenant': 'onepercent',
'csv_export': True,
'log_to_salesforce': True
}
},
}
CELERY_TIMEZONE = 'Europe/Amsterdam'
CELERY_ENABLE_UTC = True
CELERY_RESULT_BACKEND = getattr(settings, 'CELERY_RESULT_BACKEND', 'amqp')
CELERY_TASK_RESULT_EXPIRES = 18000 # 5 hours.
if getattr(settings, 'CELERY_ALWAYS_EAGER', False):
CELERY_ALWAYS_EAGER = True
| from datetime import timedelta
from celery.schedules import crontab
from django.conf import settings
CELERY_ANNOTATIONS = {
'bluebottle.analytics.tasks.queue_analytics_record': {'rate_limit': '50/s'}
}
CELERYBEAT_SCHEDULE = {
'set_status_realised': {
'task': 'bluebottle.projects.tasks.set_status_realised',
'schedule': crontab(minute=0, hour=0)
},
'update_popularity': {
'task': 'bluebottle.projects.tasks.update_popularity',
'schedule': timedelta(hours=1),
},
'update_exchange_rates': {
'task': 'bluebottle.projects.tasks.update_exchange_rates',
'schedule': crontab(minute=1, hour=3),
},
'update_project_status_stats': {
'task': 'bluebottle.projects.tasks.update_project_status_stats',
'schedule': crontab(hour=0, minute=7),
},
'sync_surveys': {
'task': 'bluebottle.surveys.tasks.sync_surveys',
'schedule': timedelta(hours=1),
},
'update_salesforce_30': {
'task': 'bluebottle.common.tasks.update_salesforce',
'schedule': crontab(minute='*/30'),
'kwargs': {
'tenant': 'onepercent',
'synchronize': True,
'updated': 60,
'log_to_salesforce': True
}
},
'update_salesforce_week': {
'task': 'bluebottle.common.tasks.update_salesforce',
'schedule': crontab(minute=0, hour=12, day_of_week='sun'),
'kwargs': {
'tenant': 'onepercent',
'csv_export': True,
'log_to_salesforce': True
}
},
}
CELERY_TIMEZONE = 'Europe/Amsterdam'
CELERY_ENABLE_UTC = True
CELERY_RESULT_BACKEND = getattr(settings, 'CELERY_RESULT_BACKEND', 'amqp')
CELERY_TASK_RESULT_EXPIRES = 18000 # 5 hours.
if getattr(settings, 'CELERY_ALWAYS_EAGER', False):
CELERY_ALWAYS_EAGER = True
| bsd-3-clause | Python |
f509034ffbd4c218afbf62df9ec894bd01a20964 | Remove commented pdb | praekelt/jmbo-chart,praekelt/jmbo-chart | chart/models.py | chart/models.py | from datetime import datetime
from django.core.urlresolvers import reverse
from django.db import models
from django.utils import timezone
from preferences.models import Preferences
from jmbo.models import ModelBase
from music.models import Track
class Chart(ModelBase):
@property
def chartentries_permitted(self):
return self.chartentries.filter(
track__in=Track.permitted.all()
).order_by('current_position')
class ChartEntry(models.Model):
created = models.DateTimeField(auto_now_add=True)
chart = models.ForeignKey(
Chart,
related_name='chartentries'
)
track = models.ForeignKey(Track)
previous_position = models.IntegerField(
blank=True,
null=True
)
current_position = models.IntegerField(
blank=True,
null=True
)
next_position = models.IntegerField(
blank=True,
null=True
)
remove = models.BooleanField(
help_text="On the next update this entry will be removed completely."
)
class Meta:
verbose_name = 'Chart entry'
verbose_name_plural = 'Chart entries'
ordering = ['current_position']
def get_duration_on_chart(self):
now = timezone.now()
if not timezone.is_aware(self.created):
now = datetime.now()
return now - (now - self.created)
def __unicode__(self):
return '%s Entry %s' % (self.chart.title, self.current_position)
class ChartPreferences(Preferences):
__module__ = 'preferences.models'
primary_chart = models.ForeignKey(
'chart.Chart',
null=True,
help_text="Select the primary chart link from the navigation.",
related_name='chartoptions_primary_chart',
limit_choices_to={'state': 'published'}
)
class Meta:
verbose_name = 'Chart preferences'
verbose_name_plural = 'Chart preferences'
| from datetime import datetime
from django.core.urlresolvers import reverse
from django.db import models
from django.utils import timezone
from preferences.models import Preferences
from jmbo.models import ModelBase
from music.models import Track
class Chart(ModelBase):
@property
def chartentries_permitted(self):
#import pdb;pdb.set_trace()
return self.chartentries.filter(
track__in=Track.permitted.all()
).order_by('current_position')
class ChartEntry(models.Model):
created = models.DateTimeField(auto_now_add=True)
chart = models.ForeignKey(
Chart,
related_name='chartentries'
)
track = models.ForeignKey(Track)
previous_position = models.IntegerField(
blank=True,
null=True
)
current_position = models.IntegerField(
blank=True,
null=True
)
next_position = models.IntegerField(
blank=True,
null=True
)
remove = models.BooleanField(
help_text="On the next update this entry will be removed completely."
)
class Meta:
verbose_name = 'Chart entry'
verbose_name_plural = 'Chart entries'
ordering = ['current_position']
def get_duration_on_chart(self):
now = timezone.now()
if not timezone.is_aware(self.created):
now = datetime.now()
return now - (now - self.created)
def __unicode__(self):
return '%s Entry %s' % (self.chart.title, self.current_position)
class ChartPreferences(Preferences):
__module__ = 'preferences.models'
primary_chart = models.ForeignKey(
'chart.Chart',
null=True,
help_text="Select the primary chart link from the navigation.",
related_name='chartoptions_primary_chart',
limit_choices_to={'state': 'published'}
)
class Meta:
verbose_name = 'Chart preferences'
verbose_name_plural = 'Chart preferences'
| bsd-3-clause | Python |
7883b95433a35b0b4bfa3354454626473986b2a5 | bump version number | CloverHealth/temporal-sqlalchemy | temporal_sqlalchemy/version.py | temporal_sqlalchemy/version.py | """Version information."""
__version__ = '0.2.0'
| """Version information."""
__version__ = '0.1.0'
| bsd-3-clause | Python |
2ecaf44c0bc5117744ecda7bd0f8df2627e9a527 | Change order of the tests | mcolom/ipolDevel,mcolom/ipolDevel,mcolom/ipolDevel,mcolom/ipolDevel | ci_tests/all.py | ci_tests/all.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import time
import random
from subprocess import Popen, PIPE
import os
system_test = '/home/ipol/ipolDevel/ci_tests/system.py'
archive_test = '/home/ipol/ipolDevel/ipol_demo/modules/archive/test.py'
blobs_test = '/home/ipol/ipolDevel/ipol_demo/modules/blobs/test.py'
demoinfo_test = '/home/ipol/ipolDevel/ipol_demo/modules/demoinfo/test.py'
dispatcher_test = '/home/ipol/ipolDevel/ipol_demo/modules/dispatcher/test.py'
demorunner_test = '/home/ipol/ipolDevel/ipol_demo/modules/demorunner/test.py'
resources = '/home/ipol/ipolDevel/ci_tests/resources'
demorunners = '/home/ipol/ipolDevel/ipol_demo/modules/config_common/demorunners.xml'
shared_folder = '/home/ipol/ipolDevel/shared_folder'
tests = [demoinfo_test, blobs_test, archive_test, dispatcher_test, demorunner_test, system_test]
def start():
"""
Start the script
"""
try:
while not can_execute():
# Wait random time between 5 and 10 sec to try to execute the test again
time.sleep(5 + random.random() * 5)
run_tests()
finally:
os.remove('test.lock')
def can_execute():
"""
Check if the test can be executed. Only 1 test can be executed simultaneously
"""
if os.path.isfile('test.lock'):
return False
open('test.lock', 'w')
return True
def run_tests():
"""
Execute all the tests
"""
for test in tests:
# Print the tested module
module_name = os.path.basename(os.path.split(test)[0]).title()
if module_name == '': module_name = 'System'
# Execute test
process = Popen(['python', test, resources, demorunners, shared_folder], stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print "{} test failed:".format(module_name)
print stderr
print stdout
exit(process.returncode)
start()
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import time
import random
from subprocess import Popen, PIPE
import os
system_test = '/home/ipol/ipolDevel/ci_tests/system.py'
archive_test = '/home/ipol/ipolDevel/ipol_demo/modules/archive/test.py'
blobs_test = '/home/ipol/ipolDevel/ipol_demo/modules/blobs/test.py'
demoinfo_test = '/home/ipol/ipolDevel/ipol_demo/modules/demoinfo/test.py'
dispatcher_test = '/home/ipol/ipolDevel/ipol_demo/modules/dispatcher/test.py'
demorunner_test = '/home/ipol/ipolDevel/ipol_demo/modules/demorunner/test.py'
resources = '/home/ipol/ipolDevel/ci_tests/resources'
demorunners = '/home/ipol/ipolDevel/ipol_demo/modules/config_common/demorunners.xml'
shared_folder = '/home/ipol/ipolDevel/shared_folder'
tests = [system_test, demoinfo_test, blobs_test, archive_test, dispatcher_test, demorunner_test]
def start():
"""
Start the script
"""
try:
while not can_execute():
# Wait random time between 5 and 10 sec to try to execute the test again
time.sleep(5 + random.random() * 5)
run_tests()
finally:
os.remove('test.lock')
def can_execute():
"""
Check if the test can be executed. Only 1 test can be executed simultaneously
"""
if os.path.isfile('test.lock'):
return False
open('test.lock', 'w')
return True
def run_tests():
"""
Execute all the tests
"""
for test in tests:
# Print the tested module
module_name = os.path.basename(os.path.split(test)[0]).title()
if module_name == '': module_name = 'System'
# Execute test
process = Popen(['python', test, resources, demorunners, shared_folder], stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print "{} test failed:".format(module_name)
print stderr
print stdout
exit(process.returncode)
start()
| agpl-3.0 | Python |
76b047fcb2c80d8abad0b275710b8e01459daadd | Bump version to 0.20.4 | thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader | pytablereader/__version__.py | pytablereader/__version__.py | # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.20.4"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.20.3"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
948ccdf2864988f1463f901c6024a5a3df09fdba | Bump version to 0.30.0 | thombashi/pytablewriter | pytablewriter/__version__.py | pytablewriter/__version__.py | # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.30.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.29.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
3c2a426553072149710d976938918d837cd6c9ff | fix pos | PyThaiNLP/pythainlp | pythainlp/postaggers/text.py | pythainlp/postaggers/text.py | from __future__ import absolute_import,division,print_function
from nine import nimport,str
from pythainlp.segment import segment
import pythainlp
import os
import nltk.tag, nltk.data
json= nimport('json')
codecs= nimport('codecs')
reader = codecs.getreader("utf-8")
templates_dir = os.path.join(os.path.dirname(pythainlp.__file__), 'corpus')
template_file = os.path.join(templates_dir, 'thaipos.json')
data1 = json.load(reader(open(template_file).read()))
#Postaggers ภาษาไทย
def tag(text):
"""รับค่าเป็นข้อความ ''str'' คืนค่าเป็น ''list'' เช่น [('ข้อความ', 'ชนิดคำ')]"""
text= segment(text)
tagger = nltk.tag.UnigramTagger(model=data1)# backoff=default_tagger)
return tagger.tag(text) | from __future__ import absolute_import,division,print_function
from nine import nimport,str
from pythainlp.segment import segment
import pythainlp
import os
json= nimport('json')
import nltk.tag, nltk.data
templates_dir = os.path.join(os.path.dirname(pythainlp.__file__), 'corpus')
template_file = os.path.join(templates_dir, 'thaipos.json')
#default_tagger = nltk.data.load(nltk.tag._POS_TAGGER)
def data():
return json.load(open(template_file).read())
data1 =data()
#Postaggers ภาษาไทย
def tag(text):
"""รับค่าเป็นข้อความ ''str'' คืนค่าเป็น ''list'' เช่น [('ข้อความ', 'ชนิดคำ')]"""
text= segment(text)
tagger = nltk.tag.UnigramTagger(model=data1)# backoff=default_tagger)
return tagger.tag(text) | apache-2.0 | Python |
40adc641638e3efcf8e4e5af898deda9e40da161 | Use NTP terms. Add comments. | rsmith-nl/scripts,rsmith-nl/scripts | ntpclient.py | ntpclient.py | #!/usr/bin/env python3
# file: ntpclient.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2017-11-16 19:33:50 +0100
# Last modified: 2018-09-09T14:10:52+0200
"""Simple NTP query program."""
from contextlib import closing
from datetime import datetime
from socket import socket, AF_INET, SOCK_DGRAM
import os
import struct
import time
# See e.g. # https://www.cisco.com/c/en/us/about/press/internet-protocol-journal/back-issues/table-contents-58/154-ntp.html
# From left to right:
# * No leap second adjustment = 0 (2 bits)
# * protocol version 3 (3 bits)
# * client packet = 3 (3 bits)
# In [1]: hex((0 & 0b11) << 6 | (3 & 0b111) << 3 | (3 & 0b111))
# Out[1]: '0x1b'
_query = b'\x1b' + 47 * b'\0'
def get_ntp_time(host="pool.ntp.org", port=123):
fmt = "!12I"
with closing(socket(AF_INET, SOCK_DGRAM)) as s:
s.sendto(_query, (host, port))
msg, address = s.recvfrom(1024)
unpacked = struct.unpack(fmt, msg[0:struct.calcsize(fmt)])
# Return the average of receive and transmit timestamps.
# Note that 2208988800 is the difference in seconds between the
# UNIX epoch 1970-1-1 and the NTP epoch 1900-1-1.
# See: (datetime.datetime(1970,1,1) - datetime.datetim:::::e(1900,1,1)).total_seconds()
t2 = unpacked[8] + float(unpacked[9]) / 2**32 - 2208988800
t3 = unpacked[10] + float(unpacked[11]) / 2**32 - 2208988800
return (t2 + t3) / 2
if __name__ == "__main__":
res = None
t1 = time.clock_gettime(time.CLOCK_REALTIME)
ntptime = get_ntp_time('nl.pool.ntp.org')
t4 = time.clock_gettime(time.CLOCK_REALTIME)
# It is not guaranteed that the NTP time is *exactly* in the middle of both
# local times. But it is a reasonable simplification.
localtime = (t1 + t4) / 2
if os.geteuid() == 0:
time.clock_settime(time.CLOCK_REALTIME, ntptime)
res = 'Time set to NTP time.'
diff = localtime - ntptime
localtime = datetime.fromtimestamp(localtime)
ntptime = datetime.fromtimestamp(ntptime)
print('Local time value:', localtime.strftime('%a %b %d %H:%M:%S.%f %Y'))
print('NTP time value:', ntptime.strftime('%a %b %d %H:%M:%S.%f %Y'))
print('Local time - ntp time: {:.6f} s'.format(diff))
if res:
print(res)
| #!/usr/bin/env python3
# file: ntpclient.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2017-11-16 19:33:50 +0100
# Last modified: 2018-05-13T12:15:28+0200
"""Simple NTP query program."""
from contextlib import closing
from datetime import datetime
from socket import socket, AF_INET, SOCK_DGRAM
import os
import struct
import time
_query = b'\x1b' + 47 * b'\0'
def get_ntp_time(host="pool.ntp.org", port=123):
fmt = "!12I"
with closing(socket(AF_INET, SOCK_DGRAM)) as s:
s.sendto(_query, (host, port))
msg, address = s.recvfrom(1024)
unpacked = struct.unpack(fmt, msg[0:struct.calcsize(fmt)])
return unpacked[10] + float(unpacked[11]) / 2**32 - 2208988800
if __name__ == "__main__":
beforetime = time.clock_gettime(time.CLOCK_REALTIME)
ntptime = get_ntp_time('nl.pool.ntp.org')
aftertime = time.clock_gettime(time.CLOCK_REALTIME)
# It is not guaranteed that the NTP time is *exactly* in the middle of both
# local times. But it is a reasonable simplification.
localtime = (beforetime + aftertime) / 2
if os.geteuid() == 0:
time.clock_settime(time.CLOCK_REALTIME, ntptime)
res = 'Time set to NTP time.'
else:
res = 'Can not set time: not superuser.'
diff = localtime - ntptime
localtime = datetime.fromtimestamp(localtime)
ntptime = datetime.fromtimestamp(ntptime)
print('Local time value:', localtime.strftime('%a %b %d %H:%M:%S.%f %Y'))
print('NTP time value:', ntptime.strftime('%a %b %d %H:%M:%S.%f %Y'))
print('Local time - ntp time: {:.6f} s'.format(diff))
print(res)
| mit | Python |
2a4d78be3df2d068431fe007b6f2d73956dc23d4 | Use feeds instead of rss | vjousse/viserlalune,vjousse/viserlalune,vjousse/viserlalune,vjousse/viserlalune | sitecontent/urls.py | sitecontent/urls.py | from django.conf.urls import patterns, url
from sitecontent import views
urlpatterns = patterns('',
url("^feeds/(?P<format>.*)$",
"sitecontent.views.blog_post_feed_richtext_filters", name="blog_post_feed_richtext_filters"),
)
| from django.conf.urls import patterns, url
from sitecontent import views
urlpatterns = patterns('',
url("^rss/(?P<format>.*)$",
"sitecontent.views.blog_post_feed_richtext_filters", name="blog_post_feed_richtext_filters"),
)
| mit | Python |
097a057d289fc6a00c6841f26c5d4776b4a6ea48 | Fix import. | rezoo/chainer,okuta/chainer,hvy/chainer,cupy/cupy,hvy/chainer,niboshi/chainer,keisuke-umezawa/chainer,jnishi/chainer,jnishi/chainer,niboshi/chainer,keisuke-umezawa/chainer,chainer/chainer,okuta/chainer,cupy/cupy,hvy/chainer,anaruse/chainer,pfnet/chainer,ysekky/chainer,niboshi/chainer,jnishi/chainer,wkentaro/chainer,chainer/chainer,aonotas/chainer,keisuke-umezawa/chainer,cupy/cupy,wkentaro/chainer,ronekko/chainer,kiyukuta/chainer,kashif/chainer,tkerola/chainer,wkentaro/chainer,okuta/chainer,jnishi/chainer,chainer/chainer,delta2323/chainer,niboshi/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,ktnyt/chainer,hvy/chainer,wkentaro/chainer,cupy/cupy,keisuke-umezawa/chainer,okuta/chainer,ktnyt/chainer | tests/chainer_tests/functions_tests/pooling_tests/pooling_nd_helper.py | tests/chainer_tests/functions_tests/pooling_tests/pooling_nd_helper.py | import itertools
import nose.tools
import six
from chainer import testing
@nose.tools.nottest
def pooling_patches(dims, ksize, stride, pad, cover_all):
"""Return tuples of slices that indicate pooling patches."""
# Left-top indexes of each pooling patch.
if cover_all:
xss = itertools.product(
*[six.moves.range(-p, d + p - k + s, s)
for (d, k, s, p) in six.moves.zip(dims, ksize, stride, pad)])
else:
xss = itertools.product(
*[six.moves.range(-p, d + p - k + 1, s)
for (d, k, s, p) in six.moves.zip(dims, ksize, stride, pad)])
# Tuples of slices for pooling patches.
return [tuple(slice(max(x, 0), min(x + k, d))
for (x, d, k) in six.moves.zip(xs, dims, ksize))
for xs in xss]
testing.run_module(__name__, __file__)
| import itertools
import nose.tools
import six
import testing
@nose.tools.nottest
def pooling_patches(dims, ksize, stride, pad, cover_all):
"""Return tuples of slices that indicate pooling patches."""
# Left-top indexes of each pooling patch.
if cover_all:
xss = itertools.product(
*[six.moves.range(-p, d + p - k + s, s)
for (d, k, s, p) in six.moves.zip(dims, ksize, stride, pad)])
else:
xss = itertools.product(
*[six.moves.range(-p, d + p - k + 1, s)
for (d, k, s, p) in six.moves.zip(dims, ksize, stride, pad)])
# Tuples of slices for pooling patches.
return [tuple(slice(max(x, 0), min(x + k, d))
for (x, d, k) in six.moves.zip(xs, dims, ksize))
for xs in xss]
testing.run_module(__name__, __file__)
| mit | Python |
72f763d9759438abd731585a1b5ef67e62e27181 | Use version gathering logic from hydrachain | gsalgado/pyethapp,RomanZacharia/pyethapp,ethereum/pyethapp,RomanZacharia/pyethapp,ethereum/pyethapp,changwu-tw/pyethapp,changwu-tw/pyethapp,gsalgado/pyethapp | pyethapp/__init__.py | pyethapp/__init__.py | # -*- coding: utf-8 -*-
# ############# version ##################
from pkg_resources import get_distribution, DistributionNotFound
import os.path
import subprocess
import re
GIT_DESCRIBE_RE = re.compile('^(?P<version>v\d+\.\d+\.\d+)-(?P<git>\d+-g[a-fA-F0-9]+(?:-dirty)?)$')
__version__ = None
try:
_dist = get_distribution('pyethapp')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'pyethapp')):
# not installed, but there is another version that *is*
raise DistributionNotFound
__version__ = _dist.version
except DistributionNotFound:
pass
if not __version__:
try:
rev = subprocess.check_output(['git', 'describe', '--tags', '--dirty'],
stderr=subprocess.STDOUT)
match = GIT_DESCRIBE_RE.match(rev)
if match:
__version__ = "{}+git-{}".format(match.group("version"), match.group("git"))
except:
pass
if not __version__:
__version__ = 'undefined'
# ########### endversion ##################
| # -*- coding: utf-8 -*-
# ############# version ##################
from pkg_resources import get_distribution, DistributionNotFound
import os.path
import subprocess
try:
_dist = get_distribution('pyethapp')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'pyethapp')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = None
else:
__version__ = _dist.version
if not __version__:
try:
# try to parse from setup.py
for l in open(os.path.join(__path__[0], '..', 'setup.py')):
if l.startswith("version = '"):
__version__ = l.split("'")[1]
break
except:
pass
finally:
if not __version__:
__version__ = 'undefined'
# add git revision and commit status
try:
rev = subprocess.check_output(['git', 'rev-parse', 'HEAD'])
is_dirty = len(subprocess.check_output(['git', 'diff', '--shortstat']).strip())
__version__ += '-' + rev[:4] + '-dirty' if is_dirty else ''
except:
pass
# ########### endversion ##################
| mit | Python |
bc823de2236c02b0a5179b34134d712e176e39ae | Update test | BakeCode/performance-testing,BakeCode/performance-testing | tests/routine/test_tool.py | tests/routine/test_tool.py | import unittest
from performance.routine import Tool, Config
class ToolTestCase(unittest.TestCase):
def setUp(self):
self.host = 'http://www.google.com'
self.config = Config(host=self.host)
def test_init(self):
tool = Tool(config=self.config)
self.assertEqual(self.config, tool.config)
with self.assertRaises(TypeError) as error:
tool = Tool(config='invalid_config')
self.assertEqual('No performance.routine.Config object', error.exception.__str__())
| import unittest
from performance.routine import Tool, Config
class ToolTestCase(unittest.TestCase):
def setUp(self):
self.host = 'http://www.google.com'
self.config = Config(host=self.host)
def test_init(self):
tool = Tool(config=self.config)
self.assertEqual(self.config, tool.config)
with self.assertRaises(TypeError) as error:
tool = Tool(config='invalid_config')
self.assertEqual('No performance.routine.Config object', error.exception.message)
| mit | Python |
faaa3898a0baf6b15696e651e2589cb60c032bd4 | Improve docs. | yanikou19/pymatgen,migueldiascosta/pymatgen,sonium0/pymatgen,yanikou19/pymatgen,ctoher/pymatgen,rousseab/pymatgen,sonium0/pymatgen,migueldiascosta/pymatgen,Dioptas/pymatgen,Bismarrck/pymatgen,rousseab/pymatgen,ctoher/pymatgen,ctoher/pymatgen,Bismarrck/pymatgen,Bismarrck/pymatgen,Bismarrck/pymatgen,migueldiascosta/pymatgen,Dioptas/pymatgen,sonium0/pymatgen,yanikou19/pymatgen,Bismarrck/pymatgen,rousseab/pymatgen | pymatgen/__init__.py | pymatgen/__init__.py | __author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Michael Kocher", "Dan Gunter", "Shreyas Cholia",
"Vincent L Chevrier", "Rickard Armiento"])
__date__ = "May 12 2013"
__version__ = "2.7.2b"
import json
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from .electronic_structure.core import Spin, Orbital
from .util.io_utils import zopen
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
def pmg_load(filename, **kwargs):
"""
Loads a json file and deserialize it with PMGJSONDecoder.
Args:
filename:
Filename of file to open. Can be gzipped or bzipped.
**kwargs:
Any of the keyword arguments supported by the json.load method.
Returns:
Deserialized pymatgen object. Note that these objects can be lists,
dicts or otherwise nested pymatgen objects that support the to_dict
and from_dict MSONAble protocol.
"""
return json.load(zopen(filename), cls=PMGJSONDecoder, **kwargs)
def pmg_dump(obj, filename, **kwargs):
"""
Dump an object to a json file using PMGJSONEncoder. Note that these
objects can be lists, dicts or otherwise nested pymatgen objects that
support the to_dict and from_dict MSONAble protocol.
Args:
obj:
Object to dump.
filename:
Filename of file to open. Can be gzipped or bzipped.
**kwargs:
Any of the keyword arguments supported by the json.load method.
"""
return json.dump(obj, zopen(filename, "w"), cls=PMGJSONEncoder, **kwargs)
| __author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Michael Kocher", "Dan Gunter", "Shreyas Cholia",
"Vincent L Chevrier", "Rickard Armiento"])
__date__ = "May 12 2013"
__version__ = "2.7.2b"
import json
#Useful aliases for commonly used objects and modules.
from .core import *
from .serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from .electronic_structure.core import Spin, Orbital
from .util.io_utils import zopen
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
def pmg_load(filename, **kwargs):
"""
Loads a json file and deserialize it with PMGJSONDecoder.
Args:
filename:
Filename of file to open. Can be gzipped or bzipped.
**kwargs:
Any of the keyword arguments supported by the json.load method.
Returns:
Deserialized pymatgen object.
"""
return json.load(zopen(filename), cls=PMGJSONDecoder, **kwargs)
def pmg_dump(obj, filename, **kwargs):
"""
Dump an object to a json file using PMGJSONEncoder.
Args:
obj:
Object to dump.
filename:
Filename of file to open. Can be gzipped or bzipped.
**kwargs:
Any of the keyword arguments supported by the json.load method.
"""
return json.dump(obj, zopen(filename, "w"), cls=PMGJSONEncoder, **kwargs)
| mit | Python |
739aaf65d1ee45392c682a06958e7036fbcfda08 | Fix RecTimer migration. | EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base | calvin/actorstore/systemactors/std/RecTimer.py | calvin/actorstore/systemactors/std/RecTimer.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, manage, condition, stateguard
class RecTimer(Actor):
"""
Pass input after a given delay
Input :
token : anything
Outputs:
token : anything
"""
@manage(['delay'])
def init(self, delay=0.1):
self.delay = delay
self.use('calvinsys.events.timer', shorthand='timer')
self.setup()
def setup(self):
self.timer = self['timer'].repeat(self.delay)
def will_migrate(self):
self.timer.cancel()
def did_migrate(self):
self.use('calvinsys.events.timer', shorthand='timer')
self.setup()
@stateguard(lambda self: self.timer and self.timer.triggered)
@condition(['token'], ['token'])
def flush(self, input):
return (input, )
@stateguard(lambda self: self.timer and self.timer.triggered)
@condition()
def clear(self):
self.timer.ack()
action_priority = (flush, clear)
requires = ['calvinsys.events.timer']
test_args = [1]
# Trigger a timer then add tokens. The tokens shall wait for the next trigger.
test_set = [
{
'setup': [lambda self: self.timer.trigger()],
'in': {'token': []}, 'out': {'token': []}
}
]
# Add tokens, nothing returned since timer not triggered above shall have cleared.
test_set += [
{'in': {'token': [r]}, 'out': {'token': []}} for r in range(3)
]
# Trigger the timer once then fetch three tokens.
# All tokens shall be flushed.
test_set += [
{
'setup': [lambda self: self.timer.trigger()],
'in': {'token': []}, 'out': {'token': [0]}
},
{'in': {'token': []}, 'out': {'token': [1]}},
{'in': {'token': []}, 'out': {'token': [2]}}
]
| # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, manage, condition, stateguard
class RecTimer(Actor):
"""
Pass input after a given delay
Input :
token : anything
Outputs:
token : anything
"""
@manage(['delay'])
def init(self, delay=0.1):
self.delay = delay
self.use('calvinsys.events.timer', shorthand='timer')
self.setup()
def setup(self):
self.timer = self['timer'].repeat(self.delay)
def will_migrate(self):
self.timer.cancel()
def did_migrate(self):
self.setup()
@stateguard(lambda self: self.timer and self.timer.triggered)
@condition(['token'], ['token'])
def flush(self, input):
return (input, )
@stateguard(lambda self: self.timer and self.timer.triggered)
@condition()
def clear(self):
self.timer.ack()
action_priority = (flush, clear)
requires = ['calvinsys.events.timer']
test_args = [1]
# Trigger a timer then add tokens. The tokens shall wait for the next trigger.
test_set = [
{
'setup': [lambda self: self.timer.trigger()],
'in': {'token': []}, 'out': {'token': []}
}
]
# Add tokens, nothing returned since timer not triggered above shall have cleared.
test_set += [
{'in': {'token': [r]}, 'out': {'token': []}} for r in range(3)
]
# Trigger the timer once then fetch three tokens.
# All tokens shall be flushed.
test_set += [
{
'setup': [lambda self: self.timer.trigger()],
'in': {'token': []}, 'out': {'token': [0]}
},
{'in': {'token': []}, 'out': {'token': [1]}},
{'in': {'token': []}, 'out': {'token': [2]}}
]
| apache-2.0 | Python |
acde8d6665f298627d0fee5e2e275fa786eb6bdb | fix manifest writing under py2 | ownaginatious/fbchat-archive-parser,ownaginatious/fbchat-archive-parser | fbchat_archive_parser/writers/__init__.py | fbchat_archive_parser/writers/__init__.py | from __future__ import unicode_literals
from datetime import datetime
import io
import os
import shutil
import six
if six.PY2:
FileNotFoundError = OSError
from .json import JsonWriter
from .pretty_json import PrettyJsonWriter
from .csv import CsvWriter
from .text import TextWriter
from .yaml import YamlWriter
_BUILTIN_WRITERS = {
"json": JsonWriter,
"pretty-json": PrettyJsonWriter,
"csv": CsvWriter,
"text": TextWriter,
"yaml": YamlWriter,
}
BUILTIN_WRITERS = tuple(sorted(list(_BUILTIN_WRITERS.keys())))
class SerializerDoesNotExist(KeyError):
"""The requested serializer was not found."""
pass
def write(fmt, data, stream_or_dir):
if fmt not in _BUILTIN_WRITERS:
raise SerializerDoesNotExist("No such serializer '%s'" % fmt)
selected_writer = _BUILTIN_WRITERS[fmt]
if isinstance(stream_or_dir, six.string_types):
write_to_dir(selected_writer(), stream_or_dir, data)
else:
selected_writer().write(data, stream_or_dir)
def write_to_dir(writer, directory, data):
output_dir = datetime.now().strftime("fbchat_dump_%Y%m%d%H%M")
directory = '%s/%s' % (directory, output_dir)
try:
shutil.rmtree(directory)
except FileNotFoundError:
pass
os.makedirs(directory)
ordered_threads = [data.threads[k] for k in sorted(list(data.threads.keys()))]
# Write the manifest
with io.open("%s/manifest.txt" % directory, 'w', encoding='utf-8') as manifest:
manifest.write("Chat history manifest for: %s\n\n" % data.user)
for i, thread in enumerate(ordered_threads, start=1):
manifest.write(" %s. %s\n" % (i, ", ".join(thread.participants)))
# Write each thread.
for i, thread in enumerate(ordered_threads, start=1):
thread_file_str = "%s/thread_%s.%s" % (directory, i, writer.extension)
with io.open(thread_file_str, 'w', encoding='utf-8') as thread_file:
writer.write_thread(thread, stream=thread_file)
print("Thread content written to [%s]" % directory)
| from datetime import datetime
import io
import os
import shutil
import six
if six.PY2:
FileNotFoundError = OSError
from .json import JsonWriter
from .pretty_json import PrettyJsonWriter
from .csv import CsvWriter
from .text import TextWriter
from .yaml import YamlWriter
_BUILTIN_WRITERS = {
"json": JsonWriter,
"pretty-json": PrettyJsonWriter,
"csv": CsvWriter,
"text": TextWriter,
"yaml": YamlWriter,
}
BUILTIN_WRITERS = tuple(sorted(list(_BUILTIN_WRITERS.keys())))
class SerializerDoesNotExist(KeyError):
"""The requested serializer was not found."""
pass
def write(fmt, data, stream_or_dir):
if fmt not in _BUILTIN_WRITERS:
raise SerializerDoesNotExist("No such serializer '%s'" % fmt)
selected_writer = _BUILTIN_WRITERS[fmt]
if isinstance(stream_or_dir, six.string_types):
write_to_dir(selected_writer(), stream_or_dir, data)
else:
selected_writer().write(data, stream_or_dir)
def write_to_dir(writer, directory, data):
output_dir = datetime.now().strftime("fbchat_dump_%Y%m%d%H%M")
directory = '%s/%s' % (directory, output_dir)
try:
shutil.rmtree(directory)
except FileNotFoundError:
pass
os.makedirs(directory)
ordered_threads = [data.threads[k] for k in sorted(list(data.threads.keys()))]
# Write the manifest
with io.open("%s/manifest.txt" % directory, 'w', encoding='utf-8') as manifest:
manifest.write("Chat history manifest for: %s\n\n" % data.user)
for i, thread in enumerate(ordered_threads, start=1):
manifest.write(" %s. %s\n" % (i, ", ".join(thread.participants)))
# Write each thread.
for i, thread in enumerate(ordered_threads, start=1):
thread_file_str = "%s/thread_%s.%s" % (directory, i, writer.extension)
with io.open(thread_file_str, 'w', encoding='utf-8') as thread_file:
writer.write_thread(thread, stream=thread_file)
print("Thread content written to [%s]" % directory)
| mit | Python |
4541605e27c9fef6cc23b245de50867ff22ea6aa | Test Case for accounting dimension | gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext | erpnext/accounts/doctype/accounting_dimension/test_accounting_dimension.py | erpnext/accounts/doctype/accounting_dimension/test_accounting_dimension.py | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_sales_invoice
from erpnext.accounts.doctype.journal_entry.test_journal_entry import make_journal_entry
class TestAccountingDimension(unittest.TestCase):
def setUp(self):
frappe.set_user("Administrator")
if not frappe.db.exists("Accounting Dimension", {"document_type": "Department"}):
dimension = frappe.get_doc({
"doctype": "Accounting Dimension",
"document_type": "Department",
}).insert()
def test_dimension_against_sales_invoice(self):
si = create_sales_invoice(do_not_save=1)
si.append("items", {
"item_code": "_Test Item",
"warehouse": "_Test Warehouse - _TC",
"qty": 1,
"rate": 100,
"income_account": "Sales - _TC",
"expense_account": "Cost of Goods Sold - _TC",
"cost_center": "_Test Cost Center - _TC",
"department": "_Test Department - _TC"
})
si.save()
si.submit()
gle = frappe.get_doc("GL Entry", {"voucher_no": si.name, "account": "Sales - _TC"})
self.assertEqual(gle.department, "_Test Department - _TC")
def test_dimension_against_journal_entry(self):
je = make_journal_entry("Sales - _TC", "Sales Expenses - _TC", 500, save=False)
je.accounts[0].update({"department": "_Test Department - _TC"})
je.accounts[1].update({"department": "_Test Department - _TC"})
je.save()
je.submit()
gle = frappe.get_doc("GL Entry", {"voucher_no": je.name, "account": "Sales - _TC"})
gle1 = frappe.get_doc("GL Entry", {"voucher_no": je.name, "account": "Sales Expenses - _TC"})
self.assertEqual(gle.department, "_Test Department - _TC")
self.assertEqual(gle1.department, "_Test Department - _TC")
| # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestAccountingDimension(unittest.TestCase):
pass
| agpl-3.0 | Python |
27112881583e53d790e66d31a2bb4d2a996ee405 | Move import to top level to avoid import fail after fist time on sys.modules hack | JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp | python/sparknlp/functions.py | python/sparknlp/functions.py | from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
from sparknlp.annotation import Annotation
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
| from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
from sparknlp.annotation import Annotation
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
| apache-2.0 | Python |
1d1854040751a67aa04399f13243299d3dd065b4 | fix tabs->spaces | kived/python-for-android,kived/python-for-android,rnixx/python-for-android,wexi/python-for-android,germn/python-for-android,ibobalo/python-for-android,rnixx/python-for-android,kivy/python-for-android,kivy/python-for-android,cbenhagen/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,inclement/python-for-android,kivy/python-for-android,germn/python-for-android,bob-the-hamster/python-for-android,rnixx/python-for-android,kived/python-for-android,inclement/python-for-android,kived/python-for-android,ibobalo/python-for-android,ibobalo/python-for-android,kronenpj/python-for-android,cbenhagen/python-for-android,bob-the-hamster/python-for-android,bob-the-hamster/python-for-android,cbenhagen/python-for-android,inclement/python-for-android,kronenpj/python-for-android,kived/python-for-android,rnixx/python-for-android,wexi/python-for-android,cbenhagen/python-for-android,wexi/python-for-android,PKRoma/python-for-android,bob-the-hamster/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,germn/python-for-android,wexi/python-for-android,rnixx/python-for-android,germn/python-for-android,inclement/python-for-android,ibobalo/python-for-android,ibobalo/python-for-android,germn/python-for-android,wexi/python-for-android,wexi/python-for-android,bob-the-hamster/python-for-android,cbenhagen/python-for-android,PKRoma/python-for-android,kivy/python-for-android,PKRoma/python-for-android,germn/python-for-android,inclement/python-for-android,kivy/python-for-android,cbenhagen/python-for-android,kived/python-for-android,inclement/python-for-android,ibobalo/python-for-android,bob-the-hamster/python-for-android | pythonforandroid/patching.py | pythonforandroid/patching.py | from os import uname
def check_all(*callables):
def check(**kwargs):
return all(c(**kwargs) for c in callables)
return check
def check_any(*callables):
def check(**kwargs):
return any(c(**kwargs) for c in callables)
return check
def is_platform(platform):
def is_x(**kwargs):
return uname()[0] == platform
return is_x
is_linux = is_platform('Linux')
is_darwin = is_platform('Darwin')
def is_arch(xarch):
def is_x(arch, **kwargs):
return arch.arch == xarch
return is_x
def is_api_gt(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api > apiver
return is_x
def is_api_gte(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api >= apiver
return is_x
def is_api_lt(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api < apiver
return is_x
def is_api_lte(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api <= apiver
return is_x
def is_api(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api == apiver
return is_x
def will_build(recipe_name):
def will(recipe, **kwargs):
return recipe_name in recipe.ctx.recipe_build_order
return will
| from os import uname
def check_all(*callables):
def check(**kwargs):
return all(c(**kwargs) for c in callables)
return check
def check_any(*callables):
def check(**kwargs):
return any(c(**kwargs) for c in callables)
return check
def is_platform(platform):
def is_x(**kwargs):
return uname()[0] == platform
return is_x
is_linux = is_platform('Linux')
is_darwin = is_platform('Darwin')
def is_arch(xarch):
def is_x(arch, **kwargs):
return arch.arch == xarch
return is_x
def is_api_gt(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api > apiver
return is_x
def is_api_gte(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api >= apiver
return is_x
def is_api_lt(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api < apiver
return is_x
def is_api_lte(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api <= apiver
return is_x
def is_api(apiver):
def is_x(recipe, **kwargs):
return recipe.ctx.android_api == apiver
return is_x
def will_build(recipe_name):
def will(recipe, **kwargs):
return recipe_name in recipe.ctx.recipe_build_order
return will
| mit | Python |
62e0ebb0eddf97437c41c9cfe4d8487236f141b4 | Stop using 'with', which is illegal in Python < 2.5. | peterldowns/python-mustache,peterldowns/python-mustache | mustache/loading.py | mustache/loading.py | # coding: utf-8
from os.path import split, splitext, extsep, join, abspath, exists
from utils import make_unicode
DEFAULT_EXTENSION = 'html'
DEFAULT_DIRECTORY = 'static/templates'
DEFAULT_ENCODING = 'utf-8'
DEFAULT_ERRORS = 'xmlcharrefreplace'
def read(path):
""" Return the contents of a file as a byte string. """
try:
f = open(path, 'rb')
return f.read()
finally:
f.close()
def read_unicode(path, encoding, encoding_errors):
""" Return the contents of a file as a unicode string. """
try:
f = open(path, 'rb')
return make_unicode(f.read(), encoding, encoding_errors)
finally:
f.close()
def get_abs_template_path(template_name, directory, extension):
""" Given a template name, a directory, and an extension, return the
absolute path to the template. """
# Get the relative path
relative_path = join(directory, template_name)
file_with_ext = template_name
if extension:
# If there is a default extension, but no file extension, then add it
file_name, file_ext = splitext(file_with_ext)
if not file_ext:
file_with_ext = extsep.join(
(file_name, extension.replace(extsep, '')))
# Rebuild the relative path
relative_path = join(directory, file_with_ext)
return abspath(relative_path)
def load_file(path, encoding, encoding_errors):
""" Given an existing path, attempt to load it as a unicode string. """
abs_path = abspath(path)
if exists(abs_path):
return read_unicode(abs_path, encoding, encoding_errors)
raise IOError("File {0} does not exist".format(abs_path))
def load_template(name, directory, extension, encoding, encoding_errors):
""" Load a template and return its contents as a unicode string. """
abs_path = get_abs_template_path(name, directory, extension)
return load_file(abs_path, encoding, encoding_errors)
| # coding: utf-8
from os.path import split, splitext, extsep, join, abspath, exists
from utils import make_unicode
DEFAULT_EXTENSION = 'html'
DEFAULT_DIRECTORY = 'static/templates'
DEFAULT_ENCODING = 'utf-8'
DEFAULT_ERRORS = 'xmlcharrefreplace'
def read(path):
""" Return the contents of a file as a byte string. """
with open(path, 'rb') as f:
return f.read()
def read_unicode(path, encoding, encoding_errors):
""" Return the contents of a file as a unicode string. """
with open(path, 'rb') as f:
return make_unicode(f.read(), encoding, encoding_errors)
def get_abs_template_path(template_name, directory, extension):
""" Given a template name, a directory, and an extension, return the
absolute path to the template. """
# Get the relative path
relative_path = join(directory, template_name)
file_with_ext = template_name
if extension:
# If there is a default extension, but no file extension, then add it
file_name, file_ext = splitext(file_with_ext)
if not file_ext:
file_with_ext = extsep.join(
(file_name, extension.replace(extsep, '')))
# Rebuild the relative path
relative_path = join(directory, file_with_ext)
return abspath(relative_path)
def load_file(path, encoding, encoding_errors):
""" Given an existing path, attempt to load it as a unicode string. """
abs_path = abspath(path)
if exists(abs_path):
return read_unicode(abs_path, encoding, encoding_errors)
raise IOError("File {0} does not exist".format(abs_path))
def load_template(name, directory, extension, encoding, encoding_errors):
""" Load a template and return its contents as a unicode string. """
abs_path = get_abs_template_path(name, directory, extension)
return load_file(abs_path, encoding, encoding_errors)
| mit | Python |
4ccdb3d5c4158db90445b2b767ce00ba6755635c | Add check for FORCE_LOCAL flag, similarly to FORCE_NOT_LOCAL. (#81) | google/fuzzbench,google/fuzzbench,google/fuzzbench,google/fuzzbench,google/fuzzbench | common/utils.py | common/utils.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utilities."""
import hashlib
import os
import urllib.request
import urllib.error
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
# pylint: disable=invalid-name
_is_local = None
if os.getenv('FORCE_NOT_LOCAL'):
# Allow local users to force is_local to return False. This allows things
# like stackdriver logging to happen when running code locally.
_is_local = False
if os.getenv('FORCE_LOCAL'):
_is_local = True
def is_local():
"""Returns True if called on a local development machine.
Returns False if called on Google Cloud."""
global _is_local # pylint: disable=invalid-name
if _is_local is not None:
return _is_local
try:
# TODO(github.com/google/fuzzbench/issues/82): Get rid of this.
urllib.request.urlopen('http://metadata.google.internal')
_is_local = False
except urllib.error.URLError:
_is_local = True
return _is_local
def string_hash(obj):
"""Returns a SHA-1 hash of the object. Not used for security purposes."""
return hashlib.sha1(str(obj).encode('utf-8')).hexdigest()
def file_hash(file_path):
"""Returns the SHA-1 hash of |file_path| contents."""
chunk_size = 51200 # Read in 50 KB chunks.
digest = hashlib.sha1()
with open(file_path, 'rb') as file_handle:
chunk = file_handle.read(chunk_size)
while chunk:
digest.update(chunk)
chunk = file_handle.read(chunk_size)
return digest.hexdigest()
| # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utilities."""
import hashlib
import os
import urllib.request
import urllib.error
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
# pylint: disable=invalid-name
_is_local = None
if os.getenv('FORCE_NOT_LOCAL'):
# Allow local users to force is_local to return False. This allows things
# like stackdriver logging to happen when running code locally.
_is_local = False
def is_local():
"""Returns True if called on a local development machine.
Returns False if called on Google Cloud."""
global _is_local # pylint: disable=invalid-name
if _is_local is not None:
return _is_local
try:
urllib.request.urlopen('http://metadata.google.internal')
_is_local = False
except urllib.error.URLError:
_is_local = True
return _is_local
def string_hash(obj):
"""Returns a SHA-1 hash of the object. Not used for security purposes."""
return hashlib.sha1(str(obj).encode('utf-8')).hexdigest()
def file_hash(file_path):
"""Returns the SHA-1 hash of |file_path| contents."""
chunk_size = 51200 # Read in 50 KB chunks.
digest = hashlib.sha1()
with open(file_path, 'rb') as file_handle:
chunk = file_handle.read(chunk_size)
while chunk:
digest.update(chunk)
chunk = file_handle.read(chunk_size)
return digest.hexdigest()
| apache-2.0 | Python |
d4188ffb4b98d78ae9d2512b611e9ff88a8c14be | Update to python3 | chin8628/Reg2Calendar,chin8628/Reg2Calendar | gcalendar_gen_class/genclass/views.py | gcalendar_gen_class/genclass/views.py | from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from .reg2cal_func import UploadText, convert2calendar, get_time, create_ical_download
import csv
import base64
def index(request):
if request.method == 'POST':
form = UploadText(request.POST)
if form.is_valid():
data = convert2calendar(form.cleaned_data['regHtml'])
if data == 0 :
error = '<div class="alert alert-danger" role="alert"><b>Error :</b> Wrong html source code!</div>'
return render(request, 'genclass/index.html', {'form': form, 'error': error})
open_day = form.cleaned_data['open_date_semester']
end_day = form.cleaned_data['end_date_semester']
data = [i.decode('utf-8') for i in data]
content = create_ical_download(open_day, end_day, data)
content = base64.b64encode(content.encode()).decode('utf-8')
download_data = '<a id="ical_link" href="data:text/calendar;charset=utf-8;base64,' + content + '" download="export.ics" style="display: hidden">A</a>'
download_script = "$(document).ready(function(){ $('#ical_link')[0].click(); $('#ical_link').remove(); window.location.href = '/success'; });"
return render(request, 'genclass/index.html', {'form': form, 'download_script': download_script, 'download_data': download_data})
else:
form = UploadText()
return render(request, 'genclass/index.html', {'form': form})
def success(request):
return render(request, 'genclass/success.html')
def help(request):
return render(request, 'genclass/help.html') | from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from .reg2cal_func import UploadText, convert2calendar, get_time, create_ical_download
import csv
import base64
def index(request):
if request.method == 'POST':
form = UploadText(request.POST)
if form.is_valid():
data = convert2calendar(form.cleaned_data['regHtml'])
if data == 0 :
error = '<div class="alert alert-danger" role="alert"><b>Error :</b> Wrong html source code!</div>'
return render(request, 'genclass/index.html', {'form': form, 'error': error})
open_day = form.cleaned_data['open_date_semester']
end_day = form.cleaned_data['end_date_semester']
content = create_ical_download(open_day, end_day, data)
content = base64.b64encode(str(content.encode('utf-8')))
download_data = '<a id="ical_link" href="data:text/calendar;charset=utf-8;base64,' + content + '" download="export.ics" style="display: hidden">A</a>'
download_script = "$(document).ready(function(){ $('#ical_link')[0].click(); $('#ical_link').remove(); window.location.href = '/success'; });"
return render(request, 'genclass/index.html', {'form': form, 'download_script': download_script, 'download_data': download_data})
else:
form = UploadText()
return render(request, 'genclass/index.html', {'form': form})
def success(request):
return render(request, 'genclass/success.html')
def help(request):
return render(request, 'genclass/help.html') | mit | Python |
4c8d04fc39f8e15f88d69a1063313783e0a131b4 | Change return to string | louismerlin/keyboard-games,louismerlin/keyboard-games | server.py | server.py | from flask import Flask
from flask import request
from tictactoe import *
app = Flask(__name__)
game = TicTacToe()
@app.route("/")
def hello():
return str(game.someone_won)
@app.route("/key", methods=['POST'])
def key():
print(request.get_json(silent=True))
return incYo()
if __name__ == "__main__":
app.run()
| from flask import Flask
from flask import request
from tictactoe import *
app = Flask(__name__)
game = TicTacToe()
@app.route("/")
def hello():
return game.someone_won
@app.route("/key", methods=['POST'])
def key():
print(request.get_json(silent=True))
return incYo()
if __name__ == "__main__":
app.run()
| mit | Python |
7503367828da4f3fa1d6e5035261cf05773b6949 | update version | romonzaman/newfies-dialer,Star2Billing/newfies-dialer,Star2Billing/newfies-dialer,Star2Billing/newfies-dialer,newfies-dialer/newfies-dialer,berinhard/newfies-dialer,saydulk/newfies-dialer,romonzaman/newfies-dialer,emartonline/newfies-dialer,saydulk/newfies-dialer,saydulk/newfies-dialer,emartonline/newfies-dialer,laprice/newfies-dialer,laprice/newfies-dialer,newfies-dialer/newfies-dialer,berinhard/newfies-dialer,newfies-dialer/newfies-dialer,newfies-dialer/newfies-dialer,laprice/newfies-dialer,romonzaman/newfies-dialer,berinhard/newfies-dialer,emartonline/newfies-dialer,Star2Billing/newfies-dialer,saydulk/newfies-dialer,romonzaman/newfies-dialer | newfies/__init__.py | newfies/__init__.py | # -*- coding: utf-8 -*-
"""Voice Broadcast Application"""
# :copyright: (c) 2010 - 2011 by Arezqui Belaid.
# :license: AGPL, see COPYING for more details.
VERSION = (1, 1, 0, "b1")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Arezqui Belaid"
__contact__ = "info@star2billing.com"
__homepage__ = "http://www.newfies-dialer.org"
__docformat__ = "restructuredtext"
| # -*- coding: utf-8 -*-
"""Voice Broadcast Application"""
# :copyright: (c) 2010 - 2011 by Arezqui Belaid.
# :license: AGPL, see COPYING for more details.
VERSION = (1, 1, 0, "a")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Arezqui Belaid"
__contact__ = "info@star2billing.com"
__homepage__ = "http://www.newfies-dialer.org"
__docformat__ = "restructuredtext"
| mpl-2.0 | Python |
e726a91c2b8686a96f5d7155234a0eca3bbb72eb | Rename attachment file based on date. | phani00/tovp,mayapurmedia/tovp,mayapurmedia/tovp,mayapurmedia/tovp,phani00/tovp,phani00/tovp | tovp/attachments/models.py | tovp/attachments/models.py | import os
from datetime import datetime
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
from audit_log.models import AuthStampedModel
class AttachmentManager(models.Manager):
def attachments_for_object(self, obj):
object_type = ContentType.objects.get_for_model(obj)
return self.filter(content_type__pk=object_type.id,
object_id=obj.id)
class Attachment(TimeStampedModel, AuthStampedModel):
def attachment_upload(instance, filename):
"""Stores the attachment in a "per module/appname/primary key" folder"""
return 'attachments/%s/%s/%s' % (
'%s_%s' % (instance.content_object._meta.app_label,
instance.content_object._meta.object_name.lower()),
instance.content_object.pk,
datetime.now().strftime("%Y%m%d%H%M-%f") + os.path.splitext(filename)[1])
objects = AttachmentManager()
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
attachment_file = models.FileField('attachment', upload_to=attachment_upload)
description = models.CharField(max_length=255, blank=True)
ATTACHMENT_TYPE_CHOICES = (
('passport', _('Passport')),
('cheque', _('Cheque')),
('other', _('Other Document')),
)
attachment_type = models.CharField(
"Attachment Type", max_length=50, choices=ATTACHMENT_TYPE_CHOICES)
class Meta:
# ordering = ['-created']
permissions = (
('delete_foreign_attachments', 'Can delete foreign attachments'),
)
def __str__(self):
return 'Attached file: %s' % self.attachment_type
@property
def filename(self):
return os.path.split(self.attachment_file.name)[1]
| import os
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
from audit_log.models import AuthStampedModel
class AttachmentManager(models.Manager):
def attachments_for_object(self, obj):
object_type = ContentType.objects.get_for_model(obj)
return self.filter(content_type__pk=object_type.id,
object_id=obj.id)
class Attachment(TimeStampedModel, AuthStampedModel):
def attachment_upload(instance, filename):
"""Stores the attachment in a "per module/appname/primary key" folder"""
return 'attachments/%s/%s/%s' % (
'%s_%s' % (instance.content_object._meta.app_label,
instance.content_object._meta.object_name.lower()),
instance.content_object.pk,
filename)
objects = AttachmentManager()
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
attachment_file = models.FileField('attachment', upload_to=attachment_upload)
description = models.CharField(max_length=255, blank=True)
ATTACHMENT_TYPE_CHOICES = (
('passport', _('Passport')),
('cheque', _('Cheque')),
('other', _('Other Document')),
)
attachment_type = models.CharField(
"Attachment Type", max_length=100, choices=ATTACHMENT_TYPE_CHOICES)
class Meta:
# ordering = ['-created']
permissions = (
('delete_foreign_attachments', 'Can delete foreign attachments'),
)
def __str__(self):
return 'Attached file: %s' % self.attachment_type
@property
def filename(self):
return os.path.split(self.attachment_file.name)[1]
| mit | Python |
c266f5171e875d8dc3abe924e4b6c9ed2a486422 | Add test for non-member access | drcapulet/sentry,daevaorn/sentry,ifduyue/sentry,hongliang5623/sentry,Natim/sentry,wujuguang/sentry,jokey2k/sentry,korealerts1/sentry,llonchj/sentry,songyi199111/sentry,vperron/sentry,BayanGroup/sentry,BuildingLink/sentry,kevinlondon/sentry,fotinakis/sentry,imankulov/sentry,korealerts1/sentry,boneyao/sentry,jean/sentry,hongliang5623/sentry,kevinastone/sentry,fuziontech/sentry,fotinakis/sentry,korealerts1/sentry,pauloschilling/sentry,ifduyue/sentry,camilonova/sentry,argonemyth/sentry,argonemyth/sentry,daevaorn/sentry,jean/sentry,wujuguang/sentry,fotinakis/sentry,looker/sentry,mvaled/sentry,mitsuhiko/sentry,ngonzalvez/sentry,JTCunning/sentry,kevinastone/sentry,TedaLIEz/sentry,camilonova/sentry,jean/sentry,boneyao/sentry,fotinakis/sentry,wong2/sentry,Kryz/sentry,llonchj/sentry,Kryz/sentry,jokey2k/sentry,drcapulet/sentry,songyi199111/sentry,JackDanger/sentry,mvaled/sentry,kevinlondon/sentry,zenefits/sentry,ngonzalvez/sentry,kevinastone/sentry,wong2/sentry,alexm92/sentry,ifduyue/sentry,BayanGroup/sentry,hongliang5623/sentry,JTCunning/sentry,ifduyue/sentry,mitsuhiko/sentry,fuziontech/sentry,beeftornado/sentry,mvaled/sentry,daevaorn/sentry,1tush/sentry,nicholasserra/sentry,JackDanger/sentry,1tush/sentry,zenefits/sentry,Natim/sentry,ewdurbin/sentry,beeftornado/sentry,nicholasserra/sentry,looker/sentry,looker/sentry,wujuguang/sentry,daevaorn/sentry,ngonzalvez/sentry,BuildingLink/sentry,zenefits/sentry,camilonova/sentry,gg7/sentry,felixbuenemann/sentry,argonemyth/sentry,imankulov/sentry,ewdurbin/sentry,JamesMura/sentry,BuildingLink/sentry,pauloschilling/sentry,gencer/sentry,boneyao/sentry,nicholasserra/sentry,ifduyue/sentry,gg7/sentry,JamesMura/sentry,llonchj/sentry,TedaLIEz/sentry,mvaled/sentry,BuildingLink/sentry,drcapulet/sentry,BuildingLink/sentry,alexm92/sentry,JackDanger/sentry,BayanGroup/sentry,zenefits/sentry,gg7/sentry,songyi199111/sentry,mvaled/sentry,vperron/sentry,looker/sentry,gencer/sentry,kevinlondon/sentry,mvaled/sentry,alexm92/sentry,gencer/sentry,looker/sentry,ewdurbin/sentry,vperron/sentry,1tush/sentry,pauloschilling/sentry,beeftornado/sentry,Kryz/sentry,Natim/sentry,gencer/sentry,felixbuenemann/sentry,JamesMura/sentry,jean/sentry,JTCunning/sentry,JamesMura/sentry,imankulov/sentry,wong2/sentry,JamesMura/sentry,TedaLIEz/sentry,gencer/sentry,jokey2k/sentry,felixbuenemann/sentry,fuziontech/sentry,zenefits/sentry,jean/sentry | tests/sentry/web/frontend/test_organization_home.py | tests/sentry/web/frontend/test_organization_home.py | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationHomePermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationHomePermissionTest, self).setUp()
self.path = reverse('sentry-organization-home', args=[self.organization.slug])
def test_teamless_member_can_load(self):
self.assert_teamless_member_can_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
def test_non_member_cannot_load(self):
self.assert_non_member_cannot_access(self.path)
class OrganizationHomeTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-home', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-home.html')
assert resp.context['organization'] == organization
assert resp.context['team_list'] == [(team, [project])]
| from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationHomePermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationHomePermissionTest, self).setUp()
self.path = reverse('sentry-organization-home', args=[self.organization.slug])
def test_teamless_member_can_load(self):
self.assert_teamless_member_can_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
class OrganizationHomeTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-home', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-home.html')
assert resp.context['organization'] == organization
assert resp.context['team_list'] == [(team, [project])]
| bsd-3-clause | Python |
86fa5519164b2e70a7072ffaec0a394b07177631 | make stopword removal optional | juanshishido/okcupid,juanshishido/okcupid | utils/textnormalization.py | utils/textnormalization.py | import re
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import regexp_tokenize
def split_on_sentence(text):
"""Tokenize the text on sentences.
Returns a list of strings (sentences).
"""
sent_tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
return sent_tokenizer.tokenize(text)
def re_punc(text):
"""Remove all punctuation. Keep apostrophes."""
return re.sub(r'[!"#$%&()*+,\-\./:;<=>?@\[\]^_`\\{\|}]+', ' ', text)
def remove_punctuation(sentences):
"""Remove punctuation based on `re_punc`.
Returns either a list of string or a single string,
based on the input type.
"""
if type(sentences) is list:
return [re_punc(sentence).strip() for sentence in sentences]
else:
return re_punc(sentences).strip()
def split_on_word(text):
"""Use regular expression tokenizer.
Keep apostrophes.
Returns a list of lists, one list for each sentence:
[[word, word], [word, word, ..., word], ...].
"""
if type(text) is list:
return [regexp_tokenize(sentence, pattern="\w+(?:[-']\w+)*")
for sentence in text]
else:
return regexp_tokenize(text, pattern="\w+(?:[-']\w+)*")
def normalize(tokenized_words, remove_stopwords = True):
"""Removes stop words, numbers, short words, and lowercases text.
Returns a list of lists, one list for each sentence:
[[word, word], [word, word, ..., word], ...].
"""
if remove_stopwords:
stop_words = stopwords.words('english')
return [[w.lower() for w in sent
if (w.lower() not in stop_words) and\
(not(w.lower().isnumeric())) and\
(len(w) > 2)]
for sent in tokenized_words]
else:
return [[w.lower() for w in sent
if not(w.lower().isnumeric())]
for sent in tokenized_words]
| import re
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import regexp_tokenize
def split_on_sentence(text):
"""Tokenize the text on sentences.
Returns a list of strings (sentences).
"""
sent_tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
return sent_tokenizer.tokenize(text)
def re_punc(text):
"""Remove all punctuation. Keep apostrophes."""
return re.sub(r'[!"#$%&()*+,\-\./:;<=>?@\[\]^_`\\{\|}]+', ' ', text)
def remove_punctuation(sentences):
"""Remove punctuation based on `re_punc`.
Returns either a list of string or a single string,
based on the input type.
"""
if type(sentences) is list:
return [re_punc(sentence).strip() for sentence in sentences]
else:
return re_punc(sentences).strip()
def split_on_word(text):
"""Use regular expression tokenizer.
Keep apostrophes.
Returns a list of lists, one list for each sentence:
[[word, word], [word, word, ..., word], ...].
"""
if type(text) is list:
return [regexp_tokenize(sentence, pattern="\w+(?:[-']\w+)*")
for sentence in text]
else:
return regexp_tokenize(text, pattern="\w+(?:[-']\w+)*")
def normalize(tokenized_words):
"""Removes stop words, numbers, short words, and lowercases text.
Returns a list of lists, one list for each sentence:
[[word, word], [word, word, ..., word], ...].
"""
stop_words = stopwords.words('english')
return [[w.lower() for w in sent
if (w.lower() not in stop_words) and\
(not(w.lower().isnumeric())) and\
(len(w) > 2)]
for sent in tokenized_words]
| mit | Python |
c12691a20370b0d30b0c84926694a26fc266ea11 | Allow proper throw in compiler (#39) | tqchen/tvm,sxjscience/tvm,Laurawly/tvm-1,dmlc/tvm,Huyuwei/tvm,tqchen/tvm,Laurawly/tvm-1,tqchen/tvm,sxjscience/tvm,Laurawly/tvm-1,dmlc/tvm,tqchen/tvm,Laurawly/tvm-1,dmlc/tvm,Laurawly/tvm-1,dmlc/tvm,Huyuwei/tvm,Huyuwei/tvm,Laurawly/tvm-1,sxjscience/tvm,Laurawly/tvm-1,Huyuwei/tvm,dmlc/tvm,sxjscience/tvm,sxjscience/tvm,Huyuwei/tvm,tqchen/tvm,sxjscience/tvm,dmlc/tvm,Laurawly/tvm-1,sxjscience/tvm,tqchen/tvm,tqchen/tvm,dmlc/tvm,Laurawly/tvm-1,Huyuwei/tvm,Huyuwei/tvm,sxjscience/tvm,Laurawly/tvm-1,Huyuwei/tvm,tqchen/tvm,dmlc/tvm,tqchen/tvm,sxjscience/tvm,dmlc/tvm,Huyuwei/tvm,tqchen/tvm | vta/python/vta/__init__.py | vta/python/vta/__init__.py | """VTA Package is a TVM backend extension to support VTA hardwares
Besides the compiler toolchain.
It also include utility functions to
configure the hardware Environment and access remote through RPC
"""
from __future__ import absolute_import as _abs
import sys
from .bitstream import get_bitstream_path, download_bitstream
from .environment import get_env, Environment
from .rpc_client import reconfig_runtime, program_fpga
__version__ = "0.1.0"
# do not import nnvm/topi when running vta.exec.rpc_server
# to maintain minimum dependency on the board
if sys.argv[0] not in ("-c", "-m"):
from . import top
from .build_module import build_config, lower, build
from . import graph
| """VTA Package is a TVM backend extension to support VTA hardwares
Besides the compiler toolchain.
It also include utility functions to
configure the hardware Environment and access remote through RPC
"""
from __future__ import absolute_import as _abs
__version__ = "0.1.0"
from .bitstream import get_bitstream_path, download_bitstream
from .environment import get_env, Environment
from .rpc_client import reconfig_runtime, program_fpga
try:
from . import top
from .build_module import build_config, lower, build
from . import graph
except (ImportError, RuntimeError):
pass
| apache-2.0 | Python |
9ddb5f7a31776b9e7e978d2a4fdc015acdc4670f | update dev version after 0.46.0 tag [ci skip] | desihub/desitarget,desihub/desitarget | py/desitarget/_version.py | py/desitarget/_version.py | __version__ = '0.46.0.dev4345'
| __version__ = '0.46.0'
| bsd-3-clause | Python |
7aa47042d22ef1c51b9a545b6cdf6363b5d30df3 | Correct author email | jlaska/pytest-github | pytest_github/__init__.py | pytest_github/__init__.py | """Plugin for py.test that associates tests with github issues using a marker."""
__version__ = "0.3.0"
__author__ = "James Laska"
__author_email__ = "<jlaska@redhat.com>"
| """Plugin for py.test that associates tests with github issues using a marker."""
__version__ = "0.3.0"
__author__ = "James Laska"
__author_email__ = "<jlaska AT redhat.com>"
| mit | Python |
5ae24ac9ca357e34d652a926588a10dedff4d4da | fix to build | weinbe58/QuSpin,weinbe58/QuSpin,weinbe58/QuSpin,weinbe58/QuSpin | quspin/operators/setup.py | quspin/operators/setup.py |
def cython_files():
import os,glob,numpy
from Cython.Build import cythonize
package_dir = os.path.dirname(os.path.realpath(__file__))
package_dir = os.path.expandvars(package_dir)
cython_src = glob.glob(os.path.join(package_dir,"*.pyx"))
include_dirs = [numpy.get_include()]
include_dirs.append(os.path.join(package_dir,"_oputils"))
cythonize(cython_src,include_path=include_dirs)
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
import os,numpy,sys
config = Configuration('operators', parent_package, top_path)
cython_files()
extra_compile_args=["-fno-strict-aliasing"]
extra_link_args=[]
if sys.platform == "darwin":
extra_compile_args.append("-std=c++11")
package_dir = os.path.dirname(os.path.realpath(__file__))
package_dir = os.path.expandvars(package_dir)
include_dirs = [numpy.get_include()]
include_dirs.append(os.path.join(package_dir,"_oputils"))
depends =[
os.path.join(package_dir,"_oputils","matvec.h"),
os.path.join(package_dir,"_oputils","matvecs.h"),
os.path.join(package_dir,"_oputils","csrmv_merge.h"),
]
src = os.path.join(package_dir,"_oputils.cpp")
config.add_extension('_oputils',sources=src,include_dirs=include_dirs,
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
depends=depends,
language="c++")
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
import sys
try:
instr = sys.argv[1]
if instr == "build_templates":
cython_files()
else:
setup(**configuration(top_path='').todict())
except IndexError: pass
|
def cython_files():
import os,glob,numpy
from Cython.Build import cythonize
package_dir = os.path.dirname(os.path.realpath(__file__))
package_dir = os.path.expandvars(package_dir)
cython_src = glob.glob(os.path.join(package_dir,"*.pyx"))
include_dirs = [numpy.get_include()]
include_dirs.append(os.path.join(package_dir,"_oputils"))
cythonize(cython_src,include_path=include_dirs)
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
import os,numpy,sys
config = Configuration('operators', parent_package, top_path)
cython_files()
extra_compile_args=["-fno-strict-aliasing"]
extra_link_args=[]
if sys.platform == "darwin":
extra_compile_args.append(["-std=c++11"])
package_dir = os.path.dirname(os.path.realpath(__file__))
package_dir = os.path.expandvars(package_dir)
include_dirs = [numpy.get_include()]
include_dirs.append(os.path.join(package_dir,"_oputils"))
depends =[
os.path.join(package_dir,"_oputils","matvec.h"),
os.path.join(package_dir,"_oputils","matvecs.h"),
os.path.join(package_dir,"_oputils","csrmv_merge.h"),
]
src = os.path.join(package_dir,"_oputils.cpp")
config.add_extension('_oputils',sources=src,include_dirs=include_dirs,
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
depends=depends,
language="c++")
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
import sys
try:
instr = sys.argv[1]
if instr == "build_templates":
cython_files()
else:
setup(**configuration(top_path='').todict())
except IndexError: pass
| bsd-3-clause | Python |
b87cfe7ee0c4318eaeeb36030c3500e614cd735e | Remove duplicate config registration | Plexxi/st2,nzlosh/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,nzlosh/st2,StackStorm/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,Plexxi/st2 | st2reactor/st2reactor/timer/config.py | st2reactor/st2reactor/timer/config.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from oslo_config import cfg
import st2common.config as common_config
from st2common.constants.system import VERSION_STRING
from st2common.constants.system import DEFAULT_CONFIG_FILE_PATH
CONF = cfg.CONF
def parse_args(args=None):
cfg.CONF(args=args, version=VERSION_STRING,
default_config_files=[DEFAULT_CONFIG_FILE_PATH])
def register_opts():
_register_common_opts()
_register_rules_engine_opts()
def get_logging_config_path():
return cfg.CONF.timersengine.logging
def _register_common_opts():
common_config.register_opts()
def _register_rules_engine_opts():
# We want backward compatibility with configuration. So register logging configuration options
# under ``timer`` section as well as ``timersengine``.
logging_opts = [
cfg.StrOpt(
'logging', default='conf/logging.timersengine.conf',
help='Location of the logging configuration file.')
]
CONF.register_opts(logging_opts, group='timer')
CONF.register_opts(logging_opts, group='timersengine')
timer_opts = [
cfg.StrOpt(
'local_timezone', default='America/Los_Angeles',
help='Timezone pertaining to the location where st2 is run.'),
cfg.BoolOpt(
'enable', default=True,
help='Specify to enable Timer.')
]
CONF.register_opts(timer_opts, group='timer')
CONF.register_opts(logging_opts, group='timersengine')
register_opts()
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from oslo_config import cfg
import st2common.config as common_config
from st2common.constants.system import VERSION_STRING
from st2common.constants.system import DEFAULT_CONFIG_FILE_PATH
common_config.register_opts()
CONF = cfg.CONF
def parse_args(args=None):
cfg.CONF(args=args, version=VERSION_STRING,
default_config_files=[DEFAULT_CONFIG_FILE_PATH])
def register_opts():
_register_common_opts()
_register_rules_engine_opts()
def get_logging_config_path():
return cfg.CONF.timersengine.logging
def _register_common_opts():
common_config.register_opts()
def _register_rules_engine_opts():
# We want backward compatibility with configuration. So register logging configuration options
# under ``timer`` section as well as ``timersengine``.
logging_opts = [
cfg.StrOpt(
'logging', default='conf/logging.timersengine.conf',
help='Location of the logging configuration file.')
]
CONF.register_opts(logging_opts, group='timer')
CONF.register_opts(logging_opts, group='timersengine')
timer_opts = [
cfg.StrOpt(
'local_timezone', default='America/Los_Angeles',
help='Timezone pertaining to the location where st2 is run.'),
cfg.BoolOpt(
'enable', default=True,
help='Specify to enable Timer.')
]
CONF.register_opts(timer_opts, group='timer')
CONF.register_opts(logging_opts, group='timersengine')
register_opts()
| apache-2.0 | Python |
cd081f9ee5677b1e9031e1edfa8e4ef735017f24 | Bump version 0.1.1 | kenjhim/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting | accounting/__init__.py | accounting/__init__.py | import os
# Use 'final' as the 4th element to indicate
# a full release
VERSION = (0, 1, 1)
def get_short_version():
return '%s.%s' % (VERSION[0], VERSION[1])
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
# Append 3rd digit if > 0
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
# Cheeky setting that allows each template to be accessible by two paths.
# Eg: the template 'accounting/templates/accounting/base.html' can be accessed
# via both 'base.html' and 'accounting/base.html'. This allows Accounting's
# templates to be extended by templates with the same filename
ACCOUNTING_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'templates/accounting')
ACCOUNTING_APPS = (
'accounting',
'accounting.libs',
'accounting.apps.connect',
'accounting.apps.people',
'accounting.apps.books',
'accounting.apps.reports',
# Third party apps that accounting depends on
'bootstrap3',
'django_select2',
)
ACCOUNTING_TEMPLATE_CONTEXT_PROCESSORS = (
'accounting.apps.context_processors.metadata',
'accounting.apps.books.context_processors.organizations',
)
def get_apps():
return ACCOUNTING_APPS
| import os
# Use 'final' as the 4th element to indicate
# a full release
VERSION = (0, 1, 0, 'alpha', 1)
def get_short_version():
return '%s.%s' % (VERSION[0], VERSION[1])
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
# Append 3rd digit if > 0
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
elif VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[3])
if len(VERSION) == 5:
version = '%s %s' % (version, VERSION[4])
return version
# Cheeky setting that allows each template to be accessible by two paths.
# Eg: the template 'accounting/templates/accounting/base.html' can be accessed
# via both 'base.html' and 'accounting/base.html'. This allows Accounting's
# templates to be extended by templates with the same filename
ACCOUNTING_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'templates/accounting')
ACCOUNTING_APPS = (
'accounting',
'accounting.libs',
'accounting.apps.connect',
'accounting.apps.people',
'accounting.apps.books',
'accounting.apps.reports',
# Third party apps that accounting depends on
'bootstrap3',
'django_select2',
)
ACCOUNTING_TEMPLATE_CONTEXT_PROCESSORS = (
'accounting.apps.context_processors.metadata',
'accounting.apps.books.context_processors.organizations',
)
def get_apps():
return ACCOUNTING_APPS
| mit | Python |
7790b8b57aba48127c76bcf2f6ed0176b9b3bd16 | Update __init__.py | markovmodel/adaptivemd,jrossyra/adaptivemd,jrossyra/adaptivemd,markovmodel/adaptivemd,jrossyra/adaptivemd,markovmodel/adaptivemd | adaptivemd/__init__.py | adaptivemd/__init__.py | ##############################################################################
# adaptiveMD: A Python Framework to Run Adaptive Molecular Dynamics (MD)
# Simulations on HPC Resources
# Copyright 2017 FU Berlin and the Authors
#
# Authors: Jan-Hendrik Prinz
# Contributors:
#
# `adaptiveMD` is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
from __future__ import absolute_import
#from .brain import Brain
# from event import StopEvent, Event, TasksFinished
from .plan import ExecutionPlan
# from condition import Condition, Now, Never
from .file import (File, Directory, Location, JSONFile, MakeDir, Copy,
Transfer, Link, Move, Remove, Action, AddPathAction, FileAction,
FileTransaction, Touch)
from .bundle import (Bundle, SortedBundle, ViewBundle, AndBundle,
BaseBundle, BundleDelegator, FunctionDelegator, LogicBundle,
OrBundle, StoredBundle)
#from .resource import LocalResource
from .configuration import Configuration
from .task import Task, PythonTask, DummyTask
from .project import Project
from .scheduler import Scheduler
from .model import Model
from .generator import TaskGenerator
from .worker import WorkerScheduler, Worker
from .logentry import LogEntry
from .reducer import (ActionParser, BashParser, ChainedParser,
DictFilterParser, PrefixParser, StageParser, StrFilterParser,
StageInParser)
from .engine import (Engine, Trajectory, Frame,
TrajectoryGenerationTask, TrajectoryExtensionTask)
from .analysis import Analysis, DoAnalysis
# specific generators that should be available to the general user
# this simplifies loading objects. Otherwise you need to import them
# manually before they can be loaded
from .engine.openmm import OpenMMEngine
from .analysis.pyemma import PyEMMAAnalysis
from . import util
from .util import DT
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
from .rp.client import Client
| ##############################################################################
# adaptiveMD: A Python Framework to Run Adaptive Molecular Dynamics (MD)
# Simulations on HPC Resources
# Copyright 2017 FU Berlin and the Authors
#
# Authors: Jan-Hendrik Prinz
# Contributors:
#
# `adaptiveMD` is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
from __future__ import absolute_import
#from .brain import Brain
# from event import StopEvent, Event, TasksFinished
from .plan import ExecutionPlan
# from condition import Condition, Now, Never
from .file import (File, Directory, Location, JSONFile, MakeDir, Copy,
Transfer, Link, Move, Remove, Action, AddPathAction, FileAction,
FileTransaction, Touch)
from .bundle import (Bundle, SortedBundle, ViewBundle, AndBundle,
BaseBundle, BundleDelegator, FunctionDelegator, LogicBundle,
OrBundle, StoredBundle)
#from .resource import LocalResource
from .configuration import Configuration
from .task import Task, PythonTask, DummyTask
from .project import Project
from .scheduler import Scheduler
from .model import Model
from .generator import TaskGenerator
from .worker import WorkerScheduler, Worker
from .logentry import LogEntry
from .reducer import (ActionParser, BashParser, ChainedParser,
DictFilterParser, PrefixParser, StageParser, StrFilterParser,
StageInParser)
from .engine import (Engine, Trajectory, Frame,
TrajectoryGenerationTask, TrajectoryExtensionTask)
from .analysis import Analysis, DoAnalysis
# specific generators that should be available to the general user
# this simplifies loading objects. Otherwise you need to import them
# manually before they can be loaded
from .engine.openmm import OpenMMEngine
from .analysis.pyemma import PyEMMAAnalysis
from . import util
from .util import DT
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
#from .rp.client import Client
| lgpl-2.1 | Python |
783f247ba398395c8d885cd74a339f433b467339 | remove unused import | chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | polling_stations/apps/uk_geo_utils/management/commands/import_cleaned_addresses.py | polling_stations/apps/uk_geo_utils/management/commands/import_cleaned_addresses.py | import os
from django.db import connection
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'cleaned_ab_path',
help='The path to the folder containing the cleaned AddressBase CSVs'
)
parser.add_argument(
'-t',
'--table',
help='If you have extended the AbstractAddress model, use this flag to specify the table name for your child table',
default='uk_geo_utils_address',
required=False,
)
def handle(self, *args, **kwargs):
self.table_name = kwargs['table']
cursor = connection.cursor()
self.stdout.write("clearing existing data..")
cursor.execute("TRUNCATE TABLE %s;" % (self.table_name))
cleaned_file_path = os.path.abspath(os.path.join(
kwargs['cleaned_ab_path'],
"addressbase_cleaned.csv"
))
self.stdout.write("importing from %s.." % (cleaned_file_path))
cursor.execute("""
COPY {0} (UPRN,address,postcode,location)
FROM '{1}' (FORMAT CSV, DELIMITER ',', quote '"');
""".format(self.table_name, cleaned_file_path))
self.stdout.write("...done")
| import os
import glob
from django.db import connection
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'cleaned_ab_path',
help='The path to the folder containing the cleaned AddressBase CSVs'
)
parser.add_argument(
'-t',
'--table',
help='If you have extended the AbstractAddress model, use this flag to specify the table name for your child table',
default='uk_geo_utils_address',
required=False,
)
def handle(self, *args, **kwargs):
self.table_name = kwargs['table']
cursor = connection.cursor()
self.stdout.write("clearing existing data..")
cursor.execute("TRUNCATE TABLE %s;" % (self.table_name))
cleaned_file_path = os.path.abspath(os.path.join(
kwargs['cleaned_ab_path'],
"addressbase_cleaned.csv"
))
self.stdout.write("importing from %s.." % (cleaned_file_path))
cursor.execute("""
COPY {0} (UPRN,address,postcode,location)
FROM '{1}' (FORMAT CSV, DELIMITER ',', quote '"');
""".format(self.table_name, cleaned_file_path))
self.stdout.write("...done")
| bsd-3-clause | Python |
5e2aae6070d60f2149c49e1137ab2a99f3966b3a | Add specific colors for heights | DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix | python/volumeBars.py | python/volumeBars.py | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import numpy
import math
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
barWidth = width / 4
pi = numpy.pi
barHeights = numpy.array([0, pi / 4, pi / 2, pi * 3 / 4])
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
heights = numpy.sin(barHeights)
barHeights += pi / 4
for x in range(width):
barHeight = int(heights[int(x / barWidth)] * height)
for y in range(height):
if height - y <= barHeight:
if y > 14
nextFrame.SetPixel(x, y, 255, 0, 0)
else if y > 10
nextFrame.SetPixel(x, y, 200, 200, 0)
else
nextFrame.SetPixel(x, y, 0, 200, 0)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(0.2) | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import numpy
import math
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
barWidth = width / 4
pi = numpy.pi
barHeights = numpy.array([0, pi / 4, pi / 2, pi * 3 / 4])
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
heights = numpy.sin(barHeights)
barHeights += pi / 4
for x in range(width):
barHeight = int(heights[int(x / barWidth)] * height)
for y in range(height):
if height - y <= barHeight:
nextFrame.SetPixel(x, y, randint(0, 255), randint(0, 255), randint(0, 255))
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(0.2)
| mit | Python |
214d5f7e09e9b5e854e7471c6dc337456f428647 | Add missing ensure_str for PY2 | ChrisRx/quickavro,ChrisRx/quickavro | quickavro/_compat.py | quickavro/_compat.py | # -*- coding: utf-8 -*-
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
default_encoding = "UTF-8"
def with_metaclass(meta, *bases):
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
if PY3:
basestring = (str, bytes)
def ensure_bytes(s):
if type(s) == str:
return bytes(s, default_encoding)
else:
return bytes(s)
def ensure_str(s):
if type(s) == bytes:
return s.decode(default_encoding)
else:
return s
else:
range = xrange
ensure_bytes = lambda s: s
ensure_str = lambda s: s
| # -*- coding: utf-8 -*-
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
default_encoding = "UTF-8"
def with_metaclass(meta, *bases):
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
if PY3:
basestring = (str, bytes)
def ensure_bytes(s):
if type(s) == str:
return bytes(s, default_encoding)
else:
return bytes(s)
def ensure_str(s):
if type(s) == bytes:
return s.decode(default_encoding)
else:
return s
else:
range = xrange
ensure_bytes = lambda s: s
| apache-2.0 | Python |
9e286b72d5ceb9b7242f0bf659bc8f6680d31d3a | Revert default value on the rating field in the OverallRating class. | GeoNode/geonode-ratings,GeoNode/geonode-ratings,GeoNode/geonode-ratings | agon_ratings/models.py | agon_ratings/models.py | import datetime
from decimal import Decimal
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.generic import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from agon_ratings.categories import RATING_CATEGORY_CHOICES
from agon_ratings.managers import OverallRatingManager
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', User)
class OverallRating(models.Model):
object_id = models.IntegerField(db_index=True)
content_type = models.ForeignKey(ContentType)
content_object = GenericForeignKey()
rating = models.DecimalField(decimal_places=1, max_digits=6, null=True)
category = models.IntegerField(null=True, choices=RATING_CATEGORY_CHOICES)
objects = OverallRatingManager()
class Meta:
unique_together = [
("object_id", "content_type", "category"),
]
def update(self):
self.rating = Rating.objects.filter(
overall_rating = self
).aggregate(r = models.Avg("rating"))["r"]
self.rating = Decimal(str(self.rating or "0"))
self.save()
class Rating(models.Model):
overall_rating = models.ForeignKey(OverallRating, null = True, related_name = "ratings")
object_id = models.IntegerField(db_index=True)
content_type = models.ForeignKey(ContentType)
content_object = GenericForeignKey()
user = models.ForeignKey(USER_MODEL)
rating = models.IntegerField()
timestamp = models.DateTimeField(default=datetime.datetime.now)
category = models.IntegerField(null=True, choices=RATING_CATEGORY_CHOICES)
class Meta:
unique_together = [
("object_id", "content_type", "user", "category"),
]
def __unicode__(self):
return unicode(self.rating)
| import datetime
from decimal import Decimal
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.generic import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from agon_ratings.categories import RATING_CATEGORY_CHOICES
from agon_ratings.managers import OverallRatingManager
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', User)
class OverallRating(models.Model):
object_id = models.IntegerField(db_index=True)
content_type = models.ForeignKey(ContentType)
content_object = GenericForeignKey()
rating = models.DecimalField(decimal_places=1, max_digits=6, null=True, default=Decimal(0.00))
category = models.IntegerField(null=True, choices=RATING_CATEGORY_CHOICES)
objects = OverallRatingManager()
class Meta:
unique_together = [
("object_id", "content_type", "category"),
]
def update(self):
self.rating = Rating.objects.filter(
overall_rating = self
).aggregate(r = models.Avg("rating"))["r"]
self.rating = Decimal(str(self.rating or "0"))
self.save()
class Rating(models.Model):
overall_rating = models.ForeignKey(OverallRating, null = True, related_name = "ratings")
object_id = models.IntegerField(db_index=True)
content_type = models.ForeignKey(ContentType)
content_object = GenericForeignKey()
user = models.ForeignKey(USER_MODEL)
rating = models.IntegerField()
timestamp = models.DateTimeField(default=datetime.datetime.now)
category = models.IntegerField(null=True, choices=RATING_CATEGORY_CHOICES)
class Meta:
unique_together = [
("object_id", "content_type", "user", "category"),
]
def __unicode__(self):
return unicode(self.rating)
| bsd-3-clause | Python |
d21b8e40c21cb701aada3b0e8e39bd583589f71c | update version to 0.2.2-dev (#129) | GoogleCloudPlatform/cloud-sql-python-connector,GoogleCloudPlatform/cloud-sql-python-connector | google/cloud/sql/connector/version.py | google/cloud/sql/connector/version.py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.2.2-dev"
| # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.2.1"
| apache-2.0 | Python |
9804fbb527fb053296533c5cd4604afcb7043a28 | Add unit_of_measurement to various Transmission sensors (#30037) | soldag/home-assistant,balloob/home-assistant,kennedyshead/home-assistant,FreekingDean/home-assistant,robbiet480/home-assistant,leppa/home-assistant,pschmitt/home-assistant,sdague/home-assistant,sdague/home-assistant,tboyce021/home-assistant,jawilson/home-assistant,balloob/home-assistant,lukas-hetzenecker/home-assistant,titilambert/home-assistant,toddeye/home-assistant,turbokongen/home-assistant,nkgilley/home-assistant,Danielhiversen/home-assistant,partofthething/home-assistant,tboyce1/home-assistant,leppa/home-assistant,lukas-hetzenecker/home-assistant,tchellomello/home-assistant,pschmitt/home-assistant,aronsky/home-assistant,rohitranjan1991/home-assistant,tchellomello/home-assistant,FreekingDean/home-assistant,mKeRix/home-assistant,balloob/home-assistant,mezz64/home-assistant,rohitranjan1991/home-assistant,adrienbrault/home-assistant,partofthething/home-assistant,postlund/home-assistant,Teagan42/home-assistant,tboyce1/home-assistant,turbokongen/home-assistant,Teagan42/home-assistant,aronsky/home-assistant,sander76/home-assistant,mKeRix/home-assistant,w1ll1am23/home-assistant,rohitranjan1991/home-assistant,mKeRix/home-assistant,Danielhiversen/home-assistant,mezz64/home-assistant,adrienbrault/home-assistant,jawilson/home-assistant,tboyce1/home-assistant,home-assistant/home-assistant,kennedyshead/home-assistant,toddeye/home-assistant,w1ll1am23/home-assistant,home-assistant/home-assistant,robbiet480/home-assistant,tboyce1/home-assistant,GenericStudent/home-assistant,titilambert/home-assistant,soldag/home-assistant,postlund/home-assistant,GenericStudent/home-assistant,mKeRix/home-assistant,nkgilley/home-assistant,sander76/home-assistant,tboyce021/home-assistant | homeassistant/components/transmission/const.py | homeassistant/components/transmission/const.py | """Constants for the Transmission Bittorent Client component."""
DOMAIN = "transmission"
SENSOR_TYPES = {
"active_torrents": ["Active Torrents", "Torrents"],
"current_status": ["Status", None],
"download_speed": ["Down Speed", "MB/s"],
"paused_torrents": ["Paused Torrents", "Torrents"],
"total_torrents": ["Total Torrents", "Torrents"],
"upload_speed": ["Up Speed", "MB/s"],
"completed_torrents": ["Completed Torrents", "Torrents"],
"started_torrents": ["Started Torrents", "Torrents"],
}
SWITCH_TYPES = {"on_off": "Switch", "turtle_mode": "Turtle Mode"}
DEFAULT_NAME = "Transmission"
DEFAULT_PORT = 9091
DEFAULT_SCAN_INTERVAL = 120
STATE_ATTR_TORRENT_INFO = "torrent_info"
ATTR_TORRENT = "torrent"
SERVICE_ADD_TORRENT = "add_torrent"
DATA_UPDATED = "transmission_data_updated"
| """Constants for the Transmission Bittorent Client component."""
DOMAIN = "transmission"
SENSOR_TYPES = {
"active_torrents": ["Active Torrents", None],
"current_status": ["Status", None],
"download_speed": ["Down Speed", "MB/s"],
"paused_torrents": ["Paused Torrents", None],
"total_torrents": ["Total Torrents", None],
"upload_speed": ["Up Speed", "MB/s"],
"completed_torrents": ["Completed Torrents", None],
"started_torrents": ["Started Torrents", None],
}
SWITCH_TYPES = {"on_off": "Switch", "turtle_mode": "Turtle Mode"}
DEFAULT_NAME = "Transmission"
DEFAULT_PORT = 9091
DEFAULT_SCAN_INTERVAL = 120
STATE_ATTR_TORRENT_INFO = "torrent_info"
ATTR_TORRENT = "torrent"
SERVICE_ADD_TORRENT = "add_torrent"
DATA_UPDATED = "transmission_data_updated"
| apache-2.0 | Python |
f8be56c94159814f9261bcafb6a75e8b2e2153f2 | add 2.4.2 (#27094) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/r-withr/package.py | var/spack/repos/builtin/packages/r-withr/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RWithr(RPackage):
"""Run Code 'With' Temporarily Modified Global State
A set of functions to run code 'with' safely and temporarily modified
global state. Many of these functions were originally a part of the
'devtools' package, this provides a simple package with limited
dependencies to provide access to these functions."""
homepage = "https://github.com/jimhester/withr"
cran = "withr"
version('2.4.2', sha256='48f96a4cb780cf6fd5fbbea1f1eb04ea3102d7a4a644cae1ed1e91139dcbbac8')
version('2.4.0', sha256='ede4cdc7e4d17e0ad24afc9fb940cba46fac4421d3a39281e9918377d73714f8')
version('2.2.0', sha256='4c21e51cf48f8c281ddd5f5ec358ac446df3c982104fd00bfe62d9259d73b582')
version('2.1.2', sha256='41366f777d8adb83d0bdbac1392a1ab118b36217ca648d3bb9db763aa7ff4686')
version('1.0.2', sha256='2391545020adc4256ee7c2e31c30ff6f688f0b6032e355e1ce8f468cab455f10')
version('1.0.1', sha256='7e245fdd17d290ff9e7c237159804dd06e1c6a3efe7855ed641eb0765a1e727d')
depends_on('r@3.0.2:', type=('build', 'run'))
depends_on('r@3.2:', when='@2.2:', type=('build', 'run'))
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RWithr(RPackage):
"""Run Code 'With' Temporarily Modified Global State
A set of functions to run code 'with' safely and temporarily modified
global state. Many of these functions were originally a part of the
'devtools' package, this provides a simple package with limited
dependencies to provide access to these functions."""
homepage = "https://github.com/jimhester/withr"
url = "https://cloud.r-project.org/src/contrib/withr_1.0.2.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/withr"
version('2.4.0', sha256='ede4cdc7e4d17e0ad24afc9fb940cba46fac4421d3a39281e9918377d73714f8')
version('2.2.0', sha256='4c21e51cf48f8c281ddd5f5ec358ac446df3c982104fd00bfe62d9259d73b582')
version('2.1.2', sha256='41366f777d8adb83d0bdbac1392a1ab118b36217ca648d3bb9db763aa7ff4686')
version('1.0.2', sha256='2391545020adc4256ee7c2e31c30ff6f688f0b6032e355e1ce8f468cab455f10')
version('1.0.1', sha256='7e245fdd17d290ff9e7c237159804dd06e1c6a3efe7855ed641eb0765a1e727d')
depends_on('r@3.0.2:', type=('build', 'run'))
depends_on('r@3.2:', when='@2.2:', type=('build', 'run'))
| lgpl-2.1 | Python |
041f96b349b3291cce412a855b9cefe5cdffc76b | Update the texlive distro digest. (#2679) | krafczyk/spack,mfherbst/spack,lgarren/spack,lgarren/spack,skosukhin/spack,tmerrick1/spack,iulian787/spack,skosukhin/spack,EmreAtes/spack,krafczyk/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,mfherbst/spack,TheTimmy/spack,mfherbst/spack,skosukhin/spack,tmerrick1/spack,tmerrick1/spack,lgarren/spack,EmreAtes/spack,lgarren/spack,iulian787/spack,matthiasdiener/spack,matthiasdiener/spack,matthiasdiener/spack,krafczyk/spack,LLNL/spack,skosukhin/spack,matthiasdiener/spack,skosukhin/spack,TheTimmy/spack,iulian787/spack,krafczyk/spack,TheTimmy/spack,TheTimmy/spack,LLNL/spack,krafczyk/spack,TheTimmy/spack,iulian787/spack,mfherbst/spack,tmerrick1/spack,EmreAtes/spack,tmerrick1/spack,EmreAtes/spack,lgarren/spack,EmreAtes/spack,mfherbst/spack,iulian787/spack,LLNL/spack | var/spack/repos/builtin/packages/texlive/package.py | var/spack/repos/builtin/packages/texlive/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import os
class Texlive(Package):
"""TeX Live is a free software distribution for the TeX typesetting
system"""
homepage = "http://www.tug.org/texlive"
# Pull from specific site because the texlive mirrors do not all
# update in synchrony.
#
# BEWARE: TexLive updates their installs frequently (probably why
# they call it *Live*...). There is no good way to provide a
# repeatable install of the package. We try to keep up with the
# digest values, but don't be surprised if this package is
# briefly unbuildable.
#
version('live', '01461ec2cc49fe0b14812eb67abbea46',
url="http://ctan.math.utah.edu/ctan/tex-archive/systems/texlive/tlnet/install-tl-unx.tar.gz")
# There does not seem to be a complete list of schemes.
# Examples include:
# full scheme (everything)
# medium scheme (small + more packages and languages)
# small scheme (basic + xetex, metapost, a few languages)
# basic scheme (plain and latex)
# minimal scheme (plain only)
# See:
# https://www.tug.org/texlive/doc/texlive-en/texlive-en.html#x1-25025r6
variant('scheme', default="small",
description='Package subset to install (e.g. full, small, basic)')
depends_on('perl', type='build')
def install(self, spec, prefix):
env = os.environ
env['TEXLIVE_INSTALL_PREFIX'] = prefix
perl = which('perl')
scheme = spec.variants['scheme'].value
perl('./install-tl', '-scheme', scheme,
'-portable', '-profile', '/dev/null')
| ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import os
class Texlive(Package):
"""TeX Live is a free software distribution for the TeX typesetting
system"""
homepage = "http://www.tug.org/texlive"
# Pull from specific site because the texlive mirrors do not all
# update in synchrony.
#
# BEWARE: TexLive updates their installs frequently (probably why
# they call it *Live*...). There is no good way to provide a
# repeatable install of the package. We try to keep up with the
# digest values, but don't be surprised if this package is
# briefly unbuildable.
#
version('live', '1962b756794827467b50ed4da94d8ee8',
url="http://ctan.math.utah.edu/ctan/tex-archive/systems/texlive/tlnet/install-tl-unx.tar.gz")
# There does not seem to be a complete list of schemes.
# Examples include:
# full scheme (everything)
# medium scheme (small + more packages and languages)
# small scheme (basic + xetex, metapost, a few languages)
# basic scheme (plain and latex)
# minimal scheme (plain only)
# See:
# https://www.tug.org/texlive/doc/texlive-en/texlive-en.html#x1-25025r6
variant('scheme', default="small",
description='Package subset to install (e.g. full, small, basic)')
depends_on('perl', type='build')
def install(self, spec, prefix):
env = os.environ
env['TEXLIVE_INSTALL_PREFIX'] = prefix
perl = which('perl')
scheme = spec.variants['scheme'].value
perl('./install-tl', '-scheme', scheme,
'-portable', '-profile', '/dev/null')
| lgpl-2.1 | Python |
046861c4ef2825cebc2aacb82343537a5cc11208 | add v 1.4.4 (#21981) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/xkbcomp/package.py | var/spack/repos/builtin/packages/xkbcomp/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Xkbcomp(AutotoolsPackage, XorgPackage):
"""The X Keyboard (XKB) Extension essentially replaces the core protocol
definition of a keyboard. The extension makes it possible to specify
clearly and explicitly most aspects of keyboard behaviour on a per-key
basis, and to track more closely the logical and physical state of a
keyboard. It also includes a number of keyboard controls designed to
make keyboards more accessible to people with physical impairments."""
homepage = "https://www.x.org/wiki/XKB/"
xorg_mirror_path = "app/xkbcomp-1.3.1.tar.gz"
version('1.4.4', sha256='159fba6b62ef4a3fb16ef7fc4eb4fc26f3888652471ceb604c495783dda020bc')
version('1.3.1', sha256='018e83a922430652d4bc3f3db610d2296e618c76c9b3fbcdccde975aeb655749')
depends_on('libx11')
depends_on('libxkbfile')
depends_on('xproto@7.0.17:')
depends_on('bison', type='build')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Xkbcomp(AutotoolsPackage, XorgPackage):
"""The X Keyboard (XKB) Extension essentially replaces the core protocol
definition of a keyboard. The extension makes it possible to specify
clearly and explicitly most aspects of keyboard behaviour on a per-key
basis, and to track more closely the logical and physical state of a
keyboard. It also includes a number of keyboard controls designed to
make keyboards more accessible to people with physical impairments."""
homepage = "https://www.x.org/wiki/XKB/"
xorg_mirror_path = "app/xkbcomp-1.3.1.tar.gz"
version('1.3.1', sha256='018e83a922430652d4bc3f3db610d2296e618c76c9b3fbcdccde975aeb655749')
depends_on('libx11')
depends_on('libxkbfile')
depends_on('xproto@7.0.17:')
depends_on('bison', type='build')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
| lgpl-2.1 | Python |
74948ad0998a61e6d0d87614d902b13442ebc9f9 | Update makeid_file.py | nausheenfatma/embeddings,nausheenfatma/embeddings | gsoc2017-nausheen/HOLE/makeid_file.py | gsoc2017-nausheen/HOLE/makeid_file.py | f=open("standard_data/train.txt","r")
id_dict={}
relation_dict={}
for line in f:
line=line.strip()
line_tokens=line.split()
try:
a=id_dict[line_tokens[0]]
except:
id_dict[line_tokens[0]]=0
try:
a=id_dict[line_tokens[2]]
except:
id_dict[line_tokens[2]]=0
try:
a=relation_dict[line_tokens[1]]
except:
relation_dict[line_tokens[1]]=0
f=open("standard_data/test.txt","r")
for line in f:
line=line.strip()
line_tokens=line.split()
try:
a=id_dict[line_tokens[0]]
except:
id_dict[line_tokens[0]]=0
try:
a=id_dict[line_tokens[2]]
except:
id_dict[line_tokens[2]]=0
try:
a=relation_dict[line_tokens[1]]
except:
relation_dict[line_tokens[1]]=0
f=open("standard_data/valid.txt","r")
for line in f:
line=line.strip()
line_tokens=line.split()
try:
a=id_dict[line_tokens[0]]
except:
id_dict[line_tokens[0]]=0
try:
a=id_dict[line_tokens[2]]
except:
id_dict[line_tokens[2]]=0
try:
a=relation_dict[line_tokens[1]]
except:
relation_dict[line_tokens[1]]=0
id_count=0
fw=open("standard_data/entity2id.txt","w")
for key in id_dict:
fw.write(key+"\t"+str(id_count)+"\n")
id_count=id_count+1
fw.close()
id_count=0
fw=open("standard_data/relation2id.txt","w")
for key in relation_dict:
fw.write(key+"\t"+str(id_count)+"\n")
id_count=id_count+1
fw.close()
print "Done"
| f=open("train.txt","r")
id_dict={}
relation_dict={}
for line in f:
line=line.strip()
line_tokens=line.split()
try:
a=id_dict[line_tokens[0]]
except:
id_dict[line_tokens[0]]=0
try:
a=id_dict[line_tokens[2]]
except:
id_dict[line_tokens[2]]=0
try:
a=relation_dict[line_tokens[1]]
except:
relation_dict[line_tokens[1]]=0
f=open("test.txt","r")
for line in f:
line=line.strip()
line_tokens=line.split()
try:
a=id_dict[line_tokens[0]]
except:
id_dict[line_tokens[0]]=0
try:
a=id_dict[line_tokens[2]]
except:
id_dict[line_tokens[2]]=0
try:
a=relation_dict[line_tokens[1]]
except:
relation_dict[line_tokens[1]]=0
f=open("valid.txt","r")
for line in f:
line=line.strip()
line_tokens=line.split()
try:
a=id_dict[line_tokens[0]]
except:
id_dict[line_tokens[0]]=0
try:
a=id_dict[line_tokens[2]]
except:
id_dict[line_tokens[2]]=0
try:
a=relation_dict[line_tokens[1]]
except:
relation_dict[line_tokens[1]]=0
id_count=0
fw=open("entity2id.txt","w")
for key in id_dict:
fw.write(key+"\t"+str(id_count)+"\n")
id_count=id_count+1
fw.close()
id_count=0
fw=open("relation2id.txt","w")
for key in relation_dict:
fw.write(key+"\t"+str(id_count)+"\n")
id_count=id_count+1
fw.close()
print "Done"
| apache-2.0 | Python |
ac492068d99f52ec5f0852b31dd7156876d4e6a2 | make imports more convenient | aliutkus/commonfate | commonfate/__init__.py | commonfate/__init__.py | from commonfate import decompose, model, transform
__all__ = ["decompose", "model", "transform"]
short_version = '0.1'
version = '0.1.0'
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Version info"""
short_version = '0.1'
version = '0.1.0'
| bsd-3-clause | Python |
45c4efcd229c94603e287cb242ddb04c9963d758 | use 2.0 api | fedspendingtransparency/data-act-build-tools,fedspendingtransparency/data-act-build-tools,fedspendingtransparency/data-act-build-tools | databricks/databricks-jobs.py | databricks/databricks-jobs.py | import sys
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import json
INSTANCE_ID = sys.argv[1]
JOB_NAME = sys.argv[2]
API_VERSION = '/api/2.0'
print("----------RUNNING JOB " + JOB_NAME )
# Run Get request with api_command param
# /jobs/list/ with api 2.0 returns all jobs, 2.1 does not
def getRequest(api_command, params={}):
if api_command == '/jobs/list':
url = "https://{}{}{}".format(INSTANCE_ID, '/api/2.0', api_command)
else:
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.get(
url = url,
json = params,
)
return response
# Start a job run
def postRequest(api_command, params):
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.post(
url = url,
json = params,
)
return response
# Get all job names and jobID's and map to dict
def getJobIds(res):
tempDict = {}
for job in res.json()['jobs']:
tempDict[job['settings']['name']] = job['job_id']
return tempDict
jobs = getJobIds(getRequest('/jobs/list'))
if( JOB_NAME in jobs ):
print("JOB ID: " + str(jobs[JOB_NAME]))
job_params = {'job_id': jobs[JOB_NAME]}
startJob = postRequest('/jobs/run-now', job_params)
run_id = startJob.json()['run_id']
run_params = { 'run_id' : run_id }
job_status = getRequest('/jobs/runs/get-output', run_params).json()["metadata"]["state"]["life_cycle_state"]
#Wait for job to finish running
while(job_status == "RUNNING" or job_status == "PENDING"):
job_status = getRequest('/jobs/runs/get-output', run_params).json()["metadata"]["state"]["life_cycle_state"]
finishedJob = getRequest('/jobs/runs/get-output', run_params)
print(json.dumps(json.loads(finishedJob.text), indent = 2))
run_url = finishedJob.json()["metadata"]["run_page_url"].replace("webapp", INSTANCE_ID+"/")
print("---------------SEE JOB RUN HERE: " + run_url)
else:
raise ValueError(sys.argv[2] + " is not a job in databricks")
| import sys
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import json
INSTANCE_ID = sys.argv[1]
JOB_NAME = sys.argv[2]
API_VERSION = '/api/2.1'
print("----------RUNNING JOB " + JOB_NAME )
# Run Get request with api_command param
# /jobs/list/ with api 2.0 returns all jobs, 2.1 does not
def getRequest(api_command, params={}):
if api_command == '/jobs/list':
url = "https://{}{}{}".format(INSTANCE_ID, '/api/2.0', api_command)
else:
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.get(
url = url,
json = params,
)
return response
# Start a job run
def postRequest(api_command, params):
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.post(
url = url,
json = params,
)
return response
# Get all job names and jobID's and map to dict
def getJobIds(res):
tempDict = {}
for job in res.json()['jobs']:
tempDict[job['settings']['name']] = job['job_id']
return tempDict
jobs = getJobIds(getRequest('/jobs/list'))
if( JOB_NAME in jobs ):
print("JOB ID: " + str(jobs[JOB_NAME]))
job_params = {'job_id': jobs[JOB_NAME]}
startJob = postRequest('/jobs/run-now', job_params)
run_id = startJob.json()['run_id']
run_params = { 'run_id' : run_id }
tasks = getRequest('/jobs/runs/get', run_params).json()["tasks"]
print(tasks)
job_status = ""
for x in tasks:
if x["run_id"] == run_id:
job_status = x.json()["state"]["life_cycle_state"]
#Wait for job to finish running
while(job_status == "RUNNING" or job_status == "PENDING"):
job_status = getRequest('/jobs/runs/get-output', run_params).json()["metadata"]["state"]["life_cycle_state"]
finishedJob = getRequest('/jobs/runs/get-output', run_params)
print(json.dumps(json.loads(finishedJob.text), indent = 2))
run_url = finishedJob.json()["metadata"]["run_page_url"].replace("webapp", INSTANCE_ID+"/")
print("---------------SEE JOB RUN HERE: " + run_url)
else:
raise ValueError(sys.argv[2] + " is not a job in databricks")
| cc0-1.0 | Python |
49ad9f30982b2b7afcdc8704f317de2e329f594e | Hide paramiko logs | danielfrg/datasciencebox,danielfrg/datasciencebox,danielfrg/datasciencebox,danielfrg/datasciencebox | datasciencebox/core/logger.py | datasciencebox/core/logger.py | from __future__ import absolute_import, division, print_function
import sys
import logging
import logging.handlers
# Hide messages if we log before setting up handler
logging.root.manager.emittedNoHandlerWarning = True
logging.getLogger("paramiko").setLevel(logging.WARNING)
def getLogger():
return logging.getLogger("datasciencebox")
def setup_logging(log_level=logging.DEBUG):
logger = logging.getLogger("datasciencebox")
logger.setLevel(log_level)
logger.propagate = False
console_handler = logging.StreamHandler(sys.stdout)
console_formatter = logging.Formatter("%(levelname)s: %(message)s")
console_handler.setFormatter(console_formatter)
console_handler.setLevel(log_level)
add_handler = True
for handle in logger.handlers:
if getattr(handle, "stream", None) == sys.stdout:
add_handler = False
break
if add_handler:
logger.addHandler(console_handler)
| from __future__ import absolute_import, division, print_function
import sys
import logging
import logging.handlers
# Hide messages if we log before setting up handler
logging.root.manager.emittedNoHandlerWarning = True
def getLogger():
return logging.getLogger("datasciencebox")
def setup_logging(log_level=logging.DEBUG):
logger = logging.getLogger("datasciencebox")
logger.setLevel(log_level)
logger.propagate = False
console_handler = logging.StreamHandler(sys.stdout)
console_formatter = logging.Formatter("%(levelname)s: %(message)s")
console_handler.setFormatter(console_formatter)
console_handler.setLevel(log_level)
add_handler = True
for handle in logger.handlers:
if getattr(handle, "stream", None) == sys.stdout:
add_handler = False
break
if add_handler:
logger.addHandler(console_handler)
| apache-2.0 | Python |
38f7074e4eaa6d87cbcd433c044a57408c34c182 | Update request.py | slawek87/yql-finance | yql/request.py | yql/request.py | import requests
class Request(object):
"""Class is responsible for prepare request query and sends it to YQL Yahoo API."""
parameters = {
'q': '',
'format': 'json',
'diagnostics': 'false',
'env': 'store://datatables.org/alltableswithkeys',
'callback': ''
}
api = 'https://query.yahooapis.com/v1/public/yql'
def prepare_query(self, symbol, start_date, end_date):
"""Method returns prepared request query for Yahoo YQL API."""
query = \
'select * from yahoo.finance.historicaldata where symbol = "%s" and startDate = "%s" and endDate = "%s"' \
% (symbol, start_date, end_date)
return query
def send(self, symbol, start_date, end_date):
"""Method sends request to Yahoo YQL API."""
query = self.prepare_query(symbol, start_date, end_date)
self.parameters['q'] = query
response = requests.get(self.api, params=self.parameters).json()
results = response['query']['results']['quote']
return results
| import requests
class Request(object):
"""Class is responsible for prepare request query and send reqest to YQL Yahoo API."""
parameters = {
'q': '',
'format': 'json',
'diagnostics': 'false',
'env': 'store://datatables.org/alltableswithkeys',
'callback': ''
}
api = 'https://query.yahooapis.com/v1/public/yql'
def prepare_query(self, symbol, start_date, end_date):
"""Method returns prepared request query for Yahoo YQL API."""
query = \
'select * from yahoo.finance.historicaldata where symbol = "%s" and startDate = "%s" and endDate = "%s"' \
% (symbol, start_date, end_date)
return query
def send(self, symbol, start_date, end_date):
"""Method sends request to Yahoo YQL API."""
query = self.prepare_query(symbol, start_date, end_date)
self.parameters['q'] = query
response = requests.get(self.api, params=self.parameters).json()
results = response['query']['results']['quote']
return results
| bsd-3-clause | Python |
e6303ec411181a6f593caa56de2d8dca643a9f0d | Update denorm command | barberscore/barberscore-api,dbinetti/barberscore,dbinetti/barberscore-django,dbinetti/barberscore-django,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore | project/apps/api/management/commands/denormalize.py | project/apps/api/management/commands/denormalize.py | from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
Group,
Person,
Singer,
Director,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
gs = Group.objects.all()
for g in gs:
g.save()
rs = Person.objects.all()
for r in rs:
r.save()
ss = Singer.objects.all()
for s in ss:
s.save()
ds = Director.objects.all()
for d in ds:
d.save()
return "Done"
| from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Group,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
cs = Contestant.objects.all()
for c in cs:
c.save()
ts = Contest.objects.all()
for t in ts:
t.save()
vs = Convention.objects.all()
for v in vs:
v.save()
gs = Group.objects.all()
for g in gs:
g.save()
return "Done"
| bsd-2-clause | Python |
87537f552eadda62839db4a1cf3220575a568db2 | test against the return value of get_holiday instead of the function itself | bmintz/reactor-bot | reactor_bot/emoji.py | reactor_bot/emoji.py | #!/usr/bin/env python3
# encoding: utf-8
import re
import string
from datetime import date
def get_poll_emoji(message):
"""generate the proper emoji to react to any poll message"""
if message.count('\n') > 0:
# ignore the first line, which is the command line
for line in message.split('\n')[1:]:
if not line:
continue
yield parse_starting_emoji(line)
else:
yield from ('👍', '👎')
# no matter what, not knowing is always an option
# TODO make this configurable anyway
yield get_shrug_emoji()
def parse_starting_emoji(line):
"""find text/emoji at the beginning of a line
and convert it to proper emoji"""
return parse_emoji(extract_emoji(line))
def extract_emoji(line):
"""extract *unparsed* emoji from the beginning of a line
the emoji may be separated by either ')' or whitespace"""
return line.split(')')[0].split()[0].strip()
def parse_emoji(text):
"""convert text to a corresponding similar emoji
text should be a single character, unless it's a server custom emoji
or a flag emoji
parse_emoji is undefined if text does not meet these conditions
"""
# match server emoji
custom_emoji_match = re.search(r'^<(:[\w_]*:\d*)>', text)
if custom_emoji_match:
# ignore the <> on either side
return custom_emoji_match.group(1)
elif text in string.ascii_letters:
return get_letter_emoji(text.upper())
elif text in string.digits:
return get_digit_emoji(text)
else:
# if not letters or digits, it's probably an emoji anyway
return text
def get_letter_emoji(letter: str):
if letter == 'B' and _get_holiday() == 'April Fools':
return '🅱'
start = ord('🇦')
# position in alphabet
letter_index = ord(letter) - ord('A')
return chr(start + letter_index)
def get_digit_emoji(digit: str):
return digit + '\N{combining enclosing keycap}'
def get_shrug_emoji():
shrug_emoji = {
'April Fools': '🦑',
'Halloween': '\N{jack-o-lantern}',
}
return shrug_emoji.get(_get_holiday(), '🤷')
def _get_holiday():
today = date.today()
holidays = {
(4, 1): 'April Fools',
(10, 31): 'Halloween',
}
return holidays.get((today.month, today.day))
| #!/usr/bin/env python3
# encoding: utf-8
import re
import string
from datetime import date
def get_poll_emoji(message):
"""generate the proper emoji to react to any poll message"""
if message.count('\n') > 0:
# ignore the first line, which is the command line
for line in message.split('\n')[1:]:
if not line:
continue
yield parse_starting_emoji(line)
else:
yield from ('👍', '👎')
# no matter what, not knowing is always an option
# TODO make this configurable anyway
yield get_shrug_emoji()
def parse_starting_emoji(line):
"""find text/emoji at the beginning of a line
and convert it to proper emoji"""
return parse_emoji(extract_emoji(line))
def extract_emoji(line):
"""extract *unparsed* emoji from the beginning of a line
the emoji may be separated by either ')' or whitespace"""
return line.split(')')[0].split()[0].strip()
def parse_emoji(text):
"""convert text to a corresponding similar emoji
text should be a single character, unless it's a server custom emoji
or a flag emoji
parse_emoji is undefined if text does not meet these conditions
"""
# match server emoji
custom_emoji_match = re.search(r'^<(:[\w_]*:\d*)>', text)
if custom_emoji_match:
# ignore the <> on either side
return custom_emoji_match.group(1)
elif text in string.ascii_letters:
return get_letter_emoji(text.upper())
elif text in string.digits:
return get_digit_emoji(text)
else:
# if not letters or digits, it's probably an emoji anyway
return text
def get_letter_emoji(letter: str):
if letter == 'B' and _get_holiday == 'April Fools':
return '🅱'
start = ord('🇦')
# position in alphabet
letter_index = ord(letter) - ord('A')
return chr(start + letter_index)
def get_digit_emoji(digit: str):
return digit + '\N{combining enclosing keycap}'
def get_shrug_emoji():
shrug_emoji = {
'April Fools': '🦑',
'Halloween': '\N{jack-o-lantern}',
}
return shrug_emoji.get(_get_holiday(), '🤷')
def _get_holiday():
today = date.today()
holidays = {
(4, 1): 'April Fools',
(10, 31): 'Halloween',
}
return holidays.get((today.month, today.day))
| mit | Python |
7d177e9d8dbb90226174cf2b433ffd31faff79b5 | Update Customer to inherit AbstractResource | aroncds/pagarme-python,pbassut/pagarme-python,mbodock/pagarme-python,pagarme/pagarme-python | pagarme/customer.py | pagarme/customer.py | # encoding: utf-8
from __future__ import unicode_literals
from .resource import AbstractResource
class Customer(AbstractResource):
def __init__(self, api_key=None, name=None, document_number=None, email=None,
address_street=None, address_neighborhood=None,
address_zipcode=None, address_street_number=None,
address_complementary=None, phone_ddd=None, phone_number=None, **kwargs):
address_zipcode = address_zipcode.replace('.', '').replace('-', '') if address_zipcode else None
document_number = document_number.replace('.', '').replace('-', '') if document_number else None
self.data = {
'api_key': api_key,
'name': name,
'document_number': document_number,
'email': email,
'address_street': address_street,
'address_neighborhood': address_neighborhood,
'address_zipcode': address_zipcode,
'address_street_number': address_street_number,
'address_complementary': address_complementary,
'phone_ddd': phone_ddd,
'phone_number': phone_number,
}
self.data.update(kwargs)
def get_anti_fraud_data(self):
d = {}
for key, value in self.data.items():
if value is None:
continue
elif 'address' in key:
new_key = 'customer[address][{key}]'.format(key=key.replace('address_', ''))
elif 'phone' in key:
new_key = 'customer[phone][{key}]'.format(key=key.replace('phone_', ''))
else:
new_key = 'customer[{key}]'.format(key=key)
d[new_key] = value
return d
| # encoding: utf-8
from __future__ import unicode_literals
class Customer(object):
def __init__(self, name=None, document_number=None, email=None,
address_street=None, address_neighborhood=None,
address_zipcode=None, address_street_number=None,
address_complementary=None, phone_ddd=None, phone_number=None):
address_zipcode = address_zipcode.replace('.', '').replace('-', '') if address_zipcode else None
document_number = document_number.replace('.', '').replace('-', '') if document_number else None
self.data = {
'name': name,
'document_number': document_number,
'email': email,
'address_street': address_street,
'address_neighborhood': address_neighborhood,
'address_zipcode': address_zipcode,
'address_street_number': address_street_number,
'address_complementary': address_complementary,
'phone_ddd': phone_ddd,
'phone_number': phone_number,
}
def get_anti_fraud_data(self):
d = {}
for key, value in self.data.items():
if value is None:
continue
elif 'address' in key:
new_key = 'customer[address][{key}]'.format(key=key.replace('address_', ''))
elif 'phone' in key:
new_key = 'customer[phone][{key}]'.format(key=key.replace('phone_', ''))
else:
new_key = 'customer[{key}]'.format(key=key)
d[new_key] = value
return d
| mit | Python |
62a49d1f05b335779fd2aad2413cf84d656a869a | add Session error handle -> re_start session | DongjunLee/kino-bot | kino/bot.py | kino/bot.py | #coding: UTF-8
import asyncio
import websockets
import slack
import utils
# Send a message to channel (init)
slackbot = slack.SlackerAdapter()
config = utils.Config()
MASTER_NAME = config.kino["MASTER_NAME"]
BOT_NAME = config.kino["BOT_NAME"]
hello_text = "{}님 안녕하세요! \n저는 개인비서 {} 라고 합니다.\n반갑습니다.".format(MASTER_NAME, BOT_NAME)
slackbot.send_message(text=hello_text)
# Start RTM
endpoint = slackbot.start_real_time_messaging_session()
listener = slack.MsgListener()
logger = utils.Logger().get_logger()
logger.info('start real time messaging session!')
def start_session():
async def execute_bot():
ws = await websockets.connect(endpoint)
while True:
message_json = await ws.recv()
listener.handle_only_message(message_json)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
asyncio.get_event_loop().run_until_complete(execute_bot())
asyncio.get_event_loop().run_forever()
try:
start_session()
except Exception as e:
logger.error("Session Error: ", e)
start_session()
| #coding: UTF-8
import asyncio
import websockets
import slack
import utils
# Send a message to channel (init)
slackbot = slack.SlackerAdapter()
config = utils.Config()
MASTER_NAME = config.kino["MASTER_NAME"]
BOT_NAME = config.kino["BOT_NAME"]
hello_text = "{}님 안녕하세요! \n저는 {}님의 개인비서 {}입니다.\n반갑습니다.".format(MASTER_NAME, MASTER_NAME, BOT_NAME)
slackbot.send_message(text=hello_text)
# Start RTM
endpoint = slackbot.start_real_time_messaging_session()
listener = slack.MsgListener()
logger = utils.Logger().get_logger()
logger.info('start real time messaging session!')
async def execute_bot():
ws = await websockets.connect(endpoint)
while True:
message_json = await ws.recv()
listener.handle_only_message(message_json)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
asyncio.get_event_loop().run_until_complete(execute_bot())
asyncio.get_event_loop().run_forever()
| mit | Python |
a88eb2c7fc2c2d875836f0a4c201ede0c082aceb | Update the test model definitions. | affan2/django-selectable,affan2/django-selectable,mlavin/django-selectable,mlavin/django-selectable,affan2/django-selectable,mlavin/django-selectable | selectable/tests/__init__.py | selectable/tests/__init__.py | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from ..base import ModelLookup
from ..registry import registry
@python_2_unicode_compatible
class Thing(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100)
def __str__(self):
return self.name
@python_2_unicode_compatible
class OtherThing(models.Model):
name = models.CharField(max_length=100)
thing = models.ForeignKey(Thing)
def __str__(self):
return self.name
@python_2_unicode_compatible
class ManyThing(models.Model):
name = models.CharField(max_length=100)
things = models.ManyToManyField(Thing)
def __str__(self):
return self.name
class ThingLookup(ModelLookup):
model = Thing
search_fields = ('name__icontains', )
registry.register(ThingLookup)
from .test_base import *
from .test_decorators import *
from .test_fields import *
from .test_functional import *
from .test_forms import *
from .test_templatetags import *
from .test_views import *
from .test_widgets import *
| from django.db import models
from ..base import ModelLookup
from ..registry import registry
class Thing(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100)
def __unicode__(self):
return self.name
def __str__(self):
return self.name
class OtherThing(models.Model):
name = models.CharField(max_length=100)
thing = models.ForeignKey(Thing)
def __unicode__(self):
return self.name
def __str__(self):
return self.name
class ManyThing(models.Model):
name = models.CharField(max_length=100)
things = models.ManyToManyField(Thing)
def __unicode__(self):
return self.name
def __str__(self):
return self.name
class ThingLookup(ModelLookup):
model = Thing
search_fields = ('name__icontains', )
registry.register(ThingLookup)
from .test_base import *
from .test_decorators import *
from .test_fields import *
from .test_functional import *
from .test_forms import *
from .test_templatetags import *
from .test_views import *
from .test_widgets import *
| bsd-2-clause | Python |
2578241996f76eda87a769586fcbeab9e32dfda7 | fix default value. test=develop | baidu/Paddle,tensor-tang/Paddle,baidu/Paddle,PaddlePaddle/Paddle,luotao1/Paddle,chengduoZH/Paddle,luotao1/Paddle,luotao1/Paddle,baidu/Paddle,tensor-tang/Paddle,PaddlePaddle/Paddle,tensor-tang/Paddle,tensor-tang/Paddle,PaddlePaddle/Paddle,PaddlePaddle/Paddle,chengduoZH/Paddle,chengduoZH/Paddle,luotao1/Paddle,luotao1/Paddle,PaddlePaddle/Paddle,PaddlePaddle/Paddle,luotao1/Paddle,baidu/Paddle,chengduoZH/Paddle,tensor-tang/Paddle,chengduoZH/Paddle,baidu/Paddle,luotao1/Paddle,PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/test_ir_memory_optimize_transformer.py | python/paddle/fluid/tests/unittests/test_ir_memory_optimize_transformer.py | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import unittest
from timeit import default_timer as timer
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
import paddle.dataset.wmt16 as wmt16
os.environ['FLAGS_eager_delete_tensor_gb'] = "0.0"
os.environ[
'RECORDIO_FILENAME'] = '/tmp/ir_memory_optimize_transformer.wmt16.recordio'
from test_parallel_executor_transformer import transformer, ModelHyperParams, transformer_model, transformer, prepare_batch_input
from parallel_executor_test_base import TestParallelExecutorBase
# disable temporarily because of timeout.
sys.exit(0)
# NOTE(dzhwinter): test diferent strategy colisions.
# open the eager delete tensor strategy by default.
class TestTransformerWithIR(TestParallelExecutorBase):
@classmethod
def setUpClass(cls):
os.environ['CPU_NUM'] = str(4)
reader = paddle.batch(
wmt16.train(ModelHyperParams.src_vocab_size,
ModelHyperParams.trg_vocab_size),
batch_size=transformer_model.batch_size)
with fluid.recordio_writer.create_recordio_writer(
os.environ.get("RECORDIO_FILENAME")) as writer:
for batch in reader():
for tensor in prepare_batch_input(
batch, ModelHyperParams.src_pad_idx,
ModelHyperParams.trg_pad_idx, ModelHyperParams.n_head):
t = fluid.LoDTensor()
t.set(tensor, fluid.CPUPlace())
writer.append_tensor(t)
writer.complete_append_tensor()
def test_main(self):
if core.is_compiled_with_cuda():
# check python transpiler
self.check_network_convergence(
transformer,
use_cuda=True,
memory_opt=True,
use_ir_memory_optimize=False,
iter=2)
# check IR memory optimize
self.check_network_convergence(
transformer,
use_cuda=True,
memory_opt=False,
use_ir_memory_optimize=True,
iter=2)
if __name__ == '__main__':
unittest.main()
| # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
import paddle.fluid as fluid
import paddle.fluid.core as core
os.environ['FLAGS_eager_delete_tensor_gb'] = "0.0"
os.environ[
'RECORDIO_FILENAME'] = '/tmp/ir_memory_optimize_transformer.wmt16.recordio'
from test_parallel_executor_transformer import TestTransformer
from test_parallel_executor_transformer import transformer
# NOTE(dzhwinter): test diferent strategy colisions.
# open the eager delete tensor strategy by default.
class TestTransformerWithIR(TestTransformer):
def test_main(self):
if core.is_compiled_with_cuda():
# check python transpiler
self.check_network_convergence(
transformer,
use_cuda=True,
memory_opt=True,
use_ir_memory_optimize=False)
# check IR memory optimize
self.check_network_convergence(
transformer,
use_cuda=True,
memory_opt=False,
use_ir_memory_optimize=True)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
e3fcead75fc273fe7f69b2d3db7bbf2bc4ba722c | Fix dependency regression (fixes #194). (#196) | SergeyPirogov/testcontainers-python | testcontainers/core/generic.py | testcontainers/core/generic.py | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from testcontainers.core.container import DockerContainer
from testcontainers.core.waiting_utils import wait_container_is_ready
from deprecation import deprecated
ADDITIONAL_TRANSIENT_ERRORS = []
try:
from sqlalchemy.exc import OperationalError
ADDITIONAL_TRANSIENT_ERRORS.append(OperationalError)
except ImportError:
pass
class DbContainer(DockerContainer):
def __init__(self, image, **kwargs):
super(DbContainer, self).__init__(image, **kwargs)
@wait_container_is_ready(*ADDITIONAL_TRANSIENT_ERRORS)
def _connect(self):
import sqlalchemy
engine = sqlalchemy.create_engine(self.get_connection_url())
engine.connect()
def get_connection_url(self):
raise NotImplementedError
def _create_connection_url(self, dialect, username, password,
host=None, port=None, db_name=None):
if self._container is None:
raise RuntimeError("container has not been started")
if not host:
host = self.get_container_host_ip()
port = self.get_exposed_port(port)
url = "{dialect}://{username}:{password}@{host}:{port}".format(
dialect=dialect, username=username, password=password, host=host, port=port
)
if db_name:
url += '/' + db_name
return url
def start(self):
self._configure()
super().start()
self._connect()
return self
def _configure(self):
raise NotImplementedError
class GenericContainer(DockerContainer):
@deprecated(details="Use `DockerContainer`.")
def __init__(self, image):
super(GenericContainer, self).__init__(image)
| #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from testcontainers.core.container import DockerContainer
from testcontainers.core.waiting_utils import wait_container_is_ready
from deprecation import deprecated
from sqlalchemy.exc import OperationalError
class DbContainer(DockerContainer):
def __init__(self, image, **kwargs):
super(DbContainer, self).__init__(image, **kwargs)
@wait_container_is_ready(OperationalError)
def _connect(self):
import sqlalchemy
engine = sqlalchemy.create_engine(self.get_connection_url())
engine.connect()
def get_connection_url(self):
raise NotImplementedError
def _create_connection_url(self, dialect, username, password,
host=None, port=None, db_name=None):
if self._container is None:
raise RuntimeError("container has not been started")
if not host:
host = self.get_container_host_ip()
port = self.get_exposed_port(port)
url = "{dialect}://{username}:{password}@{host}:{port}".format(
dialect=dialect, username=username, password=password, host=host, port=port
)
if db_name:
url += '/' + db_name
return url
def start(self):
self._configure()
super().start()
self._connect()
return self
def _configure(self):
raise NotImplementedError
class GenericContainer(DockerContainer):
@deprecated(details="Use `DockerContainer`.")
def __init__(self, image):
super(GenericContainer, self).__init__(image)
| apache-2.0 | Python |
181b54b0303e38803b47696fbceb0ff1f9e0a0b5 | change config tpl file name | echinopsii/net.echinopsii.ariane.community.core.mapping,echinopsii/net.echinopsii.ariane.community.core.mapping,echinopsii/net.echinopsii.ariane.community.core.mapping,echinopsii/net.echinopsii.ariane.community.core.mapping | distrib/installer/components/mapping/cuMappingRimManagedServiceProcessor.py | distrib/installer/components/mapping/cuMappingRimManagedServiceProcessor.py | # CC installer mapping rim managed service configuration unit
#
# Copyright (C) 2014 Mathilde Ffrench
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from tools.AConfParamNotNone import AConfParamNotNone
from tools.AConfUnit import AConfUnit
__author__ = 'mffrench'
class cpMappingDirectory(AConfParamNotNone):
name = "##mappingDirectory"
description = "CC mapping Neo4j DB path definition"
hide = False
def __init__(self):
self.value = None
def isValid(self):
if not super().isValid:
return False
else:
if os.path.exists(self.value) and os.path.isdir(self.value) and os.access(self.value, os.W_OK) and os.access(self.value, os.W_OK):
return True
else:
print(self.description + " (" + self.value + ") is not valid. Check if it exists and it has good rights.")
return False
class cuMappingRimManagedServiceProcessor(AConfUnit):
def __init__(self, targetConfDir):
self.confUnitName = "CC mapping RIM managed service"
self.confTemplatePath = os.path.abspath("resources/templates/components/com.spectral.cc.core.MappingRimManagedService.properties.tpl")
self.confFinalPath = targetConfDir + "com.spectral.cc.core.MappingRimManagedService.properties"
mapDir = cpMappingDirectory()
self.paramsDictionary = {mapDir.name: mapDir} | # CC installer mapping rim managed service configuration unit
#
# Copyright (C) 2014 Mathilde Ffrench
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from tools.AConfParamNotNone import AConfParamNotNone
from tools.AConfUnit import AConfUnit
__author__ = 'mffrench'
class cpMappingDirectory(AConfParamNotNone):
name = "##mappingDirectory"
description = "CC mapping Neo4j DB path definition"
hide = False
def __init__(self):
self.value = None
def isValid(self):
if not super().isValid:
return False
else:
if os.path.exists(self.value) and os.path.isdir(self.value) and os.access(self.value, os.W_OK) and os.access(self.value, os.W_OK):
return True
else:
print(self.description + " (" + self.value + ") is not valid. Check if it exists and it has good rights.")
return False
class cuMappingRimManagedServiceProcessor(AConfUnit):
def __init__(self, targetConfDir):
self.confUnitName = "CC mapping RIM managed service"
self.confTemplatePath = os.path.abspath("resources/templates/components/com.spectral.cc.core.mapping.ds.rim.runtime.MappingRimManagedService.properties.tpl")
self.confFinalPath = targetConfDir + "com.spectral.cc.core.mapping.ds.rim.runtime.MappingRimManagedService.properties"
mapDir = cpMappingDirectory()
self.paramsDictionary = {mapDir.name: mapDir} | agpl-3.0 | Python |
658f76ee6257cce8ab6f12d85a9a9472f925cc9d | print in a new location | natejgreene/pyTwinkle | pyTwinkle/light_strand.py | pyTwinkle/light_strand.py | import bluetooth
import sys
LIGHTS_NAME = "00651 36L RGB"
class LightStrand:
address = None
port = None
socket = None
connected = False
def __init__(self, address, port):
self.address = address
self.port = port
def __del__(self):
if self.socket:
self.socket.close()
def __connect(self):
try:
self.socket=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
self.socket.connect((self.address, self.port))
self.connected = True
print "Connected to:", self.address
sys.stdout.flush()
except:
self.connected = False
print "Could not connect to:", self.address
sys.stdout.flush()
def send(self,command):
try:
if self.connected:
self.socket.send(command)
except:
print "Error sending command to ", self.address
sys.stdout.flush()
self.connected = false
@staticmethod
def connect_all_strands():
nearby_devices = bluetooth.discover_devices()
strands = []
for bdaddr in nearby_devices:
if LIGHTS_NAME == bluetooth.lookup_name( bdaddr ):
strand = LightStrand(bdaddr[0], bdaddr[1])
strand.__connect()
strands.append(strand)
return strands | import bluetooth
import sys
LIGHTS_NAME = "00651 36L RGB"
class LightStrand:
address = None
port = None
socket = None
connected = False
def __init__(self, address, port):
self.address = address
self.port = port
def __del__(self):
if self.socket:
self.socket.close()
def __connect(self):
try:
self.socket=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
self.socket.connect((self.address, self.port))
self.connected = True
except:
self.connected = False
def send(self,command):
try:
if self.connected:
self.socket.send(command)
except:
print "Error sending command to ", self.address
sys.stdout.flush()
self.connected = false
@staticmethod
def connect_all_strands():
nearby_devices = bluetooth.discover_devices()
strands = []
for bdaddr in nearby_devices:
if LIGHTS_NAME == bluetooth.lookup_name( bdaddr ):
strand = LightStrand(bdaddr[0], bdaddr[1])
strand.__connect()
strands.append(strand)
if strand.connected:
print "Connected to:", bdaddr
sys.stdout.flush()
return strands | mit | Python |
c123e0b14dba7e893471a82e9c52b970b9884843 | Fix py2.6 unicode output test cases | smalyshev/pywikibot-core,icyflame/batman,valhallasw/pywikibot-core,h4ck3rm1k3/pywikibot-core,jayvdb/pywikibot-core,magul/pywikibot-core,xZise/pywikibot-core,h4ck3rm1k3/pywikibot-core,PersianWikipedia/pywikibot-core,Darkdadaah/pywikibot-core,Darkdadaah/pywikibot-core,hasteur/g13bot_tools_new,VcamX/pywikibot-core,hasteur/g13bot_tools_new,happy5214/pywikibot-core,TridevGuha/pywikibot-core,emijrp/pywikibot-core,hasteur/g13bot_tools_new,wikimedia/pywikibot-core,npdoty/pywikibot,happy5214/pywikibot-core,magul/pywikibot-core,jayvdb/pywikibot-core,wikimedia/pywikibot-core,npdoty/pywikibot,darthbhyrava/pywikibot-local,trishnaguha/pywikibot-core | pywikibot/userinterfaces/terminal_interface_unix.py | pywikibot/userinterfaces/terminal_interface_unix.py | # -*- coding: utf-8 -*-
#
# (C) Pywikibot team, 2003-2014
#
# Distributed under the terms of the MIT license.
#
__version__ = '$Id$'
import sys
from . import terminal_interface_base
unixColors = {
'default': chr(27) + '[0m', # Unix end tag to switch back to default
'black': chr(27) + '[30m', # Black start tag
'red': chr(27) + '[31m', # Red start tag
'green': chr(27) + '[32m', # Green start tag
'yellow': chr(27) + '[33m', # Yellow start tag
'blue': chr(27) + '[34m', # Blue start tag
'purple': chr(27) + '[35m', # Purple start tag (Magenta)
'aqua': chr(27) + '[36m', # Aqua start tag (Cyan)
'lightgray': chr(27) + '[37m', # Light gray start tag (White)
'gray': chr(27) + '[90m', # Gray start tag
'lightred': chr(27) + '[91m', # Light Red tag
'lightgreen': chr(27) + '[92m', # Light Green tag
'lightyellow': chr(27) + '[93m', # Light Yellow tag
'lightblue': chr(27) + '[94m', # Light Blue tag
'lightpurple': chr(27) + '[95m', # Light Purple tag (Magenta)
'lightaqua': chr(27) + '[96m', # Light Aqua tag (Cyan)
'white': chr(27) + '[97m', # White start tag (Bright White)
}
class UnixUI(terminal_interface_base.UI):
def printColorized(self, text, targetStream):
totalcount = 0
for key, value in unixColors.items():
ckey = '\03{%s}' % key
totalcount += text.count(ckey)
text = text.replace(ckey, value)
if totalcount > 0:
# just to be sure, reset the color
text += unixColors['default']
# .encoding does not mean we can write unicode
# to the stream pre-2.7.
if sys.version_info >= (2, 7) and \
hasattr(targetStream, 'encoding') and \
targetStream.encoding:
text = text.encode(targetStream.encoding, 'replace').decode(targetStream.encoding)
targetStream.write(text)
else:
targetStream.write(text.encode(self.encoding, 'replace'))
| # -*- coding: utf-8 -*-
#
# (C) Pywikibot team, 2003-2014
#
# Distributed under the terms of the MIT license.
#
__version__ = '$Id$'
from . import terminal_interface_base
unixColors = {
'default': chr(27) + '[0m', # Unix end tag to switch back to default
'black': chr(27) + '[30m', # Black start tag
'red': chr(27) + '[31m', # Red start tag
'green': chr(27) + '[32m', # Green start tag
'yellow': chr(27) + '[33m', # Yellow start tag
'blue': chr(27) + '[34m', # Blue start tag
'purple': chr(27) + '[35m', # Purple start tag (Magenta)
'aqua': chr(27) + '[36m', # Aqua start tag (Cyan)
'lightgray': chr(27) + '[37m', # Light gray start tag (White)
'gray': chr(27) + '[90m', # Gray start tag
'lightred': chr(27) + '[91m', # Light Red tag
'lightgreen': chr(27) + '[92m', # Light Green tag
'lightyellow': chr(27) + '[93m', # Light Yellow tag
'lightblue': chr(27) + '[94m', # Light Blue tag
'lightpurple': chr(27) + '[95m', # Light Purple tag (Magenta)
'lightaqua': chr(27) + '[96m', # Light Aqua tag (Cyan)
'white': chr(27) + '[97m', # White start tag (Bright White)
}
class UnixUI(terminal_interface_base.UI):
def printColorized(self, text, targetStream):
totalcount = 0
for key, value in unixColors.items():
ckey = '\03{%s}' % key
totalcount += text.count(ckey)
text = text.replace(ckey, value)
if totalcount > 0:
# just to be sure, reset the color
text += unixColors['default']
if hasattr(targetStream, 'encoding') and targetStream.encoding:
text = text.encode(targetStream.encoding, 'replace').decode(targetStream.encoding)
targetStream.write(text)
else:
targetStream.write(text.encode(self.encoding, 'replace'))
| mit | Python |
0835e52fc950bd58be066c179f2bdf1f74da3c71 | move to cloudstorage api | dart-lang/api.dart.dev,dart-lang/api.dart.dev | server/scripts/insertname.py | server/scripts/insertname.py | # Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import logging
import re
from webapp2 import *
from google.appengine.api import memcache
from redirector import redir_pkgs
import cloudstorage
# When we get a request for dart: libraries, before serving the static HTML,
# rewrite it to include
# a reference to the dynamic contents, changing the title and adding a mention
# at the bottom of the page. If we don't do this, search engine indexers
# treat all of our pages as duplicates and throw them away.
#
# If the request is for a package, redirect it to dartdocs.org instead.
class ApiDocs(RequestHandler):
def get(self, *args, **kwargs):
prefix = 'dartdoc-viewer/'
title = '<title>Dart API Reference</title>'
nameMarker = '<p class="nameMarker">Dart API Documentation</p>'
path = self.request.path
myPath = path[path.index(prefix) + len(prefix):]
if not myPath.startswith("dart"):
# TODO(alanknight): Once dartdocs.org supports something after /latest
# make use of the rest of the URL to go to the right place in the package.
packageName = myPath.split("/")[0]
self.redirect(redir_pkgs(self, pkg = packageName))
else:
indexFilePath = os.path.join(os.path.dirname(__file__), '../index.html')
indexFile = open(indexFilePath, 'r').read()
substituted = indexFile.replace(title,
'<title>%s API Docs</title>' % myPath)
substituted = substituted.replace(nameMarker,
'<p class="nameMarker">Dart API Documentation for ' + myPath + '</p>\n')
self.response.out.write(substituted)
application = WSGIApplication(
[
('.*', ApiDocs),
],
debug=True)
| # Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import logging
import re
from webapp2 import *
from google.appengine.api import files, memcache
from redirector import redir_pkgs
# When we get a request for dart: libraries, before serving the static HTML,
# rewrite it to include
# a reference to the dynamic contents, changing the title and adding a mention
# at the bottom of the page. If we don't do this, search engine indexers
# treat all of our pages as duplicates and throw them away.
#
# If the request is for a package, redirect it to dartdocs.org instead.
class ApiDocs(RequestHandler):
def get(self, *args, **kwargs):
prefix = 'dartdoc-viewer/'
title = '<title>Dart API Reference</title>'
nameMarker = '<p class="nameMarker">Dart API Documentation</p>'
path = self.request.path
myPath = path[path.index(prefix) + len(prefix):]
if not myPath.startswith("dart"):
# TODO(alanknight): Once dartdocs.org supports something after /latest
# make use of the rest of the URL to go to the right place in the package.
packageName = myPath.split("/")[0]
self.redirect(redir_pkgs(self, pkg = packageName))
else:
indexFilePath = os.path.join(os.path.dirname(__file__), '../index.html')
indexFile = open(indexFilePath, 'r').read()
substituted = indexFile.replace(title,
'<title>%s API Docs</title>' % myPath)
substituted = substituted.replace(nameMarker,
'<p class="nameMarker">Dart API Documentation for ' + myPath + '</p>\n')
self.response.out.write(substituted)
application = WSGIApplication(
[
('.*', ApiDocs),
],
debug=True)
| bsd-3-clause | Python |
dc8bed4e91b53d1dcae87125e7c1c766da5ba3f6 | Add some new InvItem types. | richardkiss/pycoin,richardkiss/pycoin | pycoin/message/InvItem.py | pycoin/message/InvItem.py | import functools
from pycoin.serialize import b2h_rev
from pycoin.serialize.bitcoin_streamer import parse_struct, stream_struct
ITEM_TYPE_TX = 1
ITEM_TYPE_BLOCK = 2
ITEM_TYPE_MERKLEBLOCK = 3
INV_CMPCT_BLOCK = 4
INV_WITNESS_FLAG = 1 << 30
INV_TYPE_MASK = 0xffffffff >> 2
@functools.total_ordering
class InvItem(object):
def __init__(self, item_type, data, dont_check=False):
if not dont_check:
assert item_type in (ITEM_TYPE_TX, ITEM_TYPE_BLOCK, ITEM_TYPE_MERKLEBLOCK)
self.item_type = item_type
assert isinstance(data, bytes)
assert len(data) == 32
self.data = data
def __str__(self):
INV_TYPES = ["?", "Tx", "Block", "Merkle"]
idx = self.item_type
if not 0 < idx < 4:
idx = 0
return "InvItem %s [%s]" % (INV_TYPES[idx], b2h_rev(self.data))
def __repr__(self):
return str(self)
def __hash__(self):
return hash((self.item_type, self.data))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.item_type == other.item_type and self.data == other.data
return False
def __lt__(self, other):
return (self.item_type, self.data) < (other.item_type, other.data)
def stream(self, f):
stream_struct("L#", f, self.item_type, self.data)
@classmethod
def parse(self, f):
return self(*parse_struct("L#", f), dont_check=True)
| import functools
from pycoin.serialize import b2h_rev
from pycoin.serialize.bitcoin_streamer import parse_struct, stream_struct
ITEM_TYPE_TX, ITEM_TYPE_BLOCK, ITEM_TYPE_MERKLEBLOCK = (1, 2, 3)
@functools.total_ordering
class InvItem(object):
def __init__(self, item_type, data, dont_check=False):
if not dont_check:
assert item_type in (ITEM_TYPE_TX, ITEM_TYPE_BLOCK, ITEM_TYPE_MERKLEBLOCK)
self.item_type = item_type
assert isinstance(data, bytes)
assert len(data) == 32
self.data = data
def __str__(self):
INV_TYPES = ["?", "Tx", "Block", "Merkle"]
idx = self.item_type
if not 0 < idx < 4:
idx = 0
return "InvItem %s [%s]" % (INV_TYPES[idx], b2h_rev(self.data))
def __repr__(self):
return str(self)
def __hash__(self):
return hash((self.item_type, self.data))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.item_type == other.item_type and self.data == other.data
return False
def __lt__(self, other):
return (self.item_type, self.data) < (other.item_type, other.data)
def stream(self, f):
stream_struct("L#", f, self.item_type, self.data)
@classmethod
def parse(self, f):
return self(*parse_struct("L#", f), dont_check=True)
| mit | Python |
59e51a64ef4cc361e76f5146ce1052b80fc6b2a7 | Bump version to 15.0.0a3 | genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio | resolwe_bio/__about__.py | resolwe_bio/__about__.py | """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe-bio'
__summary__ = 'Bioinformatics pipelines for the Resolwe platform'
__url__ = 'https://github.com/genialis/resolwe-bio'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '15.0.0a3'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
"__title__", "__summary__", "__url__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
| """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe-bio'
__summary__ = 'Bioinformatics pipelines for the Resolwe platform'
__url__ = 'https://github.com/genialis/resolwe-bio'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '15.0.0a2'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
"__title__", "__summary__", "__url__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
| apache-2.0 | Python |
5bffdf7ec68934595c394910aafc25fe634b10b2 | Update pylsy_test.py | gnithin/Pylsy,muteness/Pylsy,huiyi1990/Pylsy,gnithin/Pylsy,bcho/Pylsy,muteness/Pylsy,bcho/Pylsy,huiyi1990/Pylsy | pylsy/tests/pylsy_test.py | pylsy/tests/pylsy_test.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import unittest
from pylsy import PylsyTable
class PylsyTableTests(unittest.TestCase):
def setUp(self):
attributes = ["name", "age"]
self.table = PylsyTable(attributes)
def tearDown(self):
self.table = None
def testCreateTable(self):
name = ["a", "b"]
self.table.add_data("name", name)
age = [1, 2]
self.table.add_data("age", age)
correct_file = open('correct.out', 'r')
correctPrint = correct_file.read()
try:
#import io
#from contextlib import redirect_stdout
#with io.StringIO() as buf, redirect_stdout(buf):
#print(self.table,end='')
output = self.table.__str__()
self.assertEqual(output, correctPrint)
except ImportError:
import sys
f_handler = open('test.out', 'w')
sys.stdout = f_handler
self.table.create_table()
f_handler.close()
f_handler = open('test.out', 'r')
self.assertEqual(f_handler.read(), correctPrint)
if __name__ == '__main__':
unittest.main()
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
import unittest
from pylsy import PylsyTable
class PylsyTableTests(unittest.TestCase):
def setUp(self):
attributes = ["name", "age"]
self.table = PylsyTable(attributes)
def tearDown(self):
self.table = None
def testCreateTable(self):
name = ["a", "b"]
self.table.add_data("name", name)
age = [1, 2]
self.table.add_data("age", age)
correct_file = open('correct.out', 'r')
correctPrint = correct_file.read()
try:
import io
from contextlib import redirect_stdout
with io.StringIO() as buf, redirect_stdout(buf):
print(self.table)
output = buf.getvalue()
self.assertEqual(output, correctPrint)
except ImportError:
import sys
f_handler = open('test.out', 'w')
sys.stdout = f_handler
self.table.create_table()
f_handler.close()
f_handler = open('test.out', 'r')
self.assertEqual(f_handler.read(), correctPrint)
if __name__ == '__main__':
unittest.main()
| mit | Python |
a9a98603d0170128dab627a17a6d525083f48c6a | change forever semantics; instead make it something run by crontab, so is much more robust than a daemon can possibly be. | sagemathinc/smc,tscholl2/smc,sagemathinc/smc,sagemathinc/smc,tscholl2/smc,tscholl2/smc,tscholl2/smc,DrXyzzy/smc,tscholl2/smc,DrXyzzy/smc,sagemathinc/smc,DrXyzzy/smc,DrXyzzy/smc | salvus/scripts/forever.py | salvus/scripts/forever.py | #!/usr/bin/env python
# Ensure that system-wide daemons are running.
# If <service> is not running, do "service <service> restart".
# We do NOT just do "service <service> start" since that does
# not work (it's just an observerable fact).
#
# Put this in a crontab, e.g.,
#
# * * * * * /home/salvus/forever.py rethinkdb >> /home/salvus/.forever_rethinkdb.log 2>> /home/salvus/.forever_rethinkdb.err
#
import os, sys, time
SERVICES = sys.argv[1:]
if len(SERVICES) == 0 or len([x for x in SERVICES if x.startswith('-')]):
sys.stderr.write("usage: %s <service> <service> ...\n"%sys.argv[0])
sys.exit(1)
def is_running(service):
return bool(os.popen("pidof %s"%service).read())
def test(service):
if not is_running(service):
print("%s: %s not running so restarting"%(service, time.asctime()))
os.system("sudo service %s restart"%service)
for service in SERVICES:
test(service)
| #!/usr/bin/env python
# Ensure that system-wide daemons stays running.
# If <service> is not running, do "service <service> restart".
# We do NOT just do "service <service> start" since that does
# not work (it's just an observerable fact).
import os, sys, time
SERVICES = sys.argv[1:]
if len(SERVICES) == 0 or len([x for x in SERVICES if x.startswith('-')]):
sys.stderr.write("usage: %s <service> <service> ...\n"%sys.argv[0])
sys.exit(1)
def is_running(service):
return bool(os.popen("pidof %s"%service).read())
def test(service):
if not is_running(service):
print("%s: %s not running so restarting"%(service, time.asctime()))
os.system("sudo service %s restart"%service)
time.sleep(15) # wait extra time for service to start up
while True:
for service in SERVICES:
test(service)
time.sleep(15)
| agpl-3.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.