commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
643dddf3118af4675954848671e38b20cd8234fb | use config_location instead of hardwired default | BBN-Q/QGL,BBN-Q/QGL | QGL/config.py | QGL/config.py | #Package configuration information
import json
import os.path
import sys
from . import config_location
# Load the configuration from the json file
# and populate the global configuration dictionary
QGLCfgFile = config_location.get_config_path()
if not os.path.isfile(QGLCfgFile):
rootFolder = os.path.dirname(os.path.abspath(__file__))
rootFolder = rootFolder.replace('\\', '/') # use unix-like convention
# build a config file from the template
templateFile = os.path.join(rootFolder, 'config.example.json')
ifid = open(templateFile, 'r')
ofid = open(QGLCfgFile, 'w')
for line in ifid:
ofid.write(line.replace('/my/path/to', rootFolder))
ifid.close()
ofid.close()
with open(QGLCfgFile, 'r') as f:
QGLCfg = json.load(f)
#pull out the variables
#abspath allows the use of relative file names in the config file
AWGDir = os.path.abspath(QGLCfg['AWGDir'])
channelLibFile = os.path.abspath(QGLCfg['ChannelLibraryFile'])
# plotting options
plotBackground = QGLCfg.get('PlotBackground', '#EAEAF2')
gridColor = QGLCfg.get('GridColor', None)
# select pulse library (standard or all90)
pulse_primitives_lib = QGLCfg.get('PulsePrimitivesLibrary', 'standard')
# select a CNOT implementation (a name of a Pulse function that implements
# CNOT in your gate set, e.g. CNOT_simple or CNOT_CR)
cnot_implementation = QGLCfg.get('cnot_implementation', 'CNOT_simple')
| #Package configuration information
import json
import os.path
import sys
#Load the configuration from the json file and populate the global configuration dictionary
rootFolder = os.path.dirname(os.path.abspath(__file__))
rootFolder = rootFolder.replace('\\', '/') # use unix-like convention
QGLCfgFile = os.path.join(rootFolder, 'config.json')
if not os.path.isfile(QGLCfgFile):
# build a config file from the template
templateFile = os.path.join(rootFolder, 'config.example.json')
ifid = open(templateFile, 'r')
ofid = open(QGLCfgFile, 'w')
for line in ifid:
ofid.write(line.replace('/my/path/to', rootFolder))
ifid.close()
ofid.close()
with open(QGLCfgFile, 'r') as f:
QGLCfg = json.load(f)
#pull out the variables
#abspath allows the use of relative file names in the config file
AWGDir = os.path.abspath(QGLCfg['AWGDir'])
channelLibFile = os.path.abspath(QGLCfg['ChannelLibraryFile'])
# plotting options
plotBackground = QGLCfg.get('PlotBackground', '#EAEAF2')
gridColor = QGLCfg.get('GridColor', None)
# select pulse library (standard or all90)
pulse_primitives_lib = QGLCfg.get('PulsePrimitivesLibrary', 'standard')
# select a CNOT implementation (a name of a Pulse function that implements
# CNOT in your gate set, e.g. CNOT_simple or CNOT_CR)
cnot_implementation = QGLCfg.get('cnot_implementation', 'CNOT_simple')
| apache-2.0 | Python |
9ca92efe4b9ed99018bc2eeadca17ae71f8ac60b | Update pocs/mount/__init__.py | panoptes/POCS,panoptes/POCS,panoptes/POCS,panoptes/POCS | pocs/mount/__init__.py | pocs/mount/__init__.py | from glob import glob
from pocs.mount.mount import AbstractMount # pragma: no flakes
from pocs.utils import error
from pocs.utils import load_module
from pocs.utils.location import create_location_from_config
from pocs.utils.logger import get_root_logger
def create_mount_from_config(config, mount_info=None, earth_location=None, *args, **kwargs):
""" Sets up the mount that will be used by the observatory """
logger = get_root_logger()
if 'mount' not in config:
logger.info("No mount information in config, cannot create.")
return None
if earth_location is None:
logger.debug(f'No location provided, using values from config.')
site_details = create_location_from_config(config)
earth_location = site_details['earth_location']
if mount_info is None:
logger.debug(f'No mount info provided, using values from config.')
mount_info = config.get('mount')
model = mount_info.get('model')
driver = mount_info.get('driver')
# See if we have a serial connection
try:
port = mount_info['serial']['port']
if port is None or len(glob(port)) == 0:
msg = f"Mount port ({port}) not available. Use simulator = mount for simulator."
raise error.MountNotFound(msg=msg)
except KeyError:
# TODO(jamessynge): We should move the driver specific validation into the driver
# module (e.g. module.create_mount_from_config). This means we have to adjust the
# definition of this method to return a validated but not fully initialized mount
# driver.
if model != 'bisque':
msg = "No port specified for mount in config file. Use simulator = mount for simulator. Exiting."
raise error.MountNotFound(msg=msg)
logger.debug('Creating mount: {}'.format(model))
module = load_module('pocs.mount.{}'.format(driver))
# Make the mount include site information
mount = module.Mount(location=earth_location, *args, **kwags)
logger.debug('Mount created from config')
return mount
| from glob import glob
from pocs.mount.mount import AbstractMount # pragma: no flakes
from pocs.utils import error
from pocs.utils import load_module
from pocs.utils.location import create_location_from_config
from pocs.utils.logger import get_root_logger
def create_mount_from_config(config, mount_info=None, earth_location=None, *args, **kwargs):
""" Sets up the mount that will be used by the observatory """
logger = get_root_logger()
if 'mount' not in config:
logger.info("No mount information in config, cannot create.")
return None
if earth_location is None:
logger.debug(f'No location provided, using values from config.')
site_details = create_location_from_config(config)
earth_location = site_details['earth_location']
if mount_info is None:
logger.debug(f'No mount info provided, using values from config.')
mount_info = config.get('mount')
model = mount_info.get('model')
driver = mount_info.get('driver')
# See if we have a serial connection
try:
port = mount_info['serial']['port']
if port is None or len(glob(port)) == 0:
msg = f"Mount port ({port}) not available. Use simulator = mount for simulator."
raise error.MountNotFound(msg=msg)
except KeyError:
# TODO(jamessynge): We should move the driver specific validation into the driver
# module (e.g. module.create_mount_from_config). This means we have to adjust the
# definition of this method to return a validated but not fully initialized mount
# driver.
if model != 'bisque':
msg = "No port specified for mount in config file. Use simulator = mount for simulator. Exiting."
raise error.MountNotFound(msg=msg)
logger.debug('Creating mount: {}'.format(model))
module = load_module('pocs.mount.{}'.format(driver))
# Make the mount include site information
mount = module.Mount(location=earth_location, *args, **kwags)
logger.debug('Mount created')
return mount
| mit | Python |
7ae97b8619e78da2d818991c03b0fd9e0e330c85 | Fix python3 compat issue in propdict | theeternalsw0rd/xmms2,xmms2/xmms2-stable,six600110/xmms2,theefer/xmms2,theeternalsw0rd/xmms2,theeternalsw0rd/xmms2,chrippa/xmms2,six600110/xmms2,krad-radio/xmms2-krad,theeternalsw0rd/xmms2,chrippa/xmms2,chrippa/xmms2,chrippa/xmms2,theeternalsw0rd/xmms2,six600110/xmms2,chrippa/xmms2,theefer/xmms2,xmms2/xmms2-stable,chrippa/xmms2,six600110/xmms2,theefer/xmms2,theefer/xmms2,xmms2/xmms2-stable,theeternalsw0rd/xmms2,theefer/xmms2,xmms2/xmms2-stable,theefer/xmms2,krad-radio/xmms2-krad,xmms2/xmms2-stable,krad-radio/xmms2-krad,six600110/xmms2,six600110/xmms2,krad-radio/xmms2-krad,krad-radio/xmms2-krad,krad-radio/xmms2-krad,xmms2/xmms2-stable,theefer/xmms2 | src/clients/lib/python/xmmsclient/propdict.py | src/clients/lib/python/xmmsclient/propdict.py | #Py3k compat
try:
a = basestring
del a
except NameError:
basestring = str
class PropDict(dict):
def __init__(self, srcs):
dict.__init__(self)
self._sources = srcs
def set_source_preference(self, sources):
"""
Change list of source preference
This method has been deprecated and should no longer be used.
"""
raise DeprecationWarning("This method has been deprecated and should no longer be used. Set the sources list using the 'sources' property.")
self._set_sources(sources)
def has_key(self, item):
try:
self.__getitem__(item)
return True
except KeyError:
return False
def __contains__(self, item):
return self.has_key(item)
def __getitem__(self, item):
if isinstance(item, basestring):
for src in self._sources:
if src.endswith('*'):
for k in self:
if k[0].startswith(src[:-1]) and k[1] == item:
return dict.__getitem__(self, k)
try:
t = dict.__getitem__(self, (src, item))
return t
except KeyError:
pass
raise KeyError(item)
return dict.__getitem__(self, item)
def get(self, item, default=None):
try:
return self[item]
except KeyError:
return default
def _get_sources(self):
return self._sources
def _set_sources(self, val):
if isinstance(val, basestring):
raise TypeError("Need a sequence of sources")
for i in val:
if not isinstance(i, basestring):
raise TypeError("Sources need to be strings")
self._sources = val
sources = property(_get_sources, _set_sources)
| #Py3k compat
try:
a = basestring
del a
except NameError:
basestring = str
class PropDict(dict):
def __init__(self, srcs):
dict.__init__(self)
self._sources = srcs
def set_source_preference(self, sources):
"""
Change list of source preference
This method has been deprecated and should no longer be used.
"""
raise DeprecationWarning("This method has been deprecated and should no longer be used. Set the sources list using the 'sources' property.")
self._set_sources(sources)
def has_key(self, item):
try:
self.__getitem__(item)
return True
except KeyError:
return False
def __contains__(self, item):
return self.has_key(item)
def __getitem__(self, item):
if isinstance(item, basestring):
for src in self._sources:
if src.endswith('*'):
for k,v in self.iteritems():
if k[0].startswith(src[:-1]) and k[1] == item:
return v
try:
t = dict.__getitem__(self, (src, item))
return t
except KeyError:
pass
raise KeyError(item)
return dict.__getitem__(self, item)
def get(self, item, default=None):
try:
return self[item]
except KeyError:
return default
def _get_sources(self):
return self._sources
def _set_sources(self, val):
if isinstance(val, basestring):
raise TypeError("Need a sequence of sources")
for i in val:
if not isinstance(i, basestring):
raise TypeError("Sources need to be strings")
self._sources = val
sources = property(_get_sources, _set_sources)
| lgpl-2.1 | Python |
47b8f63d318e0007abc979884f6096221775843f | Implement near returns. | haowu4682/gem5,haowu4682/gem5,haowu4682/gem5,LingxiaoJIA/gem5,LingxiaoJIA/gem5,haowu4682/gem5,haowu4682/gem5,LingxiaoJIA/gem5,LingxiaoJIA/gem5,LingxiaoJIA/gem5,haowu4682/gem5,LingxiaoJIA/gem5,haowu4682/gem5,haowu4682/gem5,LingxiaoJIA/gem5,haowu4682/gem5 | src/arch/x86/isa/insts/control_transfer/xreturn.py | src/arch/x86/isa/insts/control_transfer/xreturn.py | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the
# following conditions are met:
#
# The software must be used only for Non-Commercial Use which means any
# use which is NOT directed to receiving any direct monetary
# compensation for, or commercial advantage from such use. Illustrative
# examples of non-commercial use are academic research, personal study,
# teaching, education and corporate research & development.
# Illustrative examples of commercial use are distributing products for
# commercial advantage and providing services using the software for
# commercial advantage.
#
# If you wish to use this software or functionality therein that may be
# covered by patents for commercial use, please contact:
# Director of Intellectual Property Licensing
# Office of Strategy and Technology
# Hewlett-Packard Company
# 1501 Page Mill Road
# Palo Alto, California 94304
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer. Redistributions
# in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution. Neither the name of
# the COPYRIGHT HOLDER(s), HEWLETT-PACKARD COMPANY, nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission. No right of
# sublicense is granted herewith. Derivatives of the software and
# output created using the software may be prepared, but only for
# Non-Commercial Uses. Derivatives of the software may be shared with
# others provided: (i) the others agree to abide by the list of
# conditions herein which includes the Non-Commercial Use restrictions;
# and (ii) such Derivatives of the software include the above copyright
# notice to acknowledge the contribution from this software where
# applicable, this list of conditions and the disclaimer below.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop RET
{
# Make the default data size of rets 64 bits in 64 bit mode
.adjust_env oszIn64Override
ld t1, ss, [0, t0, rsp]
addi rsp, rsp, dsz
wripi t1, 0
};
'''
| # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the
# following conditions are met:
#
# The software must be used only for Non-Commercial Use which means any
# use which is NOT directed to receiving any direct monetary
# compensation for, or commercial advantage from such use. Illustrative
# examples of non-commercial use are academic research, personal study,
# teaching, education and corporate research & development.
# Illustrative examples of commercial use are distributing products for
# commercial advantage and providing services using the software for
# commercial advantage.
#
# If you wish to use this software or functionality therein that may be
# covered by patents for commercial use, please contact:
# Director of Intellectual Property Licensing
# Office of Strategy and Technology
# Hewlett-Packard Company
# 1501 Page Mill Road
# Palo Alto, California 94304
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer. Redistributions
# in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution. Neither the name of
# the COPYRIGHT HOLDER(s), HEWLETT-PACKARD COMPANY, nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission. No right of
# sublicense is granted herewith. Derivatives of the software and
# output created using the software may be prepared, but only for
# Non-Commercial Uses. Derivatives of the software may be shared with
# others provided: (i) the others agree to abide by the list of
# conditions herein which includes the Non-Commercial Use restrictions;
# and (ii) such Derivatives of the software include the above copyright
# notice to acknowledge the contribution from this software where
# applicable, this list of conditions and the disclaimer below.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = ""
#let {{
# class RET(Inst):
# "GenFault ${new UnimpInstFault}"
#}};
| bsd-3-clause | Python |
f31934d9317bb9f50d75a34bba2b0b16ce545a8f | Bump version | datashaman/wifidog-auth-flask,datashaman/wifidog-auth-flask,datashaman/wifidog-auth-flask,datashaman/wifidog-auth-flask | config.py | config.py | import os
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
APP_VERSION = '0.5.0'
CSRF_SESSION_KEY = 'ABigSecretIsHardToFind'
DATABASE_CONNECTION_OPTIONS = {}
DEBUG = False
FACEBOOK_APP_ID= '89526572170'
GOOGLE_ANALYTICS_TRACKING_ID = os.environ.get('GOOGLE_ANALYTICS_TRACKING_ID', '')
HOST = '0.0.0.0'
PORT = 8080
PUSH_ENABLED = False
SECRET_KEY = 'AnotherBigSecretIsAlsoHardToFind'
SECURITY_PASSWORD_HASH = 'sha512_crypt'
SECURITY_PASSWORD_SALT = 'ThisIsNotALoveSong'
SECURITY_POST_LOGIN_VIEW = 'app.vouchers_index'
SECURITY_POST_LOGOUT_VIEW = 'login'
SECURITY_REGISTERABLE=False
SECURITY_REGISTER_EMAIL=False
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(BASE_DIR, 'data/local.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
TESTING = False
THREADS_PER_PAGE = 8
UPLOADS_DEFAULT_DEST = os.path.join(BASE_DIR, 'uploads')
UPLOADS_DEFAULT_URL = '/static'
VOUCHER_DEFAULT_MINUTES = 90
VOUCHER_MAXAGE = 60 * 24
WTF_CSRF_ENABLED = True
| import os
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
APP_VERSION = '0.4.0'
CSRF_SESSION_KEY = 'ABigSecretIsHardToFind'
DATABASE_CONNECTION_OPTIONS = {}
DEBUG = False
FACEBOOK_APP_ID= '89526572170'
GOOGLE_ANALYTICS_TRACKING_ID = os.environ.get('GOOGLE_ANALYTICS_TRACKING_ID', '')
HOST = '0.0.0.0'
PORT = 8080
PUSH_ENABLED = False
SECRET_KEY = 'AnotherBigSecretIsAlsoHardToFind'
SECURITY_PASSWORD_HASH = 'sha512_crypt'
SECURITY_PASSWORD_SALT = 'ThisIsNotALoveSong'
SECURITY_POST_LOGIN_VIEW = 'app.vouchers_index'
SECURITY_POST_LOGOUT_VIEW = 'login'
SECURITY_REGISTERABLE=False
SECURITY_REGISTER_EMAIL=False
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(BASE_DIR, 'data/local.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
TESTING = False
THREADS_PER_PAGE = 8
UPLOADS_DEFAULT_DEST = os.path.join(BASE_DIR, 'uploads')
UPLOADS_DEFAULT_URL = '/static'
VOUCHER_DEFAULT_MINUTES = 90
VOUCHER_MAXAGE = 60 * 24
WTF_CSRF_ENABLED = True
| mit | Python |
f7d2b4d773636a3f858e082e011e2069a064a5e4 | Add __str__ on models | SaturDJang/warp,SaturDJang/warp,SaturDJang/warp,SaturDJang/warp | presentation/models.py | presentation/models.py | from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
is_public = models.BooleanField(default=True)
def __str__(self):
return self.subject
class Slide(TimeStampedModel):
presentation = models.ForeignKey(Presentation, on_delete=models.CASCADE)
slide_order = models.PositiveSmallIntegerField()
markdown = models.TextField()
def __str__(self):
return self.markdown
| from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
is_public = models.BooleanField(default=True)
class Slide(TimeStampedModel):
presentation = models.ForeignKey(Presentation, on_delete=models.CASCADE)
slide_order = models.PositiveSmallIntegerField()
markdown = models.TextField()
html = models.TextField()
| mit | Python |
8c706f4f7be18c25a1209365fa780edb76341b3a | Change make_healpixdb.py to work with eg. opsim DB | rbiswas4/simlib | scripts/make_healpixdb.py | scripts/make_healpixdb.py | from __future__ import division
import numpy as np
import time
import sqlite3
import healpy as hp
from healpy import query_disc, query_polygon
import opsimsummary as oss
import pandas as pd
from itertools import repeat
import os
from sqlalchemy import create_engine
pkgDir = os.path.split(oss.__file__)[0]
dbname = os.path.join(pkgDir, 'example_data', 'enigma_1189_micro.db')
engineFile = 'sqlite:///' + dbname
engine = create_engine(engineFile)
# opsim_hdf = '/Users/rbiswas/data/LSST/OpSimData/minion_1016.hdf'
OpSim_combined = pd.read_sql_query('SELECT * FROM Summary WHERE PropID is 364',
con=engine, index_col='obsHistID')
def addVec(df, raCol='ditheredRA', decCol='ditheredDec'):
thetas = - df[decCol] + np.pi /2.
phis = df[raCol]
df['vec'] = list(hp.ang2vec(thetas, phis))
addVec(OpSim_combined)
NSIDE = 1
OpSim_combined['hids'] = [query_disc(NSIDE, vec, np.radians(1.75), inclusive=True, nest=True) for vec in OpSim_combined.vec]
# Note this is the less efficient scheme, but the nest scheme is useful later.
lens = map(len, OpSim_combined.hids.values)
rowdata = []
_ = list(rowdata.extend(repeat(i, lens[i])) for i in xrange(len(OpSim_combined)))
coldata = np.concatenate(OpSim_combined.hids.values)
conn = sqlite3.Connection('healpixels_micro.db')
cur = conn.cursor()
cur.execute('CREATE TABLE simlib (ipix int, obsHistId int)')
tstart = time.time()
told = tstart
for i in range(len(rowdata)):
cur.execute('INSERT INTO simlib VALUES ({0}, {1})'.format(rowdata[i], coldata[i]))
if i % 10000000 == 0:
conn.commit()
tat = time.time()
print('committed at {0} taking time {1}'.format(i, tat-told))
told = tat
conn.commit()
print('Committed the table to disk\n')
# create index
print('Creteing ipix index\n')
cur.execute('CREATE INDEX {ix} on {tn}({cn})'\
.format(ix='ipix_ind', tn='simlib', cn='ipix'))
print('Creteing obsHistID index\n')
cur.execute('CREATE INDEX {ix} on {tn}({cn})'\
.format(ix='obshistid_ind', tn='simlib', cn='obsHistId'))
conn.close()
| from __future__ import division
import numpy as np
import time
import sqlite3
import healpy as hp
from healpy import query_disc, query_polygon
import opsimsummary as oss
import pandas as pd
from itertools import repeat
opsim_hdf = '/Users/rbiswas/data/LSST/OpSimData/minion_1016.hdf'
OpSim_combined = pd.read_hdf(opsim_hdf, 'Table')
def addVec(df, raCol='ditheredRA', decCol='ditheredDec'):
thetas = - df[decCol] + np.pi /2.
phis = df[raCol]
df['vec'] = list(hp.ang2vec(thetas, phis))
addVec(OpSim_combined)
NSIDE = 256
OpSim_combined['hids'] = [query_disc(NSIDE, vec, np.radians(1.75), inclusive=True, nest=True) for vec in OpSim_combined.vec]
# Note this is the less efficient scheme, but the nest scheme is useful later.
lens = map(len, OpSim_combined.hids.values)
rowdata = []
_ = list(rowdata.extend(repeat(i, lens[i])) for i in xrange(len(OpSim_combined)))
coldata = np.concatenate(OpSim_combined.hids.values)
conn = sqlite3.Connection('healpixels.db')
cur = conn.cursor()
cur.execute('CREATE TABLE simlib (ipix int, obsHistId int)')
tstart = time.time()
told = tstart
for i in range(len(rowdata)):
cur.execute('INSERT INTO simlib VALUES ({0}, {1})'.format(rowdata[i], coldata[i]))
if i % 10000000 == 0:
conn.commit()
tat = time.time()
print('committed at {0} taking time {1}'.format(i, tat-told))
told = tat
conn.commit()
print('Committed the table to disk\n')
# create index
print('Creteing ipix index\n')
cur.execute('CREATE INDEX {ix} on {tn}({cn})'\
.format(ix='ipix_ind', tn='simlib', cn='ipix'))
print('Creteing obsHistID index\n')
cur.execute('CREATE INDEX {ix} on {tn}({cn})'\
.format(ix='obshistid_ind', tn='simlib', cn='obsHistId'))
conn.close()
| mit | Python |
66b217efddf8ad8a2a8e4cd2384d2d994155cd7b | Fix a bug | ryuichiueda/raspimouse_ros,ryuichiueda/raspimouse_ros | scripts/rtlightsensors.py | scripts/rtlightsensors.py | #!/usr/bin/env python
import sys, rospy
from raspimouse_ros.msg import LightSensorValues
def talker():
devfile = '/dev/rtlightsensor0'
rospy.init_node('lightsensors')
pub = rospy.Publisher('lightsensors', LightSensorValues, queue_size=1)
rate = rospy.Rate(10)
while not rospy.is_shutdown():
try:
with open(devfile,'r') as f:
data = f.readline().split()
d = LightSensorValues()
d.right_forward = int(data[0])
d.right_side = int(data[1])
d.left_side = int(data[2])
d.left_forward = int(data[3])
pub.publish(d)
except:
rospy.logerr("cannot open " + devfile)
rate.sleep()
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
| #!/usr/bin/env python
import sys, rospy
from raspimouse_ros.msg import LightSensorValues
def talker():
devfile = '/dev/rtlightsensor0'
rospy.init_node('lightsensors')
pub = rospy.Publisher('lightsensors', LightSensorValues, queue_size=1)
rate = rospy.Rate(10)
while not rospy.is_shutdown():
try:
with open(devfile,'r') as f:
data = f.readline().split()
d = LightSensorValues()
d.right_forward = int(data[0])
d.right_side = int(data[1])
d.left_side = int(data[2])
d.left_forward = int(data[3])
pub.publish(d)
rate.sleep()
except:
rospy.logerr("cannot open " + devfile)
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
| mit | Python |
0f452e9ae1cb3216337d062def1d3a68d8a3c16d | Update Qt.py | csparkresearch/ExpEYES17-Qt,csparkresearch/ExpEYES17-Qt,csparkresearch/ExpEYES17-Qt,csparkresearch/ExpEYES17-Qt,csparkresearch/ExpEYES17-Qt | SPARK17/Qt.py | SPARK17/Qt.py | import os
if os.environ['SPARK17_QT_LIB'] == 'PyQt5':
from PyQt5 import QtGui,QtCore,QtWidgets
else:
print ('using PyQt4')
from PyQt4 import QtGui,QtCore
from PyQt4 import QtGui as QtWidgets
| from PyQt5 import QtGui,QtCore,QtWidgets
| mit | Python |
87194047d01a7321a3729e1de67a59336ae7d9cf | Add dbus_init to the public API | mitya57/secretstorage | secretstorage/__init__.py | secretstorage/__init__.py | # SecretStorage module for Python
# Access passwords using the SecretService DBus API
# Author: Dmitry Shachnev, 2013-2018
# License: 3-clause BSD, see LICENSE file
"""This file provides quick access to all SecretStorage API. Please
refer to documentation of individual modules for API details.
"""
from jeepney.integrate.blocking import DBusConnection, connect_and_authenticate
from secretstorage.collection import Collection, create_collection, \
get_all_collections, get_default_collection, get_any_collection, \
get_collection_by_alias, search_items
from secretstorage.item import Item
from secretstorage.exceptions import SecretStorageException, \
SecretServiceNotAvailableException, LockedException, \
ItemNotFoundException, PromptDismissedException
from secretstorage.util import add_match_rules
__version_tuple__ = (3, 1, 0)
__version__ = '.'.join(map(str, __version_tuple__))
__all__ = [
'Collection',
'Item',
'ItemNotFoundException',
'LockedException',
'PromptDismissedException',
'SecretServiceNotAvailableException',
'SecretStorageException',
'create_collection',
'dbus_init',
'get_all_collections',
'get_any_collection',
'get_collection_by_alias',
'get_default_collection',
'search_items',
]
def dbus_init() -> DBusConnection:
"""Returns a new connection to the session bus, instance of
jeepney's :class:`DBusConnection` class. This connection can
then be passed to various SecretStorage functions, such as
:func:`~secretstorage.collection.get_default_collection`.
.. warning::
The D-Bus socket will not be closed automatically. You can
close it manually using the :meth:`DBusConnection.close` method,
or you can use the :class:`contextlib.closing` context manager:
.. code-block:: python
from contextlib import closing
with closing(dbus_init()) as conn:
collection = secretstorage.get_default_collection(conn)
items = collection.search_items({'application': 'myapp'})
However, you will not be able to call any methods on the objects
created within the context after you leave it.
.. versionchanged:: 3.0
Before the port to Jeepney, this function returned an
instance of :class:`dbus.SessionBus` class.
.. versionchanged:: 3.1
This function no longer accepts any arguments.
"""
try:
connection = connect_and_authenticate()
add_match_rules(connection)
return connection
except KeyError as ex:
# os.environ['DBUS_SESSION_BUS_ADDRESS'] may raise it
reason = "Environment variable {} is unset".format(ex.args[0])
raise SecretServiceNotAvailableException(reason) from ex
except (ConnectionError, ValueError) as ex:
raise SecretServiceNotAvailableException(str(ex)) from ex
| # SecretStorage module for Python
# Access passwords using the SecretService DBus API
# Author: Dmitry Shachnev, 2013-2018
# License: 3-clause BSD, see LICENSE file
"""This file provides quick access to all SecretStorage API. Please
refer to documentation of individual modules for API details.
"""
from jeepney.integrate.blocking import DBusConnection, connect_and_authenticate
from secretstorage.collection import Collection, create_collection, \
get_all_collections, get_default_collection, get_any_collection, \
get_collection_by_alias, search_items
from secretstorage.item import Item
from secretstorage.exceptions import SecretStorageException, \
SecretServiceNotAvailableException, LockedException, \
ItemNotFoundException, PromptDismissedException
from secretstorage.util import add_match_rules
__version_tuple__ = (3, 1, 0)
__version__ = '.'.join(map(str, __version_tuple__))
__all__ = [
'Collection',
'Item',
'ItemNotFoundException',
'LockedException',
'PromptDismissedException',
'SecretServiceNotAvailableException',
'SecretStorageException',
'create_collection',
'get_all_collections',
'get_any_collection',
'get_collection_by_alias',
'get_default_collection',
'search_items',
]
def dbus_init() -> DBusConnection:
"""Returns a new connection to the session bus, instance of
jeepney's :class:`DBusConnection` class. This connection can
then be passed to various SecretStorage functions, such as
:func:`~secretstorage.collection.get_default_collection`.
.. warning::
The D-Bus socket will not be closed automatically. You can
close it manually using the :meth:`DBusConnection.close` method,
or you can use the :class:`contextlib.closing` context manager:
.. code-block:: python
from contextlib import closing
with closing(dbus_init()) as conn:
collection = secretstorage.get_default_collection(conn)
items = collection.search_items({'application': 'myapp'})
However, you will not be able to call any methods on the objects
created within the context after you leave it.
.. versionchanged:: 3.0
Before the port to Jeepney, this function returned an
instance of :class:`dbus.SessionBus` class.
.. versionchanged:: 3.1
This function no longer accepts any arguments.
"""
try:
connection = connect_and_authenticate()
add_match_rules(connection)
return connection
except KeyError as ex:
# os.environ['DBUS_SESSION_BUS_ADDRESS'] may raise it
reason = "Environment variable {} is unset".format(ex.args[0])
raise SecretServiceNotAvailableException(reason) from ex
except (ConnectionError, ValueError) as ex:
raise SecretServiceNotAvailableException(str(ex)) from ex
| bsd-3-clause | Python |
d342fd7a23542e7c968dca3af76281b1d35ba352 | use a better name for the test directory name in tests | geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info,geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/osmaxx,geometalab/osmaxx-frontend | osmaxx-py/osmaxx/tests/test_utils.py | osmaxx-py/osmaxx/tests/test_utils.py | import os
from django.test import TestCase
from osmaxx.utils import PrivateSystemStorage
class PrivateSystemStorageTestCase(TestCase):
def setUp(self):
self.directory_name = 'OSMAXX_private-storage-directory-for-tests'
def test_creates_a_new_directory_if_it_does_not_exist(self):
directory_path = os.path.join('/tmp/', self.directory_name)
self.assertFalse(os.path.exists(directory_path))
PrivateSystemStorage(location=directory_path)
self.assertTrue(os.path.exists(directory_path))
os.rmdir(directory_path)
def xtest_raises_when_missing_permissions(self):
# FIXME: this isn't testable as it stands, because we are root on the docker instance
# and it will overwrite the access rights to create a directory.
read_only_directory = os.path.join('/tmp/', 'read_only_dir')
os.mkdir(read_only_directory, mode=0o000)
directory_path = os.path.join(read_only_directory, self.directory_name)
self.assertFalse(os.path.exists(directory_path))
with self.assertRaises(OSError):
PrivateSystemStorage(location=directory_path)
self.assertFalse(os.path.exists(directory_path))
| import os
from django.test import TestCase
from osmaxx.utils import PrivateSystemStorage
class PrivateSystemStorageTestCase(TestCase):
def setUp(self):
self.directory_name = 'OSMAXX_OjM3cRB2xgSuDXr5yBxxzds9mO8gmP'
def test_creates_a_new_directory_if_it_does_not_exist(self):
directory_path = os.path.join('/tmp/', self.directory_name)
self.assertFalse(os.path.exists(directory_path))
PrivateSystemStorage(location=directory_path)
self.assertTrue(os.path.exists(directory_path))
os.rmdir(directory_path)
def xtest_raises_when_missing_permissions(self):
# FIXME: this isn't testable as it stands, because we are root on the docker instance
# and it will overwrite the access rights to create a directory.
read_only_directory = os.path.join('/tmp/', 'read_only_dir')
os.mkdir(read_only_directory, mode=0o000)
directory_path = os.path.join(read_only_directory, self.directory_name)
self.assertFalse(os.path.exists(directory_path))
with self.assertRaises(OSError):
PrivateSystemStorage(location=directory_path)
self.assertFalse(os.path.exists(directory_path))
| mit | Python |
b587f7833a57d1f589e14aa33a7e761552c40a5a | bump to 0.19.6 | dataversioncontrol/dvc,dataversioncontrol/dvc,efiop/dvc,efiop/dvc,dmpetrov/dataversioncontrol,dmpetrov/dataversioncontrol | dvc/__init__.py | dvc/__init__.py | """
DVC
----
Make your data science projects reproducible and shareable.
"""
import os
import warnings
VERSION_BASE = '0.19.6'
__version__ = VERSION_BASE
PACKAGEPATH = os.path.abspath(os.path.dirname(__file__))
HOMEPATH = os.path.dirname(PACKAGEPATH)
VERSIONPATH = os.path.join(PACKAGEPATH, 'version.py')
if os.path.exists(os.path.join(HOMEPATH, 'setup.py')):
# dvc is run directly from source without installation or
# __version__ is called from setup.py
if os.getenv('APPVEYOR_REPO_TAG', '').lower() != 'true' \
and os.getenv('TRAVIS_TAG', '') == '':
# Dynamically update version
try:
import git
repo = git.Repo(HOMEPATH)
sha = repo.head.object.hexsha
short_sha = repo.git.rev_parse(sha, short=6)
dirty = '.mod' if repo.is_dirty() else ''
__version__ = '{}+{}{}'.format(__version__, short_sha, dirty)
# Write a helper file, that will be installed with the package
# and will provide a true version of the installed dvc
with open(VERSIONPATH, 'w+') as fd:
fd.write('# AUTOGENERATED by dvc/__init__.py\n')
fd.write('version = "{}"\n'.format(__version__))
except Exception:
pass
else:
# Remove version.py so that it doesn't get into the release
if os.path.exists(VERSIONPATH):
os.unlink(VERSIONPATH)
else:
# dvc was installed with pip or something. Hopefully we have our
# auto-generated version.py to help us provide a true version
try:
from dvc.version import version
__version__ = version
except Exception:
pass
VERSION = __version__
# Ignore numpy's runtime warnings: https://github.com/numpy/numpy/pull/432.
# We don't directly import numpy, but our dependency networkx does, causing
# these warnings in some environments. Luckily these warnings are benign and
# we can simply ignore them so that they don't show up when you are using dvc.
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
| """
DVC
----
Make your data science projects reproducible and shareable.
"""
import os
import warnings
VERSION_BASE = '0.19.5'
__version__ = VERSION_BASE
PACKAGEPATH = os.path.abspath(os.path.dirname(__file__))
HOMEPATH = os.path.dirname(PACKAGEPATH)
VERSIONPATH = os.path.join(PACKAGEPATH, 'version.py')
if os.path.exists(os.path.join(HOMEPATH, 'setup.py')):
# dvc is run directly from source without installation or
# __version__ is called from setup.py
if os.getenv('APPVEYOR_REPO_TAG', '').lower() != 'true' \
and os.getenv('TRAVIS_TAG', '') == '':
# Dynamically update version
try:
import git
repo = git.Repo(HOMEPATH)
sha = repo.head.object.hexsha
short_sha = repo.git.rev_parse(sha, short=6)
dirty = '.mod' if repo.is_dirty() else ''
__version__ = '{}+{}{}'.format(__version__, short_sha, dirty)
# Write a helper file, that will be installed with the package
# and will provide a true version of the installed dvc
with open(VERSIONPATH, 'w+') as fd:
fd.write('# AUTOGENERATED by dvc/__init__.py\n')
fd.write('version = "{}"\n'.format(__version__))
except Exception:
pass
else:
# Remove version.py so that it doesn't get into the release
if os.path.exists(VERSIONPATH):
os.unlink(VERSIONPATH)
else:
# dvc was installed with pip or something. Hopefully we have our
# auto-generated version.py to help us provide a true version
try:
from dvc.version import version
__version__ = version
except Exception:
pass
VERSION = __version__
# Ignore numpy's runtime warnings: https://github.com/numpy/numpy/pull/432.
# We don't directly import numpy, but our dependency networkx does, causing
# these warnings in some environments. Luckily these warnings are benign and
# we can simply ignore them so that they don't show up when you are using dvc.
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
| apache-2.0 | Python |
ca32f3f69db49312d65330758ccbea039937885d | append to a copy of list; var-name change | dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq | corehq/apps/userreports/expressions/__init__.py | corehq/apps/userreports/expressions/__init__.py | import copy
from django.conf import settings
from django.utils.module_loading import import_string
from corehq.apps.userreports.expressions.factory import ExpressionFactory
def get_custom_ucr_expressions():
custom_ucr_expressions = copy.copy(settings.CUSTOM_UCR_EXPRESSIONS)
for path_to_expression_lists in settings.CUSTOM_UCR_EXPRESSION_LISTS:
custom_ucr_expressions += import_string(path_to_expression_lists)
return custom_ucr_expressions
# Bootstrap plugin expressions
for type_name, factory_function_path in get_custom_ucr_expressions():
ExpressionFactory.register(type_name, import_string(factory_function_path))
| from django.conf import settings
from django.utils.module_loading import import_string
from corehq.apps.userreports.expressions.factory import ExpressionFactory
def get_custom_ucr_expressions():
custom_ucr_expressions = settings.CUSTOM_UCR_EXPRESSIONS
for expression_list in settings.CUSTOM_UCR_EXPRESSION_LISTS:
custom_ucr_expressions += import_string(expression_list)
return custom_ucr_expressions
# Bootstrap plugin expressions
for type_name, factory_function_path in get_custom_ucr_expressions():
ExpressionFactory.register(type_name, import_string(factory_function_path))
| bsd-3-clause | Python |
c8d382db5b9edd60cc98a765ca902b2365c8aee4 | Revert "Add failed tests" | nvbn/coviolations_web,nvbn/coviolations_web | projects/tests/base.py | projects/tests/base.py | from mock import MagicMock
from .. import models
class MockGithubMixin(object):
"""Mock github calls mixin"""
def setUp(self):
self._mock_github_call()
def _mock_github_call(self):
"""Mock github call"""
self._orig_get_remote_projects =\
models.ProjectManager._get_remote_projects
models.ProjectManager._get_remote_projects = MagicMock()
def tearDown(self):
models.ProjectManager._get_remote_projects =\
self._orig_get_remote_projects
def _create_repo(self, n):
"""Create repo"""
repo = MagicMock(
url='http://test{}.com'.format(n),
organization=None,
private=False,
)
repo.full_name = 'project {}'.format(n)
return repo
| from mock import MagicMock
from .. import models
class MockGithubMixin(object):
"""Mock github calls mixin"""
def setUp(self):
self._mock_github_call()
def _mock_github_call(self):
"""Mock github call"""
self._orig_get_remote_projects =\
models.ProjectManager._get_remote_projects
models.ProjectManager._get_remote_projects = MagicMock()
def tearDown(self):
models.ProjectManager._get_remote_projects =\
self._orig_get_remote_projects
def _create_repo(self, n):
"""Create repo"""
self.assertTrue(False)
repo = MagicMock(
url='http://test{}.com'.format(n),
organization=None,
private=False,
)
repo.full_name = 'project {}'.format(n)
return repo
| mit | Python |
4e414e763c5afbd5095729b2c85a91f0ae85f375 | Remove an extra comma, which breaks the following assertion | openstack/neutron-fwaas,openstack/neutron-fwaas | neutron_fwaas/tests/tempest_plugin/tests/scenario/base.py | neutron_fwaas/tests/tempest_plugin/tests/scenario/base.py | # Copyright (c) 2015 Midokura SARL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common import ssh
from tempest_lib import exceptions as lib_exc
from tempest import config
from tempest.scenario import manager
from neutron_fwaas.tests.tempest_plugin.tests import fwaas_client
CONF = config.CONF
class FWaaSScenarioTest(fwaas_client.FWaaSClientMixin,
manager.NetworkScenarioTest):
_delete_wrapper = manager.NetworkScenarioTest.delete_wrapper
def check_connectivity(self, ip_address, username=None, private_key=None,
should_connect=True,
check_icmp=True, check_ssh=True):
if should_connect:
msg = "Timed out waiting for %s to become reachable" % ip_address
else:
msg = "ip address %s is reachable" % ip_address
if check_icmp:
ok = self.ping_ip_address(ip_address,
should_succeed=should_connect)
self.assertTrue(ok, msg=msg)
if check_ssh:
connect_timeout = CONF.validation.connect_timeout
kwargs = {}
if not should_connect:
# Use a shorter timeout for negative case
kwargs['timeout'] = 1
try:
client = ssh.Client(ip_address, username, pkey=private_key,
channel_timeout=connect_timeout,
**kwargs)
client.test_connection_auth()
except lib_exc.SSHTimeout:
if should_connect:
raise
| # Copyright (c) 2015 Midokura SARL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common import ssh
from tempest_lib import exceptions as lib_exc
from tempest import config
from tempest.scenario import manager
from neutron_fwaas.tests.tempest_plugin.tests import fwaas_client
CONF = config.CONF
class FWaaSScenarioTest(fwaas_client.FWaaSClientMixin,
manager.NetworkScenarioTest):
_delete_wrapper = manager.NetworkScenarioTest.delete_wrapper
def check_connectivity(self, ip_address, username=None, private_key=None,
should_connect=True,
check_icmp=True, check_ssh=True):
if should_connect:
msg = "Timed out waiting for %s to become reachable" % ip_address
else:
msg = "ip address %s is reachable" % ip_address
if check_icmp:
ok = self.ping_ip_address(ip_address,
should_succeed=should_connect),
self.assertTrue(ok, msg=msg)
if check_ssh:
connect_timeout = CONF.validation.connect_timeout
kwargs = {}
if not should_connect:
# Use a shorter timeout for negative case
kwargs['timeout'] = 1
try:
client = ssh.Client(ip_address, username, pkey=private_key,
channel_timeout=connect_timeout,
**kwargs)
client.test_connection_auth()
except lib_exc.SSHTimeout:
if should_connect:
raise
| apache-2.0 | Python |
e744f098506b2289b4e1891dc4c510327f75f0af | Add yscale (optional) | openhumanoids/exotica,openhumanoids/exotica,openhumanoids/exotica,openhumanoids/exotica | exotica_python/src/pyexotica/publish_trajectory.py | exotica_python/src/pyexotica/publish_trajectory.py | from __future__ import print_function, division
from time import sleep
import matplotlib.pyplot as plt
import signal
__all__ = ["sig_int_handler", "publish_pose", "publish_trajectory",
"publish_time_indexed_trajectory", "plot"]
def sig_int_handler(signal, frame):
raise KeyboardInterrupt
def publish_pose(q, problem, t=0.0):
problem.get_scene().update(q, t)
problem.get_scene().get_kinematic_tree().publish_frames()
def publish_trajectory(traj, T, problem, once=False):
if len(traj) == 0:
print("Trajectory has zero elements")
raise
signal.signal(signal.SIGINT, sig_int_handler)
print('Playing back trajectory ' + str(T) + 's')
dt = float(T) / float(len(traj))
t = 0
while True:
try:
publish_pose(traj[t], problem, float(t) * dt)
sleep(dt)
if t >= len(traj) - 1 and once:
return
t = (t + 1) % len(traj)
except KeyboardInterrupt:
return False
def publish_time_indexed_trajectory(traj, Ts, problem, once=False):
if len(traj) == 0:
print("Trajectory has zero elements")
raise
signal.signal(signal.SIGINT, sig_int_handler)
print('Playing back trajectory ' + str(len(Ts)) +
' states in ' + str(Ts[len(Ts) - 1]))
while True:
try:
for i in range(1, len(Ts) - 1):
publish_pose(traj[i], problem, Ts[i])
sleep(Ts[i] - Ts[i-1])
if once:
break
except KeyboardInterrupt:
return False
return True
def plot(solution, labels=None, yscale=None):
print('Plotting the solution')
plt.plot(solution, '.-')
if labels is not None:
plt.legend(labels)
if yscale is not None:
plt.yscale(yscale)
plt.show()
| from __future__ import print_function, division
from time import sleep
import matplotlib.pyplot as plt
import signal
__all__ = ["sig_int_handler", "publish_pose", "publish_trajectory",
"publish_time_indexed_trajectory", "plot"]
def sig_int_handler(signal, frame):
raise KeyboardInterrupt
def publish_pose(q, problem, t=0.0):
problem.get_scene().update(q, t)
problem.get_scene().get_kinematic_tree().publish_frames()
def publish_trajectory(traj, T, problem, once=False):
if len(traj) == 0:
print("Trajectory has zero elements")
raise
signal.signal(signal.SIGINT, sig_int_handler)
print('Playing back trajectory ' + str(T) + 's')
dt = float(T) / float(len(traj))
t = 0
while True:
try:
publish_pose(traj[t], problem, float(t) * dt)
sleep(dt)
if t >= len(traj) - 1 and once:
return
t = (t + 1) % len(traj)
except KeyboardInterrupt:
return False
def publish_time_indexed_trajectory(traj, Ts, problem, once=False):
if len(traj) == 0:
print("Trajectory has zero elements")
raise
signal.signal(signal.SIGINT, sig_int_handler)
print('Playing back trajectory ' + str(len(Ts)) +
' states in ' + str(Ts[len(Ts) - 1]))
while True:
try:
for i in range(1, len(Ts) - 1):
publish_pose(traj[i], problem, Ts[i])
sleep(Ts[i] - Ts[i-1])
if once:
break
except KeyboardInterrupt:
return False
return True
def plot(solution, labels=None):
print('Plotting the solution')
plt.plot(solution, '.-')
if labels is not None:
plt.legend(labels)
plt.show()
| bsd-3-clause | Python |
91c8af7f2fcfbee6a63f360a6b29c8398bd71ac0 | Fix missing `g++` package in pip example. | Fizzadar/pyinfra,Fizzadar/pyinfra | examples/pip.py | examples/pip.py | from pyinfra import host
from pyinfra.operations import apk, apt, files, pip, python, yum
SUDO = True
if host.fact.linux_name in ['Alpine']:
apk.packages(
name='Install packages for python virtual environments',
packages=[
'gcc',
'g++',
'libffi-dev',
'make',
'musl-dev',
'openssl-dev',
'py3-pynacl',
'py3-virtualenv',
'python3-dev',
],
)
if host.fact.linux_name in ['CentOS']:
yum.packages(
name='Install pip3 so you can install virtualenv',
packages=['python3-pip', 'python3-devel', 'gcc-c++', 'make'],
)
if host.fact.linux_name in ['Ubuntu']:
apt.packages(
name='Install pip3 so you can install virtualenv',
packages='python3-pip',
update=True,
)
if not host.fact.file('/usr/bin/pip'):
files.link(
name='Create link /usr/bin/pip that points to /usr/bin/pip3',
path='/usr/bin/pip',
target='/usr/bin/pip3',
)
pip.packages(
name='Install virtualenv using pip',
packages='virtualenv',
)
pip.virtualenv(
name='Create a virtualenv',
path='/usr/local/bin/venv',
)
# use that virtualenv to install pyinfra
pip.packages(
name='Install pyinfra into a virtualenv',
packages='pyinfra',
virtualenv='/usr/local/bin/venv',
)
# Show that we can actually run the pyinfra command from that virtualenv
def run_pyinfra_version(state, host):
status, stdout, stderr = host.run_shell_command(
'/usr/local/bin/venv/bin/pyinfra --version',
env={'LC_ALL': 'C.UTF-8', 'LANG': 'C.UTF-8,'},
)
assert status, 'pyinfra command failed: {0}'.format((stdout, stderr))
assert 'pyinfra: ' in stdout[0]
python.call(run_pyinfra_version) # noqa: E305
| from pyinfra import host
from pyinfra.operations import apk, apt, files, pip, python, yum
SUDO = True
if host.fact.linux_name in ['Alpine']:
apk.packages(
name='Install packages for python virtual environments',
packages=[
'gcc',
'libffi-dev',
'make',
'musl-dev',
'openssl-dev',
'py3-pynacl',
'py3-virtualenv',
'python3-dev',
],
)
if host.fact.linux_name in ['CentOS']:
yum.packages(
name='Install pip3 so you can install virtualenv',
packages=['python3-pip', 'python3-devel', 'gcc-c++', 'make'],
)
if host.fact.linux_name in ['Ubuntu']:
apt.packages(
name='Install pip3 so you can install virtualenv',
packages='python3-pip',
update=True,
)
if not host.fact.file('/usr/bin/pip'):
files.link(
name='Create link /usr/bin/pip that points to /usr/bin/pip3',
path='/usr/bin/pip',
target='/usr/bin/pip3',
)
pip.packages(
name='Install virtualenv using pip',
packages='virtualenv',
)
pip.virtualenv(
name='Create a virtualenv',
path='/usr/local/bin/venv',
)
# use that virtualenv to install pyinfra
pip.packages(
name='Install pyinfra into a virtualenv',
packages='pyinfra',
virtualenv='/usr/local/bin/venv',
)
# Show that we can actually run the pyinfra command from that virtualenv
def run_pyinfra_version(state, host):
status, stdout, stderr = host.run_shell_command(
'/usr/local/bin/venv/bin/pyinfra --version',
env={'LC_ALL': 'C.UTF-8', 'LANG': 'C.UTF-8,'},
)
assert status, 'pyinfra command failed: {0}'.format((stdout, stderr))
assert 'pyinfra: ' in stdout[0]
python.call(run_pyinfra_version) # noqa: E305
| mit | Python |
cba136dff9ba3ec5074fb9a2a4082c6e7430e3d1 | Add debug output for read_email. | benigls/spam,benigls/spam | spam/preprocess/preprocess.py | spam/preprocess/preprocess.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A set of function that cleans the dataset
for machine learning process.
"""
import io
import sys
import re
from nltk import tokenize
from nltk.corpus import stopwords
def regex(text):
"""
A function that removes non-alphanumeric, -, _ characters
and the word `Subject:`, and `re:` in text.
"""
clean_text = re.sub('Subject:|re:', '', text)
clean_text = re.sub('[^\w]+', ' ', clean_text)
return clean_text
def tokenizer(text):
"""
A function that splits a text.
"""
return tokenize.word_tokenize(text)
def remove_stopwords(word_list):
"""
A function that remove stopwords from a list of words.
"""
return [word for word in word_list
if word not in stopwords.words('english')]
def clean_text(text):
"""
A function that cleans text (regex, token, stop).
"""
word_list = remove_stopwords(tokenizer(regex(text)))
return ' '.join(word_list)
def static_vars(**kwargs):
def decorate(func):
for k in kwargs:
setattr(func, k, kwargs[k])
return func
return decorate
@static_vars(success=0, fail=0)
def read_email(path, clean=True):
"""
A function that accepts file paths and return it's contents.
"""
with io.open(path, 'r', encoding='cp1252') as file:
try:
content = ''.join(file.readlines())
read_email.success += 1
except UnicodeDecodeError:
content = ''
read_email.fail += 1
sys.stdout.write("Success: {} \t".format(read_email.success))
sys.stdout.write("Fail: {} \r".format(read_email.fail))
sys.stdout.flush()
file.close()
return clean_text(content) if clean else content
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A set of function that cleans the dataset
for machine learning process.
"""
import re
from nltk import tokenize
from nltk.corpus import stopwords
def regex(text):
"""
A function that removes non-alphanumeric, -, _ characters
and the word `Subject:`, and `re:` in text.
"""
clean_text = re.sub('Subject:|re:', '', text)
clean_text = re.sub('[^\w]+', ' ', clean_text)
return clean_text
def tokenizer(text):
"""
A function that splits a text.
"""
return tokenize.word_tokenize(text)
def remove_stopwords(word_list):
"""
A function that remove stopwords from a list of words.
"""
return [word for word in word_list
if word not in stopwords.words('english')]
def clean_text(text):
"""
A function that cleans text (regex, token, stop).
"""
word_list = remove_stopwords(tokenizer(regex(text)))
return ' '.join(word_list)
def read_email(path, clean=True):
"""
A function that accepts file paths and return it's contents.
"""
with open(path, 'r') as file:
content = ''.join(file.readlines())
file.close()
return clean_text(content) if clean else content
| mit | Python |
78c52615d763a77d0347b0d001df21f4b23095b8 | fix bug in Response.read() | aliyun/aliyun-oss-python-sdk | oss/http.py | oss/http.py | import requests
import platform
from . import __version__
from requests.structures import CaseInsensitiveDict
from .compat import to_bytes
_USER_AGENT = 'aliyun-sdk-python/{0} ({1}/{2}/{3};{4})'.format(
__version__, platform.system(), platform.release(), platform.machine(), platform.python_version())
class Session(object):
def __init__(self):
self.session = requests.Session()
def do_request(self, req):
return Response(self.session.request(req.method, req.url,
data=req.data,
params=req.params,
headers=req.headers,
stream=True))
class Request(object):
def __init__(self, method, url,
data=None,
params=None,
headers=None):
self.method = method
self.url = url
self.data = to_bytes(data)
self.params = params or {}
if not isinstance(headers, CaseInsensitiveDict):
self.headers = CaseInsensitiveDict(headers)
else:
self.headers = headers
# tell requests not to add 'Accept-Encoding: gzip, deflate' by default
if 'Accept-Encoding' not in self.headers:
self.headers['Accept-Encoding'] = None
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = _USER_AGENT
class Response(object):
def __init__(self, response):
self.response = response
self.status = response.status_code
self.headers = response.headers
def read(self, amt=None):
if amt is None:
content = b''
for chunk in self.response.iter_content(512 * 1024):
content += chunk
return content
else:
try:
return next(self.response.iter_content(amt))
except StopIteration:
return b''
| import requests
import platform
from . import __version__
from requests.structures import CaseInsensitiveDict
from .compat import to_bytes
_USER_AGENT = 'aliyun-sdk-python/{0} ({1}/{2}/{3};{4})'.format(
__version__, platform.system(), platform.release(), platform.machine(), platform.python_version())
class Session(object):
def __init__(self):
self.session = requests.Session()
def do_request(self, req):
return Response(self.session.request(req.method, req.url,
data=req.data,
params=req.params,
headers=req.headers,
stream=False))
class Request(object):
def __init__(self, method, url,
data=None,
params=None,
headers=None):
self.method = method
self.url = url
self.data = to_bytes(data)
self.params = params or {}
if not isinstance(headers, CaseInsensitiveDict):
self.headers = CaseInsensitiveDict(headers)
else:
self.headers = headers
# tell requests not to add 'Accept-Encoding: gzip, deflate' by default
if 'Accept-Encoding' not in self.headers:
self.headers['Accept-Encoding'] = None
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = _USER_AGENT
class Response(object):
def __init__(self, response):
self.response = response
self.status = response.status_code
self.headers = response.headers
def read(self, amt=None):
if amt is None:
content = b''
for chunk in self.response.iter_content(512 * 1024):
content += chunk
return content
else:
return next(self.response.iter_content(amt))
| mit | Python |
5c5740e1ac07303a83e509fa34175218bb2b3c96 | insert missing conversion from modes to states | qutip/qutip,qutip/qutip | doc/guide/scripts/floquet_ex3.py | doc/guide/scripts/floquet_ex3.py | import numpy as np
from matplotlib import pyplot
import qutip
delta = 0.0 * 2*np.pi
eps0 = 1.0 * 2*np.pi
A = 0.25 * 2*np.pi
omega = 1.0 * 2*np.pi
T = 2*np.pi / omega
tlist = np.linspace(0.0, 20 * T, 101)
psi0 = qutip.basis(2,0)
H0 = - delta/2.0 * qutip.sigmax() - eps0/2.0 * qutip.sigmaz()
H1 = A/2.0 * qutip.sigmax()
args = {'w': omega}
H = [H0, [H1, lambda t,args: np.sin(args['w'] * t)]]
# noise power spectrum
gamma1 = 0.1
def noise_spectrum(omega):
return 0.5 * gamma1 * omega/(2*np.pi)
# find the floquet modes for the time-dependent hamiltonian
f_modes_0, f_energies = qutip.floquet_modes(H, T, args)
# precalculate mode table
f_modes_table_t = qutip.floquet_modes_table(
f_modes_0, f_energies, np.linspace(0, T, 500 + 1), H, T, args,
)
# solve the floquet-markov master equation
output = qutip.fmmesolve(H, psi0, tlist, [qutip.sigmax()], [], [noise_spectrum], T, args)
# calculate expectation values in the computational basis
p_ex = np.zeros(tlist.shape, dtype=np.complex128)
for idx, t in enumerate(tlist):
f_modes_t = qutip.floquet_modes_t_lookup(f_modes_table_t, t, T)
f_states_t = qutip.floquet_states(f_modes_t, f_energies, t)
p_ex[idx] = qutip.expect(qutip.num(2), output.states[idx].transform(f_states_t, True))
# For reference: calculate the same thing with mesolve
output = qutip.mesolve(H, psi0, tlist,
[np.sqrt(gamma1) * qutip.sigmax()], [qutip.num(2)],
args)
p_ex_ref = output.expect[0]
# plot the results
pyplot.plot(tlist, np.real(p_ex), 'r--', tlist, 1-np.real(p_ex), 'b--')
pyplot.plot(tlist, np.real(p_ex_ref), 'r', tlist, 1-np.real(p_ex_ref), 'b')
pyplot.xlabel('Time')
pyplot.ylabel('Occupation probability')
pyplot.legend(("Floquet $P_1$", "Floquet $P_0$", "Lindblad $P_1$", "Lindblad $P_0$"))
pyplot.show()
| import numpy as np
from matplotlib import pyplot
import qutip
delta = 0.0 * 2*np.pi
eps0 = 1.0 * 2*np.pi
A = 0.25 * 2*np.pi
omega = 1.0 * 2*np.pi
T = 2*np.pi / omega
tlist = np.linspace(0.0, 20 * T, 101)
psi0 = qutip.basis(2,0)
H0 = - delta/2.0 * qutip.sigmax() - eps0/2.0 * qutip.sigmaz()
H1 = A/2.0 * qutip.sigmax()
args = {'w': omega}
H = [H0, [H1, lambda t,args: np.sin(args['w'] * t)]]
# noise power spectrum
gamma1 = 0.1
def noise_spectrum(omega):
return 0.5 * gamma1 * omega/(2*np.pi)
# find the floquet modes for the time-dependent hamiltonian
f_modes_0, f_energies = qutip.floquet_modes(H, T, args)
# precalculate mode table
f_modes_table_t = qutip.floquet_modes_table(
f_modes_0, f_energies, np.linspace(0, T, 500 + 1), H, T, args,
)
# solve the floquet-markov master equation
output = qutip.fmmesolve(H, psi0, tlist, [qutip.sigmax()], [], [noise_spectrum], T, args)
# calculate expectation values in the computational basis
p_ex = np.zeros(tlist.shape, dtype=np.complex128)
for idx, t in enumerate(tlist):
f_modes_t = qutip.floquet_modes_t_lookup(f_modes_table_t, t, T)
p_ex[idx] = qutip.expect(qutip.num(2), output.states[idx].transform(f_modes_t, True))
# For reference: calculate the same thing with mesolve
output = qutip.mesolve(H, psi0, tlist,
[np.sqrt(gamma1) * qutip.sigmax()], [qutip.num(2)],
args)
p_ex_ref = output.expect[0]
# plot the results
pyplot.plot(tlist, np.real(p_ex), 'r--', tlist, 1-np.real(p_ex), 'b--')
pyplot.plot(tlist, np.real(p_ex_ref), 'r', tlist, 1-np.real(p_ex_ref), 'b')
pyplot.xlabel('Time')
pyplot.ylabel('Occupation probability')
pyplot.legend(("Floquet $P_1$", "Floquet $P_0$", "Lindblad $P_1$", "Lindblad $P_0$"))
pyplot.show()
| bsd-3-clause | Python |
0da80053f6c5fa41d33b692e6abc7067ed100bb4 | bump version | sagasurvey/saga,sagasurvey/saga | SAGA/version.py | SAGA/version.py | """
SAGA package version
"""
__version__ = "0.40.0a9"
| """
SAGA package version
"""
__version__ = "0.40.0a8"
| mit | Python |
c694aefd2a555e0fb7e11212bfb4c412c226ea89 | Fix Codacy | adityahase/frappe,frappe/frappe,vjFaLk/frappe,adityahase/frappe,mhbu50/frappe,mhbu50/frappe,StrellaGroup/frappe,RicardoJohann/frappe,RicardoJohann/frappe,almeidapaulopt/frappe,frappe/frappe,RicardoJohann/frappe,mhbu50/frappe,yashodhank/frappe,StrellaGroup/frappe,vjFaLk/frappe,adityahase/frappe,yashodhank/frappe,frappe/frappe,yashodhank/frappe,yashodhank/frappe,almeidapaulopt/frappe,vjFaLk/frappe,RicardoJohann/frappe,saurabh6790/frappe,vjFaLk/frappe,almeidapaulopt/frappe,saurabh6790/frappe,saurabh6790/frappe,mhbu50/frappe,adityahase/frappe,saurabh6790/frappe,StrellaGroup/frappe,almeidapaulopt/frappe | frappe/desk/doctype/route_history/route_history.py | frappe/desk/doctype/route_history/route_history.py | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class RouteHistory(Document):
pass
def flush_old_route_records():
"""Deletes all route records except last 500 records per user"""
records_to_keep_limit = 500
users = frappe.db.sql('''
SELECT `user`
FROM `tabRoute History`
GROUP BY `user`
HAVING count(`name`) > {limit}
'''.format(limit=records_to_keep_limit))
for user in users:
user = user[0]
last_record_to_keep = frappe.db.get_all('Route History',
filters={
'user': user,
},
limit=1,
limit_start=500,
fields=['modified'],
order_by='modified desc')
frappe.db.sql('''
DELETE
FROM `tabRoute History`
WHERE `modified` <= '{modified}' and `user`='{user}'
'''.format(modified=last_record_to_keep[0].modified, user=user)) | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
import json
class RouteHistory(Document):
pass
def flush_old_route_records():
"""Deletes all route records except last 500 records per user"""
records_to_keep_limit = 500
users = frappe.db.sql('''
SELECT `user`
FROM `tabRoute History`
GROUP BY `user`
HAVING count(`name`) > {limit}
'''.format(limit=records_to_keep_limit))
for user in users:
user = user[0]
last_record_to_keep = frappe.db.get_all('Route History',
filters={
'user': user,
},
limit=1,
limit_start=500,
fields=['modified'],
order_by='modified desc')
frappe.db.sql('''
DELETE
FROM `tabRoute History`
WHERE `modified` <= '{modified}' and `user`='{user}'
'''.format(modified=last_record_to_keep[0].modified, user=user)) | mit | Python |
c1ea7a007d70c2b815b4879d383af3825c74e7e8 | fix flake8 violation | BrianHicks/tinyobj,BrianHicks/tinyobj | tinyobj/fields.py | tinyobj/fields.py | """**tinyobj** implements a number of fields to do validation, etc."""
from . import _compat
class Field(object):
"""base for other fields"""
def __init__(self):
self.default = None
def initialize(self, value=()):
"""\
initialize returns a cleaned value or the default, raising ValueErrors
as necessary.
"""
if value == ():
try:
return self.default()
except TypeError:
return self.default
else:
return self.clean(value)
def clean(self, value):
"""clean a value, returning the cleaned value"""
raise NotImplementedError
class NumberField(Field):
"""accept and validate numbers
takes a type to convert values to, can be (EG) ``float``, ``int``,
``long``, or ``complex``.
"""
def __init__(self, t=float, allow_negative=True, allow_positive=True):
self.t = t
self.default = t()
self.allow_negative = allow_negative
self.allow_positive = allow_positive
def clean(self, value):
"""clean a value, converting and performing bounds checking"""
if not isinstance(value, self.t):
value = self.t(value)
if not self.allow_negative and value < 0:
raise ValueError('value was negative')
if not self.allow_positive and value > 0:
raise ValueError('values was positive')
return value
class BoolField(Field):
"""accept and validate boolean values
note that this field will just call ``bool`` on values, this may not be
your desired behavior so you might want to implement a subclass that parses
truthy/falsey values in a way specific to your application
"""
def __init__(self, default=False):
self.clean = bool
self.default = bool(default)
class TextField(Field):
"""accept and validate text.
Uses the Python implementation's appropriate unicode value (IE ``unicode``
on 2.x and ``str`` on 3.x)
"""
def __init__(self):
self.default = _compat.text_type
self.clean = _compat.text_type
class NoValidationField(Field):
"""\
doesn't validate at all, but returns the value passed (defaulting to None)
"""
def initialize(self, value=None):
return value
class DefaultField(Field):
"""\
a field which sets a default but allows anything else
"""
def __init__(self, default):
self.default = default
def initialize(self, value=None):
return value if value is not None else self.default
| """**tinyobj** implements a number of fields to do validation, etc."""
from . import _compat
class Field(object):
"""base for other fields"""
def __init__(self):
self.default = None
def initialize(self, value=()):
"""\
initialize returns a cleaned value or the default, raising ValueErrors
as necessary.
"""
if value == ():
try:
return self.default()
except TypeError:
return self.default
else:
return self.clean(value)
def clean(self, value):
"""clean a value, returning the cleaned value"""
raise NotImplementedError
class NumberField(Field):
"""accept and validate numbers
takes a type to convert values to, can be (EG) ``float``, ``int``,
``long``, or ``complex``.
"""
def __init__(self, t=float, allow_negative=True, allow_positive=True):
self.t = t
self.default = t()
self.allow_negative = allow_negative
self.allow_positive = allow_positive
def clean(self, value):
"""clean a value, converting and performing bounds checking"""
if not isinstance(value, self.t):
value = self.t(value)
if not self.allow_negative and value < 0:
raise ValueError('value was negative')
if not self.allow_positive and value > 0:
raise ValueError('values was positive')
return value
class BoolField(Field):
"""accept and validate boolean values
note that this field will just call ``bool`` on values, this may not be
your desired behavior so you might want to implement a subclass that parses
truthy/falsey values in a way specific to your application
"""
def __init__(self, default=False):
self.clean = bool
self.default = bool(default)
class TextField(Field):
"""accept and validate text.
Uses the Python implementation's appropriate unicode value (IE ``unicode``
on 2.x and ``str`` on 3.x)
"""
def __init__(self):
self.default = _compat.text_type
self.clean = _compat.text_type
class NoValidationField(Field):
"""\
doesn't validate at all, but returns the value passed (defaulting to None)
"""
def initialize(self, value=None):
return value
class DefaultField(Field):
"""\
a field which sets a default but allows anything else
"""
def __init__(self, default):
self.default = default
def initialize(self, value=None):
return value if value is not None else self.default
| mit | Python |
e15f97713aac0459dc0cd553cf36658506c47367 | Make copyright perpetual | tony/tmuxp | tmuxp/__init__.py | tmuxp/__init__.py | # -*- coding: utf-8 -*-
# flake8: NOQA
"""tmux session manager.
tmuxp
~~~~~
:copyright: Copyright 2013- Tony Narlock.
:license: MIT, see LICENSE for details
"""
from __future__ import absolute_import, unicode_literals
from . import cli, config, util
from .__about__ import (
__author__,
__copyright__,
__description__,
__email__,
__license__,
__package_name__,
__title__,
__version__,
)
| # -*- coding: utf-8 -*-
# flake8: NOQA
"""tmux session manager.
tmuxp
~~~~~
:copyright: Copyright 2013-2018 Tony Narlock.
:license: MIT, see LICENSE for details
"""
from __future__ import absolute_import, unicode_literals
from . import cli, config, util
from .__about__ import (
__author__,
__copyright__,
__description__,
__email__,
__license__,
__package_name__,
__title__,
__version__,
)
| bsd-3-clause | Python |
61255b73b93309e360f890df4057dd2bc66e3e7a | modify demo | yinkaisheng/Python-UIAutomation-for-Windows | demos/hide_window_by_hotkey.py | demos/hide_window_by_hotkey.py | #!python3
# -*- coding: utf-8 -*-
# hide windows with hotkey Ctrl+1, show the hidden windows with hotkey Ctrl+2
import os
import sys
import time
import subprocess
from typing import List
from threading import Event
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # not required after 'pip install uiautomation'
import uiautomation as auto
WindowsWantToHide = ('Warcraft III', 'Valve001', 'Counter-Strike', 'Notepad')
def hide(stopEvent: Event, handles: List[int]):
#_uiobj = auto.UIAutomationInitializerInThread()
# Hide doesn't call any COM methods, so it doesn't need an UIAutomationInitializerInThread
for handle in handles:
win = auto.ControlFromHandle(handle)
win.Hide(0)
def show(stopEvent: Event, handle: List[int]):
#_uiobj = auto.UIAutomationInitializerInThread()
# Show doesn't call any COM methods, so it doesn't need an UIAutomationInitializerInThread
for handle in handles:
win = auto.ControlFromHandle(handle)
win.Show(0)
if auto.IsIconic(handle):
win.ShowWindow(auto.SW.Restore, 0)
if __name__ == '__main__':
for i in range(2):
subprocess.Popen('notepad.exe')
time.sleep(1)
notepad = auto.WindowControl(searchDepth=1, ClassName='Notepad')
notepad.MoveWindow(i * 400, 0, 400, 300)
notepad.SendKeys('notepad {}'.format(i + 1))
auto.SetConsoleTitle('Hide: Ctrl+1, Show: Ctrl+2, Exit: Ctrl+D')
cmdWindow = auto.GetConsoleWindow()
if cmdWindow:
cmdWindow.GetTransformPattern().Move(0, 300)
auto.Logger.ColorfullyWriteLine('Press <Color=Green>Ctr+1</Color> to hide the windows\nPress <Color=Green>Ctr+2</Color> to show the windows\n')
handles = [win.NativeWindowHandle for win in auto.GetRootControl().GetChildren() if win.ClassName in WindowsWantToHide]
auto.RunByHotKey({(auto.ModifierKey.Control, auto.Keys.VK_1): lambda event: hide(event, handles),
(auto.ModifierKey.Control, auto.Keys.VK_2): lambda event: show(event, handles),
}, waitHotKeyReleased=False)
| #!python3
# -*- coding: utf-8 -*-
# hide windows with hotkey Ctrl+1, show the hidden windows with hotkey Ctrl+2
import os
import sys
import time
import subprocess
from typing import List
from threading import Event
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # not required after 'pip install uiautomation'
import uiautomation as auto
WindowsWantToHide = ('Warcraft III', 'Valve001', 'Counter-Strike', 'Notepad')
def hide(stopEvent: Event, handles: List[int]):
#_uiobj = auto.UIAutomationInitializerInThread()
# Hide doesn't call any COM methods, so it doesn't need an UIAutomationInitializerInThread
for handle in handles:
win = auto.ControlFromHandle(handle)
win.Hide(0)
def show(stopEvent: Event, handle: List[int]):
#_uiobj = auto.UIAutomationInitializerInThread()
# Show doesn't call any COM methods, so it doesn't need an UIAutomationInitializerInThread
for handle in handles:
win = auto.ControlFromHandle(handle)
win.Show(0)
if auto.IsIconic(handle):
win.ShowWindow(auto.SW.Restore, 0)
if __name__ == '__main__':
for i in range(2):
subprocess.Popen('notepad.exe')
time.sleep(1)
notepad = auto.WindowControl(searchDepth=1, ClassName='Notepad')
notepad.MoveWindow(i * 400, 0, 400, 300)
notepad.SendKeys('notepad {}'.format(i + 1))
auto.SetConsoleTitle('Hide: Ctrl+1, Show: Ctrl+2, Exit: Ctrl+D')
auto.Logger.ColorfullyWriteLine('Press <Color=Green>Ctr+1</Color> to hide the windows\nPress <Color=Green>Ctr+2</Color> to show the windows\n')
handles = [win.NativeWindowHandle for win in auto.GetRootControl().GetChildren() if win.ClassName in WindowsWantToHide]
auto.RunByHotKey({(auto.ModifierKey.Control, auto.Keys.VK_1): lambda event: hide(event, handles),
(auto.ModifierKey.Control, auto.Keys.VK_2): lambda event: show(event, handles),
})
| apache-2.0 | Python |
e908a2c62be1d937a68b5c602b8cae02633685f7 | Load at a distance content in updatadata command | uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged | csunplugged/general/management/commands/updatedata.py | csunplugged/general/management/commands/updatedata.py | """Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("load_at_a_distance_data", lite_load=lite_load)
management.call_command("rebuild_search_indexes")
| """Module for the custom Django updatedata command."""
from django.core import management
class Command(management.base.BaseCommand):
"""Required command class for the custom Django updatedata command."""
help = "Update all data from content folders for all applications"
def add_arguments(self, parser):
"""Add optional parameter to updatedata command."""
parser.add_argument(
"--lite-load",
action="store_true",
dest="lite_load",
help="Perform lite load (only load key content)",
)
def handle(self, *args, **options):
"""Automatically called when the updatedata command is given."""
lite_load = options.get("lite_load")
management.call_command("flush", interactive=False)
management.call_command("loadresources", lite_load=lite_load)
management.call_command("loadtopics", lite_load=lite_load)
management.call_command("loadgeneralpages", lite_load=lite_load)
management.call_command("loadclassicpages", lite_load=lite_load)
management.call_command("loadactivities", lite_load=lite_load)
management.call_command("rebuild_search_indexes")
| mit | Python |
cc16dc7d90457b045e8c5806a09b82e25ecc72d8 | Create move from zero | dvitme/odoomrp-wip,xpansa/odoomrp-wip,alfredoavanzosc/odoomrp-wip-1,diagramsoftware/odoomrp-wip,agaldona/odoomrp-wip-1,jobiols/odoomrp-wip,sergiocorato/odoomrp-wip,agaldona/odoomrp-wip-1,diagramsoftware/odoomrp-wip,factorlibre/odoomrp-wip,oihane/odoomrp-wip,oihane/odoomrp-wip,Eficent/odoomrp-wip,alhashash/odoomrp-wip,invitu/odoomrp-wip,InakiZabala/odoomrp-wip,Eficent/odoomrp-wip,Antiun/odoomrp-wip,windedge/odoomrp-wip,odoomrp/odoomrp-wip,jorsea/odoomrp-wip,raycarnes/odoomrp-wip,Endika/odoomrp-wip,odoomrp/odoomrp-wip,odoocn/odoomrp-wip,esthermm/odoomrp-wip,jobiols/odoomrp-wip,esthermm/odoomrp-wip,michaeljohn32/odoomrp-wip,Daniel-CA/odoomrp-wip-public,ddico/odoomrp-wip,factorlibre/odoomrp-wip,Daniel-CA/odoomrp-wip-public,maljac/odoomrp-wip,slevenhagen/odoomrp-wip-npg,sergiocorato/odoomrp-wip | stock_quant_packages_moving_wizard/models/stock.py | stock_quant_packages_moving_wizard/models/stock.py | # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.one
def move_to(self, dest_location):
move_obj = self.env['stock.move']
new_move = move_obj.create({
'name': 'Move %s to %s' % (self.product_id.name,
dest_location.name),
'product_id': self.product_id.id,
'location_id': self.location_id.id,
'location_dest_id': dest_location.id,
'product_uom_qty': self.qty,
'product_uom': self.product_id.uom_id.id,
'date_expected': fields.Datetime.now(),
'date': fields.Datetime.now(),
'quant_ids': [(4, self.id)]
})
new_move.action_done()
| # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.one
def move_to(self, dest_location):
last_move = self._get_latest_move(self)
new_move = last_move.copy()
new_move.location_id = new_move.location_dest_id
new_move.location_dest_id = dest_location
new_move.date_expected = fields.Datetime.now()
new_move.date = new_move.date_expected
new_move.product_uom_qty = self.qty
new_move.action_done()
| agpl-3.0 | Python |
707a1bdf98a0e0ece5afe83897901643282401c4 | switch to xkcd code names | GabeIsman/securedrop,jrosco/securedrop,ehartsuyker/securedrop,jaseg/securedrop,heartsucker/securedrop,ageis/securedrop,mark-in/securedrop-prov-upstream,GabeIsman/securedrop,harlo/securedrop,jeann2013/securedrop,jrosco/securedrop,chadmiller/securedrop,ageis/securedrop,conorsch/securedrop,jaseg/securedrop,jrosco/securedrop,jrosco/securedrop,GabeIsman/securedrop,jrosco/securedrop,jaseg/securedrop,GabeIsman/securedrop,harlo/securedrop,micahflee/securedrop,kelcecil/securedrop,ehartsuyker/securedrop,jeann2013/securedrop,chadmiller/securedrop,ehartsuyker/securedrop,chadmiller/securedrop,kelcecil/securedrop,jeann2013/securedrop,heartsucker/securedrop,ageis/securedrop,kelcecil/securedrop,conorsch/securedrop,heartsucker/securedrop,harlo/securedrop,micahflee/securedrop,jeann2013/securedrop,chadmiller/securedrop,ehartsuyker/securedrop,micahflee/securedrop,kelcecil/securedrop,mark-in/securedrop-prov-upstream,pwplus/securedrop,ageis/securedrop,conorsch/securedrop,jeann2013/securedrop,ehartsuyker/securedrop,pwplus/securedrop,jrosco/securedrop,GabeIsman/securedrop,jaseg/securedrop,conorsch/securedrop,micahflee/securedrop,jeann2013/securedrop,mark-in/securedrop-prov-upstream,garrettr/securedrop,heartsucker/securedrop,chadmiller/securedrop,garrettr/securedrop,conorsch/securedrop,kelcecil/securedrop,jaseg/securedrop,garrettr/securedrop,heartsucker/securedrop,pwplus/securedrop,GabeIsman/securedrop,jaseg/securedrop,pwplus/securedrop,ehartsuyker/securedrop,pwplus/securedrop,pwplus/securedrop,mark-in/securedrop-prov-upstream,harlo/securedrop,harlo/securedrop,harlo/securedrop,garrettr/securedrop,kelcecil/securedrop,chadmiller/securedrop | crypto.py | crypto.py | import hmac, hashlib, subprocess, random
import gnupg
import config
WORDS_IN_RANDOM_ID = 2
WORD_LIST = 'wordlist'
HASH_FUNCTION = hashlib.sha256
GPG_KEY_TYPE = "RSA"
GPG_KEY_LENGTH = "4096"
class CryptoException(Exception): pass
words = file(WORD_LIST).read().split('\n')
def genrandomid():
return ' '.join(random.choice(words) for x in range(WORDS_IN_RANDOM_ID))
def shash(s):
"""
>>> shash('Hello, world!')
'98015b0fbf815a630cbcda94b809d207490d7cc2c5c02cb33a242acfd5b73cc1'
"""
return hmac.HMAC(config.HMAC_SECRET, s, HASH_FUNCTION).hexdigest()
gpg = gnupg.GPG(gnupghome=config.GPG_KEY_DIR)
def genkeypair(name, secret):
"""
>>> if not gpg.list_keys(shash('randomid')):
... genkeypair(shash('randomid'), 'randomid').type
... else:
... u'P'
u'P'
"""
return gpg.gen_key(gpg.gen_key_input(
key_type=GPG_KEY_TYPE, key_length=GPG_KEY_LENGTH,
passphrase=secret,
name_email="%s@wireleaks.example.com" % name
))
def getkey(name):
for key in gpg.list_keys():
for uid in key['uids']:
if ' <%s@' % name in uid: return key['fingerprint']
return None
def encrypt(fp, s, output=None):
"""
>>> encrypt(shash('randomid'), "Goodbye, cruel world!")[:75]
'-----BEGIN PGP MESSAGE-----\\nVersion: GnuPG v1.4.9 (Darwin)\\n\\nhQIMA3rf0hDNFTT'
"""
fp = fp.replace(' ', '')
if isinstance(s, unicode):
s = s.encode('utf8')
if isinstance(s, str):
out = gpg.encrypt(s, [fp], output=output)
else:
out = gpg.encrypt_file(s, [fp], output=output)
if out.ok:
return out.data
else:
raise CryptoException(out.stderr)
def decrypt(name, secret, s):
"""
>>> decrypt(shash('randomid'), 'randomid',
... encrypt(shash('randomid'), 'Goodbye, cruel world!')
... )
'Goodbye, cruel world!'
"""
return gpg.decrypt(s, passphrase=secret).data
def secureunlink(fn):
return subprocess.check_call(['srm', fn])
if __name__ == "__main__":
import doctest
doctest.testmod()
| import hmac, hashlib, subprocess, random
import gnupg
import config
BITS_IN_RANDOM_ID = 256
HASH_FUNCTION = hashlib.sha256
GPG_KEY_TYPE = "RSA"
GPG_KEY_LENGTH = "4096"
class CryptoException(Exception): pass
def genrandomid():
return hex(random.getrandbits(BITS_IN_RANDOM_ID))[2:-1]
def shash(s):
"""
>>> shash('Hello, world!')
'98015b0fbf815a630cbcda94b809d207490d7cc2c5c02cb33a242acfd5b73cc1'
"""
return hmac.HMAC(config.HMAC_SECRET, s, HASH_FUNCTION).hexdigest()
gpg = gnupg.GPG(gnupghome=config.GPG_KEY_DIR)
def genkeypair(name, secret):
"""
>>> if not gpg.list_keys(shash('randomid')):
... genkeypair(shash('randomid'), 'randomid').type
... else:
... u'P'
u'P'
"""
return gpg.gen_key(gpg.gen_key_input(
key_type=GPG_KEY_TYPE, key_length=GPG_KEY_LENGTH,
passphrase=secret,
name_email="%s@wireleaks.example.com" % name
))
def getkey(name):
for key in gpg.list_keys():
for uid in key['uids']:
if ' <%s@' % name in uid: return key['fingerprint']
return None
def encrypt(fp, s, output=None):
"""
>>> encrypt(shash('randomid'), "Goodbye, cruel world!")[:75]
'-----BEGIN PGP MESSAGE-----\\nVersion: GnuPG v1.4.9 (Darwin)\\n\\nhQIMA3rf0hDNFTT'
"""
fp = fp.replace(' ', '')
if isinstance(s, unicode):
s = s.encode('utf8')
if isinstance(s, str):
out = gpg.encrypt(s, [fp], output=output)
else:
out = gpg.encrypt_file(s, [fp], output=output)
if out.ok:
return out.data
else:
raise CryptoException(out.stderr)
def decrypt(name, secret, s):
"""
>>> decrypt(shash('randomid'), 'randomid',
... encrypt(shash('randomid'), 'Goodbye, cruel world!')
... )
'Goodbye, cruel world!'
"""
return gpg.decrypt(s, passphrase=secret).data
def secureunlink(fn):
return subprocess.check_call(['srm', fn])
if __name__ == "__main__":
import doctest
doctest.testmod()
| agpl-3.0 | Python |
fd9dc5337587831b16e51598295c2e659ee4c824 | Fix test-url | GetStream/stream-django,GetStream/stream-django | stream_django/tests/test_app/tests/test_manager.py | stream_django/tests/test_app/tests/test_manager.py | import httpretty
import re
from stream_django.feed_manager import feed_manager
from stream_django.tests import Tweet
import unittest
api_url = re.compile(r'(us-east-api.)?stream-io-api.com(/api)?/*.')
class ManagerTestCase(unittest.TestCase):
def setUp(self):
feed_manager.enable_model_tracking()
def test_get_user_feed(self):
feed = feed_manager.get_user_feed(42)
self.assertEqual(feed.id, 'user:42')
def test_get_user_feed_by_feed_type(self):
feed = feed_manager.get_user_feed(42, 'personal')
self.assertEqual(feed.id, 'personal:42')
def test_get_notification_feed(self):
feed = feed_manager.get_notification_feed(42)
self.assertEqual(feed.id, 'notification:42')
def test_get_actor_feed(self):
tweet = Tweet()
tweet.actor = 42
feed = feed_manager.get_actor_feed(tweet)
self.assertEqual(feed, 'user')
@httpretty.activate
def test_follow_user(self):
httpretty.register_uri(httpretty.POST, api_url,
body='{}', status=200,
content_type='application/json')
feed_manager.follow_user(1, 2)
last_req = httpretty.last_request()
self.assertTrue(last_req.path.split('?')[0].endswith('1/follows/'))
@httpretty.activate
def test_unfollow_user(self):
httpretty.register_uri(httpretty.DELETE, api_url,
body='{}', status=200,
content_type='application/json')
feed_manager.unfollow_user(1, 2)
last_req = httpretty.last_request()
self.assertEqual(last_req.method, 'DELETE')
self.assertTrue(last_req.path.split('?')[0].endswith('1/follows/user:2/'))
def test_get_feed(self):
feed = feed_manager.get_feed('timeline', 42)
self.assertEqual(feed.id, 'timeline:42')
def test_get_news_feeds(self):
feeds = feed_manager.get_news_feeds(42)
self.assertIn('timeline', feeds)
self.assertIn('timeline_aggregated', feeds)
self.assertEqual(feeds['timeline'].id, 'timeline:42')
self.assertEqual(feeds['timeline_aggregated'].id, 'timeline_aggregated:42')
| import httpretty
import re
from stream_django.feed_manager import feed_manager
from stream_django.tests import Tweet
import unittest
api_url = re.compile(r'(us-east-api.)?stream-io-api.com/*.')
class ManagerTestCase(unittest.TestCase):
def setUp(self):
feed_manager.enable_model_tracking()
def test_get_user_feed(self):
feed = feed_manager.get_user_feed(42)
self.assertEqual(feed.id, 'user:42')
def test_get_user_feed_by_feed_type(self):
feed = feed_manager.get_user_feed(42, 'personal')
self.assertEqual(feed.id, 'personal:42')
def test_get_notification_feed(self):
feed = feed_manager.get_notification_feed(42)
self.assertEqual(feed.id, 'notification:42')
def test_get_actor_feed(self):
tweet = Tweet()
tweet.actor = 42
feed = feed_manager.get_actor_feed(tweet)
self.assertEqual(feed, 'user')
@httpretty.activate
def test_follow_user(self):
httpretty.register_uri(httpretty.POST, api_url,
body='{}', status=200,
content_type='application/json')
feed_manager.follow_user(1, 2)
last_req = httpretty.last_request()
self.assertTrue(last_req.path.split('?')[0].endswith('1/follows/'))
@httpretty.activate
def test_unfollow_user(self):
httpretty.register_uri(httpretty.DELETE, api_url,
body='{}', status=200,
content_type='application/json')
feed_manager.unfollow_user(1, 2)
last_req = httpretty.last_request()
self.assertEqual(last_req.method, 'DELETE')
self.assertTrue(last_req.path.split('?')[0].endswith('1/follows/user:2/'))
def test_get_feed(self):
feed = feed_manager.get_feed('timeline', 42)
self.assertEqual(feed.id, 'timeline:42')
def test_get_news_feeds(self):
feeds = feed_manager.get_news_feeds(42)
self.assertIn('timeline', feeds)
self.assertIn('timeline_aggregated', feeds)
self.assertEqual(feeds['timeline'].id, 'timeline:42')
self.assertEqual(feeds['timeline_aggregated'].id, 'timeline_aggregated:42')
| bsd-3-clause | Python |
51e882394a73493aae873671b0287e6f4a873884 | Add an example | jochasinga/pluto | examples/led.py | examples/led.py | '''
Blink
Turns on an on-board LED on for one second, and then off.
Most Arduinos have an on-board LED you can control. On the Uno and Leonardo,
it is attached to digital pin 13. If you're unsure what pin the on-board LED
is connected to on your Arduino model, check the doc at http://arduino.cc
This example code is in the public domain.
modified July 12, 2015
by Joe Chasinga
'''
#!/usr/bin/env python
import sys, os
sys.path.append('../pluto')
from pluto import *
import time
def main():
# Invoke a general board
board = Board()
board.led(13).on()
time.sleep(1)
# the board remembers the on-board led
board.led.off()
if __name__ == '__main__':
main()
| '''
LED
Turns on and off an on-board LED
Pluto has collect some number of Arduino boards with on-board LED attached to pin 13. For these boards, Pluto can recognize automatically through the use of the board's class. If unsure, consult the doc at http://arduino.cc and use general Board class, then supply the pin number as the argument to led callable.
This example code is in the public domain.
modified June 22, 2015
by Joe Chasinga
'''
#!/usr/bin/env python
import sys, os
sys.path.append('../pluto')
from pluto import *
import time
def main():
board = Board()
board.led(13).on()
time.sleep(5)
board.led(13).off()
if __name__ == '__main__':
main()
| mit | Python |
047c95e255d6aac31651e3a95e2045de0b4888e2 | Make a real json response. | talavis/kimenu | flask_app.py | flask_app.py | from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/api/*": {"origins": "*"}})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return jsonify(main.list_restaurants())
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return jsonify(data)
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
| import json
from flask import abort
from flask import Flask
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/api/*": {"origins": "*"}})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return json.dumps(main.list_restaurants())
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return json.dumps(data)
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
| bsd-3-clause | Python |
4a07285b55afebd30310af3445795490ba06d56b | bump version to 0.0.9 | byteweaver/django-posts,byteweaver/django-posts | posts/__init__.py | posts/__init__.py | __version__ = '0.0.9'
| __version__ = '0.0.8'
| bsd-3-clause | Python |
5e3fb540186c9c01105809f660491a60a8c907d6 | Fix choropleth_states.py example | python-visualization/folium,QuLogic/folium,QuLogic/folium,ocefpaf/folium,shankari/folium,shankari/folium,ocefpaf/folium,QuLogic/folium,shankari/folium,python-visualization/folium | examples/choropleth_states.py | examples/choropleth_states.py | '''
Choropleth map of US states
'''
import folium
import pandas as pd
state_geo = r'us-states.json'
state_unemployment = r'US_Unemployment_Oct2012.csv'
state_data = pd.read_csv(state_unemployment)
# Let Folium determine the scale.
states = folium.Map(location=[48, -102], zoom_start=3)
states.choropleth(geo_path=state_geo, data=state_data,
columns=['State', 'Unemployment'],
key_on='feature.id',
fill_color='YlGn', fill_opacity=0.7, line_opacity=0.2,
legend_name='Unemployment Rate (%)')
states.save(outfile='us_state_map.html')
# Let's define our own scale and change the line opacity.
states2 = folium.Map(location=[48, -102], zoom_start=3)
states2.choropleth(geo_path=state_geo, data=state_data,
columns=['State', 'Unemployment'],
threshold_scale=[5, 6, 7, 8, 9, 10],
key_on='feature.id',
fill_color='BuPu', fill_opacity=0.7, line_opacity=0.5,
legend_name='Unemployment Rate (%)',
reset=True)
states2.save(outfile='us_state_map_2.html')
| '''
Choropleth map of US states
'''
import folium
import pandas as pd
state_geo = r'us-states.json'
state_unemployment = r'US_Unemployment_Oct2012.csv'
state_data = pd.read_csv(state_unemployment)
# Let Folium determine the scale.
states = folium.Map(location=[48, -102], zoom_start=3)
states.geo_json(geo_path=state_geo, data=state_data,
columns=['State', 'Unemployment'],
key_on='feature.id',
fill_color='YlGn', fill_opacity=0.7, line_opacity=0.2,
legend_name='Unemployment Rate (%)')
states.save(outfile='us_state_map.html')
# Let's define our own scale and change the line opacity.
states2 = folium.Map(location=[48, -102], zoom_start=3)
states2.geo_json(geo_path=state_geo, data=state_data,
columns=['State', 'Unemployment'],
threshold_scale=[5, 6, 7, 8, 9, 10],
key_on='feature.id',
fill_color='BuPu', fill_opacity=0.7, line_opacity=0.5,
legend_name='Unemployment Rate (%)',
reset=True)
states2.save(outfile='us_state_map_2.html')
| mit | Python |
84c9076a6bccfa4556be262d5bd5405a30d78268 | Revise to clarified comments | bowen0701/algorithms_data_structures | lc0448_find_all_numbers_disappeared_in_an_array.py | lc0448_find_all_numbers_disappeared_in_an_array.py | """Leetcode 448. Find All Numbers Disappeared in an Array
Easy
URL: https://leetcode.com/problems/find-all-numbers-disappeared-in-an-array/
Given an array of integers where 1 <= a[i] <= n (n = size of array),
some elements appear twice and others appear once.
Find all the elements of [1, n] inclusive that do not appear in this array.
Could you do it without extra space and in O(n) runtime?
You may assume the returned list does not count as extra space.
Example:
Input:
[4,3,2,7,8,2,3,1]
Output:
[5,6]
"""
class SolutionSetDiff(object):
def findDisappearedNumbers(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
if not nums:
return []
# Create the complete set of 1 to n.
n = len(nums)
complete_set = set([i for i in range(1, n + 1)])
# Use a set to collect distinct numbers in nums.
nums_set = set()
for num in nums:
nums_set.add(num)
# Compute difference set.
diff_set = complete_set - nums_set
return list(diff_set)
class SolutionNegMark(object):
def findDisappearedNumbers(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(1).
"""
if not nums:
return []
for num in nums:
# Use idx=num-1 to mark appeared by updating num[idx]=-num[idx].
idx = abs(num) - 1
nums[idx] = -abs(nums[idx])
print num, idx, nums
return [i + 1 for i in range(len(nums)) if nums[i] > 0]
def main():
# Output: [5,6]
nums = [4,3,2,7,8,2,3,1]
print nums
print SolutionSetDiff().findDisappearedNumbers(nums)
print SolutionNegMark().findDisappearedNumbers(nums)
if __name__ == '__main__':
main()
| """Leetcode 448. Find All Numbers Disappeared in an Array
Easy
URL: https://leetcode.com/problems/find-all-numbers-disappeared-in-an-array/
Given an array of integers where 1 <= a[i] <= n (n = size of array),
some elements appear twice and others appear once.
Find all the elements of [1, n] inclusive that do not appear in this array.
Could you do it without extra space and in O(n) runtime?
You may assume the returned list does not count as extra space.
Example:
Input:
[4,3,2,7,8,2,3,1]
Output:
[5,6]
"""
class SolutionSetDiff(object):
def findDisappearedNumbers(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
if not nums:
return []
# Create the complete set of 1 to n.
n = len(nums)
complete_set = set([i for i in range(1, n + 1)])
# Use a set to collect distinct numbers in nums.
nums_set = set()
for num in nums:
nums_set.add(num)
# Compute difference set.
diff_set = complete_set - nums_set
return list(diff_set)
class SolutionNegMark(object):
def findDisappearedNumbers(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(1).
"""
if not nums:
return []
for num in nums:
# At idx=num-1, if num is appeared, mark nums[idx] by -num[idx].
idx = abs(num) - 1
nums[idx] = -abs(nums[idx])
print num, idx, nums
return [i + 1 for i in range(len(nums)) if nums[i] > 0]
def main():
# Output: [5,6]
nums = [4,3,2,7,8,2,3,1]
print nums
print SolutionSetDiff().findDisappearedNumbers(nums)
print SolutionNegMark().findDisappearedNumbers(nums)
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
8b51e515062627f344a6a6241cf5e18a103edcbe | use default address | dashford/sentinel | src/Devices/Sensors/LTR559.py | src/Devices/Sensors/LTR559.py | import logging
import ltr559
from blinker import signal
class LTR559:
def __init__(self, address):
logging.info('Initialising LTR559 sensor with address {}'.format(address))
self._sensor = ltr559.LTR559()
def get_lux(self, mqtt_details):
"""
Return measured lux from the sensor.
:param dict mqtt_details: Relevant details for publishing to the MQTT broker
:return:
"""
logging.debug('Measuring lux')
lux = self._sensor.get_lux()
logging.info('Broadcasting lux: {}'.format(lux))
lux_signal = signal('lux')
lux_signal.send(self, lux=lux, mqtt_topic=mqtt_details['topic'])
| import logging
import ltr559
from blinker import signal
class LTR559:
def __init__(self, address):
logging.info('Initialising LTR559 sensor with address {}'.format(address))
self._sensor = ltr559.LTR559(i2c_dev=address)
def get_lux(self, mqtt_details):
"""
Return measured lux from the sensor.
:param dict mqtt_details: Relevant details for publishing to the MQTT broker
:return:
"""
logging.debug('Measuring lux')
lux = self._sensor.get_lux()
logging.info('Broadcasting lux: {}'.format(lux))
lux_signal = signal('lux')
lux_signal.send(self, lux=lux, mqtt_topic=mqtt_details['topic'])
| mit | Python |
09560bcf4ded4f9beffefbfc45e40a795d6f3883 | Create a new version | emuus/hammr,segalaj/hammr,segalaj/hammr,usharesoft/hammr,MaxTakahashi/hammr,emuus/hammr,usharesoft/hammr,MaxTakahashi/hammr | src/hammr/utils/constants.py | src/hammr/utils/constants.py | # To change this template, choose Tools | Templates
# and open the template in the editor.
__author__="UShareSoft"
import os
import tempfile
VERSION="0.2.4"
TMP_WORKING_DIR=tempfile.gettempdir() + os.sep + "hammr-" + str(os.getpid())
HTTP_TIMEOUT=10
TEMPLATE_JSON_FILE_NAME="template.json"
TEMPLATE_JSON_NEW_FILE_NAME="template.json"
FOLDER_BUNDLES = "bundles";
FOLDER_CONFIGS = "config";
FOLDER_DEPLOYMENT_SCENARIO = "deploymentScenario";
FOLDER_LOGO = "logo";
URI_SCAN_BINARY="/uforge-scan.bin"
SCAN_BINARY_NAME="uforge-scan.bin"
QUOTAS_SCAN="scan"
QUOTAS_TEMPLATE="appliance"
QUOTAS_GENERATION="generation"
QUOTAS_DISK_USAGE="diskusage" | # To change this template, choose Tools | Templates
# and open the template in the editor.
__author__="UShareSoft"
import os
import tempfile
VERSION="0.2.3"
TMP_WORKING_DIR=tempfile.gettempdir() + os.sep + "hammr-" + str(os.getpid())
HTTP_TIMEOUT=10
TEMPLATE_JSON_FILE_NAME="template.json"
TEMPLATE_JSON_NEW_FILE_NAME="template.json"
FOLDER_BUNDLES = "bundles";
FOLDER_CONFIGS = "config";
FOLDER_DEPLOYMENT_SCENARIO = "deploymentScenario";
FOLDER_LOGO = "logo";
URI_SCAN_BINARY="/uforge-scan.bin"
SCAN_BINARY_NAME="uforge-scan.bin"
QUOTAS_SCAN="scan"
QUOTAS_TEMPLATE="appliance"
QUOTAS_GENERATION="generation"
QUOTAS_DISK_USAGE="diskusage" | apache-2.0 | Python |
c27ca7239280ec9f2e68c5778db1668db6e0d0c8 | Fix import error | hack4impact/flask-base,hack4impact/flask-base,hack4impact/flask-base | app/main/errors.py | app/main/errors.py | from flask import render_template
from app.main.views import main
@main.app_errorhandler(403)
def forbidden(_):
return render_template('errors/403.html'), 403
@main.app_errorhandler(404)
def page_not_found(_):
return render_template('errors/404.html'), 404
@main.app_errorhandler(500)
def internal_server_error(_):
return render_template('errors/500.html'), 500
| from flask import render_template
from app.main import main
@main.app_errorhandler(403)
def forbidden(_):
return render_template('errors/403.html'), 403
@main.app_errorhandler(404)
def page_not_found(_):
return render_template('errors/404.html'), 404
@main.app_errorhandler(500)
def internal_server_error(_):
return render_template('errors/500.html'), 500
| mit | Python |
8217aa21db2b29389a9b4bd110158f41b2c11a0b | write header for new csv | hatnote/montage,hatnote/montage,hatnote/montage | tools/trim_csv.py | tools/trim_csv.py |
import os.path
import argparse
from unicodecsv import DictReader, DictWriter
def main():
prs = argparse.ArgumentParser()
prs.add_argument('--count', type=int, default=100)
prs.add_argument('file', type=file)
args = prs.parse_args()
count = args.count
assert count > 0
path = os.path.abspath(args.file.name)
root, ext = os.path.splitext(path)
new_path = '%s_trimmed_%s%s' % (root, count, ext)
reader = DictReader(open(path))
new_entries = []
for i in range(count):
new_entries.append(next(reader))
with open(new_path, 'w') as new_file:
writer = DictWriter(new_file, reader.unicode_fieldnames)
writer.writeheader()
writer.writerows(new_entries)
print open(new_path).read()
if __name__ == '__main__':
main()
|
import os.path
import argparse
from unicodecsv import DictReader, DictWriter
def main():
prs = argparse.ArgumentParser()
prs.add_argument('--count', type=int, default=100)
prs.add_argument('file', type=file)
args = prs.parse_args()
count = args.count
assert count > 0
path = os.path.abspath(args.file.name)
root, ext = os.path.splitext(path)
new_path = '%s_trimmed_%s%s' % (root, count, ext)
reader = DictReader(open(path))
new_entries = []
for i in range(count):
new_entries.append(next(reader))
with open(new_path, 'w') as new_file:
writer = DictWriter(new_file, reader.unicode_fieldnames)
writer.writerows(new_entries)
print open(new_path).read()
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
51655f84e4b8a6cfafa4e62421cdd6b4d6fd48e4 | rework python3-workers for parallel execution of check coroutines | telminov/django-park-keeper | parkkeeper/task_generator.py | parkkeeper/task_generator.py | # coding: utf-8
import multiprocessing
from time import sleep
import zmq
from django.conf import settings
from django.utils.timezone import now
from parkkeeper.event import emit_event
from parkkeeper import models
from parkworker.const import MONIT_TASK_EVENT
class TaskGenerator(multiprocessing.Process):
context = None
socket_pool = None
def run(self):
self.cancel_not_started_tasks()
self.context = zmq.Context()
self._create_pool()
print('TaskGenerator started.')
try:
while True:
tasks = models.MonitSchedule.create_tasks()
for task in tasks:
task_json = task.to_json()
# task created event
emit_event(MONIT_TASK_EVENT, task_json)
# send monit tasks for workers
monit = models.Monit.objects.get(name=task.monit_name)
socket = self._get_socket(monit.worker_type)
# print('Send task', task.monit_name, 'on port', monit.worker_type.port)
socket.send_string(task_json)
sleep(1)
finally:
for socket in self.socket_pool.values():
socket.close()
@staticmethod
def cancel_not_started_tasks():
models.MonitTask.objects\
.filter(start_dt=None)\
.update(cancel_dt=now(), cancel_reason='restart monit scheduler')
def _create_pool(self):
self.socket_pool = {}
for worker_type in models.WorkerType.objects.all():
self._create_socket(worker_type)
def _get_socket(self, worker_type: models.WorkerType):
if worker_type.name in self.socket_pool:
return self.socket_pool[worker_type.name]
else:
return self._create_socket(worker_type)
def _create_socket(self, worker_type: models.WorkerType):
socket = self.context.socket(zmq.PUSH)
socket.bind("tcp://*:%s" % worker_type.port)
self.socket_pool[worker_type.name] = socket
return socket
| # coding: utf-8
import multiprocessing
from time import sleep
import zmq
from django.conf import settings
from django.utils.timezone import now
from parkkeeper.event import emit_event
from parkkeeper import models
from parkworker.const import MONIT_TASK_EVENT
class TaskGenerator(multiprocessing.Process):
context = None
socket_pool = None
def run(self):
self.cancel_not_started_tasks()
self.context = zmq.Context()
self._create_pool()
print('TaskGenerator started.')
try:
while True:
tasks = models.MonitSchedule.create_tasks()
for task in tasks:
task_json = task.to_json()
# task created event
emit_event(MONIT_TASK_EVENT, task_json)
# send monit tasks for workers
monit = models.Monit.objects.get(name=task.monit_name)
socket = self._get_socket(monit.worker_type)
# print('Send task', task.monit_name, 'on port', monit.worker_type.port)
socket.send_json(task_json)
sleep(1)
finally:
for socket in self.socket_pool.values():
socket.close()
@staticmethod
def cancel_not_started_tasks():
models.MonitTask.objects\
.filter(start_dt=None)\
.update(cancel_dt=now(), cancel_reason='restart monit scheduler')
def _create_pool(self):
self.socket_pool = {}
for worker_type in models.WorkerType.objects.all():
self._create_socket(worker_type)
def _get_socket(self, worker_type: models.WorkerType):
if worker_type.name in self.socket_pool:
return self.socket_pool[worker_type.name]
else:
return self._create_socket(worker_type)
def _create_socket(self, worker_type: models.WorkerType):
socket = self.context.socket(zmq.PUSH)
socket.bind("tcp://*:%s" % worker_type.port)
self.socket_pool[worker_type.name] = socket
return socket
| mit | Python |
4c0a9db0f635e304650ff1e572f3e6766ae61434 | remove log | banbanchs/pan-baidu-download,kelwang/pan-baidu-download | panbaidu.py | panbaidu.py | #!/usr/bin/env python2
#!coding=utf-8
import sys
import os
import re
import urllib2
def getDownloadPage(url):
header = {
'User-Agent':'Mozilla/5.0 (X11; Linux x86_64)\
AppleWebKit/537.36 (KHTML, like Gecko)\
Chrome/28.0.1500.95 Safari/537.36'
}
request = urllib2.Request(url = url, headers = header)
data = urllib2.urlopen(request).read()
script_pattern = re.compile(r'<script type="text/javascript">(.+?)</script>', re.DOTALL)
script = re.findall(script_pattern, data)[2]
return script
def getFileName(data):
pattern = re.compile(r'var\sserver_filename="(.+?)"')
filename = re.search(pattern, data).group(1)
return filename
def getDownloadLink(data):
pattern = re.compile(r'dlink\\.+?(http.+?)\\"')
link = re.search(pattern, data).group(1).replace('\\', '')
return link
def download(link, filename):
cmd = "aria2c -c -o '%s' -s5 -x5 '%s'" % (filename, link)
os.system(cmd)
def main(urls):
for url in urls:
script = getDownloadPage(url)
filename = getFileName(script)
link = getDownloadLink(script)
download(link, filename)
print "%s complete\n" % filename
sys.exit()
if __name__ == '__main__':
if len(sys.argv) < 2:
print "No action specified."
sys.exit()
if sys.argv[1].startswith('--'):
option = sys.argv[1][2:]
if option == 'version':
print 'V0.3'
elif option == 'help':
print '''\
Default aria2c -c -s5 -x5
--version: Print the version
--help : Display this help'''
else:
print 'Unknow option'
sys.exit()
else:
main(sys.argv[1:])
| #!/usr/bin/env python2
#!coding=utf-8
import sys
import os
import re
import urllib2
import json
import pdb
def getDownloadPage(url):
header = {
'User-Agent':'Mozilla/5.0 (X11; Linux x86_64)\
AppleWebKit/537.36 (KHTML, like Gecko)\
Chrome/28.0.1500.95 Safari/537.36'
}
request = urllib2.Request(url = url, headers = header)
data = urllib2.urlopen(request).read()
script_pattern = re.compile(r'<script type="text/javascript">(.+?)</script>', re.DOTALL)
script = re.findall(script_pattern, data)[2]
pdb.set_trace()
return script
def getFileName(data):
pattern = re.compile(r'var\sserver_filename="(.+?)"')
filename = re.search(pattern, data).group(1)
pdb.set_trace()
return filename
def getDownloadLink(data):
pattern = re.compile(r'dlink\\.+?(http.+?)\\"')
link = re.search(pattern, data).group(1).replace('\\', '')
return link
def download(link, filename):
cmd = "aria2c -c -o '%s' -s5 -x5 '%s'" % (filename, link)
os.system(cmd)
def main(urls):
for url in urls:
script = getDownloadPage(url)
filename = getFileName(script)
link = getDownloadLink(script)
download(link, filename)
print "%s complete" % filename
sys.exit()
if __name__ == '__main__':
if len(sys.argv) < 2:
print "No action specified."
sys.exit()
if sys.argv[1].startswith('--'):
option = sys.argv[1][2:]
if option == 'version':
print 'V0.2'
elif option == 'help':
print '''\
Default aria2c -c -s5 -x5
--version: Print the version
--help : Display this help'''
else:
print 'Unknow option'
sys.exit()
else:
main(sys.argv[1:])
| mit | Python |
26bc3a761d5b513513773700b2167a0fd5b58102 | Add a note about [#76]. | peplin/astral | astral/api/handlers/ticket.py | astral/api/handlers/ticket.py | from astral.api.handlers.base import BaseHandler
from astral.models import Ticket, Node, Stream, session
import logging
log = logging.getLogger(__name__)
class TicketHandler(BaseHandler):
def _load_ticket(self, stream_slug, destination_uuid):
stream = Stream.get_by(slug=stream_slug)
if not destination_uuid:
return Ticket.get_by(stream=stream, destination=Node.me())
node = Node.get_by(uuid=destination_uuid)
return Ticket.query.filter_by(stream=stream, destination=node).first()
def delete(self, stream_slug, destination_uuid=None):
"""Stop forwarding the stream to the requesting node."""
ticket = self._load_ticket(stream_slug, destination_uuid)
if ticket:
ticket.delete()
# TODO if we were the destination, need to find another ticket
# TODO if we were forwarding this to someone else, need to propagate the
# delete to them if we can't find another
def get(self, stream_slug, destination_uuid=None):
# TODO could require target nodes to hit this every so often as a
# heartbeat
ticket = self._load_ticket(stream_slug, destination_uuid)
if ticket:
# In case we lost the tunnel, just make sure it exists
ticket.queue_tunnel_creation()
# TODO this is unideal, but we need to get the new port if it
# changed. combination of sleep and db flush seems to do it somewhat
# reliably, but it's still a race condition.
import time
time.sleep(1)
session.commit()
ticket = self._load_ticket(stream_slug, destination_uuid)
self.write({'ticket': ticket.to_dict()})
return ticket
def put(self, stream_slug, destination_uuid=None):
"""Edit tickets, most likely just confirming them."""
ticket = self._load_ticket(stream_slug, destination_uuid)
if ticket:
ticket.confirmed = self.get_json_argument('confirmed')
if ticket.confirmed:
log.info("Confirmed %s", ticket)
| from astral.api.handlers.base import BaseHandler
from astral.models import Ticket, Node, Stream, session
import logging
log = logging.getLogger(__name__)
class TicketHandler(BaseHandler):
def _load_ticket(self, stream_slug, destination_uuid):
stream = Stream.get_by(slug=stream_slug)
if not destination_uuid:
return Ticket.get_by(stream=stream, destination=Node.me())
node = Node.get_by(uuid=destination_uuid)
return Ticket.query.filter_by(stream=stream, destination=node).first()
def delete(self, stream_slug, destination_uuid=None):
"""Stop forwarding the stream to the requesting node."""
ticket = self._load_ticket(stream_slug, destination_uuid)
if ticket:
ticket.delete()
def get(self, stream_slug, destination_uuid=None):
# TODO could require target nodes to hit this every so often as a
# heartbeat
ticket = self._load_ticket(stream_slug, destination_uuid)
if ticket:
# In case we lost the tunnel, just make sure it exists
ticket.queue_tunnel_creation()
# TODO this is unideal, but we need to get the new port if it
# changed. combination of sleep and db flush seems to do it somewhat
# reliably, but it's still a race condition.
import time
time.sleep(1)
session.commit()
ticket = self._load_ticket(stream_slug, destination_uuid)
self.write({'ticket': ticket.to_dict()})
return ticket
def put(self, stream_slug, destination_uuid=None):
"""Edit tickets, most likely just confirming them."""
ticket = self._load_ticket(stream_slug, destination_uuid)
if ticket:
ticket.confirmed = self.get_json_argument('confirmed')
if ticket.confirmed:
log.info("Confirmed %s", ticket)
| mit | Python |
36c891f4f11a7780a444894042cfd603c8fd4300 | Update __init__.py | numenta/nupic.research,numenta/nupic.research | src/nupic/research/frameworks/htm/__init__.py | src/nupic/research/frameworks/htm/__init__.py | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2022, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from .spatial_pooler import SpatialPooler
from .temporal_memory import (
TemporalMemoryApicalTiebreak,
SequenceMemoryApicalTiebreak,
# PairMemoryApicalTiebreak
)
| # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2022, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from .spatial_pooler import SpatialPooler
#from temporal_memory.temporal_memory_apical_tiebreak import TemporalMemoryApicalTiebreak
#from temporal_memory.sequence_memory_apical_tiebreak import SequenceMemoryApicalTiebreak
#from temporal_memory.pair_memory_apical_tiebreak import PairMemoryApicalTiebreak
from .temporal_memory import (
TemporalMemoryApicalTiebreak,
SequenceMemoryApicalTiebreak,
# PairMemoryApicalTiebreak
)
| agpl-3.0 | Python |
7958acd24d3bc3c6f91ab6ef946dd7750d119569 | Reduce order to 4 from 5. | memmett/PyWENO,memmett/PyWENO,memmett/PyWENO | examples/step.py | examples/step.py | """PyWENO smooth reconstruction example."""
import math
import numpy
import pyweno.grid
import pyweno.weno
# explicitly define the function f that we will reconstruct ...
def f(x):
if x <= 0.0:
return 1.0
return 0.0
# build weno reconstructor
x = numpy.linspace(-1.0, 1.0, 21)
grid = pyweno.grid.Grid(x)
weno = pyweno.weno.WENO(order=4, grid=grid)
weno.precompute_reconstruction('left')
weno.precompute_reconstruction('right')
# average f
f_avg = grid.average(f)
# allocate arrays for reconstruction
f_left = numpy.zeros(grid.N)
f_right = numpy.zeros(grid.N)
# compute smoothness indicators
weno.smoothness(f_avg)
# reconstruct!
weno.reconstruct(f_avg, 'left', f_left)
weno.reconstruct(f_avg, 'right', f_right)
# plot results
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.subplot(2,1,1)
x = numpy.linspace(-1.0, 1.0, 1001);
uf = numpy.frompyfunc(f, 1, 1)
plt.plot(x, uf(x), '-k')
plt.plot(grid.x[:-1], f_left, 'or')
plt.plot(grid.x[1:], f_right, 'ob')
plt.title('PyWENO reconstruction and smoothness indicators')
plt.ylabel('f')
plt.xlabel('x')
plt.legend(['actual', 'left', 'right'])
plt.subplot(2,1,2)
plt.plot(grid.centres(), weno.sigma[:,0], 'o')
plt.plot(grid.centres(), weno.sigma[:,1], 'o')
plt.plot(grid.centres(), weno.sigma[:,2], 'o')
plt.plot(grid.centres(), weno.sigma[:,3], 'o')
plt.ylabel('sigma')
plt.xlabel('x')
plt.legend(['r=0', 'r=1', 'r=2', 'r=3'])
plt.savefig('step.png', format='png')
| """PyWENO smooth reconstruction example."""
import math
import numpy
import pyweno.grid
import pyweno.weno
# explicitly define the function f that we will reconstruct ...
def f(x):
if x <= 0.0:
return 1.0
return 0.0
# build weno reconstructor
x = numpy.linspace(-1.0, 1.0, 21)
grid = pyweno.grid.Grid(x)
weno = pyweno.weno.WENO(order=5, grid=grid)
weno.precompute_reconstruction('left')
weno.precompute_reconstruction('right')
# average f
f_avg = grid.average(f)
# allocate arrays for reconstruction
f_left = numpy.zeros(grid.N)
f_right = numpy.zeros(grid.N)
# compute smoothness indicators
weno.smoothness(f_avg)
# reconstruct!
weno.reconstruct(f_avg, 'left', f_left)
weno.reconstruct(f_avg, 'right', f_right)
# plot results
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.subplot(2,1,1)
x = numpy.linspace(-1.0, 1.0, 1001);
uf = numpy.frompyfunc(f, 1, 1)
plt.plot(x, uf(x), '-k')
plt.plot(grid.x[:-1], f_left, 'or')
plt.plot(grid.x[1:], f_right, 'ob')
plt.title('PyWENO reconstruction and smoothness indicators')
plt.ylabel('f')
plt.xlabel('x')
plt.legend(['actual', 'left', 'right'])
plt.subplot(2,1,2)
plt.plot(grid.centres(), weno.sigma[:,0], 'o')
plt.plot(grid.centres(), weno.sigma[:,1], 'o')
plt.plot(grid.centres(), weno.sigma[:,2], 'o')
plt.plot(grid.centres(), weno.sigma[:,3], 'o')
plt.plot(grid.centres(), weno.sigma[:,4], 'o')
plt.ylabel('sigma')
plt.xlabel('x')
plt.legend(['r=0', 'r=1', 'r=2', 'r=3', 'r=4'])
plt.savefig('step.png', format='png')
| bsd-3-clause | Python |
69ba9731261f79ee6ce8d44a2def2bc0e5d2809d | Set trunk RELEASE_TAG back to None after creation of 0.0a20081123rc release candidate branch. | SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange | app/soc/release.py | app/soc/release.py | # Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Release tag string for display in templates (and possibly other uses).
Steps (currently done by hand, but too be scripted in the future) to make
a release:
1) set RELEASE_TAG in this file to a "release candidate" release string that
ends in "rc"
2) commit this file *by itself* in its own commit
3) use 'svn cp' to create a release branch in /branches/ with exactly the
same name as the contents of the RELEASE_TAG string
4) set RELEASE_TAG back to None in /trunk/
To finalize a release candidate in a release branch for a push to the live
web site:
1) in the release branch, change RELEASE_TAG to remove the trailing "rc"
2) commit this file in the release branch *by itself* in its own commit
3) use 'svn cp' to create a tag in /tags/ with exactly the same name as the
contents of the RELEASE_TAG string
4) put the release branch in a state where it is ready for additional patches
after the tag by setting the end of the RELEASE_TAG string to "p0"
To re-release a previously-tagged release branch after a patch for a push to
the live web site:
1) increment the "patch suffix" of the RELEASE_TAG string to the next integer
(for example, "p0" becomes "p1", so the first tagged patch release will
always be "p1", not "p0", which is just a placeholder)
2) (same as #2 for a release candidate)
3) (same as #3 for a release candidate)
4) (there is no step 4)
"""
__authors__ = [
'"Todd Larsen" <tlarsen@google.com>',
]
RELEASE_TAG = None
| # Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Release tag string for display in templates (and possibly other uses).
Steps (currently done by hand, but too be scripted in the future) to make
a release:
1) set RELEASE_TAG in this file to a "release candidate" release string that
ends in "rc"
2) commit this file *by itself* in its own commit
3) use 'svn cp' to create a release branch in /branches/ with exactly the
same name as the contents of the RELEASE_TAG string
4) set RELEASE_TAG back to None in /trunk/
To finalize a release candidate in a release branch for a push to the live
web site:
1) in the release branch, change RELEASE_TAG to remove the trailing "rc"
2) commit this file in the release branch *by itself* in its own commit
3) use 'svn cp' to create a tag in /tags/ with exactly the same name as the
contents of the RELEASE_TAG string
4) put the release branch in a state where it is ready for additional patches
after the tag by setting the end of the RELEASE_TAG string to "p0"
To re-release a previously-tagged release branch after a patch for a push to
the live web site:
1) increment the "patch suffix" of the RELEASE_TAG string to the next integer
(for example, "p0" becomes "p1", so the first tagged patch release will
always be "p1", not "p0", which is just a placeholder)
2) (same as #2 for a release candidate)
3) (same as #3 for a release candidate)
4) (there is no step 4)
"""
__authors__ = [
'"Todd Larsen" <tlarsen@google.com>',
]
RELEASE_TAG = '0.0a20081123rc'
| apache-2.0 | Python |
ea30119d1a46d863688aa9092d316efdfd193552 | Change make update to make production | webkom/chewie,webkom/chewie | deploy.py | deploy.py | """
Takes a project name as the first argument
and a git-branch as the second (optional).
Finds the hostname from a config file, then
uses ssh to deploy the project.
"""
import os
import sys
import json
from fabric.api import env, run, cd
class MissingProjectNameError(Exception):
def __init__(self):
Exception.__init__(self, 'Missing project name argument.')
class MissingProjectError(Exception):
def __init__(self, project_name):
Exception.__init__(self, 'Cant\'t find a project with the project name %s' % project_name)
class MissingConfigFileVariableError(Exception):
def __init__(self, project_name):
Exception.__init__(self, 'Can\'t find the environment variable SERVER_CONFIG_FILE.')
try:
conf_file = os.path.dirname(os.path.realpath(__file__)) + '/' + os.environ['SERVER_CONFIG_FILE']
except KeyError as e:
raise MissingConfigFileVariableError()
def get_project(project_name):
with open(conf_file) as f:
projects = json.load(f)
try:
project = projects[project_name]
return project
except KeyError:
raise MissingProjectError(project_name)
def deploy_project(project_name, branch='master'):
project = get_project(project_name)
env.host_string = project['hostname']
env.user = project.get('user') or 'root'
with cd(project['path']):
run('git fetch && git reset --hard origin/%s' % branch)
run('make production')
if __name__ == '__main__':
try:
if len(sys.argv):
deploy_project(*sys.argv[1:])
else:
raise MissingProjectNameError
except Exception as e:
sys.stderr.write(e.message)
| """
Takes a project name as the first argument
and a git-branch as the second (optional).
Finds the hostname from a config file, then
uses ssh to deploy the project.
"""
import os
import sys
import json
from fabric.api import env, run, cd
class MissingProjectNameError(Exception):
def __init__(self):
Exception.__init__(self, 'Missing project name argument.')
class MissingProjectError(Exception):
def __init__(self, project_name):
Exception.__init__(self, 'Cant\'t find a project with the project name %s' % project_name)
class MissingConfigFileVariableError(Exception):
def __init__(self, project_name):
Exception.__init__(self, 'Can\'t find the environment variable SERVER_CONFIG_FILE.')
try:
conf_file = os.path.dirname(os.path.realpath(__file__)) + '/' + os.environ['SERVER_CONFIG_FILE']
except KeyError as e:
raise MissingConfigFileVariableError()
def get_project(project_name):
with open(conf_file) as f:
projects = json.load(f)
try:
project = projects[project_name]
return project
except KeyError:
raise MissingProjectError(project_name)
def deploy_project(project_name, branch='master'):
project = get_project(project_name)
env.host_string = project['hostname']
env.user = project.get('user') or 'root'
with cd(project['path']):
run('git fetch && git reset --hard origin/%s' % branch)
run('make update')
if __name__ == '__main__':
try:
if len(sys.argv):
deploy_project(*sys.argv[1:])
else:
raise MissingProjectNameError
except Exception as e:
sys.stderr.write(e.message)
| mit | Python |
59f66f642281daa89c347c4ce9ed97eff921c77b | Add implementation | jcollado/ftps | ftps/ftps.py | ftps/ftps.py | # -*- coding: utf-8 -*-
"""Python interface to FTPS using pycurl."""
import logging
import os
from six import BytesIO
import pycurl
LOGGER = logging.getLogger('ftps')
class FTPS(object):
"""FTPS client based on pycurl.
:param url: Server URL including authorization
:type url: str
:param connect_timeout: Connection timeout in seconds
:type connect_timeout: int
:param max_retries: Maximum number of retry attempts
:type max_retries: int
"""
def __init__(self, url, connect_timeout=5, max_retries=5):
"""Create pycurl client."""
assert url.startswith('ftps://'), 'Expected URL scheme is ftps'
self.base_url = url
self.connect_timeout = connect_timeout
self.max_retries = max_retries
self.client = pycurl.Curl()
self.reset()
def reset(self):
"""Reset client.
This is useful after each operation to make sure the client options are
set to its default state.
"""
self.client.reset()
self.client.setopt(pycurl.SSL_VERIFYPEER, False)
self.client.setopt(pycurl.SSL_VERIFYHOST, False)
self.client.setopt(pycurl.CONNECTTIMEOUT, self.connect_timeout)
def perform(self):
"""Perform operation with retries."""
retries = 0
while retries < self.max_retries:
try:
self.client.perform()
LOGGER.debug('Operation performed successfully')
return
except pycurl.error as exc:
LOGGER.warning(exc)
LOGGER.debug('Retrying (%d)...', retries)
retries += 1
LOGGER.error('Failed to perform operation')
def list(self, remote_dir=None):
"""List files in remote directory.
:param remote_dir: Path to remote directory to get the file list.
:type remote_dir: str
"""
if remote_dir is None:
# List root directory by default
remote_dir = ''
elif not remote_dir.endswith('/'):
# Make sure that directory ends with a forward slash character
remote_dir += '/'
url = '/'.join((self.base_url, remote_dir))
self.client.setopt(pycurl.URL, url)
output_buffer = BytesIO()
self.client.setopt(pycurl.WRITEDATA, output_buffer)
LOGGER.debug('Listing directory: %s', url)
self.perform()
self.reset()
output = output_buffer.getvalue().decode('utf-8')
files = [
line.split()[-1]
for line in output.split('\n')
if line
]
return files
def download(self, remote_filename, local_filename):
"""Download remote file and save it locally.
:param remote_filename: Path to remote file in server to download.
:type remote_filename: str
:param local_filename: Path to local file to create to.
:type local_filename: str
"""
url = '/'.join((self.base_url, remote_filename))
self.client.setopt(pycurl.URL, url)
with open(local_filename, 'wb') as output_file:
self.client.setopt(pycurl.WRITEDATA, output_file)
LOGGER.debug('Downloading file: %s -> %s', url, local_filename)
self.perform()
self.reset()
def upload(self, local_filename, remote_filename):
"""Upload local_filename to server.
:param local_filename: Path to local file to upload.
:type local_filename: str
:param remote_filename: Path to remote file to create.
:type remote_filename: str
"""
url = '/'.join((self.base_url, remote_filename))
self.client.setopt(pycurl.URL, url)
with open(local_filename, 'rb') as input_file:
self.client.setopt(pycurl.UPLOAD, True)
self.client.setopt(pycurl.READDATA, input_file)
self.client.setopt(
pycurl.INFILESIZE_LARGE,
os.path.getsize(local_filename),
)
LOGGER.debug('Uploading file: %s -> %s', local_filename, url)
self.perform()
self.reset()
| # -*- coding: utf-8 -*-
| mit | Python |
6673faab453ccfc1f9f2aae67c1e99433ee0ee5e | Resolve #21 | ChameleonTartu/neurotolge,ChameleonTartu/neurotolge,ChameleonTartu/neurotolge | translators/ut.py | translators/ut.py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import time
import socket
import sys
def ut_translation(queue, text, translate_from='et', translate_to='en'):
try:
__HOST__ = "booster2.hpc.ut.ee"
__PORT__ = 50007
__BUFFER_SIZE__ = 4096
delimiter = "|||"
text_for_translation = u"{source}{delimiter}" \
u"{lang_from}{delimiter}{lang_to}".format(source=text,
delimiter=delimiter,
lang_from=translate_from,
lang_to=translate_to)
begin = time.time()
s = socket.socket()
s.connect((__HOST__, __PORT__))
s.send(text_for_translation.encode('utf-8'))
translation = s.recv(__BUFFER_SIZE__).replace("|||", "")
s.close()
end = time.time()
print("ut", translation, text_for_translation)
print("ut/time : ", end - begin)
except Exception as e:
print("ut exception", e.message)
translation = ""
queue.put({'translation_ut': translation})
return None
| #!/usr/bin/python
# -*- encoding: utf-8 -*-
import time
import socket
import sys
def ut_translation(queue, text, translate_from='et', translate_to='en'):
try:
__HOST__ = "booster2.hpc.ut.ee"
__PORT__ = 50007
__BUFFER_SIZE__ = 4096
delimiter = "|||"
text_for_translation = u"{source}{delimiter}" \
u"{lang_from}{delimiter}{lang_to}".format(source=text,
delimiter=delimiter,
lang_from=translate_from,
lang_to=translate_to)
begin = time.time()
s = socket.socket()
s.connect((__HOST__, __PORT__))
print >> sys.stderr, "text-for-translation", text_for_translation
s.send(text_for_translation)
translation = s.recv(__BUFFER_SIZE__).replace("|||", "")
s.close()
end = time.time()
print("ut", translation, text_for_translation)
print("ut/time : ", end - begin)
except Exception as e:
print("ut exception", e.message)
translation = ""
queue.put({'translation_ut': translation})
return None
| mit | Python |
e374abeba61df8290f3634146014ac726d8185de | handle attachments of the template. | Maspear/odoo,shingonoide/odoo,KontorConsulting/odoo,slevenhagen/odoo-npg,ygol/odoo,waytai/odoo,mlaitinen/odoo,RafaelTorrealba/odoo,cloud9UG/odoo,hassoon3/odoo,bkirui/odoo,elmerdpadilla/iv,avoinsystems/odoo,OpusVL/odoo,demon-ru/iml-crm,CopeX/odoo,rgeleta/odoo,virgree/odoo,hopeall/odoo,slevenhagen/odoo-npg,blaggacao/OpenUpgrade,kittiu/odoo,lsinfo/odoo,matrixise/odoo,synconics/odoo,gsmartway/odoo,erkrishna9/odoo,JonathanStein/odoo,cdrooom/odoo,alexcuellar/odoo,guewen/OpenUpgrade,Drooids/odoo,lsinfo/odoo,KontorConsulting/odoo,srimai/odoo,mkieszek/odoo,tinkhaven-organization/odoo,minhtuancn/odoo,nuuuboo/odoo,oasiswork/odoo,wangjun/odoo,nhomar/odoo-mirror,ClearCorp-dev/odoo,alqfahad/odoo,Nowheresly/odoo,aviciimaxwell/odoo,jeasoft/odoo,Gitlab11/odoo,odoousers2014/odoo,vnsofthe/odoo,lgscofield/odoo,havt/odoo,sysadminmatmoz/OCB,Endika/odoo,datenbetrieb/odoo,Nowheresly/odoo,Antiun/odoo,hoatle/odoo,osvalr/odoo,rgeleta/odoo,provaleks/o8,bplancher/odoo,mvaled/OpenUpgrade,xujb/odoo,nuuuboo/odoo,sysadminmatmoz/OCB,doomsterinc/odoo,tvtsoft/odoo8,jiachenning/odoo,nexiles/odoo,florian-dacosta/OpenUpgrade,ecosoft-odoo/odoo,mszewczy/odoo,Endika/odoo,simongoffin/website_version,rdeheele/odoo,xzYue/odoo,janocat/odoo,fuselock/odoo,waytai/odoo,Ichag/odoo,SerpentCS/odoo,dariemp/odoo,bealdav/OpenUpgrade,patmcb/odoo,csrocha/OpenUpgrade,shivam1111/odoo,Noviat/odoo,nuuuboo/odoo,elmerdpadilla/iv,tinkerthaler/odoo,leorochael/odoo,Nick-OpusVL/odoo,srsman/odoo,abenzbiria/clients_odoo,sadleader/odoo,OpenUpgrade/OpenUpgrade,massot/odoo,spadae22/odoo,apanju/odoo,agrista/odoo-saas,srsman/odoo,Noviat/odoo,agrista/odoo-saas,apanju/GMIO_Odoo,ovnicraft/odoo,oihane/odoo,mkieszek/odoo,dkubiak789/odoo,osvalr/odoo,sv-dev1/odoo,shingonoide/odoo,ramitalat/odoo,apocalypsebg/odoo,stonegithubs/odoo,bakhtout/odoo-educ,csrocha/OpenUpgrade,BT-rmartin/odoo,RafaelTorrealba/odoo,Noviat/odoo,mkieszek/odoo,abdellatifkarroum/odoo,savoirfairelinux/OpenUpgrade,sinbazhou/odoo,funkring/fdoo,ubic135/odoo-design,slevenhagen/odoo-npg,waytai/odoo,NeovaHealth/odoo,andreparames/odoo,aviciimaxwell/odoo,SerpentCS/odoo,collex100/odoo,thanhacun/odoo,jusdng/odoo,ujjwalwahi/odoo,JGarcia-Panach/odoo,Adel-Magebinary/odoo,sebalix/OpenUpgrade,luiseduardohdbackup/odoo,aviciimaxwell/odoo,mlaitinen/odoo,hmen89/odoo,Drooids/odoo,bkirui/odoo,oasiswork/odoo,nexiles/odoo,odooindia/odoo,virgree/odoo,blaggacao/OpenUpgrade,dariemp/odoo,mkieszek/odoo,x111ong/odoo,tangyiyong/odoo,cysnake4713/odoo,ThinkOpen-Solutions/odoo,RafaelTorrealba/odoo,SAM-IT-SA/odoo,charbeljc/OCB,realsaiko/odoo,sv-dev1/odoo,nitinitprof/odoo,Codefans-fan/odoo,csrocha/OpenUpgrade,hifly/OpenUpgrade,dkubiak789/odoo,elmerdpadilla/iv,nuncjo/odoo,Ernesto99/odoo,Ernesto99/odoo,nitinitprof/odoo,BT-astauder/odoo,Maspear/odoo,zchking/odoo,jusdng/odoo,fevxie/odoo,Ichag/odoo,ehirt/odoo,lsinfo/odoo,JGarcia-Panach/odoo,eino-makitalo/odoo,goliveirab/odoo,ThinkOpen-Solutions/odoo,mmbtba/odoo,fuhongliang/odoo,laslabs/odoo,nuncjo/odoo,ccomb/OpenUpgrade,microcom/odoo,mszewczy/odoo,Bachaco-ve/odoo,grap/OpenUpgrade,gvb/odoo,juanalfonsopr/odoo,ubic135/odoo-design,minhtuancn/odoo,eino-makitalo/odoo,sinbazhou/odoo,PongPi/isl-odoo,ojengwa/odoo,lsinfo/odoo,hbrunn/OpenUpgrade,mustafat/odoo-1,hip-odoo/odoo,draugiskisprendimai/odoo,Eric-Zhong/odoo,grap/OpenUpgrade,simongoffin/website_version,Ichag/odoo,takis/odoo,OpenUpgrade-dev/OpenUpgrade,aviciimaxwell/odoo,jfpla/odoo,ApuliaSoftware/odoo,sadleader/odoo,Adel-Magebinary/odoo,goliveirab/odoo,fdvarela/odoo8,mustafat/odoo-1,fuhongliang/odoo,ChanduERP/odoo,Eric-Zhong/odoo,jusdng/odoo,gorjuce/odoo,juanalfonsopr/odoo,hifly/OpenUpgrade,tinkerthaler/odoo,0k/OpenUpgrade,jeasoft/odoo,virgree/odoo,ecosoft-odoo/odoo,ubic135/odoo-design,csrocha/OpenUpgrade,shaufi10/odoo,salaria/odoo,Daniel-CA/odoo,mvaled/OpenUpgrade,luistorresm/odoo,naousse/odoo,Drooids/odoo,Gitlab11/odoo,glovebx/odoo,salaria/odoo,makinacorpus/odoo,sinbazhou/odoo,glovebx/odoo,ccomb/OpenUpgrade,thanhacun/odoo,oliverhr/odoo,javierTerry/odoo,thanhacun/odoo,osvalr/odoo,cloud9UG/odoo,hmen89/odoo,jiachenning/odoo,RafaelTorrealba/odoo,FlorianLudwig/odoo,rgeleta/odoo,Codefans-fan/odoo,pplatek/odoo,agrista/odoo-saas,doomsterinc/odoo,ujjwalwahi/odoo,JonathanStein/odoo,pedrobaeza/OpenUpgrade,hopeall/odoo,Endika/odoo,slevenhagen/odoo,MarcosCommunity/odoo,kirca/OpenUpgrade,CatsAndDogsbvba/odoo,Danisan/odoo-1,ygol/odoo,mustafat/odoo-1,nexiles/odoo,gavin-feng/odoo,idncom/odoo,jusdng/odoo,stephen144/odoo,zchking/odoo,Elico-Corp/odoo_OCB,draugiskisprendimai/odoo,CatsAndDogsbvba/odoo,oihane/odoo,CopeX/odoo,shaufi/odoo,PongPi/isl-odoo,matrixise/odoo,OpenUpgrade/OpenUpgrade,funkring/fdoo,joshuajan/odoo,alqfahad/odoo,collex100/odoo,shivam1111/odoo,charbeljc/OCB,xujb/odoo,hoatle/odoo,takis/odoo,savoirfairelinux/OpenUpgrade,fuselock/odoo,arthru/OpenUpgrade,storm-computers/odoo,x111ong/odoo,Gitlab11/odoo,Maspear/odoo,erkrishna9/odoo,tinkhaven-organization/odoo,prospwro/odoo,Drooids/odoo,waytai/odoo,tvtsoft/odoo8,fossoult/odoo,nagyistoce/odoo-dev-odoo,syci/OCB,hubsaysnuaa/odoo,jpshort/odoo,tinkerthaler/odoo,ShineFan/odoo,andreparames/odoo,Endika/OpenUpgrade,hoatle/odoo,jusdng/odoo,Antiun/odoo,RafaelTorrealba/odoo,Noviat/odoo,feroda/odoo,guewen/OpenUpgrade,jeasoft/odoo,charbeljc/OCB,provaleks/o8,Noviat/odoo,fjbatresv/odoo,hmen89/odoo,CubicERP/odoo,QianBIG/odoo,abenzbiria/clients_odoo,syci/OCB,dariemp/odoo,virgree/odoo,CubicERP/odoo,mmbtba/odoo,ojengwa/odoo,pplatek/odoo,bakhtout/odoo-educ,Kilhog/odoo,slevenhagen/odoo,simongoffin/website_version,BT-astauder/odoo,fuselock/odoo,ShineFan/odoo,cloud9UG/odoo,rubencabrera/odoo,fevxie/odoo,poljeff/odoo,NL66278/OCB,simongoffin/website_version,waytai/odoo,odootr/odoo,ingadhoc/odoo,spadae22/odoo,shingonoide/odoo,gavin-feng/odoo,guerrerocarlos/odoo,juanalfonsopr/odoo,makinacorpus/odoo,odootr/odoo,leorochael/odoo,nitinitprof/odoo,PongPi/isl-odoo,bplancher/odoo,tarzan0820/odoo,pedrobaeza/OpenUpgrade,nhomar/odoo,numerigraphe/odoo,diagramsoftware/odoo,ChanduERP/odoo,javierTerry/odoo,datenbetrieb/odoo,florentx/OpenUpgrade,abdellatifkarroum/odoo,florian-dacosta/OpenUpgrade,kittiu/odoo,JonathanStein/odoo,pedrobaeza/odoo,gorjuce/odoo,ThinkOpen-Solutions/odoo,oliverhr/odoo,dfang/odoo,deKupini/erp,Ichag/odoo,fjbatresv/odoo,0k/odoo,javierTerry/odoo,eino-makitalo/odoo,tvibliani/odoo,mszewczy/odoo,ojengwa/odoo,sysadminmatmoz/OCB,lombritz/odoo,rowemoore/odoo,fdvarela/odoo8,Grirrane/odoo,juanalfonsopr/odoo,havt/odoo,dariemp/odoo,virgree/odoo,jaxkodex/odoo,dgzurita/odoo,ClearCorp-dev/odoo,OpusVL/odoo,nitinitprof/odoo,xzYue/odoo,JonathanStein/odoo,AuyaJackie/odoo,lsinfo/odoo,oliverhr/odoo,FlorianLudwig/odoo,dariemp/odoo,KontorConsulting/odoo,windedge/odoo,wangjun/odoo,oasiswork/odoo,prospwro/odoo,blaggacao/OpenUpgrade,rowemoore/odoo,fuselock/odoo,pedrobaeza/odoo,rdeheele/odoo,omprakasha/odoo,ChanduERP/odoo,windedge/odoo,sinbazhou/odoo,KontorConsulting/odoo,rahuldhote/odoo,Noviat/odoo,Endika/OpenUpgrade,apanju/GMIO_Odoo,savoirfairelinux/OpenUpgrade,florentx/OpenUpgrade,highco-groupe/odoo,grap/OpenUpgrade,Codefans-fan/odoo,0k/odoo,Danisan/odoo-1,sysadminmatmoz/OCB,colinnewell/odoo,FlorianLudwig/odoo,takis/odoo,OpenUpgrade/OpenUpgrade,andreparames/odoo,OpenUpgrade/OpenUpgrade,inspyration/odoo,apanju/GMIO_Odoo,fuhongliang/odoo,brijeshkesariya/odoo,apanju/odoo,n0m4dz/odoo,janocat/odoo,Antiun/odoo,hoatle/odoo,cpyou/odoo,makinacorpus/odoo,0k/OpenUpgrade,Kilhog/odoo,BT-astauder/odoo,ThinkOpen-Solutions/odoo,CubicERP/odoo,mmbtba/odoo,matrixise/odoo,OpusVL/odoo,dkubiak789/odoo,x111ong/odoo,ehirt/odoo,acshan/odoo,realsaiko/odoo,massot/odoo,chiragjogi/odoo,lgscofield/odoo,mszewczy/odoo,Kilhog/odoo,mmbtba/odoo,tinkerthaler/odoo,glovebx/odoo,Elico-Corp/odoo_OCB,ChanduERP/odoo,salaria/odoo,gsmartway/odoo,fuhongliang/odoo,CubicERP/odoo,ThinkOpen-Solutions/odoo,poljeff/odoo,idncom/odoo,ygol/odoo,ClearCorp-dev/odoo,srsman/odoo,oasiswork/odoo,apanju/GMIO_Odoo,rgeleta/odoo,bwrsandman/OpenUpgrade,FlorianLudwig/odoo,glovebx/odoo,ccomb/OpenUpgrade,bobisme/odoo,optima-ict/odoo,OpenUpgrade-dev/OpenUpgrade,dkubiak789/odoo,kirca/OpenUpgrade,synconics/odoo,ecosoft-odoo/odoo,vnsofthe/odoo,BT-rmartin/odoo,ujjwalwahi/odoo,inspyration/odoo,dariemp/odoo,nagyistoce/odoo-dev-odoo,AuyaJackie/odoo,ingadhoc/odoo,leoliujie/odoo,n0m4dz/odoo,kifcaliph/odoo,dalegregory/odoo,hassoon3/odoo,hmen89/odoo,ubic135/odoo-design,hopeall/odoo,kybriainfotech/iSocioCRM,MarcosCommunity/odoo,pedrobaeza/OpenUpgrade,nhomar/odoo,tarzan0820/odoo,vnsofthe/odoo,deKupini/erp,sebalix/OpenUpgrade,draugiskisprendimai/odoo,NL66278/OCB,virgree/odoo,odootr/odoo,wangjun/odoo,guewen/OpenUpgrade,SAM-IT-SA/odoo,gvb/odoo,tangyiyong/odoo,pplatek/odoo,0k/OpenUpgrade,rahuldhote/odoo,ingadhoc/odoo,bkirui/odoo,VielSoft/odoo,demon-ru/iml-crm,tvibliani/odoo,srimai/odoo,chiragjogi/odoo,andreparames/odoo,colinnewell/odoo,patmcb/odoo,bwrsandman/OpenUpgrade,collex100/odoo,diagramsoftware/odoo,odootr/odoo,poljeff/odoo,glovebx/odoo,Danisan/odoo-1,wangjun/odoo,blaggacao/OpenUpgrade,guewen/OpenUpgrade,odootr/odoo,luiseduardohdbackup/odoo,sebalix/OpenUpgrade,tinkerthaler/odoo,klunwebale/odoo,dsfsdgsbngfggb/odoo,BT-ojossen/odoo,mmbtba/odoo,makinacorpus/odoo,cdrooom/odoo,takis/odoo,guerrerocarlos/odoo,Danisan/odoo-1,highco-groupe/odoo,glovebx/odoo,havt/odoo,rdeheele/odoo,synconics/odoo,shaufi/odoo,jeasoft/odoo,Gitlab11/odoo,vnsofthe/odoo,CopeX/odoo,alhashash/odoo,microcom/odoo,gavin-feng/odoo,damdam-s/OpenUpgrade,abenzbiria/clients_odoo,brijeshkesariya/odoo,ygol/odoo,bobisme/odoo,cpyou/odoo,shaufi10/odoo,javierTerry/odoo,jaxkodex/odoo,ingadhoc/odoo,realsaiko/odoo,BT-ojossen/odoo,jiangzhixiao/odoo,fevxie/odoo,Ernesto99/odoo,TRESCLOUD/odoopub,BT-rmartin/odoo,storm-computers/odoo,syci/OCB,cloud9UG/odoo,QianBIG/odoo,nagyistoce/odoo-dev-odoo,pedrobaeza/odoo,doomsterinc/odoo,QianBIG/odoo,apanju/odoo,salaria/odoo,papouso/odoo,Drooids/odoo,arthru/OpenUpgrade,ShineFan/odoo,lgscofield/odoo,dsfsdgsbngfggb/odoo,elmerdpadilla/iv,hifly/OpenUpgrade,BT-astauder/odoo,Daniel-CA/odoo,guerrerocarlos/odoo,eino-makitalo/odoo,ojengwa/odoo,apocalypsebg/odoo,GauravSahu/odoo,sysadminmatmoz/OCB,fossoult/odoo,csrocha/OpenUpgrade,ramadhane/odoo,kybriainfotech/iSocioCRM,ygol/odoo,ihsanudin/odoo,alexcuellar/odoo,klunwebale/odoo,avoinsystems/odoo,bealdav/OpenUpgrade,acshan/odoo,NL66278/OCB,gorjuce/odoo,alhashash/odoo,lsinfo/odoo,ingadhoc/odoo,agrista/odoo-saas,cedk/odoo,tinkhaven-organization/odoo,JonathanStein/odoo,Antiun/odoo,kifcaliph/odoo,ShineFan/odoo,goliveirab/odoo,FlorianLudwig/odoo,jpshort/odoo,jaxkodex/odoo,hopeall/odoo,odoo-turkiye/odoo,grap/OpenUpgrade,hopeall/odoo,oasiswork/odoo,cpyou/odoo,apanju/GMIO_Odoo,incaser/odoo-odoo,jiangzhixiao/odoo,AuyaJackie/odoo,savoirfairelinux/OpenUpgrade,dkubiak789/odoo,rdeheele/odoo,christophlsa/odoo,Grirrane/odoo,bealdav/OpenUpgrade,NeovaHealth/odoo,cysnake4713/odoo,fdvarela/odoo8,odoo-turkiye/odoo,mvaled/OpenUpgrade,dezynetechnologies/odoo,nhomar/odoo,ojengwa/odoo,PongPi/isl-odoo,brijeshkesariya/odoo,oasiswork/odoo,shivam1111/odoo,feroda/odoo,sadleader/odoo,spadae22/odoo,OpenUpgrade-dev/OpenUpgrade,numerigraphe/odoo,slevenhagen/odoo,hanicker/odoo,thanhacun/odoo,hubsaysnuaa/odoo,havt/odoo,gavin-feng/odoo,ecosoft-odoo/odoo,shingonoide/odoo,wangjun/odoo,bobisme/odoo,0k/odoo,0k/odoo,ovnicraft/odoo,Gitlab11/odoo,tarzan0820/odoo,funkring/fdoo,doomsterinc/odoo,Grirrane/odoo,abdellatifkarroum/odoo,jiangzhixiao/odoo,guewen/OpenUpgrade,hubsaysnuaa/odoo,lgscofield/odoo,hanicker/odoo,ihsanudin/odoo,JCA-Developpement/Odoo,datenbetrieb/odoo,JGarcia-Panach/odoo,guerrerocarlos/odoo,dgzurita/odoo,Kilhog/odoo,Bachaco-ve/odoo,ApuliaSoftware/odoo,provaleks/o8,oliverhr/odoo,acshan/odoo,bobisme/odoo,fossoult/odoo,apocalypsebg/odoo,storm-computers/odoo,mvaled/OpenUpgrade,xzYue/odoo,dgzurita/odoo,xzYue/odoo,avoinsystems/odoo,TRESCLOUD/odoopub,GauravSahu/odoo,Nick-OpusVL/odoo,apanju/odoo,acshan/odoo,hopeall/odoo,JonathanStein/odoo,nuuuboo/odoo,BT-fgarbely/odoo,Nick-OpusVL/odoo,kybriainfotech/iSocioCRM,hifly/OpenUpgrade,Eric-Zhong/odoo,storm-computers/odoo,inspyration/odoo,dsfsdgsbngfggb/odoo,ubic135/odoo-design,numerigraphe/odoo,jfpla/odoo,alhashash/odoo,dalegregory/odoo,ihsanudin/odoo,BT-rmartin/odoo,Elico-Corp/odoo_OCB,rubencabrera/odoo,VielSoft/odoo,MarcosCommunity/odoo,dkubiak789/odoo,cpyou/odoo,diagramsoftware/odoo,lombritz/odoo,PongPi/isl-odoo,sv-dev1/odoo,gsmartway/odoo,leoliujie/odoo,lsinfo/odoo,apanju/odoo,fuselock/odoo,salaria/odoo,zchking/odoo,x111ong/odoo,javierTerry/odoo,OpenUpgrade/OpenUpgrade,draugiskisprendimai/odoo,synconics/odoo,optima-ict/odoo,goliveirab/odoo,hifly/OpenUpgrade,microcom/odoo,bguillot/OpenUpgrade,collex100/odoo,andreparames/odoo,rahuldhote/odoo,VielSoft/odoo,bobisme/odoo,apanju/GMIO_Odoo,gavin-feng/odoo,x111ong/odoo,rubencabrera/odoo,joariasl/odoo,patmcb/odoo,odoo-turkiye/odoo,BT-rmartin/odoo,Codefans-fan/odoo,sinbazhou/odoo,Adel-Magebinary/odoo,zchking/odoo,dsfsdgsbngfggb/odoo,damdam-s/OpenUpgrade,srsman/odoo,ramadhane/odoo,arthru/OpenUpgrade,mlaitinen/odoo,tinkhaven-organization/odoo,idncom/odoo,jiangzhixiao/odoo,rubencabrera/odoo,csrocha/OpenUpgrade,eino-makitalo/odoo,dgzurita/odoo,OpenUpgrade-dev/OpenUpgrade,papouso/odoo,shingonoide/odoo,Grirrane/odoo,spadae22/odoo,odootr/odoo,alexteodor/odoo,tangyiyong/odoo,Adel-Magebinary/odoo,Elico-Corp/odoo_OCB,alhashash/odoo,ihsanudin/odoo,joariasl/odoo,CatsAndDogsbvba/odoo,jeasoft/odoo,sebalix/OpenUpgrade,nhomar/odoo,ramadhane/odoo,Endika/odoo,odoo-turkiye/odoo,bplancher/odoo,QianBIG/odoo,slevenhagen/odoo-npg,inspyration/odoo,dfang/odoo,JCA-Developpement/Odoo,stephen144/odoo,TRESCLOUD/odoopub,ShineFan/odoo,savoirfairelinux/odoo,hubsaysnuaa/odoo,sinbazhou/odoo,lightcn/odoo,goliveirab/odoo,minhtuancn/odoo,srimai/odoo,fuselock/odoo,guewen/OpenUpgrade,eino-makitalo/odoo,bplancher/odoo,abstract-open-solutions/OCB,gvb/odoo,feroda/odoo,ccomb/OpenUpgrade,kifcaliph/odoo,gorjuce/odoo,lombritz/odoo,bguillot/OpenUpgrade,fgesora/odoo,microcom/odoo,markeTIC/OCB,luiseduardohdbackup/odoo,luistorresm/odoo,dezynetechnologies/odoo,x111ong/odoo,xujb/odoo,Grirrane/odoo,numerigraphe/odoo,sergio-incaser/odoo,gvb/odoo,apocalypsebg/odoo,cedk/odoo,gvb/odoo,dgzurita/odoo,odoousers2014/odoo,fevxie/odoo,cysnake4713/odoo,TRESCLOUD/odoopub,mustafat/odoo-1,provaleks/o8,QianBIG/odoo,BT-rmartin/odoo,kybriainfotech/iSocioCRM,AuyaJackie/odoo,BT-fgarbely/odoo,cedk/odoo,oihane/odoo,zchking/odoo,incaser/odoo-odoo,windedge/odoo,shivam1111/odoo,florentx/OpenUpgrade,prospwro/odoo,joariasl/odoo,MarcosCommunity/odoo,mvaled/OpenUpgrade,bguillot/OpenUpgrade,chiragjogi/odoo,dalegregory/odoo,naousse/odoo,jolevq/odoopub,diagramsoftware/odoo,leorochael/odoo,Endika/OpenUpgrade,ThinkOpen-Solutions/odoo,guerrerocarlos/odoo,kittiu/odoo,jolevq/odoopub,Bachaco-ve/odoo,luistorresm/odoo,abstract-open-solutions/OCB,rdeheele/odoo,hip-odoo/odoo,SerpentCS/odoo,markeTIC/OCB,laslabs/odoo,nhomar/odoo-mirror,bkirui/odoo,ecosoft-odoo/odoo,JGarcia-Panach/odoo,damdam-s/OpenUpgrade,mszewczy/odoo,Endika/odoo,jfpla/odoo,janocat/odoo,odoo-turkiye/odoo,luiseduardohdbackup/odoo,cedk/odoo,janocat/odoo,sergio-incaser/odoo,andreparames/odoo,mkieszek/odoo,wangjun/odoo,OpenUpgrade-dev/OpenUpgrade,kirca/OpenUpgrade,blaggacao/OpenUpgrade,agrista/odoo-saas,feroda/odoo,poljeff/odoo,leorochael/odoo,Codefans-fan/odoo,joshuajan/odoo,hbrunn/OpenUpgrade,papouso/odoo,ovnicraft/odoo,odoousers2014/odoo,factorlibre/OCB,mszewczy/odoo,matrixise/odoo,colinnewell/odoo,ihsanudin/odoo,rowemoore/odoo,jfpla/odoo,charbeljc/OCB,xzYue/odoo,gsmartway/odoo,ramadhane/odoo,collex100/odoo,jiachenning/odoo,shaufi10/odoo,NeovaHealth/odoo,Ernesto99/odoo,florentx/OpenUpgrade,funkring/fdoo,collex100/odoo,provaleks/o8,damdam-s/OpenUpgrade,nagyistoce/odoo-dev-odoo,steedos/odoo,leoliujie/odoo,hanicker/odoo,highco-groupe/odoo,mlaitinen/odoo,Endika/OpenUpgrade,lombritz/odoo,microcom/odoo,nuncjo/odoo,brijeshkesariya/odoo,shaufi/odoo,jfpla/odoo,dfang/odoo,funkring/fdoo,glovebx/odoo,shivam1111/odoo,prospwro/odoo,ramitalat/odoo,ApuliaSoftware/odoo,Ernesto99/odoo,CopeX/odoo,makinacorpus/odoo,mmbtba/odoo,matrixise/odoo,spadae22/odoo,Ichag/odoo,deKupini/erp,rubencabrera/odoo,optima-ict/odoo,Nick-OpusVL/odoo,tvibliani/odoo,ThinkOpen-Solutions/odoo,minhtuancn/odoo,gorjuce/odoo,xujb/odoo,takis/odoo,javierTerry/odoo,jaxkodex/odoo,abdellatifkarroum/odoo,waytai/odoo,nhomar/odoo-mirror,FlorianLudwig/odoo,papouso/odoo,apocalypsebg/odoo,tarzan0820/odoo,mvaled/OpenUpgrade,hanicker/odoo,alexcuellar/odoo,abdellatifkarroum/odoo,osvalr/odoo,nuncjo/odoo,arthru/OpenUpgrade,fgesora/odoo,dfang/odoo,slevenhagen/odoo-npg,JGarcia-Panach/odoo,fjbatresv/odoo,dalegregory/odoo,hbrunn/OpenUpgrade,dalegregory/odoo,Nick-OpusVL/odoo,eino-makitalo/odoo,laslabs/odoo,sebalix/OpenUpgrade,syci/OCB,shaufi10/odoo,Eric-Zhong/odoo,ramitalat/odoo,ygol/odoo,x111ong/odoo,Maspear/odoo,alexteodor/odoo,NeovaHealth/odoo,juanalfonsopr/odoo,abdellatifkarroum/odoo,oliverhr/odoo,stonegithubs/odoo,damdam-s/OpenUpgrade,jesramirez/odoo,dfang/odoo,xzYue/odoo,bakhtout/odoo-educ,lombritz/odoo,demon-ru/iml-crm,Danisan/odoo-1,grap/OpenUpgrade,massot/odoo,ApuliaSoftware/odoo,naousse/odoo,ovnicraft/odoo,shingonoide/odoo,optima-ict/odoo,pplatek/odoo,odooindia/odoo,klunwebale/odoo,oihane/odoo,alqfahad/odoo,hassoon3/odoo,ramitalat/odoo,realsaiko/odoo,hubsaysnuaa/odoo,leoliujie/odoo,rahuldhote/odoo,optima-ict/odoo,Bachaco-ve/odoo,tarzan0820/odoo,fevxie/odoo,tvtsoft/odoo8,christophlsa/odoo,odoousers2014/odoo,provaleks/o8,hip-odoo/odoo,addition-it-solutions/project-all,idncom/odoo,juanalfonsopr/odoo,draugiskisprendimai/odoo,bobisme/odoo,Nowheresly/odoo,Kilhog/odoo,ihsanudin/odoo,janocat/odoo,funkring/fdoo,mkieszek/odoo,nuuuboo/odoo,jeasoft/odoo,BT-ojossen/odoo,christophlsa/odoo,abstract-open-solutions/OCB,Bachaco-ve/odoo,srimai/odoo,savoirfairelinux/odoo,rowemoore/odoo,VielSoft/odoo,VielSoft/odoo,fjbatresv/odoo,papouso/odoo,fuhongliang/odoo,hbrunn/OpenUpgrade,stephen144/odoo,GauravSahu/odoo,diagramsoftware/odoo,sysadminmatmoz/OCB,shaufi/odoo,ovnicraft/odoo,simongoffin/website_version,cdrooom/odoo,ramadhane/odoo,osvalr/odoo,poljeff/odoo,bwrsandman/OpenUpgrade,salaria/odoo,SAM-IT-SA/odoo,acshan/odoo,dezynetechnologies/odoo,odoo-turkiye/odoo,joariasl/odoo,lightcn/odoo,CubicERP/odoo,CatsAndDogsbvba/odoo,alexcuellar/odoo,brijeshkesariya/odoo,bealdav/OpenUpgrade,MarcosCommunity/odoo,numerigraphe/odoo,ShineFan/odoo,ramitalat/odoo,stephen144/odoo,bguillot/OpenUpgrade,florentx/OpenUpgrade,ovnicraft/odoo,luiseduardohdbackup/odoo,chiragjogi/odoo,makinacorpus/odoo,jiangzhixiao/odoo,stonegithubs/odoo,doomsterinc/odoo,Danisan/odoo-1,bplancher/odoo,christophlsa/odoo,rowemoore/odoo,colinnewell/odoo,steedos/odoo,tarzan0820/odoo,fuhongliang/odoo,jeasoft/odoo,sv-dev1/odoo,patmcb/odoo,n0m4dz/odoo,cloud9UG/odoo,Adel-Magebinary/odoo,mlaitinen/odoo,leoliujie/odoo,BT-ojossen/odoo,dsfsdgsbngfggb/odoo,PongPi/isl-odoo,Codefans-fan/odoo,avoinsystems/odoo,hanicker/odoo,lightcn/odoo,ecosoft-odoo/odoo,havt/odoo,ecosoft-odoo/odoo,ehirt/odoo,Bachaco-ve/odoo,Eric-Zhong/odoo,joariasl/odoo,hip-odoo/odoo,alexteodor/odoo,havt/odoo,minhtuancn/odoo,xzYue/odoo,gorjuce/odoo,factorlibre/OCB,apocalypsebg/odoo,pedrobaeza/OpenUpgrade,Ichag/odoo,tvibliani/odoo,massot/odoo,draugiskisprendimai/odoo,acshan/odoo,syci/OCB,omprakasha/odoo,markeTIC/OCB,Elico-Corp/odoo_OCB,collex100/odoo,patmcb/odoo,JGarcia-Panach/odoo,datenbetrieb/odoo,makinacorpus/odoo,shivam1111/odoo,oihane/odoo,apanju/GMIO_Odoo,factorlibre/OCB,factorlibre/OCB,pplatek/odoo,realsaiko/odoo,gvb/odoo,jpshort/odoo,diagramsoftware/odoo,SAM-IT-SA/odoo,cedk/odoo,dalegregory/odoo,n0m4dz/odoo,rubencabrera/odoo,hubsaysnuaa/odoo,BT-ojossen/odoo,jfpla/odoo,shaufi/odoo,pedrobaeza/odoo,jpshort/odoo,TRESCLOUD/odoopub,sve-odoo/odoo,dgzurita/odoo,fdvarela/odoo8,NeovaHealth/odoo,jiangzhixiao/odoo,vnsofthe/odoo,ApuliaSoftware/odoo,fgesora/odoo,ojengwa/odoo,Endika/OpenUpgrade,joshuajan/odoo,odooindia/odoo,odootr/odoo,rowemoore/odoo,aviciimaxwell/odoo,kittiu/odoo,CopeX/odoo,laslabs/odoo,sve-odoo/odoo,AuyaJackie/odoo,dsfsdgsbngfggb/odoo,hoatle/odoo,naousse/odoo,dezynetechnologies/odoo,factorlibre/OCB,Bachaco-ve/odoo,takis/odoo,lgscofield/odoo,dezynetechnologies/odoo,tvibliani/odoo,ihsanudin/odoo,sadleader/odoo,nexiles/odoo,ovnicraft/odoo,Codefans-fan/odoo,cpyou/odoo,omprakasha/odoo,factorlibre/OCB,bwrsandman/OpenUpgrade,tvtsoft/odoo8,stonegithubs/odoo,takis/odoo,fjbatresv/odoo,lightcn/odoo,synconics/odoo,synconics/odoo,odoousers2014/odoo,bplancher/odoo,storm-computers/odoo,windedge/odoo,ujjwalwahi/odoo,stonegithubs/odoo,OpusVL/odoo,leorochael/odoo,ramitalat/odoo,tinkhaven-organization/odoo,leorochael/odoo,naousse/odoo,datenbetrieb/odoo,nagyistoce/odoo-dev-odoo,hopeall/odoo,papouso/odoo,dgzurita/odoo,sv-dev1/odoo,xujb/odoo,alqfahad/odoo,Eric-Zhong/odoo,omprakasha/odoo,ujjwalwahi/odoo,sadleader/odoo,shingonoide/odoo,Adel-Magebinary/odoo,addition-it-solutions/project-all,markeTIC/OCB,nitinitprof/odoo,gavin-feng/odoo,tangyiyong/odoo,Endika/odoo,microcom/odoo,srimai/odoo,optima-ict/odoo,zchking/odoo,alexteodor/odoo,numerigraphe/odoo,OpenUpgrade-dev/OpenUpgrade,kirca/OpenUpgrade,datenbetrieb/odoo,incaser/odoo-odoo,kybriainfotech/iSocioCRM,elmerdpadilla/iv,ingadhoc/odoo,jesramirez/odoo,hbrunn/OpenUpgrade,damdam-s/OpenUpgrade,shaufi10/odoo,minhtuancn/odoo,windedge/odoo,sebalix/OpenUpgrade,hanicker/odoo,sve-odoo/odoo,nexiles/odoo,sergio-incaser/odoo,kittiu/odoo,hifly/OpenUpgrade,Antiun/odoo,ccomb/OpenUpgrade,steedos/odoo,Nowheresly/odoo,luistorresm/odoo,numerigraphe/odoo,naousse/odoo,dsfsdgsbngfggb/odoo,CubicERP/odoo,pedrobaeza/OpenUpgrade,chiragjogi/odoo,jesramirez/odoo,ojengwa/odoo,klunwebale/odoo,waytai/odoo,nitinitprof/odoo,Elico-Corp/odoo_OCB,JCA-Developpement/Odoo,Endika/OpenUpgrade,vnsofthe/odoo,xujb/odoo,odooindia/odoo,pedrobaeza/OpenUpgrade,brijeshkesariya/odoo,florian-dacosta/OpenUpgrade,patmcb/odoo,tvibliani/odoo,klunwebale/odoo,NeovaHealth/odoo,omprakasha/odoo,lombritz/odoo,alqfahad/odoo,NL66278/OCB,markeTIC/OCB,papouso/odoo,Ernesto99/odoo,ApuliaSoftware/odoo,CubicERP/odoo,guerrerocarlos/odoo,idncom/odoo,Nick-OpusVL/odoo,Gitlab11/odoo,hbrunn/OpenUpgrade,fevxie/odoo,rahuldhote/odoo,avoinsystems/odoo,BT-fgarbely/odoo,christophlsa/odoo,sve-odoo/odoo,SerpentCS/odoo,dllsf/odootest,cysnake4713/odoo,Endika/odoo,kirca/OpenUpgrade,alexcuellar/odoo,klunwebale/odoo,jaxkodex/odoo,aviciimaxwell/odoo,fjbatresv/odoo,savoirfairelinux/odoo,aviciimaxwell/odoo,sergio-incaser/odoo,goliveirab/odoo,gsmartway/odoo,SAM-IT-SA/odoo,jpshort/odoo,fdvarela/odoo8,addition-it-solutions/project-all,dezynetechnologies/odoo,jiachenning/odoo,markeTIC/OCB,KontorConsulting/odoo,ChanduERP/odoo,mmbtba/odoo,salaria/odoo,jolevq/odoopub,luistorresm/odoo,janocat/odoo,joshuajan/odoo,hifly/OpenUpgrade,mszewczy/odoo,ramadhane/odoo,highco-groupe/odoo,kybriainfotech/iSocioCRM,erkrishna9/odoo,nagyistoce/odoo-dev-odoo,vnsofthe/odoo,bwrsandman/OpenUpgrade,srimai/odoo,hoatle/odoo,CopeX/odoo,dfang/odoo,ClearCorp-dev/odoo,bakhtout/odoo-educ,nexiles/odoo,alhashash/odoo,pplatek/odoo,VielSoft/odoo,Maspear/odoo,colinnewell/odoo,CatsAndDogsbvba/odoo,bkirui/odoo,tinkerthaler/odoo,RafaelTorrealba/odoo,oihane/odoo,kybriainfotech/iSocioCRM,JGarcia-Panach/odoo,ujjwalwahi/odoo,BT-fgarbely/odoo,feroda/odoo,Kilhog/odoo,thanhacun/odoo,kifcaliph/odoo,florian-dacosta/OpenUpgrade,GauravSahu/odoo,mlaitinen/odoo,SerpentCS/odoo,janocat/odoo,apocalypsebg/odoo,windedge/odoo,KontorConsulting/odoo,kirca/OpenUpgrade,alhashash/odoo,fossoult/odoo,ingadhoc/odoo,BT-fgarbely/odoo,feroda/odoo,bguillot/OpenUpgrade,leorochael/odoo,zchking/odoo,deKupini/erp,bakhtout/odoo-educ,tangyiyong/odoo,avoinsystems/odoo,joshuajan/odoo,fossoult/odoo,arthru/OpenUpgrade,kifcaliph/odoo,alexteodor/odoo,pedrobaeza/odoo,mustafat/odoo-1,CatsAndDogsbvba/odoo,dllsf/odootest,srsman/odoo,alqfahad/odoo,doomsterinc/odoo,charbeljc/OCB,ChanduERP/odoo,jesramirez/odoo,sebalix/OpenUpgrade,Maspear/odoo,ehirt/odoo,nuncjo/odoo,RafaelTorrealba/odoo,naousse/odoo,oasiswork/odoo,lombritz/odoo,xujb/odoo,laslabs/odoo,osvalr/odoo,arthru/OpenUpgrade,savoirfairelinux/odoo,Eric-Zhong/odoo,dariemp/odoo,lightcn/odoo,leoliujie/odoo,steedos/odoo,AuyaJackie/odoo,ShineFan/odoo,SAM-IT-SA/odoo,nitinitprof/odoo,abdellatifkarroum/odoo,nuncjo/odoo,jaxkodex/odoo,abstract-open-solutions/OCB,jfpla/odoo,srsman/odoo,OpenUpgrade/OpenUpgrade,klunwebale/odoo,tinkerthaler/odoo,NL66278/OCB,Ernesto99/odoo,javierTerry/odoo,draugiskisprendimai/odoo,rubencabrera/odoo,MarcosCommunity/odoo,florian-dacosta/OpenUpgrade,ClearCorp-dev/odoo,hip-odoo/odoo,jolevq/odoopub,tvtsoft/odoo8,KontorConsulting/odoo,Grirrane/odoo,mvaled/OpenUpgrade,bkirui/odoo,BT-rmartin/odoo,sve-odoo/odoo,BT-ojossen/odoo,florian-dacosta/OpenUpgrade,dkubiak789/odoo,steedos/odoo,gvb/odoo,colinnewell/odoo,slevenhagen/odoo-npg,brijeshkesariya/odoo,bkirui/odoo,SerpentCS/odoo,n0m4dz/odoo,NeovaHealth/odoo,hanicker/odoo,incaser/odoo-odoo,luiseduardohdbackup/odoo,sv-dev1/odoo,grap/OpenUpgrade,hassoon3/odoo,Noviat/odoo,odooindia/odoo,pedrobaeza/odoo,thanhacun/odoo,slevenhagen/odoo-npg,sysadminmatmoz/OCB,laslabs/odoo,OpenUpgrade/OpenUpgrade,SAM-IT-SA/odoo,tinkhaven-organization/odoo,srsman/odoo,luiseduardohdbackup/odoo,prospwro/odoo,mlaitinen/odoo,rgeleta/odoo,gavin-feng/odoo,BT-ojossen/odoo,bakhtout/odoo-educ,rowemoore/odoo,sv-dev1/odoo,alexcuellar/odoo,nuncjo/odoo,shaufi/odoo,hassoon3/odoo,florentx/OpenUpgrade,juanalfonsopr/odoo,omprakasha/odoo,kirca/OpenUpgrade,fgesora/odoo,hubsaysnuaa/odoo,jusdng/odoo,provaleks/o8,incaser/odoo-odoo,erkrishna9/odoo,sergio-incaser/odoo,savoirfairelinux/OpenUpgrade,AuyaJackie/odoo,omprakasha/odoo,SerpentCS/odoo,MarcosCommunity/odoo,hip-odoo/odoo,stonegithubs/odoo,charbeljc/OCB,fgesora/odoo,joariasl/odoo,tangyiyong/odoo,nuuuboo/odoo,funkring/fdoo,jusdng/odoo,jpshort/odoo,cedk/odoo,rahuldhote/odoo,dezynetechnologies/odoo,lgscofield/odoo,spadae22/odoo,grap/OpenUpgrade,PongPi/isl-odoo,rgeleta/odoo,dllsf/odootest,mustafat/odoo-1,n0m4dz/odoo,sinbazhou/odoo,idncom/odoo,incaser/odoo-odoo,prospwro/odoo,diagramsoftware/odoo,shaufi/odoo,tvtsoft/odoo8,bealdav/OpenUpgrade,fgesora/odoo,erkrishna9/odoo,gsmartway/odoo,hmen89/odoo,leoliujie/odoo,GauravSahu/odoo,srimai/odoo,andreparames/odoo,abstract-open-solutions/OCB,windedge/odoo,GauravSahu/odoo,fossoult/odoo,nagyistoce/odoo-dev-odoo,csrocha/OpenUpgrade,CopeX/odoo,hoatle/odoo,guewen/OpenUpgrade,jiachenning/odoo,datenbetrieb/odoo,JCA-Developpement/Odoo,shivam1111/odoo,gsmartway/odoo,JonathanStein/odoo,fossoult/odoo,colinnewell/odoo,savoirfairelinux/OpenUpgrade,blaggacao/OpenUpgrade,BT-fgarbely/odoo,ehirt/odoo,0k/odoo,acshan/odoo,joshuajan/odoo,luistorresm/odoo,nuuuboo/odoo,thanhacun/odoo,jaxkodex/odoo,deKupini/erp,slevenhagen/odoo,dalegregory/odoo,ccomb/OpenUpgrade,pplatek/odoo,minhtuancn/odoo,abstract-open-solutions/OCB,joariasl/odoo,fuselock/odoo,abenzbiria/clients_odoo,tarzan0820/odoo,ygol/odoo,stonegithubs/odoo,bguillot/OpenUpgrade,Endika/OpenUpgrade,avoinsystems/odoo,cloud9UG/odoo,cdrooom/odoo,ehirt/odoo,wangjun/odoo,odoo-turkiye/odoo,bwrsandman/OpenUpgrade,patmcb/odoo,0k/OpenUpgrade,stephen144/odoo,doomsterinc/odoo,ehirt/odoo,0k/OpenUpgrade,chiragjogi/odoo,goliveirab/odoo,abstract-open-solutions/OCB,bobisme/odoo,bwrsandman/OpenUpgrade,osvalr/odoo,Antiun/odoo,abenzbiria/clients_odoo,incaser/odoo-odoo,lightcn/odoo,lightcn/odoo,poljeff/odoo,apanju/odoo,mustafat/odoo-1,n0m4dz/odoo,slevenhagen/odoo,CatsAndDogsbvba/odoo,tvibliani/odoo,jiangzhixiao/odoo,QianBIG/odoo,GauravSahu/odoo,ramadhane/odoo,feroda/odoo,bakhtout/odoo-educ,jesramirez/odoo,slevenhagen/odoo,FlorianLudwig/odoo,jeasoft/odoo,bealdav/OpenUpgrade,oliverhr/odoo,steedos/odoo,synconics/odoo,rgeleta/odoo,damdam-s/OpenUpgrade,pedrobaeza/OpenUpgrade,shaufi10/odoo,alqfahad/odoo,christophlsa/odoo,sergio-incaser/odoo,Nowheresly/odoo,chiragjogi/odoo,Danisan/odoo-1,rahuldhote/odoo,charbeljc/OCB,addition-it-solutions/project-all,fgesora/odoo,fuhongliang/odoo,nhomar/odoo-mirror,oliverhr/odoo,tinkhaven-organization/odoo,apanju/odoo,highco-groupe/odoo,idncom/odoo,Daniel-CA/odoo,markeTIC/OCB,virgree/odoo,fjbatresv/odoo,ccomb/OpenUpgrade,0k/OpenUpgrade,kittiu/odoo,tangyiyong/odoo,ujjwalwahi/odoo,Daniel-CA/odoo,Maspear/odoo,savoirfairelinux/odoo,lgscofield/odoo,factorlibre/OCB,Nowheresly/odoo,BT-fgarbely/odoo,Daniel-CA/odoo,fevxie/odoo,MarcosCommunity/odoo,demon-ru/iml-crm,jpshort/odoo,dllsf/odootest,Drooids/odoo,Antiun/odoo,JCA-Developpement/Odoo,bguillot/OpenUpgrade,demon-ru/iml-crm,VielSoft/odoo,storm-computers/odoo,Nick-OpusVL/odoo,slevenhagen/odoo,cysnake4713/odoo,ChanduERP/odoo,shaufi10/odoo,cloud9UG/odoo,ApuliaSoftware/odoo,nhomar/odoo,addition-it-solutions/project-all,savoirfairelinux/odoo,steedos/odoo,havt/odoo,stephen144/odoo,addition-it-solutions/project-all,nhomar/odoo,blaggacao/OpenUpgrade,Daniel-CA/odoo,Gitlab11/odoo,odoousers2014/odoo,cedk/odoo,hassoon3/odoo,Ichag/odoo,oihane/odoo,dllsf/odootest,jiachenning/odoo,luistorresm/odoo,massot/odoo,BT-astauder/odoo,Kilhog/odoo,nhomar/odoo-mirror,jolevq/odoopub,spadae22/odoo,poljeff/odoo,Nowheresly/odoo,syci/OCB,christophlsa/odoo,Drooids/odoo,guerrerocarlos/odoo,prospwro/odoo,nexiles/odoo,Adel-Magebinary/odoo,kittiu/odoo,alexcuellar/odoo,Daniel-CA/odoo,gorjuce/odoo | addons/account_product_template/models/invoice.py | addons/account_product_template/models/invoice.py | # -*- coding: utf-8 -*-
from openerp.osv import osv
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def invoice_validate_send_email(self, cr, uid, ids, context=None):
Composer = self.pool['mail.compose.message']
for invoice in self.browse(cr, uid, ids, context=context):
# send template only on customer invoice
if invoice.type != 'out_invoice':
continue
# subscribe the partner to the invoice
if invoice.partner_id.id not in invoice.message_follower_ids:
self.message_subscribe(cr, uid, [invoice.id], [invoice.partner_id.id], context=context)
for line in invoice.invoice_line:
if line.product_id.email_template_id:
# CLEANME: should define and use a clean API: message_post with a template
composer_id = Composer.create(cr, uid, {
'model': 'account.invoice',
'res_id': invoice.id,
'template_id': line.product_id.email_template_id.id,
'composition_mode': 'comment',
}, context=context)
template_values = Composer.onchange_template_id(
cr, uid, composer_id, line.product_id.email_template_id.id, 'comment', 'account.invoice', invoice.id
)['value']
template_values['attachment_ids'] = [(4, id) for id in template_values.get('attachment_ids', '[]')]
Composer.write(cr, uid, [composer_id], template_values, context=context)
Composer.send_mail(cr, uid, [composer_id], context=context)
return True
def invoice_validate(self, cr, uid, ids, context=None):
res = super(account_invoice, self).invoice_validate(cr, uid, ids, context=context)
self.invoice_validate_send_email(cr, uid, ids, context=context)
return res
| # -*- coding: utf-8 -*-
from openerp.osv import osv
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def invoice_validate_send_email(self, cr, uid, ids, context=None):
Composer = self.pool['mail.compose.message']
for invoice in self.browse(cr, uid, ids, context=context):
# send template only on customer invoice
if invoice.type != 'out_invoice':
continue
# subscribe the partner to the invoice
if invoice.partner_id.id not in invoice.message_follower_ids:
self.message_subscribe(cr, uid, [invoice.id], [invoice.partner_id.id], context=context)
for line in invoice.invoice_line:
if line.product_id.email_template_id:
# CLEANME: should define and use a clean API: message_post with a template
composer_id = Composer.create(cr, uid, {
'model': 'account.invoice',
'res_id': invoice.id,
'template_id': line.product_id.email_template_id.id,
'composition_mode': 'comment',
}, context=context)
Composer.write(
cr, uid, [composer_id],
Composer.onchange_template_id(cr, uid, composer_id, line.product_id.email_template_id.id, 'comment', 'account.invoice', invoice.id)['value']
)
Composer.send_mail(cr, uid, [composer_id], context=context)
return True
def invoice_validate(self, cr, uid, ids, context=None):
res = super(account_invoice, self).invoice_validate(cr, uid, ids, context=context)
self.invoice_validate_send_email(cr, uid, ids, context=context)
return res
| agpl-3.0 | Python |
2537029f18df951b9234953452290a5172f5886f | fix send_mail() when no HTML content was given In commit "remove print from subscription" the EMAIL_DEBUG setting was removed from send_mail(). The msg.send() was mistakingly moved inside the if html: body. | saifrahmed/DjangoBB,hsoft/DjangoBB,agepoly/DjangoBB,saifrahmed/DjangoBB,slav0nic/DjangoBB,slav0nic/DjangoBB,slav0nic/DjangoBB,hsoft/slimbb,saifrahmed/DjangoBB,hsoft/slimbb,hsoft/DjangoBB,agepoly/DjangoBB,agepoly/DjangoBB,hsoft/slimbb,hsoft/DjangoBB | djangobb_forum/subscription.py | djangobb_forum/subscription.py | from django.core.mail import EmailMultiAlternatives
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.html import strip_tags
from djangobb_forum import settings as forum_settings
from djangobb_forum.util import absolute_url
if "mailer" in settings.INSTALLED_APPS:
from mailer import send_mail
else:
from django.core.mail import send_mail
def send_mail(subject, text, from_email, rec_list, html=None):
"""
Shortcut for sending email.
"""
msg = EmailMultiAlternatives(subject, text, from_email, rec_list)
if html:
msg.attach_alternative(html, "text/html")
msg.send(fail_silently=True)
# TODO: move to txt template
TOPIC_SUBSCRIPTION_TEXT_TEMPLATE = (u"""New reply from %(username)s to topic that you have subscribed on.
---
%(message)s
---
See topic: %(post_url)s
Unsubscribe %(unsubscribe_url)s""")
def email_topic_subscribers(post):
topic = post.topic
post_body_text = strip_tags(post.body_html)
if post != topic.head:
for user in topic.subscribers.all():
if user != post.user:
subject = u'RE: %s' % topic.name
to_email = user.email
text_content = TOPIC_SUBSCRIPTION_TEXT_TEMPLATE % {
'username': post.user.username,
'message': post_body_text,
'post_url': absolute_url(post.get_absolute_url()),
'unsubscribe_url': absolute_url(reverse('djangobb:forum_delete_subscription', args=[post.topic.id])),
}
#html_content = html_version(post)
send_mail(subject, text_content, settings.DEFAULT_FROM_EMAIL, [to_email])
def notify_topic_subscribers(post):
path = forum_settings.NOTIFICATION_HANDLER.split('.')
module = '.'.join(path[:-1])
func = path[-1]
module = __import__(module, globals(), locals(), [func], -1)
handler = getattr(module, func)
handler(post)
| from django.core.mail import EmailMultiAlternatives
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.html import strip_tags
from djangobb_forum import settings as forum_settings
from djangobb_forum.util import absolute_url
if "mailer" in settings.INSTALLED_APPS:
from mailer import send_mail
else:
from django.core.mail import send_mail
def send_mail(subject, text, from_email, rec_list, html=None):
"""
Shortcut for sending email.
"""
msg = EmailMultiAlternatives(subject, text, from_email, rec_list)
if html:
msg.attach_alternative(html, "text/html")
msg.send(fail_silently=True)
# TODO: move to txt template
TOPIC_SUBSCRIPTION_TEXT_TEMPLATE = (u"""New reply from %(username)s to topic that you have subscribed on.
---
%(message)s
---
See topic: %(post_url)s
Unsubscribe %(unsubscribe_url)s""")
def email_topic_subscribers(post):
topic = post.topic
post_body_text = strip_tags(post.body_html)
if post != topic.head:
for user in topic.subscribers.all():
if user != post.user:
subject = u'RE: %s' % topic.name
to_email = user.email
text_content = TOPIC_SUBSCRIPTION_TEXT_TEMPLATE % {
'username': post.user.username,
'message': post_body_text,
'post_url': absolute_url(post.get_absolute_url()),
'unsubscribe_url': absolute_url(reverse('djangobb:forum_delete_subscription', args=[post.topic.id])),
}
#html_content = html_version(post)
send_mail(subject, text_content, settings.DEFAULT_FROM_EMAIL, [to_email])
def notify_topic_subscribers(post):
path = forum_settings.NOTIFICATION_HANDLER.split('.')
module = '.'.join(path[:-1])
func = path[-1]
module = __import__(module, globals(), locals(), [func], -1)
handler = getattr(module, func)
handler(post)
| bsd-3-clause | Python |
13dac91fa6025e41e5ce56aebfde59be20fddf4a | remove blank line | jamespcole/home-assistant,Zac-HD/home-assistant,sdague/home-assistant,morphis/home-assistant,rohitranjan1991/home-assistant,sffjunkie/home-assistant,ct-23/home-assistant,keerts/home-assistant,jabesq/home-assistant,nevercast/home-assistant,oandrew/home-assistant,sanmiguel/home-assistant,betrisey/home-assistant,miniconfig/home-assistant,mKeRix/home-assistant,balloob/home-assistant,bencmbrook/home-assistant,PetePriority/home-assistant,devdelay/home-assistant,bencmbrook/home-assistant,pottzer/home-assistant,sander76/home-assistant,miniconfig/home-assistant,kyvinh/home-assistant,tinloaf/home-assistant,sfam/home-assistant,leoc/home-assistant,FreekingDean/home-assistant,nugget/home-assistant,xifle/home-assistant,EricRho/home-assistant,hmronline/home-assistant,partofthething/home-assistant,emilhetty/home-assistant,CCOSTAN/home-assistant,sfam/home-assistant,qedi-r/home-assistant,nugget/home-assistant,happyleavesaoc/home-assistant,joopert/home-assistant,sanmiguel/home-assistant,aronsky/home-assistant,open-homeautomation/home-assistant,jabesq/home-assistant,soldag/home-assistant,maddox/home-assistant,nnic/home-assistant,sffjunkie/home-assistant,LinuxChristian/home-assistant,sander76/home-assistant,jaharkes/home-assistant,bdfoster/blumate,EricRho/home-assistant,MungoRae/home-assistant,mKeRix/home-assistant,balloob/home-assistant,alanbowman/home-assistant,tchellomello/home-assistant,nugget/home-assistant,HydrelioxGitHub/home-assistant,pottzer/home-assistant,aoakeson/home-assistant,molobrakos/home-assistant,theolind/home-assistant,kennedyshead/home-assistant,DavidLP/home-assistant,jnewland/home-assistant,DavidLP/home-assistant,Julian/home-assistant,dorant/home-assistant,ma314smith/home-assistant,emilhetty/home-assistant,kyvinh/home-assistant,betrisey/home-assistant,shaftoe/home-assistant,Theb-1/home-assistant,bdfoster/blumate,auduny/home-assistant,alexmogavero/home-assistant,hexxter/home-assistant,FreekingDean/home-assistant,philipbl/home-assistant,instantchow/home-assistant,Smart-Torvy/torvy-home-assistant,stefan-jonasson/home-assistant,jamespcole/home-assistant,Nzaga/home-assistant,florianholzapfel/home-assistant,mahendra-r/home-assistant,HydrelioxGitHub/home-assistant,varunr047/homefile,persandstrom/home-assistant,philipbl/home-assistant,coteyr/home-assistant,Zyell/home-assistant,leoc/home-assistant,robbiet480/home-assistant,tomduijf/home-assistant,aoakeson/home-assistant,alanbowman/home-assistant,mikaelboman/home-assistant,caiuspb/home-assistant,theolind/home-assistant,Zyell/home-assistant,alexkolar/home-assistant,turbokongen/home-assistant,shaftoe/home-assistant,maddox/home-assistant,sffjunkie/home-assistant,alexkolar/home-assistant,kyvinh/home-assistant,hexxter/home-assistant,tmm1/home-assistant,Julian/home-assistant,leoc/home-assistant,morphis/home-assistant,deisi/home-assistant,tboyce1/home-assistant,shaftoe/home-assistant,xifle/home-assistant,balloob/home-assistant,molobrakos/home-assistant,keerts/home-assistant,sffjunkie/home-assistant,Danielhiversen/home-assistant,fbradyirl/home-assistant,mikaelboman/home-assistant,bdfoster/blumate,florianholzapfel/home-assistant,HydrelioxGitHub/home-assistant,ewandor/home-assistant,Cinntax/home-assistant,tmm1/home-assistant,Teagan42/home-assistant,nevercast/home-assistant,justyns/home-assistant,Nzaga/home-assistant,ErykB2000/home-assistant,Smart-Torvy/torvy-home-assistant,tomduijf/home-assistant,Zac-HD/home-assistant,aronsky/home-assistant,alexkolar/home-assistant,dmeulen/home-assistant,betrisey/home-assistant,badele/home-assistant,SEJeff/home-assistant,auduny/home-assistant,bdfoster/blumate,miniconfig/home-assistant,eagleamon/home-assistant,qedi-r/home-assistant,pottzer/home-assistant,persandstrom/home-assistant,fbradyirl/home-assistant,xifle/home-assistant,dmeulen/home-assistant,robjohnson189/home-assistant,alexmogavero/home-assistant,eagleamon/home-assistant,varunr047/homefile,sffjunkie/home-assistant,ma314smith/home-assistant,lukas-hetzenecker/home-assistant,michaelarnauts/home-assistant,teodoc/home-assistant,happyleavesaoc/home-assistant,happyleavesaoc/home-assistant,sfam/home-assistant,jawilson/home-assistant,MungoRae/home-assistant,MartinHjelmare/home-assistant,stefan-jonasson/home-assistant,deisi/home-assistant,mikaelboman/home-assistant,postlund/home-assistant,toddeye/home-assistant,mKeRix/home-assistant,adrienbrault/home-assistant,dorant/home-assistant,tboyce021/home-assistant,tboyce1/home-assistant,sanmiguel/home-assistant,michaelarnauts/home-assistant,nevercast/home-assistant,jamespcole/home-assistant,turbokongen/home-assistant,hmronline/home-assistant,tomduijf/home-assistant,varunr047/homefile,MungoRae/home-assistant,emilhetty/home-assistant,ewandor/home-assistant,pschmitt/home-assistant,Smart-Torvy/torvy-home-assistant,nkgilley/home-assistant,partofthething/home-assistant,devdelay/home-assistant,rohitranjan1991/home-assistant,badele/home-assistant,srcLurker/home-assistant,teodoc/home-assistant,joopert/home-assistant,caiuspb/home-assistant,open-homeautomation/home-assistant,alexmogavero/home-assistant,hexxter/home-assistant,home-assistant/home-assistant,bencmbrook/home-assistant,jaharkes/home-assistant,devdelay/home-assistant,SEJeff/home-assistant,robjohnson189/home-assistant,CCOSTAN/home-assistant,vitorespindola/home-assistant,justyns/home-assistant,tinloaf/home-assistant,titilambert/home-assistant,tboyce1/home-assistant,LinuxChristian/home-assistant,philipbl/home-assistant,ma314smith/home-assistant,Danielhiversen/home-assistant,ewandor/home-assistant,Cinntax/home-assistant,ErykB2000/home-assistant,morphis/home-assistant,Zac-HD/home-assistant,leppa/home-assistant,mezz64/home-assistant,keerts/home-assistant,lukas-hetzenecker/home-assistant,bdfoster/blumate,Julian/home-assistant,vitorespindola/home-assistant,florianholzapfel/home-assistant,auduny/home-assistant,mikaelboman/home-assistant,miniconfig/home-assistant,coteyr/home-assistant,g12mcgov/home-assistant,hmronline/home-assistant,florianholzapfel/home-assistant,deisi/home-assistant,robjohnson189/home-assistant,hmronline/home-assistant,home-assistant/home-assistant,maddox/home-assistant,PetePriority/home-assistant,mahendra-r/home-assistant,stefan-jonasson/home-assistant,emilhetty/home-assistant,alanbowman/home-assistant,JshWright/home-assistant,LinuxChristian/home-assistant,sdague/home-assistant,Duoxilian/home-assistant,keerts/home-assistant,open-homeautomation/home-assistant,eagleamon/home-assistant,oandrew/home-assistant,Julian/home-assistant,ma314smith/home-assistant,aequitas/home-assistant,pschmitt/home-assistant,jnewland/home-assistant,persandstrom/home-assistant,tboyce021/home-assistant,jabesq/home-assistant,theolind/home-assistant,aoakeson/home-assistant,ct-23/home-assistant,JshWright/home-assistant,DavidLP/home-assistant,fbradyirl/home-assistant,toddeye/home-assistant,g12mcgov/home-assistant,rohitranjan1991/home-assistant,coteyr/home-assistant,mikaelboman/home-assistant,betrisey/home-assistant,Duoxilian/home-assistant,srcLurker/home-assistant,alexmogavero/home-assistant,deisi/home-assistant,molobrakos/home-assistant,MungoRae/home-assistant,w1ll1am23/home-assistant,JshWright/home-assistant,oandrew/home-assistant,w1ll1am23/home-assistant,MartinHjelmare/home-assistant,leppa/home-assistant,xifle/home-assistant,justyns/home-assistant,happyleavesaoc/home-assistant,nkgilley/home-assistant,deisi/home-assistant,dmeulen/home-assistant,SEJeff/home-assistant,tinloaf/home-assistant,Duoxilian/home-assistant,instantchow/home-assistant,ErykB2000/home-assistant,caiuspb/home-assistant,Zac-HD/home-assistant,luxus/home-assistant,srcLurker/home-assistant,postlund/home-assistant,teodoc/home-assistant,dmeulen/home-assistant,instantchow/home-assistant,ct-23/home-assistant,kennedyshead/home-assistant,Zyell/home-assistant,eagleamon/home-assistant,tboyce1/home-assistant,nnic/home-assistant,Duoxilian/home-assistant,jawilson/home-assistant,hexxter/home-assistant,MungoRae/home-assistant,kyvinh/home-assistant,CCOSTAN/home-assistant,g12mcgov/home-assistant,dorant/home-assistant,varunr047/homefile,tchellomello/home-assistant,luxus/home-assistant,stefan-jonasson/home-assistant,ct-23/home-assistant,JshWright/home-assistant,Smart-Torvy/torvy-home-assistant,Teagan42/home-assistant,aequitas/home-assistant,jnewland/home-assistant,vitorespindola/home-assistant,GenericStudent/home-assistant,robjohnson189/home-assistant,emilhetty/home-assistant,philipbl/home-assistant,varunr047/homefile,morphis/home-assistant,ct-23/home-assistant,jaharkes/home-assistant,badele/home-assistant,leoc/home-assistant,titilambert/home-assistant,srcLurker/home-assistant,open-homeautomation/home-assistant,LinuxChristian/home-assistant,michaelarnauts/home-assistant,tmm1/home-assistant,LinuxChristian/home-assistant,Nzaga/home-assistant,mahendra-r/home-assistant,Theb-1/home-assistant,Theb-1/home-assistant,MartinHjelmare/home-assistant,nnic/home-assistant,devdelay/home-assistant,PetePriority/home-assistant,soldag/home-assistant,GenericStudent/home-assistant,robbiet480/home-assistant,jaharkes/home-assistant,luxus/home-assistant,adrienbrault/home-assistant,aequitas/home-assistant,hmronline/home-assistant,EricRho/home-assistant,mKeRix/home-assistant,mezz64/home-assistant,shaftoe/home-assistant,oandrew/home-assistant | homeassistant/components/notify/file.py | homeassistant/components/notify/file.py | """
homeassistant.components.notify.file
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
File notification service.
Configuration:
To use the File notifier you will need to add something like the following
to your config/configuration.yaml
notify:
platform: file
filename: FILENAME
timestamp: 1 or 0
Variables:
filename
*Required
Name of the file to use. The file will be created if it doesn't exist and saved
in your config/ folder.
timestamp
*Required
Add a timestamp to the entry, valid entries are 1 or 0.
"""
import logging
import os
import homeassistant.util.dt as dt_util
from homeassistant.helpers import validate_config
from homeassistant.components.notify import (
DOMAIN, ATTR_TITLE, BaseNotificationService)
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config):
""" Get the file notification service. """
if not validate_config(config,
{DOMAIN: ['filename',
'timestamp']},
_LOGGER):
return None
filename = config[DOMAIN]['filename']
timestamp = config[DOMAIN]['timestamp']
return FileNotificationService(hass, filename, timestamp)
# pylint: disable=too-few-public-methods
class FileNotificationService(BaseNotificationService):
""" Implements notification service for the File service. """
def __init__(self, hass, filename, add_timestamp):
self.filepath = os.path.join(hass.config.config_dir, filename)
self.add_timestamp = add_timestamp
def send_message(self, message="", **kwargs):
""" Send a message to a file. """
with open(self.filepath, 'a') as file:
if os.stat(self.filepath).st_size == 0:
title = '{} notifications (Log started: {})\n{}\n'.format(
kwargs.get(ATTR_TITLE),
dt_util.strip_microseconds(dt_util.utcnow()),
'-'*80)
file.write(title)
if self.add_timestamp == 1:
text = '{} {}\n'.format(dt_util.utcnow(), message)
file.write(text)
else:
text = '{}\n'.format(message)
file.write(text)
| """
homeassistant.components.notify.file
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
File notification service.
Configuration:
To use the File notifier you will need to add something like the following
to your config/configuration.yaml
notify:
platform: file
filename: FILENAME
timestamp: 1 or 0
Variables:
filename
*Required
Name of the file to use. The file will be created if it doesn't exist and saved
in your config/ folder.
timestamp
*Required
Add a timestamp to the entry, valid entries are 1 or 0.
"""
import logging
import os
import homeassistant.util.dt as dt_util
from homeassistant.helpers import validate_config
from homeassistant.components.notify import (
DOMAIN, ATTR_TITLE, BaseNotificationService)
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config):
""" Get the file notification service. """
if not validate_config(config,
{DOMAIN: ['filename',
'timestamp']},
_LOGGER):
return None
filename = config[DOMAIN]['filename']
timestamp = config[DOMAIN]['timestamp']
return FileNotificationService(hass, filename, timestamp)
# pylint: disable=too-few-public-methods
class FileNotificationService(BaseNotificationService):
""" Implements notification service for the File service. """
def __init__(self, hass, filename, add_timestamp):
self.filepath = os.path.join(hass.config.config_dir, filename)
self.add_timestamp = add_timestamp
def send_message(self, message="", **kwargs):
""" Send a message to a file. """
with open(self.filepath, 'a') as file:
if os.stat(self.filepath).st_size == 0:
title = '{} notifications (Log started: {})\n{}\n'.format(
kwargs.get(ATTR_TITLE),
dt_util.strip_microseconds(dt_util.utcnow()),
'-'*80)
file.write(title)
if self.add_timestamp == 1:
text = '{} {}\n'.format(dt_util.utcnow(), message)
file.write(text)
else:
text = '{}\n'.format(message)
file.write(text)
| apache-2.0 | Python |
df2bf7cc95f38d9e6605dcc91e56b28502063b6a | Fix usage of `url_title` in CategoryAdmin. | onespacemedia/cms-faqs,onespacemedia/cms-faqs | apps/faqs/admin.py | apps/faqs/admin.py | from cms.admin import PageBaseAdmin, SearchMetaBaseAdmin
from django.contrib import admin
from .models import Category, Faq
@admin.register(Faq)
class FaqAdmin(SearchMetaBaseAdmin):
""" Admin settings for the Faq model """
prepopulated_fields = {"url_title": ("question",)}
filter_horizontal = ("categories",)
fieldsets = (
(None, {
"fields": ["page", "question", "url_title", "answer", "categories", "order"]
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
@admin.register(Category)
class CategoryAdmin(PageBaseAdmin):
""" Admin settings for the FAQ Category model. """
prepopulated_fields = {
"slug": ("title",)
}
fieldsets = (
PageBaseAdmin.TITLE_FIELDS,
("Content", {
"fields": ("content_primary",),
}),
PageBaseAdmin.PUBLICATION_FIELDS,
PageBaseAdmin.NAVIGATION_FIELDS,
PageBaseAdmin.SEO_FIELDS,
)
| from cms.admin import PageBaseAdmin, SearchMetaBaseAdmin
from django.contrib import admin
from .models import Category, Faq
@admin.register(Faq)
class FaqAdmin(SearchMetaBaseAdmin):
""" Admin settings for the Faq model """
prepopulated_fields = {"url_title": ("question",)}
filter_horizontal = ("categories",)
fieldsets = (
(None, {
"fields": (
"page",
"question",
"url_title",
"answer",
"categories",
"order",
)
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
@admin.register(Category)
class CategoryAdmin(PageBaseAdmin):
""" Admin settings for the FAQ Category model. """
prepopulated_fields = {"url_title": ("title",)}
fieldsets = (
PageBaseAdmin.TITLE_FIELDS,
("Content", {
"fields": ("content_primary",),
}),
PageBaseAdmin.PUBLICATION_FIELDS,
PageBaseAdmin.NAVIGATION_FIELDS,
PageBaseAdmin.SEO_FIELDS,
)
| mit | Python |
8f02e80df367ccd3870110c069898969f20924da | Fix stupid bug | ehabkost/busmap,ehabkost/busmap,ehabkost/busmap | python/busmap/fetch.py | python/busmap/fetch.py | # -*- coding: utf-8 -*
import horarios, linhas, env, dias
def get_linha_hor(idhor, nome):
c = env.db.cursor()
# look for id horario
r = c.select_onerow('linhas', ['id'], 'idhor=%s', [idhor])
if r:
c.close()
return r[0]
# not found. look for a similar name, but with no idhor set
r = c.select_onerow('linhas', ['id'], 'idhor is null and nome=%s', [nome])
if r:
id = r[0]
# found. set idhor
c.execute('update linhas set idhor=%s where id=%s',
[idhor, id])
c.close()
return id
# not found. insert a new record
c.insert_one('linhas', idhor=idhor, nome=nome)
id = c.lastrowid
c.close()
return id
def get_ponto_hor(nome):
c = env.db.cursor()
r = c.select_onerow('pontos', ['id'], 'nome=%s', nome)
if r:
c.close()
return r[0]
# not found
c.insert_one('pontos', nome=nome)
id = c.lastrowid
c.close()
return id
def fetch_horarios(idhor, nome):
c = env.db.cursor()
idlinha = get_linha_hor(idhor, nome)
c.execute('delete from hs, h \
using horsets hs, horarios h \
where hs.idlinha=%s and h.idset=hs.id',
[idlinha])
html = horarios.get_horarios_html(idhor)
for pto,dia,apartir,horas in horarios.parse_hor_html(html):
print 'ponto: %s, dias: %s' % (pto, dia)
idponto = get_ponto_hor(pto)
d = dias.id_dias(dia)
c.insert_one('horsets', idlinha=idlinha, idponto=idponto,
dia=d, apartir=apartir)
idset = c.lastrowid
for sp,h in horas:
c.insert_one('horarios',
idset=idset, hora=h, special=sp)
c.close()
def fetch_hor_all():
for cod,nome in horarios.lista_linhas():
print 'Fetching %s:%s' % (cod, nome)
fetch_horarios(cod, nome)
if __name__ == '__main__':
#fetch_horarios('022', u'INTER 2 (Horário)')
fetch_hor_all()
| # -*- coding: utf-8 -*
import horarios, linhas, env, dias
def get_linha_hor(idhor, nome):
c = env.db.cursor()
# look for id horario
r = c.select_onerow('linhas', ['id'], 'idhor=%s', [idhor])
if r:
c.close()
return r[0]
# not found. look for a similar name, but with no idhor set
r = c.select_onerow('linhas', ['id'], 'idhor is null and nome=%s', [nome])
if r:
id = r[0]
# found. set idhor
c.execute('update linhas set idhor=%s where id=%s',
[idhor, id])
c.close()
return id
# not found. insert a new record
c.insert_one('linhas', idhor=idhor, nome=nome)
id = c.lastrowid
c.close()
return id
def get_ponto_hor(nome):
c = env.db.cursor()
r = c.select_onerow('pontos', ['id'], 'nome=%s', nome)
if r:
c.close()
return r[0]
# not found
c.insert_one('pontos', nome=nome)
id = c.lastrowid
c.close()
return id
def fetch_horarios(idhor, nome):
c = env.db.cursor()
idlinha = get_linha_hor(idhor, nome)
c.execute('delete from hs, h \
using horsets hs, horarios h \
where hs.idlinha=%s and h.idset=hs.id',
[idlinha])
html = horarios.get_horarios_html(idhor)
for pto,dias,apartir,horas in horarios.parse_hor_html(html):
print 'ponto: %s, dias: %s' % (pto, dias)
idponto = get_ponto_hor(pto)
d = dias.id_dias(dias)
c.insert_one('horsets', idlinha=idlinha, idponto=idponto,
dia=d, apartir=apartir)
idset = c.lastrowid
for sp,h in horas:
c.insert_one('horarios',
idset=idset, hora=h, special=sp)
c.close()
def fetch_hor_all():
for cod,nome in horarios.lista_linhas():
print 'Fetching %s:%s' % (cod, nome)
fetch_horarios(cod, nome)
if __name__ == '__main__':
#fetch_horarios('022', u'INTER 2 (Horário)')
fetch_hor_all()
| mit | Python |
bbc3bc25be1d2d19e6cd0a72dba6e7e5b821cd41 | Bump version | OCA/geospatial,OCA/geospatial,OCA/geospatial | base_geoengine/__openerp__.py | base_geoengine/__openerp__.py | # -*- coding: utf-8 -*-
# © 2011-2015 Nicolas Bessi (Camptocamp SA)
# © 2016 Yannick Vaucher (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{'name': 'Geospatial support for Odoo',
'version': '9.0.1.2.5',
'category': 'GeoBI',
'author': "Camptocamp,ACSONE SA/NV,Odoo Community Association (OCA)",
'license': 'AGPL-3',
'website': 'http://openerp.camptocamp.com',
'depends': [
'base',
'web'
],
'init_xml': [],
'data': [
'security/data.xml',
'data/geo_raster_layer_type.xml',
'views/base_geoengine_view.xml',
'geo_ir/ir_model_view.xml',
'geo_view/ir_view_view.xml',
'geo_view/geo_raster_layer_view.xml',
'geo_view/geo_vector_layer_view.xml',
'security/ir.model.access.csv',
],
'external_dependencies': {
'python': ['shapely',
'geojson'],
},
'qweb': ["static/src/xml/geoengine.xml"],
'installable': True,
'pre_init_hook': 'init_postgis',
}
| # -*- coding: utf-8 -*-
# © 2011-2015 Nicolas Bessi (Camptocamp SA)
# © 2016 Yannick Vaucher (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{'name': 'Geospatial support for Odoo',
'version': '9.0.1.2.2',
'category': 'GeoBI',
'author': "Camptocamp,ACSONE SA/NV,Odoo Community Association (OCA)",
'license': 'AGPL-3',
'website': 'http://openerp.camptocamp.com',
'depends': [
'base',
'web'
],
'init_xml': [],
'data': [
'security/data.xml',
'data/geo_raster_layer_type.xml',
'views/base_geoengine_view.xml',
'geo_ir/ir_model_view.xml',
'geo_view/ir_view_view.xml',
'geo_view/geo_raster_layer_view.xml',
'geo_view/geo_vector_layer_view.xml',
'security/ir.model.access.csv',
],
'external_dependencies': {
'python': ['shapely',
'geojson'],
},
'qweb': ["static/src/xml/geoengine.xml"],
'installable': True,
'pre_init_hook': 'init_postgis',
}
| agpl-3.0 | Python |
77ac5ccab368f9389cea2efd3cda6191d0e8d482 | Remove unused import. | mlavin/fileapi,mlavin/fileapi,mlavin/fileapi | fileapi/views.py | fileapi/views.py | from django import forms
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse
from django.http import JsonResponse, HttpResponseNotFound, HttpResponse
from django.views.generic import View, TemplateView
from jwt_auth.mixins import JSONWebTokenAuthMixin
storage = FileSystemStorage()
class UploadForm(forms.Form):
name = forms.FileField()
def file_info(name):
return {
'name': name,
'size': storage.size(name),
'created': storage.created_time(name),
'links': {
'self': reverse('upload-detail', kwargs={'name': name})
}
}
class FileListView(JSONWebTokenAuthMixin, View):
"""Get a list of all available files or create a new file."""
def get(self, request):
"""List all files."""
_, files = storage.listdir('')
info = []
for name in files:
info.append(file_info(name))
result = {
'files': info,
'count': len(info),
}
return JsonResponse(result)
def post(self, request):
"""Add a new file."""
form = UploadForm(request.POST, request.FILES)
if form.is_valid():
upload = form.cleaned_data['name']
storage.save(upload.name, upload)
result = file_info(upload.name)
return JsonResponse(result, status=201)
else:
return HttpResponse(form.errors.as_json(), status=400, content_type='application/json')
class FileDetailView(JSONWebTokenAuthMixin, View):
"""Get details for a single file or delete the file."""
def get(self, request, name):
"""Get details for a file."""
if storage.exists(name):
result = file_info(name)
return JsonResponse(result)
else:
return HttpResponseNotFound()
def delete(self, request, name):
"""Delete a file."""
if storage.exists(name):
storage.delete(name)
return HttpResponse(status=204)
else:
return HttpResponse(status=410)
class IndexView(TemplateView):
template_name = 'index.html'
| import json
from django import forms
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse
from django.http import JsonResponse, HttpResponseNotFound, HttpResponse
from django.views.generic import View, TemplateView
from jwt_auth.mixins import JSONWebTokenAuthMixin
storage = FileSystemStorage()
class UploadForm(forms.Form):
name = forms.FileField()
def file_info(name):
return {
'name': name,
'size': storage.size(name),
'created': storage.created_time(name),
'links': {
'self': reverse('upload-detail', kwargs={'name': name})
}
}
class FileListView(JSONWebTokenAuthMixin, View):
"""Get a list of all available files or create a new file."""
def get(self, request):
"""List all files."""
_, files = storage.listdir('')
info = []
for name in files:
info.append(file_info(name))
result = {
'files': info,
'count': len(info),
}
return JsonResponse(result)
def post(self, request):
"""Add a new file."""
form = UploadForm(request.POST, request.FILES)
if form.is_valid():
upload = form.cleaned_data['name']
storage.save(upload.name, upload)
result = file_info(upload.name)
return JsonResponse(result, status=201)
else:
return HttpResponse(form.errors.as_json(), status=400, content_type='application/json')
class FileDetailView(JSONWebTokenAuthMixin, View):
"""Get details for a single file or delete the file."""
def get(self, request, name):
"""Get details for a file."""
if storage.exists(name):
result = file_info(name)
return JsonResponse(result)
else:
return HttpResponseNotFound()
def delete(self, request, name):
"""Delete a file."""
if storage.exists(name):
storage.delete(name)
return HttpResponse(status=204)
else:
return HttpResponse(status=410)
class IndexView(TemplateView):
template_name = 'index.html'
| bsd-2-clause | Python |
96a46473b3c060075826c1c72e2bec1eb62d8655 | update docstring of commit | evernym/plenum,evernym/zeno | plenum/server/req_handler.py | plenum/server/req_handler.py | from binascii import unhexlify
from plenum.common.types import f
from plenum.common.request import Request
from typing import List
from plenum.common.ledger import Ledger
from plenum.common.state import PruningState
class RequestHandler:
"""
Base class for request handlers
Declares methods for validation, application of requests and
state control
"""
def __init__(self, ledger: Ledger, state: PruningState):
# TODO: continue using PruningState until State hierarchy fixed
self.ledger = ledger
self.state = state
def validate(self, req: Request, config = None):
"""
Validates request. Raises exception if requiest is invalid.
"""
pass
def apply(self, req: Request):
"""
Applies request
"""
pass
def updateState(self, txns, isCommitted=False):
"""
Updates current state with a number of committed or
not committed transactions
"""
pass
def commit(self, txnCount, stateRoot, txnRoot) -> List:
"""
:param txnCount: The number of requests to commit (The actual requests are
picked up from the uncommitted list from the ledger)
:param stateRoot: The state trie root after the txns are committed
:param txnRoot: The txn merkle root after the txns are committed
:return: list of committed transactions
"""
(seqNoStart, seqNoEnd), committedTxns = \
self.ledger.commitTxns(txnCount)
stateRoot = unhexlify(stateRoot.encode())
txnRoot = self.ledger.hashToStr(unhexlify(txnRoot.encode()))
assert self.ledger.root_hash == txnRoot
self.state.commit(rootHash=stateRoot)
seqNos = range(seqNoStart, seqNoEnd + 1)
for txn, seqNo in zip(committedTxns, seqNos):
txn[f.SEQ_NO.nm] = seqNo
return committedTxns | from binascii import unhexlify
from plenum.common.types import f
from plenum.common.request import Request
from typing import List
from plenum.common.ledger import Ledger
from plenum.common.state import PruningState
class RequestHandler:
"""
Base class for request handlers
Declares methods for validation, application of requests and
state control
"""
def __init__(self, ledger: Ledger, state: PruningState):
# TODO: continue using PruningState until State hierarchy fixed
self.ledger = ledger
self.state = state
def validate(self, req: Request, config = None):
"""
Validates request. Raises exception if requiest is invalid.
"""
pass
def apply(self, req: Request):
"""
Applies request
"""
pass
def updateState(self, txns, isCommitted=False):
"""
Updates current state with a number of committed or
not committed transactions
"""
pass
def commit(self, txnCount, stateRoot, txnRoot) -> List:
"""
Commit a number of transactions
:param txnCount:
:param stateRoot: expected state root
:param txnRoot:
:return: list of committed transactions
"""
(seqNoStart, seqNoEnd), committedTxns = \
self.ledger.commitTxns(txnCount)
stateRoot = unhexlify(stateRoot.encode())
txnRoot = self.ledger.hashToStr(unhexlify(txnRoot.encode()))
assert self.ledger.root_hash == txnRoot
self.state.commit(rootHash=stateRoot)
seqNos = range(seqNoStart, seqNoEnd + 1)
for txn, seqNo in zip(committedTxns, seqNos):
txn[f.SEQ_NO.nm] = seqNo
return committedTxns | apache-2.0 | Python |
6050b32ddb812e32da08fd15f210d9d9ee794a42 | Print Hello World in Python | rahulbohra/Python-Basic | first-program.py | first-program.py | # Python program for Programming for Everybody (Getting Started with Python) by University of Michigan - Charles Severance
# Task 1 Python code with single print statement but not print hello world
print "It is a great feeling to code in Python"
print "Hello World!"
| # Python program for Programming for Everybody (Getting Started with Python) by University of Michigan - Charles Severance
# Task 1 Python code with single print statement but not print hello world
print "It is a great feeling to code in Python"
| mit | Python |
b203e94205eb7d614d5133f937da302c941a2b4d | check 'NotFreezed' on all transform nodes | sol-ansano-kim/medic,sol-ansano-kim/medic,sol-ansano-kim/medic | plugins/Tester/notFreezed.py | plugins/Tester/notFreezed.py | import medic
from maya import OpenMaya
class NotFreezed(medic.PyTester):
Identity = OpenMaya.MMatrix()
def __init__(self):
super(NotFreezed, self).__init__()
def Name(self):
return "NotFreezed"
def Description(self):
return "Not freezed trasnform(s)"
def Match(self, node):
return node.object().hasFn(OpenMaya.MFn.kTransform)
def test(self, node):
if node.dag().isInstanced():
return None
transform = node.dag().transformationMatrix()
if NotFreezed.Identity == transform:
return None
return medic.PyReport(node)
def Create():
return NotFreezed()
| import medic
from maya import OpenMaya
class NotFreezed(medic.PyTester):
Identity = OpenMaya.MMatrix()
def __init__(self):
super(NotFreezed, self).__init__()
def Name(self):
return "NotFreezed"
def Description(self):
return "Not freezed mesh(s)"
def Match(self, node):
return node.object().hasFn(OpenMaya.MFn.kMesh)
def test(self, node):
if node.dag().isInstanced():
return None
iden = OpenMaya.MMatrix()
for p in node.parents():
transform = p.dag().transformationMatrix()
if not NotFreezed.Identity == transform:
return medic.PyReport(node)
return None
def Create():
return NotFreezed()
| mit | Python |
7182a635d270d9b816dc3f09ba6c541cc7a88b8a | Fix typo | faheempatel/aac_to_mp3 | aac_to_mp3.py | aac_to_mp3.py | #!/usr/bin/env python
import os
import os.path
import sys
import subprocess
#OUTPUT_DIR = '/Users/matt/Desktop/mp3/'
OUTPUT_DIR = 'c:/test/'
def convert_and_save(path):
filenames = [
filename
for filename
in os.listdir(path)
if filename.endswith('.m4a')
]
for filename in filenames:
source = os.path.join(path, filename)
destination = os.path.join(OUTPUT_DIR, '%s.mp3' % filename[:-4])
subprocess.call([
"ffmpeg", "-i",
source,
"-acodec", "libmp3lame", "-ab", "256k",
destination)
])
def flat_file(text_file):
for dir in text_file:
convert_and_save(dir)
return 0
def main():
path = os.getcwd()
convert_and_save(path)
return 0
if __name__ == '__main__':
status = main()
sys.exit(status) | #!/usr/bin/env python
import os
import os.path
import sys
import subprocess
#OUTPUT_DIR = '/Users/matt/Desktop/mp3/'
OUTPUT_DIR = 'c:/test/'
def covert_and_save(path):
filenames = [
filename
for filename
in os.listdir(path)
if filename.endswith('.m4a')
]
for filename in filenames:
source = os.path.join(path, filename)
destination = os.path.join(OUTPUT_DIR, '%s.mp3' % filename[:-4])
subprocess.call([
"ffmpeg", "-i",
source,
"-acodec", "libmp3lame", "-ab", "256k",
destination)
])
def flat_file(text_file):
for dir in text_file:
convert_and_save(dir)
return 0
def main():
path = os.getcwd()
convert_and_save(path)
return 0
if __name__ == '__main__':
status = main()
sys.exit(status) | mit | Python |
9404fd886c3fbdb180083b55cdd69788b24049e4 | Add new value_types to VALUE_TYPE_CHOICES | rdmorganiser/rdmo,rdmorganiser/rdmo,rdmorganiser/rdmo | rdmo/core/constants.py | rdmo/core/constants.py | from django.utils.translation import gettext_lazy as _
VALUE_TYPE_TEXT = 'text'
VALUE_TYPE_URL = 'url'
VALUE_TYPE_INTEGER = 'integer'
VALUE_TYPE_FLOAT = 'float'
VALUE_TYPE_BOOLEAN = 'boolean'
VALUE_TYPE_DATETIME = 'datetime'
VALUE_TYPE_OPTIONS = 'option'
VALUE_TYPE_EMAIL = 'email'
VALUE_TYPE_PHONE = 'phone'
VALUE_TYPE_FILE = 'file'
VALUE_TYPE_CHOICES = (
(VALUE_TYPE_TEXT, _('Text')),
(VALUE_TYPE_URL, _('URL')),
(VALUE_TYPE_INTEGER, _('Integer')),
(VALUE_TYPE_FLOAT, _('Float')),
(VALUE_TYPE_BOOLEAN, _('Boolean')),
(VALUE_TYPE_DATETIME, _('Datetime')),
(VALUE_TYPE_EMAIL, _('Email')),
(VALUE_TYPE_PHONE, _('Phone')),
(VALUE_TYPE_OPTIONS, _('Option')),
(VALUE_TYPE_FILE, _('File'))
)
PERMISSIONS = {
'condition': (
'conditions.add_condition', 'conditions.change_condition', 'conditions.delete_condition'
),
'attribute': (
'domain.add_attribute', 'domain.change_attribute', 'domain.delete_attribute'
),
'optionset': (
'options.add_optionset', 'options.change_optionset', 'options.delete_optionset'
),
'option': (
'options.add_option', 'options.change_option', 'options.delete_option'
),
'catalog': (
'questions.add_catalog', 'questions.change_catalog', 'questions.delete_catalog'
),
'section': (
'questions.add_section', 'questions.change_section', 'questions.delete_section'
),
'questionset': (
'questions.add_questionset', 'questions.change_questionset', 'questions.delete_questionset'
),
'question': (
'questions.add_question', 'questions.change_question', 'questions.delete_question'
),
'task': (
'tasks.add_task', 'tasks.change_task', 'tasks.delete_task'
),
'view': (
'views.add_view', 'views.change_view', 'views.delete_view'
)
}
| from django.utils.translation import gettext_lazy as _
VALUE_TYPE_TEXT = 'text'
VALUE_TYPE_URL = 'url'
VALUE_TYPE_INTEGER = 'integer'
VALUE_TYPE_FLOAT = 'float'
VALUE_TYPE_BOOLEAN = 'boolean'
VALUE_TYPE_DATETIME = 'datetime'
VALUE_TYPE_OPTIONS = 'option'
VALUE_TYPE_EMAIL = 'email'
VALUE_TYPE_PHONE = 'phone'
VALUE_TYPE_FILE = 'file'
VALUE_TYPE_CHOICES = (
(VALUE_TYPE_TEXT, _('Text')),
(VALUE_TYPE_URL, _('URL')),
(VALUE_TYPE_INTEGER, _('Integer')),
(VALUE_TYPE_FLOAT, _('Float')),
(VALUE_TYPE_BOOLEAN, _('Boolean')),
(VALUE_TYPE_DATETIME, _('Datetime')),
(VALUE_TYPE_OPTIONS, _('Option')),
(VALUE_TYPE_FILE, _('File'))
)
PERMISSIONS = {
'condition': (
'conditions.add_condition', 'conditions.change_condition', 'conditions.delete_condition'
),
'attribute': (
'domain.add_attribute', 'domain.change_attribute', 'domain.delete_attribute'
),
'optionset': (
'options.add_optionset', 'options.change_optionset', 'options.delete_optionset'
),
'option': (
'options.add_option', 'options.change_option', 'options.delete_option'
),
'catalog': (
'questions.add_catalog', 'questions.change_catalog', 'questions.delete_catalog'
),
'section': (
'questions.add_section', 'questions.change_section', 'questions.delete_section'
),
'questionset': (
'questions.add_questionset', 'questions.change_questionset', 'questions.delete_questionset'
),
'question': (
'questions.add_question', 'questions.change_question', 'questions.delete_question'
),
'task': (
'tasks.add_task', 'tasks.change_task', 'tasks.delete_task'
),
'view': (
'views.add_view', 'views.change_view', 'views.delete_view'
)
}
| apache-2.0 | Python |
e11169bf85d752054563f22cfe9659b19b76299b | test for Store.get_file | startling/fsstore | fsstore/tests.py | fsstore/tests.py | # -*- coding: utf-8 -*-
import unittest
from tempfile import mkdtemp
from fsstore.core import Store
class TestInterface(unittest.TestCase):
def setUp(self):
"Initialize a Store with a temporary directory."
self.tempdir = mkdtemp()
self.fs = Store(self.tempdir)
def test_save_string(self):
"Test that simple string-assignment works."
self.fs["hello"] = "world"
self.assertEquals(self.fs["hello"], "world")
self.assertEquals(self.fs.get("hello"), "world")
def test_save_dict(self):
"Test that we can save a dict and get it again."
self.fs["dir"] = {"a": "b"}
self.assertEquals(self.fs["dir"]["a"], "b")
def test_nested_dicts(self):
"Test that we can save nested dicts and retrieve their contents."
self.fs["dir"] = {"x": {"y": "z"}}
self.assertEquals(self.fs["dir"]["x"]["y"], "z")
def test_reassignment_dict_to_string(self):
"""Test that nothing breaks if we assign something that used to be a
dict to a string.
"""
self.fs["dir"] = {"x": {"y": "z"}}
self.fs["dir"] = "a new thing."
self.assertEquals(self.fs["dir"], "a new thing.")
def test_reassignment_string_to_dict(self):
"""Test that nothing breaks if we assign something that used to be a
string to a dict.
"""
self.fs["dir"] = "an old thing."
self.fs["dir"] = {"x": {"y": "z"}}
self.assertEquals(self.fs["dir"]["x"]["y"], "z")
def test_resume(self):
"Test that results are constant across Stores."
second = Store(self.tempdir)
self.fs["constant"] = "5"
self.assertEquals(self.fs["constant"], second["constant"])
def test_get_file(self):
self.fs["x"] = "yz"
with self.fs.get_file("x") as f:
self.assertEquals(f.read(), self.fs["x"], "yz")
| # -*- coding: utf-8 -*-
import unittest
from tempfile import mkdtemp
from fsstore.core import Store
class TestInterface(unittest.TestCase):
def setUp(self):
"Initialize a Store with a temporary directory."
self.tempdir = mkdtemp()
self.fs = Store(self.tempdir)
def test_save_string(self):
"Test that simple string-assignment works."
self.fs["hello"] = "world"
self.assertEquals(self.fs["hello"], "world")
self.assertEquals(self.fs.get("hello"), "world")
def test_save_dict(self):
"Test that we can save a dict and get it again."
self.fs["dir"] = {"a": "b"}
self.assertEquals(self.fs["dir"]["a"], "b")
def test_nested_dicts(self):
"Test that we can save nested dicts and retrieve their contents."
self.fs["dir"] = {"x": {"y": "z"}}
self.assertEquals(self.fs["dir"]["x"]["y"], "z")
def test_reassignment_dict_to_string(self):
"""Test that nothing breaks if we assign something that used to be a
dict to a string.
"""
self.fs["dir"] = {"x": {"y": "z"}}
self.fs["dir"] = "a new thing."
self.assertEquals(self.fs["dir"], "a new thing.")
def test_reassignment_string_to_dict(self):
"""Test that nothing breaks if we assign something that used to be a
string to a dict.
"""
self.fs["dir"] = "an old thing."
self.fs["dir"] = {"x": {"y": "z"}}
self.assertEquals(self.fs["dir"]["x"]["y"], "z")
def test_resume(self):
"Test that results are constant across Stores."
second = Store(self.tempdir)
self.fs["constant"] = "5"
self.assertEquals(self.fs["constant"], second["constant"])
| mit | Python |
6ddaac15ddb94821d12a1dd73b2a0ec3f9b8a884 | Fix minor bug in gammcat/info.py | gammapy/gamma-cat | gammacat/info.py | gammacat/info.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import subprocess
import os
from pathlib import Path
import urllib.parse
__all__ = [
'gammacat_info',
'gammacat_tag',
]
class GammaCatInfo:
"""Gather basic info about gammacat.
"""
def __init__(self):
# Git version: http://stackoverflow.com/a/21901260/498873
git_version = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'])
self.git_version = git_version.decode('ascii').strip()
# TODO: implement stable versions
self.version = self.git_version
# Git repository base directory
self.base_dir = Path(__file__).parent.parent
# Internal gammcat vesion path
hgps_analysis_dir = os.environ.get('HGPS_ANALYSIS')
if hgps_analysis_dir:
self.internal_dir = Path(hgps_analysis_dir) / 'data/catalogs/gammacat-hess-internal/'
def __str__(self):
ss = 'GammaCatInfo:\n'
ss += 'version: {}\n'.format(self.version)
ss += 'git_version: {}\n'.format(self.git_version)
ss += 'base_dir: {}\n'.format(self.base_dir)
return ss
class GammaCatTag:
"""Make and parse string tags.
"""
def source_dataset_filename(self, meta):
return 'gammacat_' + self.source_dataset_str(meta)
def source_dataset_str(self, meta):
return self.source_str(meta) + '_' + self.dataset_str(meta)
def source_str(self, meta):
return '{source_id:06d}'.format_map(meta)
def dataset_str(self, meta):
return urllib.parse.quote(meta['reference_id'])
# return '{reference_id}'.format_map(meta)
gammacat_info = GammaCatInfo()
gammacat_tag = GammaCatTag()
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
import subprocess
import os
from pathlib import Path
import urllib.parse
__all__ = [
'gammacat_info',
'gammacat_tag',
]
class GammaCatInfo:
"""Gather basic info about gammacat.
"""
def __init__(self):
# Git version: http://stackoverflow.com/a/21901260/498873
git_version = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'])
self.git_version = git_version.decode('ascii').strip()
# TODO: implement stable versions
self.version = self.git_version
# Git repository base directory
self.base_dir = Path(__file__).parent.parent
# Internal gammcat vesion path
self.internal_dir = Path(os.environ.get('HGPS_ANALYSIS')) / 'data/catalogs/gammacat-hess-internal/'
def __str__(self):
ss = 'GammaCatInfo:\n'
ss += 'version: {}\n'.format(self.version)
ss += 'git_version: {}\n'.format(self.git_version)
ss += 'base_dir: {}\n'.format(self.base_dir)
return ss
class GammaCatTag:
"""Make and parse string tags.
"""
def source_dataset_filename(self, meta):
return 'gammacat_' + self.source_dataset_str(meta)
def source_dataset_str(self, meta):
return self.source_str(meta) + '_' + self.dataset_str(meta)
def source_str(self, meta):
return '{source_id:06d}'.format_map(meta)
def dataset_str(self, meta):
return urllib.parse.quote(meta['reference_id'])
# return '{reference_id}'.format_map(meta)
gammacat_info = GammaCatInfo()
gammacat_tag = GammaCatTag()
| bsd-3-clause | Python |
477c02946b05c80a11bad6c9c20464ee2e82eab4 | Add constants | ianfieldhouse/number_to_words | number_to_words.py | number_to_words.py | class NumberToWords(object):
"""
Class for converting positive integer values to a textual representation
of the submitted number for value of 0 up to 999999999.
"""
MAX = 999999999
SMALL_NUMBERS = ['', 'one', 'two', 'three', 'four', 'five', 'six',
'seven', 'eight', 'nine', 'ten', 'eleven',
'twelve', 'thirteen', 'fourteen', 'fifteen', 'sixteen',
'seventeen', 'eighteen', 'nineteen']
TENS = ['', '', 'twenty', 'thirty', 'fourty', 'fifty', 'sixty', 'seventy',
'eighty', 'ninety']
LARGE_NUMBERS = ['', 'thousand', 'million']
| class NumberToWords(object):
"""
Class for converting positive integer values to a textual representation
of the submitted number for value of 0 up to 999999999.
"""
| mit | Python |
c9a5f5a542712fdc3ef41dd84889af9619f93822 | print original filename in flask example | siddhantgoel/streaming-form-data | examples/flask/upload-test.py | examples/flask/upload-test.py | #!/usr/bin/python3
from flask import Flask, request
import time
import os
import tempfile
from streaming_form_data import StreamingFormDataParser
from streaming_form_data.targets import FileTarget
app = Flask(__name__)
page = '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form method=post enctype=multipart/form-data id="upload-file">
<input type=file name=file>
<input type=submit value=Upload>
</form><br>
'''
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
file = FileTarget(os.path.join(tempfile.gettempdir(), "test"))
hdict = {}
for h in request.headers:
hdict[h[0]] = h[1]
parser = StreamingFormDataParser(headers=hdict)
parser.register('file', file)
timeA = time.perf_counter()
while True:
chunk = request.stream.read(8192)
if not chunk:
break
parser.data_received(chunk)
timeB = time.perf_counter()
print("time spent on file reception: %fs" % (timeB-timeA))
return file.multipart_filename + ": upload done"
return page
if __name__ == '__main__':
app.run(host='0.0.0.0')
| #!/usr/bin/python3
from flask import Flask, request
import time
import os
import tempfile
from streaming_form_data import StreamingFormDataParser
from streaming_form_data.targets import FileTarget
app = Flask(__name__)
page = '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form method=post enctype=multipart/form-data id="upload-file">
<input type=file name=file>
<input type=submit value=Upload>
</form><br>
'''
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
file = FileTarget(os.path.join(tempfile.gettempdir(), "test"))
hdict = {}
for h in request.headers:
hdict[h[0]] = h[1]
parser = StreamingFormDataParser(headers=hdict)
parser.register('file', file)
timeA = time.perf_counter()
while True:
chunk = request.stream.read(8192)
if not chunk:
break
parser.data_received(chunk)
timeB = time.perf_counter()
print("time spent on file reception: %fs" % (timeB-timeA))
return "upload done"
return page
if __name__ == '__main__':
app.run(host='0.0.0.0')
| mit | Python |
0487f13c765020195384ad2ca737a8142970cca9 | update border_fill property setting to border_fill_color | dennisobrien/bokeh,schoolie/bokeh,timsnyder/bokeh,htygithub/bokeh,htygithub/bokeh,clairetang6/bokeh,KasperPRasmussen/bokeh,ptitjano/bokeh,ericmjl/bokeh,stonebig/bokeh,justacec/bokeh,rs2/bokeh,maxalbert/bokeh,maxalbert/bokeh,aavanian/bokeh,clairetang6/bokeh,aavanian/bokeh,draperjames/bokeh,rs2/bokeh,bokeh/bokeh,ericmjl/bokeh,bokeh/bokeh,jakirkham/bokeh,schoolie/bokeh,percyfal/bokeh,justacec/bokeh,aavanian/bokeh,ptitjano/bokeh,phobson/bokeh,htygithub/bokeh,Karel-van-de-Plassche/bokeh,rs2/bokeh,dennisobrien/bokeh,dennisobrien/bokeh,stonebig/bokeh,KasperPRasmussen/bokeh,phobson/bokeh,aiguofer/bokeh,maxalbert/bokeh,DuCorey/bokeh,azjps/bokeh,clairetang6/bokeh,schoolie/bokeh,dennisobrien/bokeh,bokeh/bokeh,msarahan/bokeh,mindriot101/bokeh,rs2/bokeh,aiguofer/bokeh,mindriot101/bokeh,ptitjano/bokeh,msarahan/bokeh,ptitjano/bokeh,percyfal/bokeh,mindriot101/bokeh,draperjames/bokeh,htygithub/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,dennisobrien/bokeh,Karel-van-de-Plassche/bokeh,jakirkham/bokeh,draperjames/bokeh,timsnyder/bokeh,msarahan/bokeh,quasiben/bokeh,DuCorey/bokeh,phobson/bokeh,justacec/bokeh,bokeh/bokeh,DuCorey/bokeh,stonebig/bokeh,Karel-van-de-Plassche/bokeh,philippjfr/bokeh,schoolie/bokeh,quasiben/bokeh,quasiben/bokeh,philippjfr/bokeh,percyfal/bokeh,azjps/bokeh,DuCorey/bokeh,maxalbert/bokeh,Karel-van-de-Plassche/bokeh,msarahan/bokeh,timsnyder/bokeh,ptitjano/bokeh,aiguofer/bokeh,KasperPRasmussen/bokeh,DuCorey/bokeh,philippjfr/bokeh,jakirkham/bokeh,draperjames/bokeh,rs2/bokeh,jakirkham/bokeh,KasperPRasmussen/bokeh,justacec/bokeh,timsnyder/bokeh,percyfal/bokeh,draperjames/bokeh,schoolie/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,stonebig/bokeh,aavanian/bokeh,azjps/bokeh,timsnyder/bokeh,aavanian/bokeh,percyfal/bokeh,jakirkham/bokeh,clairetang6/bokeh,phobson/bokeh,ericmjl/bokeh,mindriot101/bokeh,philippjfr/bokeh,aiguofer/bokeh,ericmjl/bokeh,azjps/bokeh,bokeh/bokeh,ericmjl/bokeh,philippjfr/bokeh,azjps/bokeh | examples/glyphs/choropleth.py | examples/glyphs/choropleth.py | from __future__ import print_function
from bokeh.browserlib import view
from bokeh.document import Document
from bokeh.embed import file_html
from bokeh.models.glyphs import Patches
from bokeh.models import (
Plot, DataRange1d, ColumnDataSource, ResizeTool
)
from bokeh.resources import INLINE
from bokeh.sampledata import us_states, us_counties, unemployment
us_states = us_states.data.copy()
us_counties = us_counties.data
unemployment = unemployment.data
del us_states["HI"]
del us_states["AK"]
state_source = ColumnDataSource(
data=dict(
state_xs=[us_states[code]["lons"] for code in us_states],
state_ys=[us_states[code]["lats"] for code in us_states],
)
)
colors = ["#F1EEF6", "#D4B9DA", "#C994C7", "#DF65B0", "#DD1C77", "#980043"]
county_colors = []
for county_id in us_counties:
if us_counties[county_id]["state"] in ["ak", "hi", "pr", "gu", "vi", "mp", "as"]:
continue
try:
rate = unemployment[county_id]
idx = min(int(rate/2), 5)
county_colors.append(colors[idx])
except KeyError:
county_colors.append("black")
county_source = ColumnDataSource(
data=dict(
county_xs=[us_counties[code]["lons"] for code in us_counties if us_counties[code]["state"] not in ["ak", "hi", "pr", "gu", "vi", "mp", "as"]],
county_ys=[us_counties[code]["lats"] for code in us_counties if us_counties[code]["state"] not in ["ak", "hi", "pr", "gu", "vi", "mp", "as"]],
county_colors=county_colors
)
)
xdr = DataRange1d()
ydr = DataRange1d()
plot = Plot(x_range=xdr, y_range=ydr, min_border=0, border_fill_color="white",
title="2009 Unemployment Data", plot_width=1300, plot_height=800, toolbar_location="left")
county_patches = Patches(xs="county_xs", ys="county_ys", fill_color="county_colors", fill_alpha=0.7, line_color="white", line_width=0.5)
plot.add_glyph(county_source, county_patches)
state_patches = Patches(xs="state_xs", ys="state_ys", fill_alpha=0.0, line_color="#884444", line_width=2)
plot.add_glyph(state_source, state_patches)
plot.add_tools(ResizeTool())
doc = Document()
doc.add(plot)
if __name__ == "__main__":
filename = "choropleth.html"
with open(filename, "w") as f:
f.write(file_html(doc, INLINE, "Choropleth of all US counties, Unemployment 2009"))
print("Wrote %s" % filename)
view(filename)
| from __future__ import print_function
from bokeh.browserlib import view
from bokeh.document import Document
from bokeh.embed import file_html
from bokeh.models.glyphs import Patches
from bokeh.models import (
Plot, DataRange1d, ColumnDataSource, ResizeTool
)
from bokeh.resources import INLINE
from bokeh.sampledata import us_states, us_counties, unemployment
us_states = us_states.data.copy()
us_counties = us_counties.data
unemployment = unemployment.data
del us_states["HI"]
del us_states["AK"]
state_source = ColumnDataSource(
data=dict(
state_xs=[us_states[code]["lons"] for code in us_states],
state_ys=[us_states[code]["lats"] for code in us_states],
)
)
colors = ["#F1EEF6", "#D4B9DA", "#C994C7", "#DF65B0", "#DD1C77", "#980043"]
county_colors = []
for county_id in us_counties:
if us_counties[county_id]["state"] in ["ak", "hi", "pr", "gu", "vi", "mp", "as"]:
continue
try:
rate = unemployment[county_id]
idx = min(int(rate/2), 5)
county_colors.append(colors[idx])
except KeyError:
county_colors.append("black")
county_source = ColumnDataSource(
data=dict(
county_xs=[us_counties[code]["lons"] for code in us_counties if us_counties[code]["state"] not in ["ak", "hi", "pr", "gu", "vi", "mp", "as"]],
county_ys=[us_counties[code]["lats"] for code in us_counties if us_counties[code]["state"] not in ["ak", "hi", "pr", "gu", "vi", "mp", "as"]],
county_colors=county_colors
)
)
xdr = DataRange1d()
ydr = DataRange1d()
plot = Plot(x_range=xdr, y_range=ydr, min_border=0, border_fill="white",
title="2009 Unemployment Data", plot_width=1300, plot_height=800, toolbar_location="left")
county_patches = Patches(xs="county_xs", ys="county_ys", fill_color="county_colors", fill_alpha=0.7, line_color="white", line_width=0.5)
plot.add_glyph(county_source, county_patches)
state_patches = Patches(xs="state_xs", ys="state_ys", fill_alpha=0.0, line_color="#884444", line_width=2)
plot.add_glyph(state_source, state_patches)
plot.add_tools(ResizeTool())
doc = Document()
doc.add(plot)
if __name__ == "__main__":
filename = "choropleth.html"
with open(filename, "w") as f:
f.write(file_html(doc, INLINE, "Choropleth of all US counties, Unemployment 2009"))
print("Wrote %s" % filename)
view(filename)
| bsd-3-clause | Python |
88abdf5365977a47abaa0d0a8f3275e4635c8378 | Fix OAuth integration for all wiki families | yuvipanda/paws,yuvipanda/paws | singleuser/user-config.py | singleuser/user-config.py | import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
for fam in (
'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia',
'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity',
'wikidata', 'mediawiki'
):
usernames[fam]['*'] = os.environ['JPY_USER']
del fam
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
| import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
| mit | Python |
81323aa798770577fede8627591757aa1cee37e9 | add entities (hashtags, ...) | EUMSSI/EUMSSI-platform,EUMSSI/EUMSSI-platform,EUMSSI/EUMSSI-platform | preprocess/twitter2eumssi.py | preprocess/twitter2eumssi.py | #!/usr/bin/env python
import datetime
from eumssi_converter import EumssiConverter
import click
def transf_date(x):
if x.__class__ == datetime.datetime:
return x
else:
# Twitter's weird date format
return datetime.datetime.strptime(x, '%a %b %d %X +0000 %Y')
def transf_coordinates(x):
return x # TODO: figure out how to represent this correctly
def transf_hashtag(tag_list):
return [tag['text'] for tag in tag_list]
def transf_url(url_list):
return [url['expanded_url'] for url in url_list]
def transf_user(user_list):
return [user['screen_name'] for user in user_list]
'''
mapping in the form [<original_fieldname>, <eumssi_fieldname>, <transform_function>, [<available_data>,..]}
'''
twitter_map = [
['created_at', 'datePublished', transf_date, []],
['lang', 'inLanguage', None, []],
['text', 'text', None, ['text']],
['user.screen_name', 'author', None, []],
['id_str', 'tweetId', None, []],
['entities.hashtags', 'hashtags', transf_hashtag, []],
['entities.user_mentions', 'userMentions', transf_user, []],
['entities.urls', 'urlMentions', transf_url, []],
['coordinates.coordinates', 'contentLocation', transf_coordinates, []]
]
@click.command()
@click.option('--reset', is_flag=True, help="reset data_available")
@click.option('--clean', is_flag=True, help="reset data_available and remove existing meta.source")
def convert(reset, clean):
conv = EumssiConverter('twitter-api-1.1', twitter_map, coll_name="tweets")
if reset:
conv.reset()
if clean:
conv.clean()
conv.run()
if __name__ == '__main__':
convert()
| #!/usr/bin/env python
import datetime
from eumssi_converter import EumssiConverter
import click
def transf_date(x):
if x.__class__ == datetime.datetime:
return x
else:
# Twitter's weird date format
return datetime.datetime.strptime(x, '%a %b %d %X +0000 %Y')
def transf_coordinates(x):
return x # TODO: figure out how to represent this correctly
'''
mapping in the form [<original_fieldname>, <eumssi_fieldname>, <transform_function>, [<available_data>,..]}
'''
twitter_map = [
['created_at', 'datePublished', transf_date, []],
['lang', 'inLanguage', None, []],
['text', 'text', None, ['text']],
['user.id_str', 'author', None, []],
['id_str', 'tweetId', None, []],
['coordinates.coordinates', 'contentLocation', transf_coordinates, []]
]
@click.command()
@click.option('--reset', is_flag=True, help="reset data_available")
@click.option('--clean', is_flag=True, help="reset data_available and remove existing meta.source")
def convert(reset, clean):
conv = EumssiConverter('twitter-api-1.1', twitter_map, coll_name="tweets")
if reset:
conv.reset()
if clean:
conv.clean()
conv.run()
if __name__ == '__main__':
convert()
| apache-2.0 | Python |
725a82a56bd6fca6942f5d5148bf1fd2572131ef | Refactor the main module | Flavien/script.buildinstaller | addon.py | addon.py | # Copyright 2016 Flavien Charlon
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import xbmcaddon
import xbmcgui
import xml.etree.ElementTree
def run():
file_path = xbmc.translatePath("special://profile/build.xml")
root = xml.etree.ElementTree.parse(file_path).getroot()
for addon in root.findall("./addon"):
command = "InstallAddon({id})".format(id = addon.attrib["id"])
xbmc.executebuiltin(command, True)
xbmcgui.Dialog().ok(
xbmcaddon.Addon().getAddonInfo("name"),
"All the add-ons have been installed successfully.")
if __name__ == '__main__':
run()
| # Copyright 2016 Flavien Charlon
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import xbmcaddon
import xbmcgui
import os
import xml.etree.ElementTree
# Install all required addons
def execute():
file_path = xbmc.translatePath(os.path.join("special://profile/", "build.xml"))
root = xml.etree.ElementTree.parse(file_path).getroot()
for addon in root.findall("./addon"):
xbmc.executebuiltin("InstallAddon({id})".format(id = addon.attrib["id"]), True)
# Main script
addon = xbmcaddon.Addon()
addonname = addon.getAddonInfo("name")
execute()
xbmcgui.Dialog().ok(addonname, "The build has been restored successfully.") | apache-2.0 | Python |
5f3c4e6bd9a35f029e9e3241e241466cff843d6c | fix Point lookup | openstates/openstates.org,openstates/openstates.org,openstates/openstates.org,openstates/openstates.org | geo/views.py | geo/views.py | import datetime
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from django.db.models import Q
from django.contrib.gis.geos import Point
from opencivicdata.core.models import Division
DATE_FORMAT = "%Y-%m-%d"
def division_list(request):
today = datetime.datetime.strftime(datetime.datetime.now(), DATE_FORMAT)
lat = request.GET.get('lat')
lon = request.GET.get('lon')
date = datetime.datetime.strptime(
request.GET.get('date', today), DATE_FORMAT
).date()
if not (lat and lon):
return JsonResponse({"error": "Must provide lat & lon"}, status_code=400)
divisions = Division.objects.filter(
Q(geometries__boundary__set__start_date__lte=date) |
Q(geometries__boundary__set__start_date=None),
Q(geometries__boundary__set__end_date__gte=date) |
Q(geometries__boundary__set__end_date=None),
geometries__boundary__shape__contains=Point(float(lon), float(lat))
)
return JsonResponse({
'results': [
{'id': d.id,
'name': d.name,
} for d in divisions
]
})
def serialize_geometry(geometry):
return {
'boundary_set': {
'name': geometry.boundary.set.name,
'start_date': geometry.boundary.set.start_date,
'end_date': geometry.boundary.set.end_date,
},
'metadata': geometry.boundary.metadata,
'extent': geometry.boundary.extent,
'external_id': geometry.boundary.external_id,
'name': geometry.boundary.name,
'centroid': tuple(geometry.boundary.centroid),
}
def division_detail(request, pk):
division = get_object_or_404(Division, pk=pk)
data = {
'id': division.id,
'country': division.country,
'name': division.name,
'geometries': [
serialize_geometry(g) for g in
division.geometries.all()
],
}
return JsonResponse(data)
| import datetime
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from django.db.models import Q
from opencivicdata.core.models import Division
DATE_FORMAT = "%Y-%m-%d"
def division_list(request):
today = datetime.datetime.strftime(datetime.datetime.now(), DATE_FORMAT)
lat = request.GET.get('lat')
lon = request.GET.get('lon')
date = datetime.datetime.strptime(
request.GET.get('date', today), DATE_FORMAT
).date()
if not (lat and lon):
return JsonResponse({"error": "Must provide lat & lon"}, status_code=400)
divisions = Division.objects.filter(
Q(geometries__boundary__set__start_date__lte=date) |
Q(geometries__boundary__set__start_date=None),
Q(geometries__boundary__set__end_date__gte=date) |
Q(geometries__boundary__set__end_date=None),
geometries__boundary__shape__contains='POINT({} {})'.format(lon, lat)
)
divisions = divisions[:2]
return JsonResponse({
'results': [
{'id': d.id,
'name': d.name,
} for d in divisions
]
})
def serialize_geometry(geometry):
return {
'boundary_set': {
'name': geometry.boundary.set.name,
'start_date': geometry.boundary.set.start_date,
'end_date': geometry.boundary.set.end_date,
},
'metadata': geometry.boundary.metadata,
'extent': geometry.boundary.extent,
'external_id': geometry.boundary.external_id,
'name': geometry.boundary.name,
'centroid': tuple(geometry.boundary.centroid),
}
def division_detail(request, pk):
division = get_object_or_404(Division, pk=pk)
data = {
'id': division.id,
'country': division.country,
'name': division.name,
'geometries': [
serialize_geometry(g) for g in
division.geometries.all()
],
}
return JsonResponse(data)
| mit | Python |
e1af389a28b5c6a7aca2766418ab14d044596b05 | add base_vat to depends | OCA/l10n-belgium,OCA/l10n-belgium | l10n_be_partner_kbo_bce/__manifest__.py | l10n_be_partner_kbo_bce/__manifest__.py | # Copyright 2009-2020 Noviat.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Belgium - KBO/BCE numbers",
"category": "Localization",
"version": "13.0.1.0.1",
"license": "AGPL-3",
"author": "Noviat,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/l10n-belgium",
"depends": ["partner_identification", "base_vat"],
"data": ["data/res_partner_id_category.xml", "views/res_partner.xml"],
"demo": ["demo/res_partner.xml"],
"installable": True,
}
| # Copyright 2009-2020 Noviat.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Belgium - KBO/BCE numbers",
"category": "Localization",
"version": "13.0.1.0.1",
"license": "AGPL-3",
"author": "Noviat,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/l10n-belgium",
"depends": ["partner_identification"],
"data": ["data/res_partner_id_category.xml", "views/res_partner.xml"],
"demo": ["demo/res_partner.xml"],
"installable": True,
}
| agpl-3.0 | Python |
4dedebe76d2ec112013595dd8f72b83b3ba28abb | update to fit utf8 | instagrambot/instabot,Diapostrofo/instabot,instagrambot/instabot,sudoguy/instabot,instagrambot/instapro,AlexBGoode/instabot,vkgrd/instabot,rasperepodvipodvert/instabot,misisnik/testinsta,ohld/instabot,misisnik/testinsta | examples/black-whitelist/whitelist_generator.py | examples/black-whitelist/whitelist_generator.py | """
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print ("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.get_user_following(bot.user_id)
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line:
f.write(str(user_id) + "\n")
print("ADDED.\r")
| """
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print ("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.get_user_following(bot.user_id)
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print (user_info["username"] + " - " + user_info["full_name"] + "\r")
input_line = sys.stdin.readline().lower()
if "y" in input_line:
f.write(str(user_id) + "\n")
print("ADDED.\r")
| apache-2.0 | Python |
bc43d899487bc5e872884c020b17da87ce01418a | Use cache in celery task | openmaraude/APITaxi,odtvince/APITaxi,openmaraude/APITaxi,odtvince/APITaxi,l-vincent-l/APITaxi,odtvince/APITaxi,odtvince/APITaxi,l-vincent-l/APITaxi | APITaxi/tasks/send_request_operator.py | APITaxi/tasks/send_request_operator.py | #coding: utf-8
from flask import current_app
from flask.ext.restplus import marshal
from ..models.hail import Hail
from ..models.security import User
from ..descriptors.hail import hail_model
from ..extensions import db, celery
import requests, json
@celery.task()
def send_request_operator(hail_id, operateur_id, env):
hail = Hail.cache.get(hail_id)
if not hail:
current_app.logger.error('Unable to find hail: {}'.format(hail_id))
return False
operateur = User.query.get(operateur_id)
if not operateur:
current_app.logger.error('Unable to find operateur: {}'.format(operateur_id))
return False
r = None
endpoint = operateur.hail_endpoint(env)
try:
headers = {'Content-Type': 'application/json',
'Accept': 'application/json'}
if operateur.operator_header_name is not None and operateur.operator_header_name != '':
headers[operateur.operator_header_name] = operateur.operator_api_key
r = requests.post(endpoint, data=json.dumps(marshal({"data": [hail]}, hail_model)),
headers=headers)
except requests.exceptions.MissingSchema:
pass
if not r or r.status_code < 200 or r.status_code >= 300:
hail.status = 'failure'
db.session.commit()
current_app.logger.error("Unable to reach hail's endpoint {} of operator {}"\
.format(endpoint, operateur.email))
return False
r_json = None
try:
r_json = r.json()
except ValueError:
pass
if r_json and 'data' in r_json and len(r_json['data']) == 1\
and 'taxi_phone_number' in r_json['data'][0]:
hail.taxi_phone_number = r_json['data'][0]['taxi_phone_number']
hail.status = 'received_by_operator'
db.session.commit()
return True
| #coding: utf-8
from flask import current_app
from flask.ext.restplus import marshal
from ..models.hail import Hail
from ..models.security import User
from ..descriptors.hail import hail_model
from ..extensions import db, celery
import requests, json
@celery.task()
def send_request_operator(hail_id, operateur_id, env):
hail = Hail.query.get(hail_id)
if not hail:
current_app.logger.error('Unable to find hail: {}'.format(hail_id))
return False
operateur = User.query.get(operateur_id)
if not operateur:
current_app.logger.error('Unable to find operateur: {}'.format(operateur_id))
return False
r = None
endpoint = operateur.hail_endpoint(env)
try:
headers = {'Content-Type': 'application/json',
'Accept': 'application/json'}
if operateur.operator_header_name is not None and operateur.operator_header_name != '':
headers[operateur.operator_header_name] = operateur.operator_api_key
r = requests.post(endpoint, data=json.dumps(marshal({"data": [hail]}, hail_model)),
headers=headers)
except requests.exceptions.MissingSchema:
pass
if not r or r.status_code < 200 or r.status_code >= 300:
hail.status = 'failure'
db.session.commit()
current_app.logger.error("Unable to reach hail's endpoint {} of operator {}"\
.format(endpoint, operateur.email))
return False
r_json = None
try:
r_json = r.json()
except ValueError:
pass
if r_json and 'data' in r_json and len(r_json['data']) == 1\
and 'taxi_phone_number' in r_json['data'][0]:
hail.taxi_phone_number = r_json['data'][0]['taxi_phone_number']
hail.status = 'received_by_operator'
db.session.commit()
return True
| agpl-3.0 | Python |
294dabd8cc6bfc7e004a1a0dde9b40e9535d4b19 | Raise 404 Error if no Tag exists. | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | organizer/views.py | organizer/views.py | from django.http.response import (
Http404, HttpResponse)
from django.template import Context, loader
from .models import Tag
def homepage(request):
tag_list = Tag.objects.all()
template = loader.get_template(
'organizer/tag_list.html')
context = Context({'tag_list': tag_list})
output = template.render(context)
return HttpResponse(output)
def tag_detail(request, slug):
try:
tag = Tag.objects.get(slug__iexact=slug)
except Tag.DoesNotExist:
raise Http404
template = loader.get_template(
'organizer/tag_detail.html')
context = Context({'tag': tag})
return HttpResponse(template.render(context))
| from django.http.response import HttpResponse
from django.template import Context, loader
from .models import Tag
def homepage(request):
tag_list = Tag.objects.all()
template = loader.get_template(
'organizer/tag_list.html')
context = Context({'tag_list': tag_list})
output = template.render(context)
return HttpResponse(output)
def tag_detail(request, slug):
tag = Tag.objects.get(slug__iexact=slug)
template = loader.get_template(
'organizer/tag_detail.html')
context = Context({'tag': tag})
return HttpResponse(template.render(context))
| bsd-2-clause | Python |
741b0e2ebad363097473c7f3750b2b852a61dcff | bump version to v3.7.3 | geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx | osmaxx/__init__.py | osmaxx/__init__.py | __version__ = 'v3.7.3'
__all__ = [
'__version__',
]
| __version__ = 'v3.7.2'
__all__ = [
'__version__',
]
| mit | Python |
8045c0016e6607d80687613535b5036cb826b711 | Add group_hidden | chadyred/odoo_addons,odoocn/odoo_addons,chadyred/odoo_addons,odoocn/odoo_addons,tiexinliu/odoo_addons,tiexinliu/odoo_addons,bmya/odoo_addons,bmya/odoo_addons,odoocn/odoo_addons,bmya/odoo_addons,ovnicraft/odoo_addons,ovnicraft/odoo_addons,ovnicraft/odoo_addons,chadyred/odoo_addons,tiexinliu/odoo_addons | smile_base/__openerp__.py | smile_base/__openerp__.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Smile Base",
"version": "0.2.3",
"depends": ["mail"],
"author": "Smile",
"description": """Smile Base
* Install and make French the default language
* Remove the scheduled action "Update Notification" which sends companies and users info to OpenERP S.A.
* Activate access logs for ir.translation object
* Correct date and time format for French language
* Review the menu "Applications"
* Remove the menu "Update modules" from apps.openerp.com
* Add sequence and display window actions in IrValues
* Force to call unlink method at removal of remote object linked by a fields.many2one with ondelete='cascade'
* Deduplicate pool._store_function
* Add BaseModel.bulk_create, BaseModel.store_set_values and BaseModel._compute_store_set
* Improve BaseModel.load method performance
Suggestions & Feedback to: corentin.pouhet-brunerie@smile.fr
""",
"summary": "",
"website": "http://www.smile.fr",
"category": 'Tools',
"sequence": 20,
"data": [
"security/base_security.xml",
"security/res_users.yml",
"data/mail_data.xml",
"data/ir_lang.yml",
"view/ir_values_view.xml",
"view/module_view.xml",
],
"js": [
"static/src/js/disable_openerp_online.js",
],
"auto_install": True,
"installable": True,
"application": False,
}
| # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Smile Base",
"version": "0.2.3",
"depends": ["mail"],
"author": "Smile",
"description": """Smile Base
* Install and make French the default language
* Remove the scheduled action "Update Notification" which sends companies and users info to OpenERP S.A.
* Activate access logs for ir.translation object
* Correct date and time format for French language
* Review the menu "Applications"
* Remove the menu "Update modules" from apps.openerp.com
* Add sequence and display window actions in IrValues
* Force to call unlink method at removal of remote object linked by a fields.many2one with ondelete='cascade'
* Deduplicate pool._store_function
* Add BaseModel.bulk_create, BaseModel.store_set_values and BaseModel._compute_store_set
* Improve BaseModel.load method performance
Suggestions & Feedback to: corentin.pouhet-brunerie@smile.fr
""",
"summary": "",
"website": "http://www.smile.fr",
"category": 'Tools',
"sequence": 20,
"data": [
"security/res_users.yml",
"data/mail_data.xml",
"data/ir_lang.yml",
"view/ir_values_view.xml",
"view/module_view.xml",
],
"js": [
"static/src/js/disable_openerp_online.js",
],
"auto_install": True,
"installable": True,
"application": False,
}
| agpl-3.0 | Python |
3c0a181ac54f5017ace8c02ea8a3982f4e62bed4 | read correct env variables | cgoldberg/githubtakeout | githubtakeout.py | githubtakeout.py | import logging
import os
import shutil
import tarfile
import git
from github import Github
logging.basicConfig(level=logging.INFO, format='%(message)s')
logger = logging.getLogger(__name__)
try:
USER = os.environ['GITHUBUSER']
PASSWORD = os.environ['GITHUBPASSWORD']
except KeyError as e:
raise SystemExit('GITHUBUSER and GITHUBPASSWORD environment'
' variables are required.')
def make_gzip_tarball(source_dir, output_dir, tarball_filename):
output_path = os.path.join(output_dir, tarball_filename)
with tarfile.open(output_path, 'w:gz') as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
return output_path
def clone_repo(repo_url, repo_path):
logger.info('cloning: {}'.format(repo_url))
try:
git.Repo.clone_from(repo_url, repo_path)
except git.GitCommandError as e:
logger.error(e)
def archive_repo(repo_name, repos_dir, repo_path):
tarball_filename = '{}.tar.gz'.format(repo_name)
logger.info('creating archive: {}'.format(tarball_filename))
make_gzip_tarball(repo_path, repos_dir, tarball_filename)
logger.info('deleting repo: {}\n'.format(repo_name))
shutil.rmtree(repo_path)
def export_repos(user_name, include_gists=True):
repos_dir = 'github_backup'
github = Github(USER, PASSWORD)
user = github.get_user(user_name)
for repo in user.get_repos():
repo_path = os.path.join(repos_dir, repo.name)
# don't include forked repos
if repo.source is None:
clone_repo(repo.git_url, repo_path)
archive_repo(repo.name, repos_dir, repo_path)
if include_gists:
for gist in user.get_gists():
gist_path = os.path.join(repos_dir, gist.id)
clone_repo(gist.git_pull_url, gist_path)
archive_repo(gist.name, repos_dir, gist_path)
if __name__ == '__main__':
export_repos(USER)
| import logging
import os
import shutil
import tarfile
import git
from github import Github
logging.basicConfig(level=logging.INFO, format='%(message)s')
logger = logging.getLogger(__name__)
try:
USER = os.environ['USER']
PASSWORD = os.environ['PASSWORD']
except KeyError as e:
raise SystemExit('USER and PASSWORD environment variables are required.')
def make_gzip_tarball(source_dir, output_dir, tarball_filename):
output_path = os.path.join(output_dir, tarball_filename)
with tarfile.open(output_path, 'w:gz') as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
return output_path
def clone_repo(repo_url, repo_path):
logger.info('cloning: {}'.format(repo_url))
try:
git.Repo.clone_from(repo_url, repo_path)
except git.GitCommandError as e:
logger.error(e)
def archive_repo(repo_name, repos_dir, repo_path):
tarball_filename = '{}.tar.gz'.format(repo_name)
logger.info('creating archive: {}'.format(tarball_filename))
make_gzip_tarball(repo_path, repos_dir, tarball_filename)
logger.info('deleting repo: {}\n'.format(repo_name))
shutil.rmtree(repo_path)
def export_repos(user_name, include_gists=True):
repos_dir = 'github_backup'
github = Github(USER, PASSWORD)
user = github.get_user(user_name)
for repo in user.get_repos():
repo_path = os.path.join(repos_dir, repo.name)
# don't include forked repos
if repo.source is None:
clone_repo(repo.git_url, repo_path)
archive_repo(repo.name, repos_dir, repo_path)
if include_gists:
for gist in user.get_gists():
gist_path = os.path.join(gists_dir, gist.id)
clone_repo(gist.git_pull_url, gist_path)
archive_repo(gist.name, gists_dir, gist_path)
if __name__ == '__main__':
user = 'cgoldberg'
export_repos(user)
| mit | Python |
a65b385769d33b606ea4b7c11c5542ea7d9394b9 | Disable redirect_state in strava backend. Fixes #259 | contracode/python-social-auth,mchdks/python-social-auth,henocdz/python-social-auth,alrusdi/python-social-auth,python-social-auth/social-core,firstjob/python-social-auth,msampathkumar/python-social-auth,mark-adams/python-social-auth,python-social-auth/social-storage-sqlalchemy,muhammad-ammar/python-social-auth,rsalmaso/python-social-auth,msampathkumar/python-social-auth,ononeor12/python-social-auth,bjorand/python-social-auth,firstjob/python-social-auth,tkajtoch/python-social-auth,ariestiyansyah/python-social-auth,joelstanner/python-social-auth,JJediny/python-social-auth,mark-adams/python-social-auth,duoduo369/python-social-auth,JJediny/python-social-auth,rsteca/python-social-auth,jneves/python-social-auth,nirmalvp/python-social-auth,falcon1kr/python-social-auth,JerzySpendel/python-social-auth,degs098/python-social-auth,ByteInternet/python-social-auth,webjunkie/python-social-auth,lneoe/python-social-auth,yprez/python-social-auth,falcon1kr/python-social-auth,contracode/python-social-auth,S01780/python-social-auth,nirmalvp/python-social-auth,garrett-schlesinger/python-social-auth,lawrence34/python-social-auth,iruga090/python-social-auth,yprez/python-social-auth,wildtetris/python-social-auth,noodle-learns-programming/python-social-auth,muhammad-ammar/python-social-auth,rsteca/python-social-auth,jeyraof/python-social-auth,S01780/python-social-auth,daniula/python-social-auth,barseghyanartur/python-social-auth,ononeor12/python-social-auth,msampathkumar/python-social-auth,muhammad-ammar/python-social-auth,lamby/python-social-auth,MSOpenTech/python-social-auth,chandolia/python-social-auth,garrett-schlesinger/python-social-auth,tkajtoch/python-social-auth,barseghyanartur/python-social-auth,michael-borisov/python-social-auth,lawrence34/python-social-auth,python-social-auth/social-docs,MSOpenTech/python-social-auth,firstjob/python-social-auth,rsteca/python-social-auth,lneoe/python-social-auth,alrusdi/python-social-auth,hsr-ba-fs15-dat/python-social-auth,ariestiyansyah/python-social-auth,S01780/python-social-auth,mathspace/python-social-auth,san-mate/python-social-auth,fearlessspider/python-social-auth,fearlessspider/python-social-auth,hsr-ba-fs15-dat/python-social-auth,lneoe/python-social-auth,merutak/python-social-auth,robbiet480/python-social-auth,joelstanner/python-social-auth,daniula/python-social-auth,yprez/python-social-auth,rsalmaso/python-social-auth,tutumcloud/python-social-auth,JerzySpendel/python-social-auth,tkajtoch/python-social-auth,barseghyanartur/python-social-auth,python-social-auth/social-app-django,mrwags/python-social-auth,duoduo369/python-social-auth,merutak/python-social-auth,hsr-ba-fs15-dat/python-social-auth,lamby/python-social-auth,frankier/python-social-auth,degs098/python-social-auth,JJediny/python-social-auth,noodle-learns-programming/python-social-auth,drxos/python-social-auth,lawrence34/python-social-auth,MSOpenTech/python-social-auth,VishvajitP/python-social-auth,clef/python-social-auth,merutak/python-social-auth,jameslittle/python-social-auth,JerzySpendel/python-social-auth,VishvajitP/python-social-auth,frankier/python-social-auth,alrusdi/python-social-auth,Andygmb/python-social-auth,degs098/python-social-auth,Andygmb/python-social-auth,robbiet480/python-social-auth,clef/python-social-auth,cjltsod/python-social-auth,SeanHayes/python-social-auth,wildtetris/python-social-auth,henocdz/python-social-auth,mathspace/python-social-auth,cmichal/python-social-auth,DhiaEddineSaidi/python-social-auth,joelstanner/python-social-auth,jameslittle/python-social-auth,jneves/python-social-auth,robbiet480/python-social-auth,tobias47n9e/social-core,contracode/python-social-auth,iruga090/python-social-auth,jameslittle/python-social-auth,mchdks/python-social-auth,mrwags/python-social-auth,python-social-auth/social-core,wildtetris/python-social-auth,lamby/python-social-auth,jeyraof/python-social-auth,python-social-auth/social-app-django,cmichal/python-social-auth,mrwags/python-social-auth,drxos/python-social-auth,webjunkie/python-social-auth,iruga090/python-social-auth,nirmalvp/python-social-auth,noodle-learns-programming/python-social-auth,falcon1kr/python-social-auth,ByteInternet/python-social-auth,tutumcloud/python-social-auth,daniula/python-social-auth,mathspace/python-social-auth,DhiaEddineSaidi/python-social-auth,jneves/python-social-auth,drxos/python-social-auth,VishvajitP/python-social-auth,jeyraof/python-social-auth,ByteInternet/python-social-auth,python-social-auth/social-app-django,san-mate/python-social-auth,cmichal/python-social-auth,san-mate/python-social-auth,henocdz/python-social-auth,Andygmb/python-social-auth,chandolia/python-social-auth,bjorand/python-social-auth,cjltsod/python-social-auth,DhiaEddineSaidi/python-social-auth,mark-adams/python-social-auth,ariestiyansyah/python-social-auth,michael-borisov/python-social-auth,michael-borisov/python-social-auth,python-social-auth/social-app-cherrypy,bjorand/python-social-auth,clef/python-social-auth,webjunkie/python-social-auth,mchdks/python-social-auth,fearlessspider/python-social-auth,chandolia/python-social-auth,SeanHayes/python-social-auth,ononeor12/python-social-auth | social/backends/strava.py | social/backends/strava.py | """
Strava OAuth2 backend, docs at:
http://psa.matiasaguirre.net/docs/backends/strava.html
"""
from social.backends.oauth import BaseOAuth2
class StravaOAuth(BaseOAuth2):
name = 'strava'
AUTHORIZATION_URL = 'https://www.strava.com/oauth/authorize'
ACCESS_TOKEN_URL = 'https://www.strava.com/oauth/token'
ACCESS_TOKEN_METHOD = 'POST'
# Strava doesn't check for parameters in redirect_uri and directly appends
# the auth parameters to it, ending with an URL like:
# http://example.com/complete/strava?redirect_state=xxx?code=xxx&state=xxx
# Check issue #259 for details.
REDIRECT_STATE = False
def get_user_id(self, details, response):
return response['athlete']['id']
def get_user_details(self, response):
"""Return user details from Strava account"""
# because there is no usernames on strava
username = response['athlete']['id']
email = response['athlete'].get('email', '')
fullname, first_name, last_name = self.get_user_names(
first_name=response['athlete'].get('first_name', '')
)
return {'username': str(username),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name,
'email': email}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
return self.get_json('https://www.strava.com/api/v3/athlete',
params={'access_token': access_token})
| """
Strava OAuth2 backend, docs at:
http://psa.matiasaguirre.net/docs/backends/strava.html
"""
from social.backends.oauth import BaseOAuth2
class StravaOAuth(BaseOAuth2):
name = 'strava'
AUTHORIZATION_URL = 'https://www.strava.com/oauth/authorize'
ACCESS_TOKEN_URL = 'https://www.strava.com/oauth/token'
ACCESS_TOKEN_METHOD = 'POST'
def get_user_id(self, details, response):
return response['athlete']['id']
def get_user_details(self, response):
"""Return user details from Strava account"""
# because there is no usernames on strava
username = response['athlete']['id']
email = response['athlete'].get('email', '')
fullname, first_name, last_name = self.get_user_names(
first_name=response['athlete'].get('first_name', '')
)
return {'username': str(username),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name,
'email': email}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
return self.get_json('https://www.strava.com/api/v3/athlete',
params={'access_token': access_token})
| bsd-3-clause | Python |
a95c3bff0065ed5612a0786e7d8fd3e43fe71ff7 | Declare immutable fields in SuperMessageNode | SOM-st/PySOM,SOM-st/PySOM,smarr/PySOM,smarr/PySOM | src/som/interpreter/ast/nodes/message/super_node.py | src/som/interpreter/ast/nodes/message/super_node.py | from .abstract_node import AbstractMessageNode
class SuperMessageNode(AbstractMessageNode):
_immutable_fields_ = ['_method?', '_super_class', '_selector']
def __init__(self, selector, receiver, args, super_class, source_section = None):
AbstractMessageNode.__init__(self, selector, None, receiver, args, source_section)
self._method = None
self._super_class = super_class
self._selector = selector
def execute(self, frame):
if self._method is None:
method = self._super_class.lookup_invokable(self._selector)
if not method:
raise Exception("Not yet implemented")
self._method = method
rcvr, args = self._evaluate_rcvr_and_args(frame)
return self._method.invoke(rcvr, args)
| from .abstract_node import AbstractMessageNode
class SuperMessageNode(AbstractMessageNode):
def __init__(self, selector, receiver, args, super_class, source_section = None):
AbstractMessageNode.__init__(self, selector, None, receiver, args, source_section)
self._method = None
self._super_class = super_class
self._selector = selector
def execute(self, frame):
if self._method is None:
method = self._super_class.lookup_invokable(self._selector)
if not method:
raise Exception("Not yet implemented")
self._method = method
rcvr, args = self._evaluate_rcvr_and_args(frame)
return self._method.invoke(rcvr, args)
| mit | Python |
116f74ec0bfd574d13837ce6831bde91e8504562 | simplify option handling | OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server | lava_scheduler_app/management/commands/__init__.py | lava_scheduler_app/management/commands/__init__.py | import logging
from optparse import make_option
import sys
from django.core.management.base import BaseCommand
class SchedulerCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-l', '--loglevel',
action='store',
default=None,
help="Log level, default is taken from settings."),
make_option('-f', '--logfile',
action='store',
default=None,
help="Path to log file, default is taken from settings."),
)
log_prefix = ''
def _configure(self, options):
from django.conf import settings
daemon_options = settings.SCHEDULER_DAEMON_OPTIONS.copy()
if options['logfile'] is not None:
daemon_options['LOG_FILE_PATH'] = options['logfile']
if options['loglevel'] is not None:
daemon_options['LOG_LEVEL'] = options['loglevel']
logger = logging.getLogger('')
if daemon_options['LOG_FILE_PATH'] in [None, '-']:
handler = logging.StreamHandler(sys.stderr)
else:
handler = logging.FileHandler(daemon_options['LOG_FILE_PATH'])
fmt = "%(asctime)s [%(levelname)s] [%(name)s] %(message)s"
if self.log_prefix:
fmt = self.log_prefix + ' ' + fmt
handler.setFormatter(logging.Formatter(fmt))
logger.addHandler(handler)
logger.setLevel(getattr(logging, daemon_options['LOG_LEVEL'].upper()))
return daemon_options
| import logging
from optparse import make_option
import sys
from django.core.management.base import BaseCommand
NOTSET = object()
class SchedulerCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-l', '--loglevel',
action='store',
default=NOTSET,
help="Log level, default is taken from settings."),
make_option('-f', '--logfile',
action='store',
default=NOTSET,
help="Path to log file, default is taken from settings."),
)
log_prefix = ''
def _configure(self, options):
from django.conf import settings
daemon_options = settings.SCHEDULER_DAEMON_OPTIONS.copy()
if options['logfile'] is not NOTSET:
daemon_options['LOG_FILE_PATH'] = options['logfile']
if options['loglevel'] is not NOTSET:
daemon_options['LOG_LEVEL'] = options['loglevel']
logger = logging.getLogger('')
if daemon_options['LOG_FILE_PATH'] is None:
handler = logging.StreamHandler(sys.stderr)
else:
handler = logging.FileHandler(daemon_options['LOG_FILE_PATH'])
fmt = "%(asctime)s [%(levelname)s] [%(name)s] %(message)s"
if self.log_prefix:
fmt = self.log_prefix + ' ' + fmt
handler.setFormatter(logging.Formatter(fmt))
logger.addHandler(handler)
logger.setLevel(getattr(logging, daemon_options['LOG_LEVEL'].upper()))
return daemon_options
| agpl-3.0 | Python |
cb94433a39be091c387bf4bc6e96a5c94e77a2c4 | Fix downloading STM32 CubeMX | modm-io/modm-devices | tools/generator/raw-data-extractor/extract-stm32.py | tools/generator/raw-data-extractor/extract-stm32.py |
from pathlib import Path
from multiprocessing import Pool
import urllib.request
import zipfile
import shutil
import re
import io
import os
cubeurl = "https://www.st.com/content/st_com/en/products/development-tools/"\
"software-development-tools/stm32-software-development-tools/"\
"stm32-configurators-and-code-generators/stm32cubemx.html"
# Set the right headers
hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
with urllib.request.urlopen(urllib.request.Request(cubeurl, headers=hdr)) as response:
html = response.read().decode("utf-8")
dlurl = re.search(r'data-download-path="(/content/ccc/resource/.*?\.zip)"', html).group(1)
dlurl = "https://www.st.com" + dlurl
print("Downloading CubeMX...")
print(dlurl)
shutil.rmtree("temp-stm32", ignore_errors=True)
with urllib.request.urlopen(urllib.request.Request(dlurl, headers=hdr)) as content:
z = zipfile.ZipFile(io.BytesIO(content.read()))
item = [n for n in z.namelist() if ".exe" in n][0]
print("Extracting SetupSTM32CubeMX.exe...")
z = zipfile.ZipFile(io.BytesIO(z.read(item)))
print("Extracting Core-Pack...")
z.extract("resources/packs/pack-Core", "temp-stm32/")
print("Compiling IzPackDeserializer...")
Path("temp-stm32/bin/izpack_deserializer").mkdir(exist_ok=True, parents=True)
Path("temp-stm32/bin/com/izforge/izpack/api/data").mkdir(exist_ok=True, parents=True)
os.system("javac izpack/*.java")
shutil.move("izpack/IzPackDeserializer.class", "temp-stm32/bin/izpack_deserializer/")
for f in Path("izpack/").glob("*.class"):
shutil.move(str(f), "temp-stm32/bin/com/izforge/izpack/api/data/")
print("Extracting Database...")
os.system("(cd temp-stm32/bin; java izpack_deserializer.IzPackDeserializer > /dev/null)")
print("Moving Database...")
shutil.rmtree("../raw-device-data/stm32-devices", ignore_errors=True)
Path("../raw-device-data/stm32-devices").mkdir(exist_ok=True, parents=True)
shutil.move("temp-stm32/output/db/mcu", "../raw-device-data/stm32-devices/")
shutil.move("temp-stm32/output/db/plugins", "../raw-device-data/stm32-devices/")
|
from pathlib import Path
from multiprocessing import Pool
import urllib.request
import zipfile
import shutil
import re
import io
import os
cubeurl = "https://www.st.com/content/st_com/en/products/development-tools/"\
"software-development-tools/stm32-software-development-tools/"\
"stm32-configurators-and-code-generators/stm32cubemx.html"
with urllib.request.urlopen(cubeurl) as response:
html = response.read().decode("utf-8")
dlurl = re.search(r'data-download-path="(/content/ccc/resource/.*?\.zip)"', html).group(1)
dlurl = "https://www.st.com" + dlurl
print("Downloading CubeMX...")
print(dlurl)
shutil.rmtree("temp-stm32", ignore_errors=True)
with urllib.request.urlopen(dlurl) as content:
z = zipfile.ZipFile(io.BytesIO(content.read()))
item = [n for n in z.namelist() if ".exe" in n][0]
print("Extracting SetupSTM32CubeMX.exe...")
z = zipfile.ZipFile(io.BytesIO(z.read(item)))
print("Extracting Core-Pack...")
z.extract("resources/packs/pack-Core", "temp-stm32/")
print("Compiling IzPackDeserializer...")
Path("temp-stm32/bin/izpack_deserializer").mkdir(exist_ok=True, parents=True)
Path("temp-stm32/bin/com/izforge/izpack/api/data").mkdir(exist_ok=True, parents=True)
os.system("javac izpack/*.java")
shutil.move("izpack/IzPackDeserializer.class", "temp-stm32/bin/izpack_deserializer/")
for f in Path("izpack/").glob("*.class"):
shutil.move(str(f), "temp-stm32/bin/com/izforge/izpack/api/data/")
print("Extracting Database...")
os.system("(cd temp-stm32/bin; java izpack_deserializer.IzPackDeserializer > /dev/null)")
print("Moving Database...")
shutil.rmtree("../raw-device-data/stm32-devices", ignore_errors=True)
Path("../raw-device-data/stm32-devices").mkdir(exist_ok=True, parents=True)
shutil.move("temp-stm32/output/db/mcu", "../raw-device-data/stm32-devices/")
shutil.move("temp-stm32/output/db/plugins", "../raw-device-data/stm32-devices/")
| mpl-2.0 | Python |
2cd6a49c268e1c56f819fef5f838b2e0dfafb96b | Complete extended binary search sol | bowen0701/algorithms_data_structures | lc033_search_in_rotated_sorted_array.py | lc033_search_in_rotated_sorted_array.py | """Leetcode 33. Search in Rotated Sorted Array
Medium
URL: https://leetcode.com/problems/search-in-rotated-sorted-array/
Suppose an array sorted in ascending order is rotated at some pivot unknown
to you beforehand.
(i.e., [0,1,2,4,5,6,7] might become [4,5,6,7,0,1,2]).
You are given a target value to search. If found in the array return its index,
otherwise return -1.
You may assume no duplicate exists in the array.
Your algorithm's runtime complexity must be in the order of O(log n).
Example 1:
Input: nums = [4,5,6,7,0,1,2], target = 0
Output: 4
Example 2:
Input: nums = [4,5,6,7,0,1,2], target = 3
Output: -1
"""
class Solution(object):
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
if not nums:
return -1
# Note that pivot is nums[0].
pivot = nums[0]
first, last = 0, len(nums) - 1
while first < last:
mid = first + (last - first) // 2
if nums[mid] == target:
return mid
# Check target and middle are in the same side or splitted.
if (target >= pivot) == (nums[mid] >= pivot):
split_bool = False
else:
split_bool = True
if split_bool:
# If splitted, apply binary search based on pivot.
if target < pivot:
first = mid + 1
else:
last = mid - 1
else:
# If not splitted, apply normal binary search.
if target < nums[mid]:
last = mid - 1
else:
first = mid + 1
if nums[first] == target:
return first
else:
return -1
def main():
# Ans: 4
nums = [4,5,6,7,0,1,2]
target = 0
print Solution().search(nums, target)
# Ans: -1
nums = [4,5,6,7,0,1,2]
target = 3
print Solution().search(nums, target)
# Ans: 1
nums = [1,3]
target = 3
print Solution().search(nums, target)
if __name__ == '__main__':
main()
| """Leetcode 33. Search in Rotated Sorted Array
Medium
URL: https://leetcode.com/problems/search-in-rotated-sorted-array/
Suppose an array sorted in ascending order is rotated at some pivot unknown
to you beforehand.
(i.e., [0,1,2,4,5,6,7] might become [4,5,6,7,0,1,2]).
You are given a target value to search. If found in the array return its index,
otherwise return -1.
You may assume no duplicate exists in the array.
Your algorithm's runtime complexity must be in the order of O(log n).
Example 1:
Input: nums = [4,5,6,7,0,1,2], target = 0
Output: 4
Example 2:
Input: nums = [4,5,6,7,0,1,2], target = 3
Output: -1
"""
class Solution(object):
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
fc5b84b56a9ee2f1a8690dad77a25d462b6e46ee | Use realpath instead of abspath for extension id calculation | catapult-project/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult,sahiljain/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,benschmaus/catapult,catapult-project/catapult-csm,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult,sahiljain/catapult,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult-csm,benschmaus/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,SummerLW/Perf-Insight-Report,sahiljain/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult | telemetry/telemetry/core/extension_to_load.py | telemetry/telemetry/core/extension_to_load.py | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.core.chrome import crx_id
class ExtensionPathNonExistentException(Exception):
pass
class MissingPublicKeyException(Exception):
pass
class ExtensionToLoad(object):
def __init__(self, path, is_component=False):
if not os.path.isdir(path):
raise ExtensionPathNonExistentException(
'Extension path not a directory %s' % path)
self._path = path
self._local_path = path
self._is_component = is_component
if is_component and not crx_id.HasPublicKey(path):
raise MissingPublicKeyException(
'Component extension %s must have a public key' % path)
@property
def extension_id(self):
"""Unique extension id of this extension."""
if crx_id.HasPublicKey(self._path):
# Calculate extension id from the public key.
return crx_id.GetCRXAppID(os.path.realpath(self._path))
else:
# Calculate extension id based on the path on the device.
return crx_id.GetCRXAppID(os.path.realpath(self._local_path),
from_file_path=True)
@property
def path(self):
"""Path to extension source directory."""
return self._path
@property
def local_path(self):
"""Path to extension destination directory, for remote instances of
chrome"""
return self._local_path
@local_path.setter
def local_path(self, local_path):
self._local_path = local_path
@property
def is_component(self):
"""Whether this extension should be loaded as a component extension."""
return self._is_component
| # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.core.chrome import crx_id
class ExtensionPathNonExistentException(Exception):
pass
class MissingPublicKeyException(Exception):
pass
class ExtensionToLoad(object):
def __init__(self, path, is_component=False):
if not os.path.isdir(path):
raise ExtensionPathNonExistentException(
'Extension path not a directory %s' % path)
self._path = path
self._local_path = path
self._is_component = is_component
if is_component and not crx_id.HasPublicKey(path):
raise MissingPublicKeyException(
'Component extension %s must have a public key' % path)
@property
def extension_id(self):
"""Unique extension id of this extension."""
if crx_id.HasPublicKey(self._path):
# Calculate extension id from the public key.
return crx_id.GetCRXAppID(os.path.abspath(self._path))
else:
# Calculate extension id based on the path on the device.
return crx_id.GetCRXAppID(os.path.abspath(self._local_path),
from_file_path=True)
@property
def path(self):
"""Path to extension source directory."""
return self._path
@property
def local_path(self):
"""Path to extension destination directory, for remote instances of
chrome"""
return self._local_path
@local_path.setter
def local_path(self, local_path):
self._local_path = local_path
@property
def is_component(self):
"""Whether this extension should be loaded as a component extension."""
return self._is_component
| bsd-3-clause | Python |
96d444d8ee07a6004b6b96eece65835a4ea9b218 | Set `client_settings` in `/compare` sandbox | kdeloach/model-my-watershed,project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,lliss/model-my-watershed,lliss/model-my-watershed,WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,WikiWatershed/model-my-watershed,project-icp/bee-pollinator-app,project-icp/bee-pollinator-app,project-icp/bee-pollinator-app | src/mmw/apps/home/views.py | src/mmw/apps/home/views.py | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import json
from django.http import Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.template.context_processors import csrf
from django.conf import settings
from apps.modeling.models import Project
def get_client_settings():
client_settings = {
'client_settings': json.dumps({
'base_layers': settings.BASE_LAYERS
})
}
return client_settings
def get_context(request):
context = {}
context.update(csrf(request))
context.update(get_client_settings())
return context
def home_page(request):
return render_to_response('home/home.html', get_context(request))
def project(request, proj_id=None, scenario_id=None):
"""
If proj_id was specified, check that the user owns
the project or if the project is public.
If not, return a 404. Otherwise, just load the index
template and the let the front-end handle the route
and request the project through the API.
"""
if proj_id:
project = get_object_or_404(Project, id=proj_id)
if project.user != request.user and project.is_private:
raise Http404
return render_to_response('home/home.html', get_context(request))
def compare(request):
return render_to_response('home/compare.html', get_context(request))
| # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import json
from django.http import Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.template.context_processors import csrf
from django.conf import settings
from apps.modeling.models import Project
def get_client_settings():
client_settings = {
'client_settings': json.dumps({
'base_layers': settings.BASE_LAYERS
})
}
return client_settings
def get_context(request):
context = {}
context.update(csrf(request))
context.update(get_client_settings())
return context
def home_page(request):
return render_to_response('home/home.html', get_context(request))
def project(request, proj_id=None, scenario_id=None):
"""
If proj_id was specified, check that the user owns
the project or if the project is public.
If not, return a 404. Otherwise, just load the index
template and the let the front-end handle the route
and request the project through the API.
"""
if proj_id:
project = get_object_or_404(Project, id=proj_id)
if project.user != request.user and project.is_private:
raise Http404
return render_to_response('home/home.html', get_context(request))
def compare(request):
return render_to_response('home/compare.html')
| apache-2.0 | Python |
d05473c99eaf89a42df4813a50c144859bc563fe | copy html template to the right place for gitchangelog | guardian/scala-automation | docs/buildChangelog.py | docs/buildChangelog.py | #!/usr/bin/python
import os
import tarfile
import urllib
import subprocess
import shutil
if not os.path.exists("local.tools/gitchangelog-2.1.3"):
os.mkdir("local.tools")
urllib.URLopener().retrieve("https://pypi.python.org/packages/source/g/gitchangelog/gitchangelog-2.1.3.tar.gz","local.tools/gitchangelog-2.1.3.tar.gz")
tar = tarfile.open("local.tools/gitchangelog-2.1.3.tar.gz")
tar.extractall("local.tools")
tar.close()
shutil.copyfile("html.tpl", "local.tools/gitchangelog-2.1.3/templates/mustache/html.tpl")
newEnv = os.environ.copy()
newEnv["GITCHANGELOG_CONFIG_FILENAME"] = "gitchangelog.rc"
with open("local.changelog2.html", "wb") as f:
subprocess.check_call(["python", "local.tools/gitchangelog-2.1.3/gitchangelog.py"], stdout=f, env=newEnv)
| #!/usr/bin/python
import os
import tarfile
import urllib
import subprocess
if not os.path.exists("local.tools/gitchangelog-2.1.3"):
os.mkdir("local.tools")
urllib.URLopener().retrieve("https://pypi.python.org/packages/source/g/gitchangelog/gitchangelog-2.1.3.tar.gz","local.tools/gitchangelog-2.1.3.tar.gz")
tar = tarfile.open("local.tools/gitchangelog-2.1.3.tar.gz")
tar.extractall("local.tools")
tar.close()
newEnv = os.environ.copy()
newEnv["GITCHANGELOG_CONFIG_FILENAME"] = "gitchangelog.rc"
with open("local.changelog2.html", "wb") as f:
subprocess.check_call(["python", "local.tools/gitchangelog-2.1.3/gitchangelog.py"], stdout=f, env=newEnv)
| apache-2.0 | Python |
58f1c75e052f8eb540b2dd4d01297cf826ebe798 | Make Searcher object template-renderable | pivotal-energy-solutions/django-appsearch,pivotal-energy-solutions/django-appsearch,pivotal-energy-solutions/django-appsearch,pivotal-energy-solutions/django-appsearch | appsearch/utils.py | appsearch/utils.py | from django.forms.formsets import formset_factory
from django.core.urlresolvers import reverse
from django.utils.encoding import StrAndUnicode
from django.template.loader import render_to_string
from appsearch.registry import search, SearchRegistry
from appsearch.forms import ModelSelectionForm, ConstraintForm, ConstraintFormset
class Searcher(StrAndUnicode):
""" Template helper, wrapping all the necessary components to render an appsearch page. """
# Methods and fields not meant to be accessed from the template should start with an underscore
# to let the template variable name resolution block access.
model_selection_form = None
constraint_formset = None
field_data_url = None
operator_data_url = None
string = None
results = None
def __init__(self, url, querydict, registry=search, **kwargs):
self.kwargs = kwargs
self._set_up_forms(querydict, registry)
self.url = url
self._determine_urls(kwargs)
self.form_template = kwargs.get('form_template', 'appsearch/default_form.html')
def __unicode__(self):
return render_to_string(self.form_template, {
self.kwargs.get('context_object_name', 'search'): self,
})
def _set_up_forms(self, querydict, registry):
self.model_selection_form = ModelSelectionForm(registry, querydict)
constraint_formset_class = formset_factory(ConstraintForm, formset=ConstraintFormset)
if self.model_selection_form.is_valid():
model_configuration = self.model_selection_form.get_model_configuration()
self.constraint_formset = constraint_formset_class(model_configuration, querydict)
if self.constraint_formset.is_valid():
# TODO: Execute search machinery
pass
else:
self.model_selection_form = ModelSelectionForm(registry)
self.constraint_formset = constraint_formset_class(configuration=None)
def _determine_urls(self, kwargs):
# If a URL is not customized, this namespace will be used to search out the default URL
url_namespace = kwargs.get('url_namespace')
# Check custom data URLs
field_url = kwargs.get('field_url')
operator_url = kwargs.get('operator_url')
if field_url is None:
url_name = 'appsearch:constraint-fields'
if url_namespace is not None:
url_name = url_namespace + ':' + url_name
field_url = reverse(url_name, kwargs=kwargs.get('field_url_kwargs', {}))
if operator_url is None:
url_name = 'appsearch:constraint-operators'
if url_namespace is not None:
url_name = url_namespace + ':' + url_name
operator_url = reverse(url_name, kwargs=kwargs.get('operator_url_kwargs', {}))
self.field_data_url = field_url
self.operator_data_url = operator_url
| from django.forms.formsets import formset_factory
from appsearch.registry import search, SearchRegistry
from appsearch.forms import ModelSelectionForm, ConstraintForm, ConstraintFormset
class Searcher(object):
model_selection_form = None
constraint_formset = None
string = None
results = None
def __init__(self, params, registry=search):
self.set_up_forms(params, registry)
def set_up_forms(self, params, registry):
self.model_selection_form = ModelSelectionForm(registry, params)
constraint_formset_class = formset_factory(ConstraintForm, formset=ConstraintFormset)
if self.model_selection_form.is_valid():
model_configuration = self.model_selection_form.get_model_configuration()
self.constraint_formset = constraint_formset_class(model_configuration, params)
if self.constraint_formset.is_valid():
# TODO: Execute search machinery
pass
else:
self.model_selection_form = ModelSelectionForm(registry)
self.constraint_formset = constraint_formset_class(configuration=None)
| apache-2.0 | Python |
6a85b89d75cbac1d408ef06cf716117e9a23f89e | Add logout view | hreeder/WHAuth,hreeder/WHAuth,hreeder/WHAuth | auth/core/views.py | auth/core/views.py | from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, login_user, logout_user
from auth import db
from auth.utils import send_email
from auth.core import core
from auth.core.forms import LoginForm, RegistrationForm
from auth.core.models.user import User
@core.route("/")
@login_required
def home():
return render_template("core_home.html")
@core.route("/login", methods=["GET", "POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(
username=form.username.data,
active=True
).first()
if not user:
flash("User account not found!", "danger")
return redirect(url_for("core.login"))
if user.validate_password(form.password.data):
login_user(user)
return redirect(request.args.get("next") or url_for("core.home"))
else:
flash("Your password was incorrect!", "danger")
return render_template("core_login.html", form=form)
@core.route("/logout")
@login_required
def logout():
logout_user()
return redirect(url_for("core.home"))
@core.route("/register", methods=["GET", "POST"])
def register():
form = RegistrationForm()
if form.validate_on_submit():
# Create user model
new_user = User(
username=form.username.data,
email=form.email.data,
password=User.generate_password_hash(form.password.data)
)
# Set activation key
new_user.generate_activation_key()
print(new_user.activation_key)
print(url_for('core.validate_registration', username=new_user.username, key=new_user.activation_key))
# Save user
db.session.add(new_user)
db.session.commit()
# Send the new user their activation code
# send_email()
return redirect(url_for('core.post_register'))
return render_template("core_register.html", form=form)
@core.route("/register/validating")
def post_register():
return render_template("core_post_register.html")
@core.route("/register/validate/<username>/<key>")
def validate_registration(username, key):
user = User.query.filter_by(username=username, activation_key=key, active=False).first_or_404()
user.activate()
db.session.add(user)
db.session.commit()
return redirect(url_for('core.login')) | from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, login_user
from auth import db
from auth.utils import send_email
from auth.core import core
from auth.core.forms import LoginForm, RegistrationForm
from auth.core.models.user import User
@core.route("/")
@login_required
def home():
return render_template("core_home.html")
@core.route("/login", methods=["GET", "POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(
username=form.username.data,
active=True
).first()
if not user:
flash("User account not found!", "danger")
return redirect(url_for("core.login"))
if user.validate_password(form.password.data):
login_user(user)
return redirect(request.args.get("next") or url_for("core.home"))
else:
flash("Your password was incorrect!", "danger")
return render_template("core_login.html", form=form)
@core.route("/register", methods=["GET", "POST"])
def register():
form = RegistrationForm()
if form.validate_on_submit():
# Create user model
new_user = User(
username=form.username.data,
email=form.email.data,
password=User.generate_password_hash(form.password.data)
)
# Set activation key
new_user.generate_activation_key()
print(new_user.activation_key)
print(url_for('core.validate_registration', username=new_user.username, key=new_user.activation_key))
# Save user
db.session.add(new_user)
db.session.commit()
# Send the new user their activation code
# send_email()
return redirect(url_for('core.post_register'))
return render_template("core_register.html", form=form)
@core.route("/register/validating")
def post_register():
return render_template("core_post_register.html")
@core.route("/register/validate/<username>/<key>")
def validate_registration(username, key):
user = User.query.filter_by(username=username, activation_key=key, active=False).first_or_404()
user.activate()
db.session.add(user)
db.session.commit()
return redirect(url_for('core.login')) | mit | Python |
3f2778ebac1ecd2587d12ee2256db8068816d6c0 | refactor files | N402/NoahsArk,N402/NoahsArk | ark/goal/models.py | ark/goal/models.py | from datetime import datetime
from ark.exts import db
class Goal(db.Model):
__tablename__ = 'goal'
GOAL_STATES = {
'ready': 'ready',
'doing': 'Doing',
'canceled': 'Canceled',
'finished': 'Finished',
'expired': 'Expired',
}
id = db.Column(db.Integer, primary_key=True)
account_id = db.Column(db.Integer, db.ForeignKey('account.id'))
title = db.Column(db.String(100), nullable=False)
description = db.Column(db.String(300))
image_file_id = db.Column(db.Integer, db.ForeignKey('goal_file.id'))
created = db.Column(db.DateTime, default=datetime.utcnow)
operate_at = db.Column(db.DateTime)
state = db.Column(db.Enum(*(GOAL_STATES.keys())), default='ready')
image = db.relationship('GoalFile', uselist=False)
class GoalActivity(db.Model):
__tablename__ = 'goal_activity'
id = db.Column(db.Integer, primary_key=True)
goal_id = db.Column(db.Integer, db.ForeignKey('goal.id'))
image_file_id = db.Column(db.Integer, db.ForeignKey('goal_file.id'))
content = db.Column(db.String(300))
created = db.Column(db.DateTime, default=datetime.utcnow)
image = db.relationship('GoalFile', uselist=False)
class GoalLikeLog(db.Model):
__tablename__ = 'goal_like_log'
id = db.Column(db.Integer, primary_key=True)
goal_id = db.Column(db.Integer, db.ForeignKey('goal.id'))
account_id = db.Column(db.Integer, db.ForeignKey('account.id'))
created = db.Column(db.DateTime, default=datetime.utcnow)
is_deleted = db.Column(db.Boolean, default=False)
class GoalFile(db.Model):
__tablename__ = 'goal_file'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80))
file_url = db.Column(db.String(500))
account_id = db.Column(db.Integer, db.ForeignKey('account.id'))
created = db.Column(db.DateTime, default=datetime.utcnow)
| from datetime import datetime
from ark.exts import db
class Goal(db.Model):
__tablename__ = 'goal'
GOAL_STATES = {
'ready': 'ready',
'doing': 'Doing',
'canceled': 'Canceled',
'finished': 'Finished',
'expired': 'Expired',
}
id = db.Column(db.Integer, primary_key=True)
account_id = db.Column(db.Integer, db.ForeignKey('account.id'))
title = db.Column(db.String(100), nullable=False)
description = db.Column(db.String(300))
image_url = db.Column(db.String(300))
created = db.Column(db.DateTime, default=datetime.utcnow)
operate_at = db.Column(db.DateTime)
state = db.Column(db.Enum(*(GOAL_STATES.keys())), default='ready')
class GoalActivity(db.Model):
__tablename__ = 'goal_activity'
id = db.Column(db.Integer, primary_key=True)
goal_id = db.Column(db.Integer, db.ForeignKey('goal.id'))
content = db.Column(db.String(300))
image_url = db.Column(db.String(300))
created = db.Column(db.DateTime, default=datetime.utcnow)
class GoalLikeLog(db.Model):
__tablename__ = 'goal_like_log'
id = db.Column(db.Integer, primary_key=True)
goal_id = db.Column(db.Integer, db.ForeignKey('goal.id'))
account_id = db.Column(db.Integer, db.ForeignKey('account.id'))
created = db.Column(db.DateTime, default=datetime.utcnow)
is_deleted = db.Column(db.Boolean, default=False)
| mit | Python |
677b9bdfa4bf0c2541bea04bf44c6a0ca4ab90c9 | mark log jid test as flaky | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/integration/logging/test_jid_logging.py | tests/integration/logging/test_jid_logging.py | # -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import TestsLoggingHandler, flaky
import logging
import salt.ext.six as six
@skipIf(six.PY3, 'Runtest Log Hander Disabled for PY3, #41836')
class LoggingJIDsTest(ModuleCase):
'''
Validate that JIDs appear in LOGs
'''
def setUp(self):
'''
Set up
'''
log_format = '[%(levelname)-8s] %(jid)s %(message)s'
self.handler = TestsLoggingHandler(format=log_format,
level=logging.DEBUG)
@flaky
def test_jid_in_logs(self):
'''
Test JID in log_format
'''
with self.handler:
self.run_function('test.ping')
assert any('JID' in s for s in self.handler.messages) is True, 'JID not found in log messages'
| # -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import TestsLoggingHandler
import logging
import salt.ext.six as six
@skipIf(six.PY3, 'Runtest Log Hander Disabled for PY3, #41836')
class LoggingJIDsTest(ModuleCase):
'''
Validate that JIDs appear in LOGs
'''
def setUp(self):
'''
Set up
'''
log_format = '[%(levelname)-8s] %(jid)s %(message)s'
self.handler = TestsLoggingHandler(format=log_format,
level=logging.DEBUG)
def test_jid_in_logs(self):
'''
Test JID in log_format
'''
with self.handler:
self.run_function('test.ping')
assert any('JID' in s for s in self.handler.messages) is True, 'JID not found in log messages'
| apache-2.0 | Python |
102267acafeb66b417e852f2e04a345b6d77504e | Refactor web scraping. OOP coming. | NathanMH/scripts,NathanMH/scripts,NathanMH/scripts | bc_bounty_check.py | bc_bounty_check.py | """####################
Author: Nathan Mador-House
####################"""
#######################
"""####################
Index:
1. Imports and Readme
2. Functions
3. Main
4. Testing
####################"""
#######################
###################################################################
# 1. IMPORTS AND README
###################################################################
from bs4 import BeautifulSoup
import urllib.request
import re
# Searches for new open issues from BC open government pay for pull program.
###################################################################
# 2. FUNCTIONS
###################################################################
BOUNTIES_URL = "https://github.com/search?utf8=%E2%9C%93&q=org%3Abcgov+%241000&type=Issues&ref=searchresults"
def make_html(url):
""" Make soup object """
html = urllib.request.Request(url)
response = urllib.request.urlopen(html)
html_response = response.read().decode('utf-8')
the_soup = BeautifulSoup(html_response, 'html.parser')
return the_soup
def previous_bounties(bounties_list):
""" Get list of previous bounties """
previous_bounties = []
with open(bounties_list, 'r') as text_file:
previous_bounties = text_file.readlines()
number_of_previous_bounties = len(previous_bounties)
print("Previous Bounties: " + str(number_of_previous_bounties))
for i in previous_bounties:
print(str(i).strip())
def current_bounties(soup):
""" Get list of current bounties """
# Active Bounties
active_bounties = soup.find_all("svg", class_="octicon octicon-issue-opened open")
total_active_bounties = len(active_bounties)
print("Active Bounties: " + str(total_active_bounties))
# Bounties listed
bounties = soup.find_all(class_="issue-list-item public")
total_current_bounties = len(bounties)
print("Total bounties: " + str(total_current_bounties))
# Bounty links
bounty_links = soup.find_all("a", href=re.compile("/bcgov/"))
print(bounty_links[0])
# Tuple of [Key, Title, Link]
tuple
def save_current_bounties(bounties, bounties_file):
for i in bounties:
with open(bounties_file, 'w') as text_file:
text_file.write(i)
###################################################################
# 3. MAIN
###################################################################
def main(url):
soup = make_html(url)
# previous_bounties('previous_bounties.txt')
current_bounties(soup)
main(BOUNTIES_URL)
###################################################################
# 4. TESTING
###################################################################
| #!/bin/sh
# Searches for new open issues from BC open government pay for pull program.
from bs4 import BeautifulSoup
import urllib.request
url = "https://github.com/search?utf8=%E2%9C%93&q=org%3Abcgov+%241000&type=Issues&ref=searchresults"
def make_html(url):
html = urllib.request.Request(url)
response = urllib.request.urlopen(html)
return response.read().decode('utf-8')
def make_soup(html):
soup = BeautifulSoup(html, 'html.parser')
return soup
def check_number_of_bounties(soup, filename):
with open(filename, 'r') as text_file:
bounties = text_file.readline()
number_active_bounties = len(soup.find_all("svg", class_="octicon octicon-issue-opened open"))
print("Previous Bounties: " + bounties.rstrip())
print("Current Bounties: " + str(number_active_bounties))
print("---------------")
all_bounties = soup.find_all("p", class_="title")
for i in all_bounties:
print(i)
print()
# active_bounties = []
# for i in range(1, number_active_bounties):
# print(all_bounties[i])
# active_bounties[i] = all_bounties[i]
# print(len(active_bounties)ne)
# for i in active_bounties:
# print(i)
# def __main__(url):
html = make_html(url)
soup = make_soup(html)
check_number_of_bounties(soup, 'previous_bounties.txt')
| mit | Python |
305fb2b631bf9ede152995d1ba264ac58d39cea9 | Improve display of exceptions | jacquev6/MockMockMock | MockMockMock/_Details/MockException.py | MockMockMock/_Details/MockException.py | # -*- coding: utf-8 -*-
# Copyright 2013 Vincent Jacques
# vincent@vincent-jacques.net
# This file is part of MockMockMock. http://jacquev6.github.com/MockMockMock
# MockMockMock is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
# MockMockMock is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License along with MockMockMock. If not, see <http://www.gnu.org/licenses/>.
class MockException(Exception):
pass
| # -*- coding: utf-8 -*-
# Copyright 2013 Vincent Jacques
# vincent@vincent-jacques.net
# This file is part of MockMockMock. http://jacquev6.github.com/MockMockMock
# MockMockMock is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
# MockMockMock is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License along with MockMockMock. If not, see <http://www.gnu.org/licenses/>.
class MockException(Exception):
def __init__(self, message):
Exception.__init__(self)
self.message = message
| mit | Python |
71214cca77137e23b279a5495ded1fce6f37aa11 | Set version to 0.2 | carlospalol/django-moneyfield,generalov/django-moneyfield | moneyfield/__init__.py | moneyfield/__init__.py | from .fields import MoneyField, MoneyModelForm
from .exceptions import *
__version__ = '0.2' | from .fields import MoneyField, MoneyModelForm
from .exceptions import *
__version__ = 'experimental' | mit | Python |
a61e63be1b1c6e31fd0d469962277f05db644bc4 | Add dashboard URLs | PanDAWMS/panda-bigmon-lsst,kiae-grid/panda-bigmon-lsst,PanDAWMS/panda-bigmon-lsst | lsst/urls.py | lsst/urls.py | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
#from core.common.urls import *
import lsst.settings
import lsst.views as lsstmon_views
urlpatterns = patterns('',
### url(r'^$', lsstmon_views.mainPage),
### url(r'^lsst/$', lsstmon_views.mainPage),
### url(r'^lsst/jobs/$', lsstmon_views.jobList),
### url(r'^lsst/jobs/(.*)/$', lsstmon_views.jobList),
### url(r'^lsst/jobs/(.*)/(.*)/$', lsstmon_views.jobList),
### url(r'^lsst/job$', lsstmon_views.jobInfo),
### url(r'^lsst/job/(.*)/$', lsstmon_views.jobInfo),
### url(r'^lsst/job/(.*)/(.*)/$', lsstmon_views.jobInfo),
### url(r'^lsst/users/$', lsstmon_views.userList),
### url(r'^lsst/user/(?P<user>.*)/$', lsstmon_views.userInfo),
### url(r'^lsst/sites/$', lsstmon_views.siteList),
### url(r'^lsst/site/(?P<site>.*)/$', lsstmon_views.siteInfo),
# url(r'^$', lsstmon_views.mainPage),
url(r'^$', lsstmon_views.mainPage),
url(r'^jobs/$', lsstmon_views.jobList),
url(r'^jobs/(.*)/$', lsstmon_views.jobList),
url(r'^jobs/(.*)/(.*)/$', lsstmon_views.jobList),
url(r'^job$', lsstmon_views.jobInfo),
url(r'^job/(.*)/$', lsstmon_views.jobInfo),
url(r'^job/(.*)/(.*)/$', lsstmon_views.jobInfo),
url(r'^users/$', lsstmon_views.userList),
url(r'^user/(?P<user>.*)/$', lsstmon_views.userInfo),
url(r'^sites/$', lsstmon_views.siteList),
url(r'^site/(?P<site>.*)/$', lsstmon_views.siteInfo),
url(r'^dash/$', lsstmon_views.dashAnalysis),
url(r'^dash/analysis/$', lsstmon_views.dashAnalysis),
url(r'^dash/production/$', lsstmon_views.dashProduction),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
#urlpatterns += common_patterns
#urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
#from core.common.urls import *
import lsst.settings
import lsst.views as lsstmon_views
urlpatterns = patterns('',
### url(r'^$', lsstmon_views.mainPage),
### url(r'^lsst/$', lsstmon_views.mainPage),
### url(r'^lsst/jobs/$', lsstmon_views.jobList),
### url(r'^lsst/jobs/(.*)/$', lsstmon_views.jobList),
### url(r'^lsst/jobs/(.*)/(.*)/$', lsstmon_views.jobList),
### url(r'^lsst/job$', lsstmon_views.jobInfo),
### url(r'^lsst/job/(.*)/$', lsstmon_views.jobInfo),
### url(r'^lsst/job/(.*)/(.*)/$', lsstmon_views.jobInfo),
### url(r'^lsst/users/$', lsstmon_views.userList),
### url(r'^lsst/user/(?P<user>.*)/$', lsstmon_views.userInfo),
### url(r'^lsst/sites/$', lsstmon_views.siteList),
### url(r'^lsst/site/(?P<site>.*)/$', lsstmon_views.siteInfo),
# url(r'^$', lsstmon_views.mainPage),
url(r'^$', lsstmon_views.mainPage),
url(r'^jobs/$', lsstmon_views.jobList),
url(r'^jobs/(.*)/$', lsstmon_views.jobList),
url(r'^jobs/(.*)/(.*)/$', lsstmon_views.jobList),
url(r'^job$', lsstmon_views.jobInfo),
url(r'^job/(.*)/$', lsstmon_views.jobInfo),
url(r'^job/(.*)/(.*)/$', lsstmon_views.jobInfo),
url(r'^users/$', lsstmon_views.userList),
url(r'^user/(?P<user>.*)/$', lsstmon_views.userInfo),
url(r'^sites/$', lsstmon_views.siteList),
url(r'^site/(?P<site>.*)/$', lsstmon_views.siteInfo),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
#urlpatterns += common_patterns
#urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| apache-2.0 | Python |
088c5bad2845d9f82fd712af7c4737c7fc45d0bc | Update get-relation-foreign-objects.py | agusmakmun/Some-Examples-of-Simple-Python-Script,agusmakmun/Some-Examples-of-Simple-Python-Script | Django/get-relation-foreign-objects.py | Django/get-relation-foreign-objects.py | #https://docs.djangoproject.com/en/dev/topics/db/queries/#caching-and-querysets
class Data_Toko(models.Model):
....
class Pengiriman(models.Model):
property = models.ForeignKey(Data_Toko, related_name='pengiriman')
metode_pengiriman = models.CharField(max_length=200, default='JNE', blank=True, null=True, help_text='Isikan Pengiriman yang tersedia, contoh: JNE')
>>> a = Pengiriman.objects.all()
>>> a
[<Pengiriman: Pengiriman object>, <Pengiriman: Pengiriman object>]
>>>
>>> from appearance.models import Pengiriman
>>> a = Pengiriman.objects.all()
>>> print ([p.metode_pengiriman for p in a])
[u'Tiki', u'Pos Indonesia']
>>>
>>> c = [str(p.metode_pengiriman) for p in a]
>>> c
['Tiki', 'Pos Indonesia']
>>>
>>> for i in c:
... print i
...
Tiki
Pos Indonesia
>>>
| #https://docs.djangoproject.com/en/dev/topics/db/queries/#caching-and-querysets
class Data_Toko(models.Model):
....
class Pengiriman(models.Model):
property = models.ForeignKey(Data_Toko, related_name='pengiriman')
metode_pengiriman = models.CharField(max_length=200, default='JNE', blank=True, null=True, help_text='Isikan Pengiriman yang tersedia, contoh: JNE')
>>> a = Pengiriman.objects.all()
>>> a
[<Pengiriman: Pengiriman object>, <Pengiriman: Pengiriman object>]
>>>
>>> from appearance.models import Pengiriman
>>> a = Pengiriman.objects.all()
>>> print ([p.metode_pengiriman for p in a])
[u'Tiki', u'Pos Indonesia']
>>>
>>> c = [str(p.metode_pengiriman) for p in a]
>>> c
['Tiki', 'Pos Indonesia']
>>>
| agpl-3.0 | Python |
0766e7fbbb1ef5315b20814d277f44c8ec8b82fb | add 1.3-9 (#10732) | LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack | var/spack/repos/builtin/packages/r-rgdal/package.py | var/spack/repos/builtin/packages/r-rgdal/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RRgdal(RPackage):
"""Provides bindings to the 'Geospatial' Data Abstraction Library
('GDAL') (>= 1.6.3) and access to projection/transformation operations
from the 'PROJ.4' library. The 'GDAL' and 'PROJ.4' libraries are
external to the package, and, when installing the package from source,
must be correctly installed first. Both 'GDAL' raster and 'OGR' vector
map data can be imported into R, and 'GDAL' raster data and 'OGR'
vector data exported. Use is made of classes defined in the 'sp' package.
Windows and Mac Intel OS X binaries (including 'GDAL', 'PROJ.4' and
'Expat') are provided on 'CRAN'."""
homepage = "https://cran.r-project.org/package=rgdal"
url = "https://cran.rstudio.com/src/contrib/rgdal_1.3-9.tar.gz"
list_url = "https://cran.rstudio.com/src/contrib/Archive/rgdal"
version('1.3-9', sha256='3e44f88d09894be4c0abd8874d00b40a4a5f4542b75250d098ffbb3ba41e2654')
version('1.2-16', 'de83bf08519a53de68a7632ecb7f2dc9')
depends_on('r-sp', type=('build', 'run'))
depends_on('gdal')
depends_on('proj')
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RRgdal(RPackage):
"""Provides bindings to the 'Geospatial' Data Abstraction Library
('GDAL') (>= 1.6.3) and access to projection/transformation operations
from the 'PROJ.4' library. The 'GDAL' and 'PROJ.4' libraries are
external to the package, and, when installing the package from source,
must be correctly installed first. Both 'GDAL' raster and 'OGR' vector
map data can be imported into R, and 'GDAL' raster data and 'OGR'
vector data exported. Use is made of classes defined in the 'sp' package.
Windows and Mac Intel OS X binaries (including 'GDAL', 'PROJ.4' and
'Expat') are provided on 'CRAN'."""
homepage = "https://cran.r-project.org/package=rgdal"
url = "https://cran.rstudio.com/src/contrib/rgdal_1.2-16.tar.gz"
list_url = "https://cran.rstudio.com/src/contrib/Archive/rgdal"
version('1.2-16', 'de83bf08519a53de68a7632ecb7f2dc9')
depends_on('r-sp', type=('build', 'run'))
depends_on('gdal')
depends_on('proj')
| lgpl-2.1 | Python |
93767e220919ef53d011e6930d66eadf4773d779 | Update MotorsControlFile.py | VitorHugoAguiar/ProBot,VitorHugoAguiar/ProBot,VitorHugoAguiar/ProBot,VitorHugoAguiar/ProBot | ProBot_BeagleBone/MotorsControlFile.py | ProBot_BeagleBone/MotorsControlFile.py | #!/usr/bin/python
# Python Standart Library Imports
import SabertoothFile
import PWMFile
import ProBotConstantsFile
# Initialization of classes from local files
Sabertooth = SabertoothFile.SabertoothClass()
PWM = PWMFile.PWMClass()
Pconst = ProBotConstantsFile.Constants()
class MotorsControlClass():
def MotorsControl(self,rightMotor, leftMotor, userChoice):
if userChoice=='1':
# Sending the values to the Sabertooth that is connected to the motors
Sabertooth.drive(Pconst.addr, 1, int(rightMotor))
Sabertooth.drive(Pconst.addr, 2, int(leftMotor))
if userChoice=='2':
# Sending the values to the pwm controller that is connected to the motors
PWM.PWM_Signals(round(rightMotor, 2), round(leftMotor, 2))
| #!/usr/bin/python
# Python Standart Library Imports
import SabertoothFile
import PWMFile
import ProBotConstantsFile
# Initialization of classes from local files
Sabertooth = SabertoothFile.SabertoothClass()
PWM = PWMFile.PWMClass()
Pconst = ProBotConstantsFile.Constants()
class MotorsControlClass():
def MotorsControl(self,rightMotor, leftMotor, userChoice):
if userChoice=='1':
# Sending the values to the Sabertooth that is connected to the motors
Sabertooth.drive(Pconst.addr, 1, int(rightMotor))
Sabertooth.drive(Pconst.addr, 2, int(leftMotor))
if userChoice=='2':
# Sending the values to the pwm controller that is connected to the motors
PWM.PWM_Signals(round(rightMotor, 2), round(leftMotor, 2))
| agpl-3.0 | Python |
6f9e935c01c77a440d1190c6148e567f1694797b | Update docker_settings_secret.py | openconnectome/open-connectome,neurodata/ndstore,openconnectome/open-connectome,neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome,openconnectome/open-connectome,neurodata/ndstore,neurodata/ndstore,openconnectome/open-connectome | setup/docker_config/django/docker_settings_secret.py | setup/docker_config/django/docker_settings_secret.py | # Secret Settings for NeuroData
USER = 'neurodata'
PASSWORD = 'neur0data'
HOST = 'localhost'
SECRET_KEY = 'nothing_as_such'
AWS_ACCESS_KEY_ID = ''
AWS_SECRET_ACCESS_KEY = ''
SHARED_SECRET = ''
| # Secret Settings for NeuroData
USER = 'neurodata'
PASSWORD = 'neur0data'
HOST = 'localhost'
SECRET_KEY = 'nothing_as_such'
AWS_ACCESS_KEY_ID = ''
AWS_SECRET_ACCESS_KEY = ''
| apache-2.0 | Python |
fff8576444498fbb264c441c98302f9e45275270 | Add line SUBSTITUTE | abcdw/direlog,abcdw/direlog | patterns.py | patterns.py | # -*- coding: utf-8 -*-
import re
pre_patterns = [
(
r'(\d{16}-[-\w]*\b)',
r'REQUEST_ID_SUBSTITUTE',
),
(
# r'([\dA-F]){8}-[\dA-F]{4}-4[\dA-F]{3}-[89AB][\dA-F]{3}-[\dA-F]{12}',
r'([0-9A-F]){8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}',
# r'[0-9A-F-]{36}',
# r'ACE088EB-ECA6-4348-905A-041EF10DBD53',
r'UUID_SUBSTITUTE',
),
(
# r"""
# (25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.
# (25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.
# (25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.
# (25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)
# """,
# r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}',
r'\b(\d{1,3}\.){3}\d{1,3}\b',
r'IP_ADDRESS_SUBSTITUTE',
),
(
r'js:\d+:\d+',
r'js:POSITION_SUBSTITUTE',
),
(
r'line \d+',
r'LINE_SUBSTITUTE',
),
]
| # -*- coding: utf-8 -*-
import re
pre_patterns = [
(
r'(\d{16}-[-\w]*\b)',
r'REQUEST_ID_SUBSTITUTE',
),
(
# r'([\dA-F]){8}-[\dA-F]{4}-4[\dA-F]{3}-[89AB][\dA-F]{3}-[\dA-F]{12}',
r'([0-9A-F]){8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}',
# r'[0-9A-F-]{36}',
# r'ACE088EB-ECA6-4348-905A-041EF10DBD53',
r'UUID_SUBSTITUTE',
),
(
# r"""
# (25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.
# (25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.
# (25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.
# (25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)
# """,
# r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}',
r'\b(\d{1,3}\.){3}\d{1,3}\b',
r'IP_ADDRESS_SUBSTITUTE',
),
(
r'js:\d+:\d+',
r'js:POSITION_SUBSTITUTE',
),
]
| mit | Python |
5bcc4ae60f89fbcadad234e0d6b9a755d28aab5d | Handle ctrl-C-ing out of palm-log | markpasc/paperplain,markpasc/paperplain | pavement.py | pavement.py | import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def build():
"""Package up the app."""
call('palm-package', '.')
@task
def halt():
call('palm-launch', '--device=emulator', '-c', 'org.markpasc.paperplain')
@task
@needs('halt')
def uninstall():
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
@task
@needs('build', 'uninstall')
def push():
"""Reinstall the app and start it."""
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
try:
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
except KeyboardInterrupt:
print
| import subprocess
from paver.easy import *
def call(*args, **kwargs):
return subprocess.call(args, **kwargs)
@task
def build():
"""Package up the app."""
call('palm-package', '.')
@task
def halt():
call('palm-launch', '--device=emulator', '-c', 'org.markpasc.paperplain')
@task
@needs('halt')
def uninstall():
call('palm-install', '--device=emulator', '-r', 'org.markpasc.paperplain')
@task
@needs('build', 'uninstall')
def push():
"""Reinstall the app and start it."""
call('palm-install', '--device=emulator', 'org.markpasc.paperplain_1.0.0_all.ipk')
call('palm-launch', '--device=emulator', 'org.markpasc.paperplain')
@task
def tail():
"""Follow the device's log."""
call('palm-log', '--device=emulator', '--system-log-level', 'info')
call('palm-log', '--device=emulator', '-f', 'org.markpasc.paperplain')
| mit | Python |
85eda1d8dc0774d90cc6ff0410c36c3f1119fbd0 | Update calc figures | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend | cla_backend/libs/eligibility_calculator/constants/disposable_income.py | cla_backend/libs/eligibility_calculator/constants/disposable_income.py | LIMIT = 73300
PARTNER_ALLOWANCE = 18191
CHILD_ALLOWANCE = 29149
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
| LIMIT = 73300
PARTNER_ALLOWANCE = 17946
CHILD_ALLOWANCE = 28822
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
| mit | Python |
1493513ffa056c399f92ab1db70ba0bd81e3b642 | move bba fields to easy_my_coop_be | OCA/l10n-belgium,OCA/l10n-belgium | easy_my_coop_be/models/coop.py | easy_my_coop_be/models/coop.py | from odoo import fields, models
class SubscriptionRequest(models.Model):
_inherit = 'subscription.request'
company_type = fields.Selection([('scrl', 'SCRL'),
('asbl', 'ASBL'),
('sprl', 'SPRL'),
('sa', 'SA')])
def get_partner_company_vals(self):
vals = super(SubscriptionRequest).get_partner_company_vals()
vals['out_inv_comm_algorithm'] = 'random'
return vals
def get_partner_vals(self):
vals = super(SubscriptionRequest).get_partner_vals()
vals['out_inv_comm_type'] = 'bba'
vals['out_inv_comm_algorithm'] = 'random'
return vals
def get_representative_valst(self):
vals = super(SubscriptionRequest).get_representative_vals()
vals['out_inv_comm_type'] = 'bba'
vals['out_inv_comm_algorithm'] = 'random'
return vals
| from odoo import fields, models
class SubscriptionRequest(models.Model):
_inherit = 'subscription.request'
company_type = fields.Selection([('scrl', 'SCRL'),
('asbl', 'ASBL'),
('sprl', 'SPRL'),
('sa', 'SA')])
| agpl-3.0 | Python |
692532c762d18310ad338132a001a557b64f22d3 | Remove useless test | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/monitoring/tests/zabbix/test_db_client.py | nodeconductor/monitoring/tests/zabbix/test_db_client.py | from __future__ import unicode_literals
import unittest
from django.db import DatabaseError
from mock import Mock
from nodeconductor.monitoring.zabbix.db_client import ZabbixDBClient
class ZabbixPublicApiTest(unittest.TestCase):
def setUp(self):
self.client = ZabbixDBClient()
def test_get_item_stats_returns_empty_list_on_db_error(self):
self.client.zabbix_api_client.get_host_ids = Mock(return_value=[])
self.client.get_item_time_and_value_list = Mock(side_effect=DatabaseError)
self.assertEqual(self.client.get_item_stats([], 'cpu', 1, 10, 2), [])
| from __future__ import unicode_literals
import unittest
from django.db import DatabaseError
from mock import Mock
from nodeconductor.monitoring.zabbix.db_client import ZabbixDBClient
class ZabbixPublicApiTest(unittest.TestCase):
def setUp(self):
self.client = ZabbixDBClient()
def test_get_item_stats_returns_time_segments(self):
self.client.zabbix_api_client.get_host_ids = Mock(return_value=[1])
start_timestamp = 1415912624L
end_timestamp = 1415912630L
time_and_value_list = Mock()
time_and_value_list.fetchone = Mock(return_value=(1415912630L,1))
self.client.get_item_time_and_value_list = Mock(return_value=time_and_value_list)
segments_count = 3
instance = object
item_key = 'cpu'
segment_list = self.client.get_item_stats([instance], item_key, start_timestamp, end_timestamp, segments_count)
expected_segment_list = [
{'from': 1415912624L, 'to': 1415912626L, 'value': 1},
{'from': 1415912626L, 'to': 1415912628L, 'value': 1},
{'from': 1415912628L, 'to': 1415912630L, 'value': 1},
]
self.assertEquals(segment_list, expected_segment_list)
self.client.zabbix_api_client.get_host_ids.assert_called_once_with([instance])
def test_get_item_stats_returns_empty_list_on_db_error(self):
self.client.zabbix_api_client.get_host_ids = Mock(return_value=[])
self.client.get_item_time_and_value_list = Mock(side_effect=DatabaseError)
self.assertEqual(self.client.get_item_stats([], 'cpu', 1, 10, 2), [])
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.