commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
fc6a9dfebc8bc016155c791c3325cdbb257dc192 | Move redis import | OParl/validator,OParl/validator | src/cache.py | src/cache.py | # The MIT License (MIT)
#
# Copyright (c) 2017 Stefan Graupner
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class Cache:
basekey = ""
def __init__(self, basekey=""):
self.basekey = basekey
def has(self, key):
return False
def get(self, key):
return ""
def set(self, key, value, ttl=0):
pass
def fullkey(self, key):
if len(self.basekey) > 0:
return "{}:{}".format(self.basekey, key)
else:
return key
class RedisCache(Cache):
redis = None
def __init__(self, basekey="", redis_server='localhost', redis_port=6379):
import redis
Cache.__init__(self, basekey)
self.redis = redis.Redis(host=redis_server, port=redis_port, db=0)
def has(self, key):
return self.redis.exists(self.fullkey(key))
def get(self, key):
return self.redis.get(self.fullkey(key))
def set(self, key, value, ttl=600):
return self.redis.set(self.fullkey(key), value, ttl)
| # The MIT License (MIT)
#
# Copyright (c) 2017 Stefan Graupner
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import redis
class Cache:
basekey = ""
def __init__(self, basekey=""):
self.basekey = basekey
def has(self, key):
return False
def get(self, key):
return ""
def set(self, key, value, ttl=0):
pass
def fullkey(self, key):
if len(self.basekey) > 0:
return "{}:{}".format(self.basekey, key)
else:
return key
class RedisCache(Cache):
redis = None
def __init__(self, basekey="", redis_server='localhost', redis_port=6379):
Cache.__init__(self, basekey)
self.redis = redis.Redis(host=redis_server, port=redis_port, db=0)
def has(self, key):
return self.redis.exists(self.fullkey(key))
def get(self, key):
return self.redis.get(self.fullkey(key))
def set(self, key, value, ttl=600):
return self.redis.set(self.fullkey(key), value, ttl)
| mit | Python |
4f5419fdec8dd1face4e3185d7146c1705b4de0f | add _t_r_a_k to list of available tables | fonttools/fonttools,googlefonts/fonttools | Lib/fontTools/ttLib/tables/__init__.py | Lib/fontTools/ttLib/tables/__init__.py |
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
# DON'T EDIT! This file is generated by MetaTools/buildTableList.py.
def _moduleFinderHint():
"""Dummy function to let modulefinder know what tables may be
dynamically imported. Generated by MetaTools/buildTableList.py.
>>> _moduleFinderHint()
"""
from . import B_A_S_E_
from . import C_B_D_T_
from . import C_B_L_C_
from . import C_F_F_
from . import C_O_L_R_
from . import C_P_A_L_
from . import D_S_I_G_
from . import E_B_D_T_
from . import E_B_L_C_
from . import F_F_T_M_
from . import G_D_E_F_
from . import G_M_A_P_
from . import G_P_K_G_
from . import G_P_O_S_
from . import G_S_U_B_
from . import J_S_T_F_
from . import L_T_S_H_
from . import M_A_T_H_
from . import M_E_T_A_
from . import O_S_2f_2
from . import S_I_N_G_
from . import S_V_G_
from . import T_S_I_B_
from . import T_S_I_D_
from . import T_S_I_J_
from . import T_S_I_P_
from . import T_S_I_S_
from . import T_S_I_V_
from . import T_S_I__0
from . import T_S_I__1
from . import T_S_I__2
from . import T_S_I__3
from . import T_S_I__5
from . import V_D_M_X_
from . import V_O_R_G_
from . import _a_v_a_r
from . import _c_m_a_p
from . import _c_v_t
from . import _f_e_a_t
from . import _f_p_g_m
from . import _f_v_a_r
from . import _g_a_s_p
from . import _g_l_y_f
from . import _g_v_a_r
from . import _h_d_m_x
from . import _h_e_a_d
from . import _h_h_e_a
from . import _h_m_t_x
from . import _k_e_r_n
from . import _l_o_c_a
from . import _l_t_a_g
from . import _m_a_x_p
from . import _m_e_t_a
from . import _n_a_m_e
from . import _p_o_s_t
from . import _p_r_e_p
from . import _s_b_i_x
from . import _t_r_a_k
from . import _v_h_e_a
from . import _v_m_t_x
if __name__ == "__main__":
import doctest, sys
sys.exit(doctest.testmod().failed)
|
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
# DON'T EDIT! This file is generated by MetaTools/buildTableList.py.
def _moduleFinderHint():
"""Dummy function to let modulefinder know what tables may be
dynamically imported. Generated by MetaTools/buildTableList.py.
>>> _moduleFinderHint()
"""
from . import B_A_S_E_
from . import C_B_D_T_
from . import C_B_L_C_
from . import C_F_F_
from . import C_O_L_R_
from . import C_P_A_L_
from . import D_S_I_G_
from . import E_B_D_T_
from . import E_B_L_C_
from . import F_F_T_M_
from . import G_D_E_F_
from . import G_M_A_P_
from . import G_P_K_G_
from . import G_P_O_S_
from . import G_S_U_B_
from . import J_S_T_F_
from . import L_T_S_H_
from . import M_A_T_H_
from . import M_E_T_A_
from . import O_S_2f_2
from . import S_I_N_G_
from . import S_V_G_
from . import T_S_I_B_
from . import T_S_I_D_
from . import T_S_I_J_
from . import T_S_I_P_
from . import T_S_I_S_
from . import T_S_I_V_
from . import T_S_I__0
from . import T_S_I__1
from . import T_S_I__2
from . import T_S_I__3
from . import T_S_I__5
from . import V_D_M_X_
from . import V_O_R_G_
from . import _a_v_a_r
from . import _c_m_a_p
from . import _c_v_t
from . import _f_e_a_t
from . import _f_p_g_m
from . import _f_v_a_r
from . import _g_a_s_p
from . import _g_l_y_f
from . import _g_v_a_r
from . import _h_d_m_x
from . import _h_e_a_d
from . import _h_h_e_a
from . import _h_m_t_x
from . import _k_e_r_n
from . import _l_o_c_a
from . import _l_t_a_g
from . import _m_a_x_p
from . import _m_e_t_a
from . import _n_a_m_e
from . import _p_o_s_t
from . import _p_r_e_p
from . import _s_b_i_x
from . import _v_h_e_a
from . import _v_m_t_x
if __name__ == "__main__":
import doctest, sys
sys.exit(doctest.testmod().failed)
| mit | Python |
b5068d644ffde56f302e9aee5b77e837a1d3e181 | Add some logging to generic error handler. | not-nexus/shelf,kyle-long/pyshelf,not-nexus/shelf,kyle-long/pyshelf | pyshelf/app.py | pyshelf/app.py | import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
import logging
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
"""
Prevents Exceptions flying all around the place.
"""
logger = logging.getLogger(__name__)
logger.debug("Exception was caught by the generic exception handler, {0} - {1}".format(type(error), error))
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
| import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
| mit | Python |
7f0853522424ceefc44d57f7c597dee2df6f2fe7 | include the revisions in the generated diffs | alex/pyvcs | pyvcs/utils.py | pyvcs/utils.py | from difflib import unified_diff
from pyvcs.exceptions import FileDoesNotExist
def generate_unified_diff(repository, changed_files, commit1, commit2):
diffs = []
for file_name in changed_files:
try:
file1 = repository.file_contents(file_name, commit1)
except FileDoesNotExist:
file1 = ''
try:
file2 = repository.file_contents(file_name, commit2)
except FileDoesNotExist:
file2 = ''
diffs.append(unified_diff(
file1.splitlines(), file2.splitlines(), fromfile=file_name,
tofile=file_name, fromfiledate=commit1, tofiledate=commit2
))
return '\n'.join('\n'.join(map(lambda s: s.rstrip('\n'), diff)) for diff in diffs)
| from difflib import unified_diff
from pyvcs.exceptions import FileDoesNotExist
def generate_unified_diff(repository, changed_files, commit1, commit2):
diffs = []
for file_name in changed_files:
try:
file1 = repository.file_contents(file_name, commit1)
except FileDoesNotExist:
file1 = ''
try:
file2 = repository.file_contents(file_name, commit2)
except FileDoesNotExist:
file2 = ''
diffs.append(unified_diff(
file1.splitlines(), file2.splitlines(), fromfile=file_name,
tofile=file_name
))
return '\n'.join('\n'.join(map(lambda s: s.rstrip('\n'), diff)) for diff in diffs)
| bsd-3-clause | Python |
e503ef58e801cfbc3ba72ba84bc2150c79a401d3 | Save creatorId as well for geometries | OpenChemistry/mongochemserver | girder/molecules/molecules/models/geometry.py | girder/molecules/molecules/models/geometry.py | from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson,
'creatorId': user['_id']
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
| from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
| bsd-3-clause | Python |
05f4c0ae11879afecfc9cb9d155905ad4f3bf5fe | change json to find | awangga/signapp | signapp.py | signapp.py | #!/usr/bin/env python
"""
signapp.py
created by Rolly Maulana Awangga
"""
import config
import pymongo
import urllib
from Crypto.Cipher import AES
class Signapp(object):
def __init__(self):
self.key = config.key
self.iv = config.iv
self.opendb()
def opendb(self):
self.conn = pymongo.MongoClient(config.mongohost, config.mongoport)
self.db = self.conn.signapp
def getAllSign(self,NPM):
self.db.sign
return self.db.sign.find({"NPM":NPM})
def getLastSign(self,NPM):
self.db.sign
return self.db.sign.find_one({"NPM":NPM})
def insertSign(self,NPM,Nilai,rcvdate):
self.db.sign
data = {"NPM":NPM,"Nilai":Nilai,"waktu":rcvdate}
def encodeData(self,msg):
obj=AES.new(self.key,AES.MODE_CFB,self.iv)
cp = obj.encrypt(msg)
return cp.encode("hex")
def decodeData(self,msg):
obj=AES.new(self.key,AES.MODE_CFB,self.iv)
dec = msg.decode("hex")
return obj.decrypt(dec)
def getHtmlBegin(self):
return config.html_begin
def getHtmlEnd(self):
return config.html_end
def getMenu(self,uri):
if uri == config.keyuri:
opsi = "key"
elif uri == config.tokenuri:
opsi = "token"
else:
opsi = "other"
return opsi
def tokenValidation(self,token):
url = config.tokenurl+token
response = urllib.urlopen(url)
html = response.read()
if html.find(config.aud) and html.find(config.iss):
ret = "valid"
else:
ret = "invalid"
return ret
| #!/usr/bin/env python
"""
signapp.py
created by Rolly Maulana Awangga
"""
import config
import pymongo
import urllib
import json
from Crypto.Cipher import AES
class Signapp(object):
def __init__(self):
self.key = config.key
self.iv = config.iv
self.opendb()
def opendb(self):
self.conn = pymongo.MongoClient(config.mongohost, config.mongoport)
self.db = self.conn.signapp
def getAllSign(self,NPM):
self.db.sign
return self.db.sign.find({"NPM":NPM})
def getLastSign(self,NPM):
self.db.sign
return self.db.sign.find_one({"NPM":NPM})
def insertSign(self,NPM,Nilai,rcvdate):
self.db.sign
data = {"NPM":NPM,"Nilai":Nilai,"waktu":rcvdate}
def encodeData(self,msg):
obj=AES.new(self.key,AES.MODE_CFB,self.iv)
cp = obj.encrypt(msg)
return cp.encode("hex")
def decodeData(self,msg):
obj=AES.new(self.key,AES.MODE_CFB,self.iv)
dec = msg.decode("hex")
return obj.decrypt(dec)
def getHtmlBegin(self):
return config.html_begin
def getHtmlEnd(self):
return config.html_end
def getMenu(self,uri):
if uri == config.keyuri:
opsi = "key"
elif uri == config.tokenuri:
opsi = "token"
else:
opsi = "other"
return opsi
def tokenValidation(self,token):
url = config.tokenurl+token
response = urllib.urlopen(url)
html = response.read()
data = json.loads(html)
if (data['aud'] == config.aud) and (data['iss'] == config.iss):
ret = data['email']
else:
ret = ""
return ret
| agpl-3.0 | Python |
b90394696d8668891cef8ae218e9170be5bf86dd | Fix - Include isBinary flag | SArnab/JHU-605.401.82-SupaFly,SArnab/JHU-605.401.82-SupaFly,SArnab/JHU-605.401.82-SupaFly,SArnab/JHU-605.401.82-SupaFly,SArnab/JHU-605.401.82-SupaFly | clue/websocket/protocol.py | clue/websocket/protocol.py | from autobahn.twisted.websocket import WebSocketServerProtocol
from twisted.python import log
from clue import errors
from uuid import uuid1
from txaio import create_future
class ClueServerProtocol(WebSocketServerProtocol):
def __init__(self):
self.id = uuid1()
self.is_closed = create_future()
'''
Called when a connection is being established between the client
and server. Each instance of this class is a new connection.
'''
def onConnect(self, request):
pass
'''
Called when a connection is opened between the client
and server. This means the handshake has completed.
'''
def onOpen(self):
log.msg("New Connection", self.id)
# If we fail to track the connection, close it.
try:
self.factory.track_connection(self)
except Exception:
log.err("Failed to track connection")
self.sendClose(errors.DUPLICATE_CONNECTION_ID)
'''
Called when a message has been received on this connection.
'''
def onMessage(self, payload, isBinary):
# Drop binary messages
if isBinary: return
# Decode to a utf8 string
message = payload.decode("utf-8")
log.msg("Received message [%s]" % message)
if (message == "PING"):
# PING/PONG
self.sendMessage(u"PONG", False)
else:
# Echo
self.sendMessage(payload, isBinary)
'''
Called when this connection is closed.
'''
def onClose(self, wasClean, code, reason):
log.msg("Connection [%s] closed [wasClean: %d]" % (self.id, wasClean))
self.factory.untrack_connection(self) | from autobahn.twisted.websocket import WebSocketServerProtocol
from twisted.python import log
from clue import errors
from uuid import uuid1
from txaio import create_future
class ClueServerProtocol(WebSocketServerProtocol):
def __init__(self):
self.id = uuid1()
self.is_closed = create_future()
'''
Called when a connection is being established between the client
and server. Each instance of this class is a new connection.
'''
def onConnect(self, request):
pass
'''
Called when a connection is opened between the client
and server. This means the handshake has completed.
'''
def onOpen(self):
log.msg("New Connection", self.id)
# If we fail to track the connection, close it.
try:
self.factory.track_connection(self)
except Exception:
log.err("Failed to track connection")
self.sendClose(errors.DUPLICATE_CONNECTION_ID)
'''
Called when a message has been received on this connection.
'''
def onMessage(self, payload, isBinary):
# Drop binary messages
if isBinary: return
# Decode to a utf8 string
message = payload.decode("utf-8")
log.msg("Received message [%s]" % message)
if (message == "PING"):
self.sendMessage("PONG")
else:
self.sendMessage(message)
'''
Called when this connection is closed.
'''
def onClose(self, wasClean, code, reason):
log.msg("Connection [%s] closed [wasClean: %d]" % (self.id, wasClean))
self.factory.untrack_connection(self) | mit | Python |
00ffe6505273ca2717d4df8f2b947e1a577829bd | Remove extra annotations | nlpub/mnogoznal,nlpub/mnogoznal,nlpub/mnogoznal | watset_wsd.py | watset_wsd.py | #!/usr/bin/env python
from flask import Flask, render_template, url_for, redirect, request
from flask_misaka import Misaka
from wsd.models import RequestWSD
app = Flask(__name__)
Misaka(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/wsd')
def wsd_redirect():
return redirect(url_for('.index'), code=302)
@app.route('/wsd', methods=['POST'])
def wsd():
text_box_value = request.form["input-text-name"]
result = RequestWSD.wsd_func(text_box_value)
return render_template('wsd.html', output=result)
@app.route('/about')
def about():
return render_template('about.html')
if __name__ == '__main__':
app.run()
| #!/usr/bin/env python
from flask import Flask, render_template, url_for, redirect, request
from flask_misaka import Misaka
from wsd.models import RequestWSD
app = Flask(__name__)
Misaka(app)
@app.route('/', methods=['GET'])
def index():
return render_template('index.html')
@app.route('/wsd', methods=['GET'])
def wsd_redirect():
return redirect(url_for('.index'), code=302)
@app.route('/wsd', methods=['POST'])
def wsd():
text_box_value = request.form["input-text-name"]
result = RequestWSD.wsd_func(text_box_value)
return render_template('wsd.html', output=result)
@app.route('/about', methods=['GET'])
def about():
return render_template('about.html')
if __name__ == '__main__':
app.run() | mit | Python |
7e7bdd474cde49757bff4357e76ae9f72bbefac1 | increase position zoom of the camera flyer | openhumanoids/oh-distro,openhumanoids/oh-distro,openhumanoids/oh-distro,openhumanoids/oh-distro,openhumanoids/oh-distro,openhumanoids/oh-distro,openhumanoids/oh-distro,openhumanoids/oh-distro | software/ddapp/src/python/ddapp/cameracontrol.py | software/ddapp/src/python/ddapp/cameracontrol.py | import vtk
import time
import numpy as np
from ddapp.timercallback import TimerCallback
class OrbitController(TimerCallback):
def __init__(self, view):
TimerCallback.__init__(self)
self.view = view
self.orbitTime = 20.0
def tick(self):
speed = 360.0 / self.orbitTime
degrees = self.elapsed * speed
self.view.camera().Azimuth(degrees)
self.view.render()
class Flyer(TimerCallback):
def __init__(self, view):
TimerCallback.__init__(self)
self.view = view
self.flyTime = 0.5
self.startTime = 0.0
self.maintainViewDirection = False
self.positionZoom = 0.7
def getCameraCopy(self):
camera = vtk.vtkCamera()
camera.DeepCopy(self.view.camera())
return camera
def zoomTo(self, newFocalPoint, newPosition=None):
self.interp = vtk.vtkCameraInterpolator()
self.interp.AddCamera(0.0, self.getCameraCopy())
c = self.getCameraCopy()
newFocalPoint = np.array(newFocalPoint)
oldFocalPoint = np.array(c.GetFocalPoint())
oldPosition = np.array(c.GetPosition())
if newPosition is None:
if self.maintainViewDirection:
newPosition = oldPosition + (newFocalPoint - oldFocalPoint)
else:
newPosition = oldPosition
newPosition += self.positionZoom*(newFocalPoint - newPosition)
#newPosition = newFocalPoint - self.positionZoom*(newFocalPoint - newPosition)
c.SetFocalPoint(newFocalPoint)
c.SetPosition(newPosition)
c.SetViewUp([0.0, 0.0, 1.0])
self.interp.AddCamera(1.0, c)
self.startTime = time.time()
self.start()
def tick(self):
elapsed = time.time() - self.startTime
t = elapsed / float(self.flyTime)
self.interp.InterpolateCamera(t, self.view.camera())
self.view.render()
if t > 1.0:
return False
| import vtk
import time
import numpy as np
from ddapp.timercallback import TimerCallback
class OrbitController(TimerCallback):
def __init__(self, view):
TimerCallback.__init__(self)
self.view = view
self.orbitTime = 20.0
def tick(self):
speed = 360.0 / self.orbitTime
degrees = self.elapsed * speed
self.view.camera().Azimuth(degrees)
self.view.render()
class Flyer(TimerCallback):
def __init__(self, view):
TimerCallback.__init__(self)
self.view = view
self.flyTime = 0.5
self.startTime = 0.0
self.maintainViewDirection = False
self.positionZoom = 0.2
def getCameraCopy(self):
camera = vtk.vtkCamera()
camera.DeepCopy(self.view.camera())
return camera
def zoomTo(self, newFocalPoint, newPosition=None):
self.interp = vtk.vtkCameraInterpolator()
self.interp.AddCamera(0.0, self.getCameraCopy())
c = self.getCameraCopy()
newFocalPoint = np.array(newFocalPoint)
oldFocalPoint = np.array(c.GetFocalPoint())
oldPosition = np.array(c.GetPosition())
if newPosition is None:
if self.maintainViewDirection:
newPosition = oldPosition + (newFocalPoint - oldFocalPoint)
else:
newPosition = oldPosition
newPosition += self.positionZoom*(newFocalPoint - newPosition)
c.SetFocalPoint(newFocalPoint)
c.SetPosition(newPosition)
c.SetViewUp([0.0, 0.0, 1.0])
self.interp.AddCamera(1.0, c)
self.startTime = time.time()
self.start()
def tick(self):
elapsed = time.time() - self.startTime
t = elapsed / float(self.flyTime)
self.interp.InterpolateCamera(t, self.view.camera())
self.view.render()
if t > 1.0:
return False
| bsd-3-clause | Python |
63736a2958aae03749560dd3109fb4fea3b3ca7a | Switch calendar labels to be in Japanese | kfdm/wanikani,kfdm/wanikani | wanikani/django.py | wanikani/django.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.http import HttpResponse
from django.views.generic.base import View
from icalendar import Calendar, Event
from wanikani.core import WaniKani, Radical, Kanji
class WaniKaniView(View):
def get(self, request, **kwargs):
client = WaniKani(kwargs['api_key'])
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
cal.add('prodid', '-//Wanikani Blockers//github.com/kfdm/wanikani//')
cal.add('version', '2.0')
for ts in sorted(queue):
if not len(queue[ts]):
continue
counts = {
Radical: 0,
Kanji: 0,
}
for obj in queue[ts]:
counts[obj.__class__] += 1
event = Event()
if counts[Radical] and counts[Kanji]:
event.add('summary', u'部首: {0} 漢字: {1}'.format(
counts[Radical], counts[Kanji]
))
elif counts[Radical]:
event.add('summary', u'部首: {0}'.format(
counts[Radical]
))
else:
event.add('summary', u'漢字: {0}'.format(
counts[Kanji]
))
event.add('dtstart', ts)
event.add('dtend', ts)
event['uid'] = str(ts)
cal.add_component(event)
return HttpResponse(
content=cal.to_ical(),
content_type='text/plain; charset=utf-8'
)
|
from __future__ import absolute_import
from django.http import HttpResponse
from django.views.generic.base import View
from icalendar import Calendar, Event
from wanikani.core import WaniKani, Radical, Kanji
class WaniKaniView(View):
def get(self, request, **kwargs):
client = WaniKani(kwargs['api_key'])
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
cal.add('prodid', '-//Wanikani Blockers//github.com/kfdm/wanikani//')
cal.add('version', '2.0')
for ts in sorted(queue):
if not len(queue[ts]):
continue
counts = {
Radical: 0,
Kanji: 0,
}
for obj in queue[ts]:
counts[obj.__class__] += 1
event = Event()
event.add('summary', 'R: {0} K: {1}'.format(
counts[Radical], counts[Kanji]
))
event.add('dtstart', ts)
event.add('dtend', ts)
event['uid'] = str(ts)
cal.add_component(event)
return HttpResponse(
content=cal.to_ical(),
content_type='text/plain; charset=utf-8'
)
| mit | Python |
1c59f6522072a3a1846a9e8124a359d0651b00d7 | Change dependency as well | rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo | dojo/db_migrations/0019_system_settings_allow_anonymous_survey_repsonse.py | dojo/db_migrations/0019_system_settings_allow_anonymous_survey_repsonse.py | # Generated by Django 2.2.1 on 2019-08-21 19:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dojo', '0018_sonarqube_api_integration'),
]
operations = [
migrations.AddField(
model_name='system_settings',
name='allow_anonymous_survey_repsonse',
field=models.BooleanField(default=False, help_text='Enable anyone with a link to the survey to answer a survey', verbose_name='Allow Anonymous Survey Responses'),
),
]
| # Generated by Django 2.2.1 on 2019-08-21 19:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dojo', '0014_jira_conf_resolution_mappings'),
]
operations = [
migrations.AddField(
model_name='system_settings',
name='allow_anonymous_survey_repsonse',
field=models.BooleanField(default=False, help_text='Enable anyone with a link to the survey to answer a survey', verbose_name='Allow Anonymous Survey Responses'),
),
]
| bsd-3-clause | Python |
c77fca2f41570a558719e0a9c1ce3efd1a5b206c | Add tests for patient filtering | darkfeline/drchrono-birthday,darkfeline/drchrono-birthday | drchrono_birthday/tests.py | drchrono_birthday/tests.py | # Copyright (C) 2016 Allen Li
#
# This file is part of drchrono Birthday.
#
# drchrono Birthday is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# drchrono Birthday is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with drchrono Birthday. If not, see <http://www.gnu.org/licenses/>.
import datetime
from django.contrib.auth.models import User
from django.test import TestCase
from django.utils import timezone
from drchrono_birthday import models
from drchrono_birthday.management.commands.sendbirthdaymessages \
import Command
class GetPatientsTest(TestCase):
def setUp(self):
bob = User.objects.create_user('bob', 'bob@example.com', 'bob')
bob.save()
doctor = models.Doctor(user=bob, name='Bob',
last_updated=timezone.now())
doctor.save()
self.doctor = doctor
def make_patient(self, name, dob):
patient = models.Patient(doctor=self.doctor, name=name,
date_of_birth=dob)
patient.save()
def test_day(self):
"""Test when range is one day long."""
self.make_patient('A', datetime.date(1990, 12, 31))
self.make_patient('B', datetime.date(1990, 1, 1))
self.make_patient('C', datetime.date(1990, 1, 2))
patients = Command._get_patients(datetime.date(2000, 1, 1),
datetime.date(2000, 1, 1))
patients = [x.name for x in patients]
self.assertNotIn('A', patients)
self.assertIn('B', patients)
self.assertNotIn('C', patients)
def test_week_in_month(self):
"""Test when range is one week long within a month."""
self.make_patient('A', datetime.date(1990, 6, 5))
self.make_patient('B', datetime.date(1990, 6, 6))
self.make_patient('C', datetime.date(1990, 6, 12))
self.make_patient('D', datetime.date(1990, 6, 13))
start = datetime.date(2000, 6, 6)
end = start + datetime.timedelta(days=6) # inclusive
patients = Command._get_patients(start, end)
patients = [x.name for x in patients]
self.assertNotIn('A', patients)
self.assertIn('B', patients)
self.assertIn('C', patients)
self.assertNotIn('D', patients)
def test_week_split(self):
"""Test when range is one week long split across months."""
self.make_patient('A', datetime.date(1990, 6, 27))
self.make_patient('B', datetime.date(1990, 6, 28))
self.make_patient('C', datetime.date(1990, 6, 30))
self.make_patient('D', datetime.date(1990, 7, 1))
self.make_patient('E', datetime.date(1990, 7, 4))
self.make_patient('F', datetime.date(1990, 7, 5))
start = datetime.date(2000, 6, 28)
end = start + datetime.timedelta(days=6) # inclusive
patients = Command._get_patients(start, end)
patients = [x.name for x in patients]
self.assertNotIn('A', patients)
self.assertIn('B', patients)
self.assertIn('C', patients)
self.assertIn('D', patients)
self.assertIn('E', patients)
self.assertNotIn('F', patients)
| from django.test import TestCase
# Create your tests here.
| agpl-3.0 | Python |
884208477f0556cbe05c2ed965e6a42f57969fd9 | Remove unused import | yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core | yunity/groups/models.py | yunity/groups/models.py | from django.db.models import TextField, ManyToManyField, CharField
from yunity.base.base_models import BaseModel, LocationModel
from config import settings
class Group(BaseModel, LocationModel):
name = CharField(max_length=settings.NAME_MAX_LENGTH)
description = TextField(blank=True)
members = ManyToManyField(settings.AUTH_USER_MODEL, related_name='groups')
password = CharField(max_length=255, blank=True)
| from django.db.models import Model
from django.db.models import TextField, ManyToManyField, CharField
from yunity.base.base_models import BaseModel, LocationModel
from config import settings
class Group(BaseModel, LocationModel):
name = CharField(max_length=settings.NAME_MAX_LENGTH)
description = TextField(blank=True)
members = ManyToManyField(settings.AUTH_USER_MODEL, related_name='groups')
password = CharField(max_length=255, blank=True)
| agpl-3.0 | Python |
ab8ea5329bc4566b877e2f2991f096909842f9d9 | Bump version to 0.1.4 | totem/cluster-orchestrator,totem/cluster-orchestrator,totem/cluster-orchestrator | orchestrator/__init__.py | orchestrator/__init__.py | import logging
from conf.appconfig import LOG_FORMAT, LOG_DATE, LOG_ROOT_LEVEL
__version__ = '0.1.4'
__author__ = 'sukrit'
logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_DATE, level=LOG_ROOT_LEVEL)
logging.getLogger('boto').setLevel(logging.INFO)
| import logging
from conf.appconfig import LOG_FORMAT, LOG_DATE, LOG_ROOT_LEVEL
__version__ = '0.1.0'
__author__ = 'sukrit'
logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_DATE, level=LOG_ROOT_LEVEL)
logging.getLogger('boto').setLevel(logging.INFO)
| mit | Python |
3429293244359b5635b7d060caf527a36850f3a2 | Prepare for next dev version to incorporate encofing fixes in flask-hyperschema library | totem/cluster-orchestrator,totem/cluster-orchestrator,totem/cluster-orchestrator | orchestrator/__init__.py | orchestrator/__init__.py | from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.6'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
| from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.5'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
| mit | Python |
657ec7f2239cc0a48cb656cfd01ca03f854e0abc | bump version | vmalloc/pydeploy | pydeploy/__version__.py | pydeploy/__version__.py | __version__ = "0.2.6"
| __version__ = "0.2.5"
| bsd-3-clause | Python |
e8b404b4525983df8b790d18afcca4cc79430133 | Make python clean up after itself by removing its /tmp subdir. | google/swiftshader,google/swiftshader,bkaradzic/SwiftShader,bkaradzic/SwiftShader,bkaradzic/SwiftShader,google/swiftshader,bkaradzic/SwiftShader,bkaradzic/SwiftShader | pydir/build-pnacl-ir.py | pydir/build-pnacl-ir.py | #!/usr/bin/env python2
import argparse
import errno
import os
import shutil
import tempfile
from utils import shellcmd
from utils import FindBaseNaCl
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument('cfile', nargs='+', type=str,
help='C file(s) to convert')
argparser.add_argument('--dir', nargs='?', type=str, default='.',
help='Output directory. Default "%(default)s".')
argparser.add_argument('--disable-verify', action='store_true')
args = argparser.parse_args()
nacl_root = FindBaseNaCl()
# Prepend bin to $PATH.
os.environ['PATH'] = (
nacl_root + '/toolchain/linux_x86/pnacl_newlib/bin' + os.pathsep +
os.pathsep + os.environ['PATH'])
try:
tempdir = tempfile.mkdtemp()
for cname in args.cfile:
basename = os.path.splitext(cname)[0]
llname = os.path.join(tempdir, basename + '.ll')
pnaclname = basename + '.pnacl.ll'
pnaclname = os.path.join(args.dir, pnaclname)
shellcmd('pnacl-clang -O2 -c {0} -o {1}'.format(cname, llname))
shellcmd('pnacl-opt ' +
'-pnacl-abi-simplify-preopt -pnacl-abi-simplify-postopt' +
('' if args.disable_verify else
' -verify-pnaclabi-module -verify-pnaclabi-functions') +
' -pnaclabi-allow-debug-metadata'
' {0} -S -o {1}'.format(llname, pnaclname))
finally:
try:
shutil.rmtree(tempdir)
except OSError as exc:
if exc.errno != errno.ENOENT: # ENOENT - no such file or directory
raise # re-raise exception
| #!/usr/bin/env python2
import argparse
import os
import tempfile
from utils import shellcmd
from utils import FindBaseNaCl
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument('cfile', nargs='+', type=str,
help='C file(s) to convert')
argparser.add_argument('--dir', nargs='?', type=str, default='.',
help='Output directory. Default "%(default)s".')
argparser.add_argument('--disable-verify', action='store_true')
args = argparser.parse_args()
nacl_root = FindBaseNaCl()
# Prepend bin to $PATH.
os.environ['PATH'] = (
nacl_root + '/toolchain/linux_x86/pnacl_newlib/bin' + os.pathsep +
os.pathsep + os.environ['PATH'])
tempdir = tempfile.mkdtemp()
for cname in args.cfile:
basename = os.path.splitext(cname)[0]
llname = os.path.join(tempdir, basename + '.ll')
pnaclname = basename + '.pnacl.ll'
pnaclname = os.path.join(args.dir, pnaclname)
shellcmd('pnacl-clang -O2 -c {0} -o {1}'.format(cname, llname))
shellcmd('pnacl-opt ' +
'-pnacl-abi-simplify-preopt -pnacl-abi-simplify-postopt' +
('' if args.disable_verify else
' -verify-pnaclabi-module -verify-pnaclabi-functions') +
' -pnaclabi-allow-debug-metadata'
' {0} -S -o {1}'.format(llname, pnaclname))
| apache-2.0 | Python |
41941ce6ff0e8b2ac917f14c7ffddfa325ebb008 | Bump version | kinverarity1/pyexperiment,kinverarity1/pyexperiment,kinverarity1/pyexperiment,duerrp/pyexperiment,duerrp/pyexperiment,shaunstanislaus/pyexperiment,DeercoderResearch/pyexperiment,shaunstanislaus/pyexperiment,duerrp/pyexperiment,shaunstanislaus/pyexperiment,shaunstanislaus/pyexperiment,kinverarity1/pyexperiment,DeercoderResearch/pyexperiment,DeercoderResearch/pyexperiment,DeercoderResearch/pyexperiment | pyexperiment/version.py | pyexperiment/version.py | """Defines the version of pyexperiment
"""
__version__ = '0.2.10'
| """Defines the version of pyexperiment
"""
__version__ = '0.2.9'
| mit | Python |
bcb887b24e19905655b3fe54e58c71319673ed46 | Update version | sendpulse/sendpulse-rest-api-python | pysendpulse/__init__.py | pysendpulse/__init__.py | __author__ = 'Maksym Ustymenko'
__author_email__ = 'tech@sendpulse.com'
__copyright__ = 'Copyright 2017, SendPulse'
__credits__ = ['Maksym Ustymenko', ]
__version__ = '0.0.9'
| __author__ = 'Maksym Ustymenko'
__author_email__ = 'tech@sendpulse.com'
__copyright__ = 'Copyright 2017, SendPulse'
__credits__ = ['Maksym Ustymenko', ]
__version__ = '0.0.8'
| apache-2.0 | Python |
f28459497dbfa0535323cce52a4ea4dc2daa2bf9 | fix pylint in setup | dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost | python-package/setup.py | python-package/setup.py | # pylint: disable=invalid-name, exec-used
"""Setup xgboost package."""
from __future__ import absolute_import
import sys
from setuptools import setup, find_packages
import subprocess
sys.path.insert(0, '.')
import os
#build on the fly if install in pip
#otherwise, use build.sh in the parent directory
if 'pip' in __file__:
if not os.name == 'nt': #if not windows
build_sh = subprocess.Popen(['sh', 'xgboost/build-python.sh'])
build_sh.wait()
output = build_sh.communicate()
print(output)
CURRENT_DIR = os.path.dirname(__file__)
# We can not import `xgboost.libpath` in setup.py directly since xgboost/__init__.py
# import `xgboost.core` and finally will import `numpy` and `scipy` which are setup
# `install_requires`. That's why we're using `exec` here.
libpath_py = os.path.join(CURRENT_DIR, 'xgboost/libpath.py')
libpath = {'__file__': libpath_py}
exec(compile(open(libpath_py, "rb").read(), libpath_py, 'exec'), libpath, libpath)
LIB_PATH = libpath['find_lib_path']()
#print LIB_PATH
#to deploy to pip, please use
#make pythonpack
#python setup.py register sdist upload
#and be sure to test it firstly using "python setup.py register sdist upload -r pypitest"
setup(name='xgboost',
version=open(os.path.join(CURRENT_DIR, 'xgboost/VERSION')).read().strip(),
#version='0.4a13',
description=open(os.path.join(CURRENT_DIR, 'README.md')).read(),
install_requires=[
'numpy',
'scipy',
],
maintainer='Hongliang Liu',
maintainer_email='phunter.lau@gmail.com',
zip_safe=False,
packages=find_packages(),
#don't need this and don't use this, give everything to MANIFEST.in
#package_dir = {'':'xgboost'},
#package_data = {'': ['*.txt','*.md','*.sh'],
# }
#this will use MANIFEST.in during install where we specify additional files,
#this is the golden line
include_package_data=True,
data_files=[('xgboost', LIB_PATH)],
url='https://github.com/dmlc/xgboost')
| # pylint: disable=invalid-name
"""Setup xgboost package."""
from __future__ import absolute_import
import sys
from setuptools import setup, find_packages
import subprocess
sys.path.insert(0, '.')
import os
#build on the fly if install in pip
#otherwise, use build.sh in the parent directory
if 'pip' in __file__:
if not os.name == 'nt': #if not windows
build_sh = subprocess.Popen(['sh', 'xgboost/build-python.sh'])
build_sh.wait()
output = build_sh.communicate()
print(output)
CURRENT_DIR = os.path.dirname(__file__)
# We can not import `xgboost.libpath` in setup.py directly since xgboost/__init__.py
# import `xgboost.core` and finally will import `numpy` and `scipy` which are setup
# `install_requires`. That's why we're using `exec` here.
libpath_py = os.path.join(CURRENT_DIR, 'xgboost/libpath.py')
libpath = {'__file__': libpath_py}
exec(compile(open(libpath_py, "rb").read(), libpath_py, 'exec'), libpath, libpath)
LIB_PATH = libpath['find_lib_path']()
#print LIB_PATH
#to deploy to pip, please use
#make pythonpack
#python setup.py register sdist upload
#and be sure to test it firstly using "python setup.py register sdist upload -r pypitest"
setup(name='xgboost',
version=open(os.path.join(CURRENT_DIR, 'xgboost/VERSION')).read().strip(),
#version='0.4a13',
description=open(os.path.join(CURRENT_DIR, 'README.md')).read(),
install_requires=[
'numpy',
'scipy',
],
maintainer='Hongliang Liu',
maintainer_email='phunter.lau@gmail.com',
zip_safe=False,
packages=find_packages(),
#don't need this and don't use this, give everything to MANIFEST.in
#package_dir = {'':'xgboost'},
#package_data = {'': ['*.txt','*.md','*.sh'],
# }
#this will use MANIFEST.in during install where we specify additional files,
#this is the golden line
include_package_data=True,
data_files=[('xgboost', LIB_PATH)],
url='https://github.com/dmlc/xgboost')
| apache-2.0 | Python |
6cb5c6f14c66b5f21214cfc4aa3e08e1f113c856 | add ds9 fits parser | SAOImageDS9/tkblt,SAOImageDS9/tkblt,SAOImageDS9/tkblt | ds9/parsers/fitsparser.tac | ds9/parsers/fitsparser.tac | %{
%}
#include string.tin
%start command
%token MASK_
%token NEW_
%token SLICE_
%%
# XPA/SAMP only
command : fits
;
fits: new filename {FitsCmdLoad $2 {} {}}
| new MASK_ filename {FitsCmdLoad $3 mask {}}
| new SLICE_ filename {FitsCmdLoad $3 {} slice}
| new MASK_ SLICE_ filename {FitsCmdLoad $4 mask slice}
;
filename : {set _ {}}
| STRING_ {set _ $1}
;
new :
| NEW_ {CreateFrame}
;
%%
proc fits::yyerror {msg} {
variable yycnt
variable yy_current_buffer
variable index_
ParserError $msg $yycnt $yy_current_buffer $index_
}
| %{
%}
#include string.tin
%start command
%token MASK_
%token NEW_
%token SLICE_
%%
# XPA/SAMP only
command : fits
;
fits: NEW_ {CreateFrame; FitsCmdLoad {} {} {}}
| new STRING_ {FitsCmdLoad $2 {} {}}
| new MASK_ STRING_ {FitsCmdLoad $3 mask {}}
| new SLICE_ STRING_ {FitsCmdLoad $3 {} slice}
| new MASK_ SLICE_ STRING_ {FitsCmdLoad $4 mask slice}
;
new :
| NEW_ {CreateFrame}
;
%%
proc fits::yyerror {msg} {
variable yycnt
variable yy_current_buffer
variable index_
ParserError $msg $yycnt $yy_current_buffer $index_
}
| mit | Python |
7d1527b4d6e874ce06ed2bc329c3c0f5555cd2a4 | revise parallel model file | google/jax,tensorflow/probability,google/jax,google/jax,tensorflow/probability,google/jax | pjit_model.py | pjit_model.py | from functools import partial
import numpy.random as npr
import jax.numpy as np
from jax import lax
from jax import grad, pjit, papply
### set up some synthetic data
rng = npr.RandomState(0)
R = lambda *shape: rng.randn(*shape).astype("float32")
layer_sizes = [3, 2]
params = [(R(m, n), R(n)) for m, n in zip(layer_sizes[:-1], layer_sizes[1:])]
input_batch = R(5, 3)
target_batch = R(5, 2)
batch = (input_batch, target_batch)
### standard definition
def predict(params, inputs):
for W, b in params:
outputs = np.dot(inputs, W) + b
inputs = np.tanh(outputs)
return outputs
def loss(params, batch):
inputs, targets = batch
preds = predict(params, inputs)
perex_loss = -np.mean(preds * targets, axis=1)
return np.sum(perex_loss)
print 'single-machine'
print loss(params, batch)
print grad(loss)(params, batch)
print
### writing an spmd program manually
def spmd_loss(params, batch):
inputs, targets = batch
preds = predict(params, inputs)
perex_loss = -np.mean(preds * targets)
return lax.psum(perex_loss, axis_name='i') # 'allreduce-sum' on hidden axis
# compiling the grad function for parallel execution
gradfun = pjit(grad(spmd_loss), axis_name='i', in_axes=(None, 0), out_axes=None)
print 'manual spmd program, compile-of-grad version'
print gradfun(params, batch) # parallel execution, fwd and bwd fused together
print
# or, grad-of-compile version
spmd_loss = pjit(spmd_loss, axis_name='i', in_axes=(None, 0), out_axes=None)
print 'manual spmd program, grad-of-compile version'
print spmd_loss(params, batch) # parallel execution
print grad(spmd_loss)(params, batch) # parallel execution, fwd and bwd separate
print
# or get both with compile-of-grad-of-compile
gradfun = pjit(grad(spmd_loss), axis_name='i', in_axes=(None, 0), out_axes=None)
print 'manual spmd program, compile-of-grad-of-compile version'
print spmd_loss(params, batch) # parallel execution
print grad(spmd_loss)(params, batch) # parallel execution, fwd and bwd fused
print
### getting an spmd program from the standard definition with papply
# TODO papply!
# spmd_loss, axis_name = papply(loss, axis_size=5, in_axes=(None, 0))
# spmd_loss = pjit(spmd_loss, axis_name=axis_name, in_axes=(None, 0), out_axes=None)
# print spmd_loss(params, batch) # parallel execution
# print
| from functools import partial
import numpy.random as npr
import jax.numpy as np
from jax import lax
from jax import pjit, grad
### set up some synthetic data
rng = npr.RandomState(0)
R = lambda *shape: rng.randn(*shape).astype("float32")
layer_sizes = [3, 2]
params = [(R(m, n), R(n)) for m, n in zip(layer_sizes[:-1], layer_sizes[1:])]
input_batch = R(5, 3)
target_batch = R(5, 2)
batch = (input_batch, target_batch)
### standard definition
def predict(params, inputs):
for W, b in params:
outputs = np.dot(inputs, W) + b
inputs = np.tanh(outputs)
return outputs
def loss(params, batch):
inputs, targets = batch
preds = predict(params, inputs)
perex_loss = -np.mean(preds * targets, axis=1)
return np.sum(perex_loss)
print 'single-machine'
print loss(params, batch)
print grad(loss)(params, batch)
print
### writing an spmd program manually
def predict(params, inputs):
for W, b in params:
outputs = np.dot(inputs, W) + b
inputs = np.tanh(outputs)
return outputs
def spmd_loss(params, batch):
inputs, targets = batch
preds = predict(params, inputs)
perex_loss = -np.mean(preds * targets)
return lax.psum(perex_loss, axis_name='i') # 'allreduce-sum' on hidden axis
# compiling the grad function for parallel execution
gradfun = pjit(grad(spmd_loss), axis_name='i', in_axes=(None, 0), out_axes=None)
print 'manual spmd program, compile-of-grad version'
print gradfun(params, batch) # parallel execution, fwd and bwd fused together
print
# or, grad-of-compile version
spmd_loss = pjit(spmd_loss, axis_name='i', in_axes=(None, 0), out_axes=None)
print 'manual spmd program, grad-of-compile version'
print spmd_loss(params, batch) # parallel execution
print grad(spmd_loss)(params, batch) # parallel execution, fwd and bwd separate
print
# or get both with compile-of-grad-of-compile
gradfun = pjit(grad(spmd_loss), axis_name='i', in_axes=(None, 0), out_axes=None)
print 'manual spmd program, compile-of-grad-of-compile version'
print spmd_loss(params, batch) # parallel execution
print grad(spmd_loss)(params, batch) # parallel execution, fwd and bwd fused
print
### getting an spmd program automagically with papply
# TODO
| apache-2.0 | Python |
10b1e000240cd9670e502a2a66c5ab85734c2152 | Remove host to ip | liverliu/netmusichacker,liverliu/netmusichacker | python/hacker/config.py | python/hacker/config.py | __author__ = 'shijianliu'
host = 'http://223.252.199.7'
server_host='127.0.0.1'
server_port=80
| __author__ = 'shijianliu'
host = 'http://music.liverliu.com'
server_host='127.0.0.1'
server_port=80
| apache-2.0 | Python |
3b237d7ef2c418cfcf361f8d9ca32806a265f823 | Rename class | rdmorganiser/rdmo,rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug,DMPwerkzeug/DMPwerkzeug,rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug | rdmo/core/swagger.py | rdmo/core/swagger.py | from django.conf.urls import include, url
from rdmo.accounts.urls import accounts_patterns_api
from rdmo.conditions.urls import conditions_patterns_api
from rdmo.domain.urls import domain_patterns_api
from rdmo.options.urls import options_patterns_api
from rdmo.projects.urls import projects_patterns_api
from rdmo.questions.urls import questions_patterns_api
from rdmo.tasks.urls import tasks_patterns_api
from rdmo.views.urls import views_patterns_api
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.schemas import SchemaGenerator
from rest_framework.views import APIView
from rest_framework_swagger import renderers
urlpatterns = [
url(r'^api/v1/accounts/', include(accounts_patterns_api, namespace='api-v1-accounts')),
url(r'^api/v1/conditions/', include(conditions_patterns_api, namespace='api-v1-conditions')),
url(r'^api/v1/domain/', include(domain_patterns_api, namespace='api-v1-domain')),
url(r'^api/v1/options/', include(options_patterns_api, namespace='api-v1-options')),
url(r'^api/v1/projects/', include(projects_patterns_api, namespace='api-v1-projects')),
url(r'^api/v1/questions/', include(questions_patterns_api, namespace='api-v1-questions')),
url(r'^api/v1/tasks/', include(tasks_patterns_api, namespace='api-v1-tasks')),
url(r'^api/v1/views/', include(views_patterns_api, namespace='api-v1-views')),
]
class swagger_schema_view(APIView):
permission_classes = [AllowAny]
renderer_classes = [
renderers.OpenAPIRenderer,
renderers.SwaggerUIRenderer
]
def get(self, request):
generator = SchemaGenerator(
title="RDMO API",
patterns=urlpatterns,
)
schema = generator.get_schema(request=request)
return Response(schema)
| from django.conf.urls import include, url
from rdmo.accounts.urls import accounts_patterns_api
from rdmo.conditions.urls import conditions_patterns_api
from rdmo.domain.urls import domain_patterns_api
from rdmo.options.urls import options_patterns_api
from rdmo.projects.urls import projects_patterns_api
from rdmo.questions.urls import questions_patterns_api
from rdmo.tasks.urls import tasks_patterns_api
from rdmo.views.urls import views_patterns_api
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.schemas import SchemaGenerator
from rest_framework.views import APIView
from rest_framework_swagger import renderers
urlpatterns = [
url(r'^api/v1/accounts/', include(accounts_patterns_api, namespace='api-v1-accounts')),
url(r'^api/v1/conditions/', include(conditions_patterns_api, namespace='api-v1-conditions')),
url(r'^api/v1/domain/', include(domain_patterns_api, namespace='api-v1-domain')),
url(r'^api/v1/options/', include(options_patterns_api, namespace='api-v1-options')),
url(r'^api/v1/projects/', include(projects_patterns_api, namespace='api-v1-projects')),
url(r'^api/v1/questions/', include(questions_patterns_api, namespace='api-v1-questions')),
url(r'^api/v1/tasks/', include(tasks_patterns_api, namespace='api-v1-tasks')),
url(r'^api/v1/views/', include(views_patterns_api, namespace='api-v1-views')),
]
class SwaggerSchemaView(APIView):
permission_classes = [AllowAny]
renderer_classes = [
renderers.OpenAPIRenderer,
renderers.SwaggerUIRenderer
]
def get(self, request):
generator = SchemaGenerator(
title="RDMO API",
patterns=urlpatterns,
)
schema = generator.get_schema(request=request)
return Response(schema)
| apache-2.0 | Python |
966fe99da944ee2864db8a35705c4e021bc29a98 | Add median pivot point for quick sort | akras14/cs-101,akras14/cs-101,akras14/cs-101,akras14/cs-101 | quick-sort/quicksort.py | quick-sort/quicksort.py | """ Quick Sort Implementation"""
def sort(arr, l=None, r=None):
"""Sort Array"""
# Init l and r, if not provided
if l is None:
l = 0
if r is None:
r = len(arr) - 1
# Check for Base case
if l >= r: # Length equal 1
return
p = getP(arr, l, r)
# Swap p with first element, so left to right partition would work for all pivot points
swap(arr, p, l)
p = partition(arr, l, r)
sort(arr, l, p - 1)
sort(arr, p + 1, r)
def partition(arr, l, r):
"""
Partition array around split point p
All values to the left of p are less than value at p
All values to the right of p are greater than value at p
arr - Array to partition
l - starting element
r - right most element
"""
p = arr[l]
i = l + 1
for j in range(l+1, r+1):
if arr[j] < p:
swap(arr, i, j)
i = i + 1
swap(arr, l, i-1)
return i - 1
def swap(arr, i, j):
"""Swap two values in an array"""
temp = arr[j]
arr[j] = arr[i]
arr[i] = temp
def getP(arr, l, r):
return l # First element
# return r # Last element
# return medianPivotPoint(arr, l, r)
def medianPivotPoint(arr, l, r):
arrLength = len(arr)
if arrLength < 3:
return l
left = arr[l]
right = arr[r]
if arrLength % 2 == 0: # Even
middleIndex = arrLength / 2 - 1
else: # Odd
middleIndex = arrLength / 2
middle = arr[middleIndex]
maxVal = max(left, middle, right)
minVal = min(left, middle, right)
if left != maxVal and left != minVal:
return l
elif right != maxVal and right != minVal:
return r
elif middle != maxVal and middle != maxVal:
return middleIndex
else:
raise ValueError
| """ Quick Sort Implementation"""
def sort(arr, l=None, r=None):
"""Sort Array"""
# Init l and r, if not provided
if l is None:
l = 0
if r is None:
r = len(arr) - 1
# Check for Base case
if l >= r: # Length equal 1
return
p = getP(arr, l, r)
# Swap p with first element, so left to right partition would work for all pivot points
swap(arr, p, l)
p = partition(arr, l, r)
sort(arr, l, p - 1)
sort(arr, p + 1, r)
def partition(arr, l, r):
"""
Partition array around split point p
All values to the left of p are less than value at p
All values to the right of p are greater than value at p
arr - Array to partition
l - starting element
r - right most element
"""
p = arr[l]
i = l + 1
for j in range(l+1, r+1):
if arr[j] < p:
swap(arr, i, j)
i = i + 1
swap(arr, l, i-1)
return i - 1
def swap(arr, i, j):
"""Swap two values in an array"""
temp = arr[j]
arr[j] = arr[i]
arr[i] = temp
def getP(arr, l, r):
return l # First element
# return r # Last element
| mit | Python |
9aabda0e489639a753d6589e9d0f7923505643cb | Update parseSecure.py | m-sinclair/PythonScript | parseSecure.py | parseSecure.py | #!/usr/bin/python
import time
import re
import subprocess
import os
import iptc
def follow(thefile):
thefile.seek(0,2)
while True:
line = thefile.readline()
if not line:
time.sleep(0.1)
continue
yield line
if __name__ == '__main__':
logfile = open("/var/log/secure","r")
loglines = follow(logfile)
ipNew = 999
ipOld = 999
count = 0
for line in loglines:
if re.search(r"Failed password", line):
ipNew = re.findall( r'[0-9]+(?:\.[0-9]+){3}', line)
if ipNew == ipOld:
count += 1
else:
count = 0
ipOld = ipNew
if count > 4:
count = 0
ipString = str(ipNew)
rule = iptc.Rule()
rule.protocol = "tcp"
match = iptc.Match(rule, "tcp")
match.dport = "22"
ule.add_match(match)
match = iptc.Match(rule, "ip")
match.src = ipString
rule.add_match(match)
rule.target = iptc.Target(rule, "DROP")
chain = iptc.Chain(iptc.Table.(iptc.Table.FILTER), "INPUT")
chain.insert_rule(rule)
| #!/usr/bin/python
import time
import re
import subprocess
import os
def follow(thefile):
thefile.seek(0,2)
while True:
line = thefile.readline()
if not line:
time.sleep(0.1)
continue
yield line
if __name__ == '__main__':
logfile = open("/var/log/secure","r")
loglines = follow(logfile)
ipNew = 999
ipOld = 999
count = 0
for line in loglines:
if re.search(r"Failed password", line):
ipNew = re.findall( r'[0-9]+(?:\.[0-9]+){3}', line)
if ipNew == ipOld:
count += 1
else:
count = 0
ipOld = ipNew
if count > 4:
count = 0
ipString = str(ipNew)
subprocess.call(["iptables -A INPUT -s ", ipString," -j DROP"])
#call(["iptables -A INPUT -s ", ipString," -j DROP"]),
| unlicense | Python |
5006ba3124cd80a4529b9ed645aa8981d06a9886 | Stop generate feeds when publishing | andrewheiss/scorecarddiplomacy-org,andrewheiss/scorecarddiplomacy-org,andrewheiss/scorecarddiplomacy-org,andrewheiss/scorecarddiplomacy-org | publishconf.py | publishconf.py | #!/usr/bin/env python3
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
| #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
| mit | Python |
7e9ff1866c6c640e8709a16f783805bc935e198f | Rename options to user_options. | fhirschmann/penchy,fhirschmann/penchy | penchy/jvms.py | penchy/jvms.py | """
This module provides JVMs to run programs.
"""
import os
import shlex
from penchy.maven import get_classpath
class JVM(object):
"""
This class represents a JVM.
"""
def __init__(self, path, options=""):
"""
:param path: path to jvm executable relative to basepath
:param options: string of options that will be passed to jvm
:type options: string
"""
self.basepath = '/'
self.path = path
# XXX: a passed classpath must be filtered and readded before run
self.user_options = options
def configure(self, *args):
"""
Configure jvm options that allows `args` to run
:param *args: :class:`Tool` or :class:`Program` instances that should be
run.
"""
#TODO
pass
def run(self):
"""
Run the jvm with the current configuration.
"""
#TODO
pass
@property
def cmdline(self):
"""
The command line suitable for `subprocess.Popen` based on the current
configuration.
"""
return [self.basepath + os.sep + self.path] + \
['-classpath', get_classpath()] + shlex.split(options)
class WrappedJVM(JVM):
"""
This class is an abstract base class for a JVM that is wrapped by another
Program.
Inheriting classes must expose this attributes:
- ``out``: dictionary that maps logical output names to paths of output
files
- ``exports``: set of logical outputs (valid keys for ``out``)
"""
def __init__(self):
"""
Inheriting classes must:
- have compatible arguments with JVM.__init__
- call JVM.__init__
"""
raise NotImplementedError("must be implemented")
def run(self):
"""
Run with wrapping.
"""
raise NotImplementedError("must be implemented")
class ValgrindJVM(WrappedJVM):
"""
This class represents a JVM which is called by valgrind.
"""
#TODO
pass
| """
This module provides JVMs to run programs.
"""
import os
import shlex
from penchy.maven import get_classpath
class JVM(object):
"""
This class represents a JVM.
"""
def __init__(self, path, options=""):
"""
:param path: path to jvm executable relative to basepath
:param options: string of options that will be passed to jvm
:type options: string
"""
self.basepath = '/'
self.path = path
self.options = options
def configure(self, *args):
"""
Configure jvm options that allows `args` to run
:param *args: :class:`Tool` or :class:`Program` instances that should be
run.
"""
#TODO
pass
def run(self):
"""
Run the jvm with the current configuration.
"""
#TODO
pass
@property
def cmdline(self):
"""
The command line suitable for `subprocess.Popen` based on the current
configuration.
"""
return [self.basepath + os.sep + self.path] + \
['-classpath', get_classpath()] + shlex.split(options)
class WrappedJVM(JVM):
"""
This class is an abstract base class for a JVM that is wrapped by another
Program.
Inheriting classes must expose this attributes:
- ``out``: dictionary that maps logical output names to paths of output
files
- ``exports``: set of logical outputs (valid keys for ``out``)
"""
def __init__(self):
"""
Inheriting classes must:
- have compatible arguments with JVM.__init__
- call JVM.__init__
"""
raise NotImplementedError("must be implemented")
def run(self):
"""
Run with wrapping.
"""
raise NotImplementedError("must be implemented")
class ValgrindJVM(WrappedJVM):
"""
This class represents a JVM which is called by valgrind.
"""
#TODO
pass
| mit | Python |
ddd3373ce078cf9bf40da7ebd8591995e819b750 | Add function to swap byte order | bjoernricks/phell | phell/utils.py | phell/utils.py | # -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
def swap_bytes(value):
if sys.version_info.major < 3:
return "".join([bytes(b) for b in reversed(value)])
return bytes(reversed(value))
# vim: set ts=4 sw=4 tw=80:
| # -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
# vim: set ts=4 sw=4 tw=80:
| mit | Python |
63b5362e4ba531544ea732c1daaaca920e73e936 | Extend nice regex | Venefyxatu/phennyfyxata,Venefyxatu/phennyfyxata,Venefyxatu/phennyfyxata | phenny/suck.py | phenny/suck.py | #!/usr/bin/env python
# coding=utf-8
import time
import random
ACTION = chr(1) + "ACTION "
AFIN = chr(1)
NICE_CHOICES = ['Aww, ik vind jou lief, %s!',
ACTION + ' knuffelt %s' + AFIN,
'%s: jij bent officieel cool!',
ACTION + ' knuffelt %s plat' +AFIN,
'lief van je, %s!']
def suck(phenny, input):
"""
Phenny suckt soms.
"""
asker = input.nick
if ('stom' in input.group()
or 'trut' in input.group().lower()
or 'bitch' in input.group().lower()
or 'evil' in input.group().lower()):
phenny.say('Zusje? Kan jij %s even laten weten wat we daarvan vinden?' % asker)
return
if random.random() < 0.3:
action = chr(1) + "ACTION "
phenny.say('%s neemt een heel lang rietje' % action)
time.sleep(0.2)
phenny.say('%s stopt het in het drankje van %s' % (action, asker))
time.sleep(0.2)
phenny.say('%s drinkt het leeg' % action)
time.sleep(0.2)
phenny.say('soms wel ja')
time.sleep(0.2)
else:
phenny.say('Zusje? Kan jij %s even laten weten wat we daarvan vinden?' % asker)
suck.rule = (r'(?i)(evil)?phenny(fyxata)? (suck(t|s)|zuigt|is '
'(stom|een trut|een truttebol|een bitch))')
def lief(phenny, input):
"""
Maar ze is eigenlijk wel lief
"""
asker = input.nick
phenny.say(random.choice(NICE_CHOICES) % asker)
lief.rule = (r'(?i).*((ik vind phenny(fyxata)? |phenny(fyxata)? is )'
'(leuk|tof|lief|aardig|cool)|ik hou van phenny(fyxata)?).*')
if __name__ == '__main__':
print __doc__.strip()
| #!/usr/bin/env python
# coding=utf-8
import time
import random
ACTION = chr(1) + "ACTION "
AFIN = chr(1)
NICE_CHOICES = ['Aww, ik vind jou lief, %s!',
ACTION + ' knuffelt %s' + AFIN,
'%s: jij bent officieel cool!',
ACTION + ' knuffelt %s plat' +AFIN,
'lief van je, %s!']
def suck(phenny, input):
"""
Phenny suckt soms.
"""
asker = input.nick
if ('stom' in input.group()
or 'trut' in input.group().lower()
or 'bitch' in input.group().lower()
or 'evil' in input.group().lower()):
phenny.say('Zusje? Kan jij %s even laten weten wat we daarvan vinden?' % asker)
return
if random.random() < 0.3:
action = chr(1) + "ACTION "
phenny.say('%s neemt een heel lang rietje' % action)
time.sleep(0.2)
phenny.say('%s stopt het in het drankje van %s' % (action, asker))
time.sleep(0.2)
phenny.say('%s drinkt het leeg' % action)
time.sleep(0.2)
phenny.say('soms wel ja')
time.sleep(0.2)
else:
phenny.say('Zusje? Kan jij %s even laten weten wat we daarvan vinden?' % asker)
suck.rule = (r'(?i)(evil)?phenny(fyxata)? (suck(t|s)|zuigt|is '
'(stom|een trut|een truttebol|een bitch))')
def lief(phenny, input):
"""
Maar ze is eigenlijk wel lief
"""
asker = input.nick
phenny.say(random.choice(NICE_CHOICES) % asker)
lief.rule = (r'(?i).*((ik vind phenny(fyxata)? |phenny(fyxata)? is )'
'(lief|aardig|cool)|ik hou van phenny(fyxata)?).*')
if __name__ == '__main__':
print __doc__.strip()
| bsd-2-clause | Python |
4ac8d799aa6b272aefdfb6a10100475dad8f8f56 | change name of age standardized rate model to model.asr | ihmeuw/dismod_mr | dismod_mr/model/__init__.py | dismod_mr/model/__init__.py | import likelihood, spline, age_groups, priors, covariates, process
from process import age_specific_rate as asr, consistent
from covariates import predict_for
| import likelihood, spline, age_groups, priors, covariates, process
from process import age_specific_rate, consistent
from covariates import predict_for
| agpl-3.0 | Python |
886a55cde5b2463878ffc95a91b7f3c6b715fcf3 | 更新 modules main/signals.py, 修正 PEP8 警告 | yrchen/CommonRepo,yrchen/CommonRepo,yrchen/CommonRepo,yrchen/CommonRepo | commonrepo/main/signals.py | commonrepo/main/signals.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from actstream import action
from actstream import registry
from django_comments.signals import comment_was_posted
from threadedcomments.models import ThreadedComment
# Comment has been registeded with actstream.registry.register
def comment_was_posted_handler(sender, comment, request, **kwargs):
action.send(
request.user,
verb='posted comment',
action_object=comment,
target=comment.content_object)
registry.register(ThreadedComment)
comment_was_posted.connect(comment_was_posted_handler)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from actstream import action
from actstream import registry
from django_comments.signals import comment_was_posted
from threadedcomments.models import ThreadedComment
# Comment has been registeded with actstream.registry.register
def comment_was_posted_handler(sender, comment, request, **kwargs):
action.send(request.user, verb='posted comment', action_object=comment, target=comment.content_object)
registry.register(ThreadedComment)
comment_was_posted.connect(comment_was_posted_handler)
| apache-2.0 | Python |
42c78e5f48f9a3fb3b2b6dd06a2f199a93bc7c72 | Check the address | ArtOfCode-/SmokeDetector,Charcoal-SE/SmokeDetector,NickVolynkin/SmokeDetector,ArtOfCode-/SmokeDetector,Charcoal-SE/SmokeDetector,NickVolynkin/SmokeDetector | continuousintegration.py | continuousintegration.py | import socket
import requests
import sys
from globalvars import GlobalVars
def watchCi():
HOST = ''
PORT = 49494
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print "CI Socket Created"
try:
s.bind((HOST, PORT))
except socket.error as msg:
print 'Bind Failed. Error code: ' + str(msg[0])
return
s.listen(10)
print 'listening for ci changes'
while 1:
conn, addr = s.accept()
circleci_addr = socket.getaddrinfo("circleci.com", 80, 0, 0, socket.IPPROTO_TCP)[0][4][0]
is_circleci = addr == circleci_addr or addr == "circleci.com"
print 'Received request from ' + addr[0] + " ; " + "verified as CircleCI" if is_circleci else "NOT verified as CircleCI!"
if not is_circleci:
GlobalVars.charcoal_hq.send_message("WARNING: got socket that doesn't come from CircleCI from %s" % addr)
continue
r=requests.get('https://api.github.com/repos/Charcoal-SE/SmokeDetector/git/refs/heads/master')
latest_sha = r.json()["object"]["sha"]
r = requests.get('https://api.github.com/repos/Charcoal-SE/SmokeDetector/commits/' + latest_sha + '/statuses')
states = []
for status in r.json():
state = status["state"]
states.append(state)
if "success" in states:
GlobalVars.charcoal_hq.send_message("CI build passed. Ready to pull!")
elif "error" in states or "failure" in states:
GlobalVars.charcoal_hq.send_message("CI build failed, *someone* (prolly Undo) borked something!")
s.close()
| import socket
import requests
import sys
from globalvars import GlobalVars
def watchCi():
HOST = ''
PORT = 49494
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print "CI Socket Created"
try:
s.bind((HOST, PORT))
except socket.error as msg:
print 'Bind Failed. Error code: ' + str(msg[0])
return
s.listen(10)
print 'listening for ci changes'
while 1:
conn, addr = s.accept()
circleci_addr = socket.getaddrinfo("circleci.com", 80, 0, 0, socket.IPPROTO_TCP)[0][4][0]
is_circleci = addr == circleci_addr or addr == "circleci.com"
print 'Received request from ' + addr[0] + " ; " + "verified as CircleCI" if is_circleci else "NOT verified as CircleCI!"
if not is_circleci:
GlobalVars.charcoal_hq.send_message("WARNING: got socket that doesn't come from CircleCI")
continue
r=requests.get('https://api.github.com/repos/Charcoal-SE/SmokeDetector/git/refs/heads/master')
latest_sha = r.json()["object"]["sha"]
r = requests.get('https://api.github.com/repos/Charcoal-SE/SmokeDetector/commits/' + latest_sha + '/statuses')
states = []
for status in r.json():
state = status["state"]
states.append(state)
if "success" in states:
GlobalVars.charcoal_hq.send_message("CI build passed. Ready to pull!")
elif "error" in states or "failure" in states:
GlobalVars.charcoal_hq.send_message("CI build failed, *someone* (prolly Undo) borked something!")
s.close()
| apache-2.0 | Python |
5656eb24e50a753aa442944ee73dd944b01d43ed | Bump version number | jrsmith3/datac,jrsmith3/datac | datac/__init__.py | datac/__init__.py | # -*- coding: utf-8 -*-
"""
Base Library (:mod:`datac`)
===========================
.. currentmodule:: datac
"""
from datac import Datac
__version__ = "0.2.0"
| # -*- coding: utf-8 -*-
"""
Base Library (:mod:`datac`)
===========================
.. currentmodule:: datac
"""
from datac import Datac
__version__ = "0.1.0"
| mit | Python |
aebc2c88b1ef1092d1266d2eb0aeb8333a1396c2 | Add context manager functionality to event bus. | rave-engine/rave | rave/events.py | rave/events.py | """
rave event bus.
"""
import rave.log
_log = rave.log.get(__name__)
class StopProcessing(BaseException):
""" Exception raised to indicate this event should not be processed further. """
pass
class HookContext:
def __init__(self, bus, event, handler):
self.bus = bus
self.event = event
self.handler = handler
def __enter__(self):
self.bus.hook(self.event, self.handler)
return self
def __exit__(self, exctype, excval, exctb):
self.bus.unhook(self.event, self.handler)
class EventBus:
def __init__(self):
self.handlers = {}
def hook(self, event, handler=None):
if not handler:
def do_hook(f):
self.hook(event, f)
return f
return do_hook
self.handlers.setdefault(event, [])
self.handlers[event].append(handler)
def hook_first(self, event, handler=None):
if not handler:
def do_hook(f):
self.hook_first(event, f)
return f
return do_hook
self.handlers.setdefault(event, [])
self.handlers[event].insert(0, handler)
def unhook(self, event, handler):
self.handlers[event].remove(handler)
def hooked(self, event, handler):
return HookContext(self, event, handler)
def emit(self, event, *args, **kwargs):
handlers = self.handlers.get(event)
if handlers:
for handler in handlers:
try:
self._invoke_handler(handler, event, args, kwargs)
except StopProcessing:
break
except Exception as e:
_log.exception(e, 'Exception thrown while processing event {event}.', event=event)
def _invoke_handler(self, handler, event, args, kwargs):
handler(event, *args, **kwargs)
## Stateful API.
def current():
""" Get current event bus. """
import rave.game, rave.engine
game = rave.game.current()
if not game:
return rave.engine.engine.events
return game.events
def emit(event, *args, **kwargs):
return current().emit(event, *args, **kwargs)
def hook(event, handler=None):
return current().hook(event, handler)
def hook_first(event, handler=None):
return current().hook_first(event, handler)
def unhook(event, handler):
return current().unhook(event, handler)
| """
rave event bus.
"""
import rave.log
## API.
class StopProcessing(BaseException):
""" Exception raised to indicate this event should onot be processed further. """
pass
class EventBus:
def __init__(self):
self.handlers = {}
def hook(self, event, handler=None):
if not handler:
def do_hook(f):
self.hook(event, f)
return f
return do_hook
self.handlers.setdefault(event, [])
self.handlers[event].append(handler)
def hook_first(self, event, handler=None):
if not handler:
def do_hook(f):
self.hook_first(event, f)
return f
return do_hook
self.handlers.setdefault(event, [])
self.handlers[event].insert(0, handler)
def unhook(self, event, handler):
self.handlers[event].remove(handler)
def emit(self, event, *args, **kwargs):
handlers = self.handlers.get(event)
if handlers:
for handler in handlers:
try:
self._invoke_handler(handler, event, args, kwargs)
except StopProcessing:
break
except Exception as e:
_log.exception(e, 'Exception thrown while processing event {event}.', event=event)
def _invoke_handler(self, handler, event, args, kwargs):
handler(event, *args, **kwargs)
## Stateful API.
def current():
""" Get current event bus. """
import rave.game, rave.engine
game = rave.game.current()
if not game:
return rave.engine.engine.events
return game.events
def emit(event, *args, **kwargs):
return current().emit(event, *args, **kwargs)
def hook(event, handler=None):
return current().hook(event, handler)
def hook_first(event, handler=None):
return current().hook_first(event, handler)
def unhook(event, handler):
return current().unhook(event, handler)
## Internals.
_log = rave.log.get(__name__)
| bsd-2-clause | Python |
73c3c9deac8f298a118c19d81648d86820d69001 | save to config | drssoccer55/RLBot,drssoccer55/RLBot | runner_GUI.py | runner_GUI.py | import tkinter as tk
from tkinter import ttk
from configparser import RawConfigParser
from gui import match_settings_frame
from gui.team_frames.team_frame_notebook import NotebookTeamFrame
from gui.utils import get_file, IndexManager
from utils.rlbot_config_parser import create_bot_config_layout, get_num_players
from gui.agent_frames.agent_frame import AgentFrame
from gui.team_frames.base_team_frame import BaseTeamFrame
def load_cfg(team1=None, team2=None, match_settings=None, config_path=None):
if config_path is None:
config_path = get_file(
filetypes=[("Config File", "*.cfg")],
title="Choose a file")
raw_config = RawConfigParser()
raw_config.read(config_path)
team_size = get_num_players(raw_config)
overall_config = create_bot_config_layout()
overall_config.parse_file(raw_config, team_size)
if team1 is not None:
team1.load_agents(overall_config)
if team2 is not None:
team2.load_agents(overall_config)
if match_settings is not None:
match_settings.load_match_settings(overall_config)
return overall_config
def save_cfg(overall_config, team1, team2, match_settings):
with open("rlbot.cfg", "w") as f:
f.write(str(overall_config))
def start_running():
print("Need to start now")
def main():
root = tk.Tk()
match_settings = match_settings_frame.SettingsFrame(root)
overall_config = load_cfg(config_path="rlbot.cfg")
index_manager = IndexManager()
# Create team frames
team1 = NotebookTeamFrame(root, 0, index_manager, AgentFrame, overall_config)
team2 = NotebookTeamFrame(root, 1, index_manager, AgentFrame, overall_config)
team1.initialize_team_frame()
team2.initialize_team_frame()
# Setup grid
match_settings.grid(row=0, column=0, columnspan=2)
team1.grid(row=1, column=0, sticky="nsew")
team2.grid(row=1, column=1, sticky="nsew")
# Add buttons
buttons_frame = ttk.Frame(root)
ttk.Button(buttons_frame, text="Load", command=lambda: load_cfg(team1, team2, match_settings)).grid(row=0, column=0)
ttk.Button(buttons_frame, text="Save", command=lambda: save_cfg(overall_config, team1, team2, match_settings)).grid(row=0, column=1)
ttk.Button(buttons_frame, text="Start", command=start_running).grid(row=0, column=2)
for i in range(3):
buttons_frame.grid_columnconfigure(i, weight=1)
buttons_frame.grid(row=2, column=0, columnspan=2, sticky="ew")
root.mainloop()
if __name__ == '__main__':
main()
| import tkinter as tk
from tkinter import ttk
from configparser import RawConfigParser
from gui import match_settings_frame
from gui.team_frames.team_frame_notebook import NotebookTeamFrame
from gui.utils import get_file, IndexManager
from utils.rlbot_config_parser import create_bot_config_layout, get_num_players
from gui.agent_frames.agent_frame import AgentFrame
from gui.team_frames.base_team_frame import BaseTeamFrame
def load_cfg(team1=None, team2=None, match_settings=None, config_path=None):
if config_path is None:
config_path = get_file(
filetypes=[("Config File", "*.cfg")],
title="Choose a file")
raw_config = RawConfigParser()
raw_config.read(config_path)
team_size = get_num_players(raw_config)
overall_config = create_bot_config_layout()
overall_config.parse_file(raw_config, team_size)
if team1 is not None:
team1.load_agents(overall_config)
if team2 is not None:
team2.load_agents(overall_config)
if match_settings is not None:
match_settings.load_match_settings(overall_config)
return overall_config
def save_cfg(team1, team2, match_settings):
print("Need to save cfg")
def start_running():
print("Need to start now")
def main():
root = tk.Tk()
match_settings = match_settings_frame.SettingsFrame(root)
overall_config = load_cfg(config_path="rlbot.cfg")
index_manager = IndexManager()
# Create team frames
team1 = NotebookTeamFrame(root, 0, index_manager, AgentFrame, overall_config)
team2 = NotebookTeamFrame(root, 1, index_manager, AgentFrame, overall_config)
team1.initialize_team_frame()
team2.initialize_team_frame()
# Setup grid
match_settings.grid(row=0, column=0, columnspan=2)
team1.grid(row=1, column=0, sticky="nsew")
team2.grid(row=1, column=1, sticky="nsew")
# Add buttons
buttons_frame = ttk.Frame(root)
ttk.Button(buttons_frame, text="Load", command=lambda: load_cfg(team1, team2, match_settings)).grid(row=0, column=0)
ttk.Button(buttons_frame, text="Save", command=lambda: save_cfg(team1, team2, match_settings)).grid(row=0, column=1)
ttk.Button(buttons_frame, text="Start", command=start_running).grid(row=0, column=2)
for i in range(3):
buttons_frame.grid_columnconfigure(i, weight=1)
buttons_frame.grid(row=2, column=0, columnspan=2, sticky="ew")
root.mainloop()
if __name__ == '__main__':
main()
| mit | Python |
3c7afc4c157d75ebd3411303e285b42539ef6779 | Remove trailing white space. | opello/adventofcode | python/07-1.py | python/07-1.py | #!/usr/bin/env python
import re
instructions = []
def doOperation(operator, operands):
if operator == '':
return operands[0]
elif operator == 'NOT':
return ~operands[0]
elif operator == 'AND':
return operands[0] & operands[1]
elif operator == 'OR':
return operands[0] | operands[1]
elif operator == 'LSHIFT':
return operands[0] << operands[1]
elif operator == 'RSHIFT':
return operands[0] >> operands[1]
else:
raise ValueError
def isInt(value):
try:
int(value)
return True
except ValueError:
return False
with open('../inputs/07.txt') as f:
for line in f:
line = line.rstrip()
expression = re.split(r' -> ', line)
operands = re.split(r' ', expression[0])
output = expression[1]
# Pass through operation.
operator = ''
operandsCount = len(operands)
# Binary operation.
if operandsCount == 3:
operator = operands[1]
# Unary operation.
elif operandsCount == 2:
operator = operands[0]
if operator != '':
operands.remove(operator)
for i in range(len(operands)):
if isInt(operands[i]):
operands[i] = int(operands[i])
instructions += [(output, operator, operands)]
state = {}
while 'a' not in state.keys():
for i in instructions:
output, operator, operands = i
cont = False
for i in range(len(operands)):
if isInt(operands[i]):
continue
elif operands[i] in state.keys():
operands[i] = state[operands[i]]
continue
else:
cont = True
break
if cont:
continue
state[output] = doOperation(operator, operands)
print state['a']
| #!/usr/bin/env python
import re
instructions = []
def doOperation(operator, operands):
if operator == '':
return operands[0]
elif operator == 'NOT':
return ~operands[0]
elif operator == 'AND':
return operands[0] & operands[1]
elif operator == 'OR':
return operands[0] | operands[1]
elif operator == 'LSHIFT':
return operands[0] << operands[1]
elif operator == 'RSHIFT':
return operands[0] >> operands[1]
else:
raise ValueError
def isInt(value):
try:
int(value)
return True
except ValueError:
return False
with open('../inputs/07.txt') as f:
for line in f:
line = line.rstrip()
expression = re.split(r' -> ', line)
operands = re.split(r' ', expression[0])
output = expression[1]
# Pass through operation.
operator = ''
operandsCount = len(operands)
# Binary operation.
if operandsCount == 3:
operator = operands[1]
# Unary operation.
elif operandsCount == 2:
operator = operands[0]
if operator != '':
operands.remove(operator)
for i in range(len(operands)):
if isInt(operands[i]):
operands[i] = int(operands[i])
instructions += [(output, operator, operands)]
state = {}
while 'a' not in state.keys():
for i in instructions:
output, operator, operands = i
cont = False
for i in range(len(operands)):
if isInt(operands[i]):
continue
elif operands[i] in state.keys():
operands[i] = state[operands[i]]
continue
else:
cont = True
break
if cont:
continue
state[output] = doOperation(operator, operands)
print state['a']
| mit | Python |
282131179642e653ef292050c53f1620ebddb269 | Make program description more concise | mgarriott/PDFMerger | src/merge.py | src/merge.py | #!/usr/bin/env python
'''
Merge together a pdf document containing only front pages with a separate
document containing only back pages and save the result into a new document.
@author: Matt Garriott
'''
import argparse
import os
from pyPdf import PdfFileReader, PdfFileWriter
def merge(fppath, bppath, outputpath, no_delete, fed_backwards):
fpfile = PdfFileReader(open(fppath))
bpfile = PdfFileReader(open(bppath))
outputfile = PdfFileWriter()
outputpages = []
for i in range(fpfile.getNumPages()):
backpages = True
try:
outputpages.append(fpfile.getPage(i))
if backpages:
if fed_backwards:
print 'i = %d / backpage = %d' % (i, bpfile.getNumPages() - i - 1)
outputpages.append(bpfile.getPage(bpfile.getNumPages() - i - 1))
else:
outputpages.append(bpfile.getPage(i))
except IndexError:
backpages = False
if not no_delete:
outputpages = [page for page in outputpages if page.extractText() != '']
[outputfile.addPage(page) for page in outputpages]
outputfile.write(open(os.path.expanduser(outputpath), 'w'))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Merge front and back pages located in separate ' +
'PDF documents into one PDF document.')
parser.add_argument('-f', '--front-pages', required=True,
help='The path to the PDF containing the front pages')
parser.add_argument('-b', '--back-pages', required=True,
help='The path to the PDF containing the back pages')
parser.add_argument('-o', '--output-file', default='~/Desktop/merged.pdf',
help='The path to save the completed pdf file, default is ~/Desktop/merged.pdf')
parser.add_argument('-nd', '--no-delete', default=False, action='store_true',
help='Prevent blank pages from being deleted from the finished document.')
parser.add_argument('--fed-backwards', default=False, action='store_true',
help='If you were lazy and fed the document in backwards on the seconds side, use this flag.')
args = parser.parse_args()
merge(args.front_pages, args.back_pages, args.output_file, args.no_delete, args.fed_backwards)
| #!/usr/bin/env python
'''
A simple program designed to allow a user to merge a pdf document
that contains only the front pages to a separate document that contains
only the back pages, and merge them in the right order into a new pdf
document.
@author: Matt Garriott
'''
import argparse
import os
from pyPdf import PdfFileReader, PdfFileWriter
def merge(fppath, bppath, outputpath, no_delete, fed_backwards):
fpfile = PdfFileReader(open(fppath))
bpfile = PdfFileReader(open(bppath))
outputfile = PdfFileWriter()
outputpages = []
for i in range(fpfile.getNumPages()):
backpages = True
try:
outputpages.append(fpfile.getPage(i))
if backpages:
if fed_backwards:
print 'i = %d / backpage = %d' % (i, bpfile.getNumPages() - i - 1)
outputpages.append(bpfile.getPage(bpfile.getNumPages() - i - 1))
else:
outputpages.append(bpfile.getPage(i))
except IndexError:
backpages = False
if not no_delete:
outputpages = [page for page in outputpages if page.extractText() != '']
[outputfile.addPage(page) for page in outputpages]
outputfile.write(open(os.path.expanduser(outputpath), 'w'))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Merge front and back pages located in separate ' +
'PDF documents into one PDF document.')
parser.add_argument('-f', '--front-pages', required=True,
help='The path to the PDF containing the front pages')
parser.add_argument('-b', '--back-pages', required=True,
help='The path to the PDF containing the back pages')
parser.add_argument('-o', '--output-file', default='~/Desktop/merged.pdf',
help='The path to save the completed pdf file, default is ~/Desktop/merged.pdf')
parser.add_argument('-nd', '--no-delete', default=False, action='store_true',
help='Prevent blank pages from being deleted from the finished document.')
parser.add_argument('--fed-backwards', default=False, action='store_true',
help='If you were lazy and fed the document in backwards on the seconds side, use this flag.')
args = parser.parse_args()
merge(args.front_pages, args.back_pages, args.output_file, args.no_delete, args.fed_backwards)
| bsd-2-clause | Python |
c8ffd1fc4c4e06cd71e86d1d48749a3fe527a54e | Fix test to accommodate change of error message. | gaiaresources/biosys,parksandwildlife/biosys,gaiaresources/biosys,serge-gaia/biosys,ropable/biosys,parksandwildlife/biosys,serge-gaia/biosys,ropable/biosys,gaiaresources/biosys,ropable/biosys,serge-gaia/biosys,parksandwildlife/biosys | biosys/apps/main/tests/api/test_serializers.py | biosys/apps/main/tests/api/test_serializers.py | from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('A dataset with this name already exists in the project.', errors.get('non_field_errors')[0])
| from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('project, name must make a unique set', errors.get('non_field_errors')[0])
| apache-2.0 | Python |
47c498a174c3a5f32db34b6cc1e646e016ee2187 | Implement make_temp() | saurabhiiit/coala,mr-karan/coala,CruiseDevice/coala,arush0311/coala,saurabhiiit/coala,swatilodha/coala,d6e/coala,tltuan/coala,MattAllmendinger/coala,tushar-rishav/coala,coala-analyzer/coala,Uran198/coala,sagark123/coala,AdeshAtole/coala,rimacone/testing2,Shade5/coala,Nosferatul/coala,yland/coala,CruiseDevice/coala,jayvdb/coala,jayvdb/coala,sudheesh001/coala,d6e/coala,Shade5/coala,nemaniarjun/coala,NalinG/coala,tushar-rishav/coala,impmihai/coala,djkonro/coala,CruiseDevice/coala,sophiavanvalkenburg/coala,incorrectusername/coala,Balaji2198/coala,AbdealiJK/coala,MariosPanag/coala,NalinG/coala,sudheesh001/coala,djkonro/coala,RJ722/coala,aptrishu/coala,lonewolf07/coala,meetmangukiya/coala,Uran198/coala,coala-analyzer/coala,aptrishu/coala,meetmangukiya/coala,abhiroyg/coala,coala/coala,lonewolf07/coala,JohnS-01/coala,arush0311/coala,ayushin78/coala,vinc456/coala,vinc456/coala,ManjiriBirajdar/coala,SanketDG/coala,swatilodha/coala,NiklasMM/coala,impmihai/coala,karansingh1559/coala,ManjiriBirajdar/coala,rresol/coala,mr-karan/coala,Shade5/coala,AbdealiJK/coala,coala/coala,netman92/coala,stevemontana1980/coala,shreyans800755/coala,netman92/coala,arafsheikh/coala,MariosPanag/coala,impmihai/coala,SambitAcharya/coala,Asnelchristian/coala,NalinG/coala,NalinG/coala,scottbelden/coala,svsn2117/coala,scriptnull/coala,MattAllmendinger/coala,tushar-rishav/coala,NiklasMM/coala,tltuan/coala,nemaniarjun/coala,SambitAcharya/coala,arjunsinghy96/coala,SambitAcharya/coala,swatilodha/coala,rimacone/testing2,arush0311/coala,damngamerz/coala,incorrectusername/coala,RJ722/coala,sils1297/coala,JohnS-01/coala,incorrectusername/coala,sagark123/coala,rimacone/testing2,lonewolf07/coala,damngamerz/coala,AdeshAtole/coala,NalinG/coala,sils1297/coala,sils1297/coala,Nosferatul/coala,yashtrivedi96/coala,RJ722/coala,Asalle/coala,coala-analyzer/coala,refeed/coala,sagark123/coala,rresol/coala,Asnelchristian/coala,arjunsinghy96/coala,NalinG/coala,SambitAcharya/coala,ayushin78/coala,NiklasMM/coala,meetmangukiya/coala,Uran198/coala,svsn2117/coala,arafsheikh/coala,shreyans800755/coala,yashLadha/coala,dagdaggo/coala,scriptnull/coala,djkonro/coala,arafsheikh/coala,karansingh1559/coala,SambitAcharya/coala,refeed/coala,kartikeys98/coala,rresol/coala,AdeshAtole/coala,Balaji2198/coala,shreyans800755/coala,scottbelden/coala,AbdealiJK/coala,scriptnull/coala,ayushin78/coala,yland/coala,yashLadha/coala,JohnS-01/coala,yashLadha/coala,svsn2117/coala,MariosPanag/coala,Balaji2198/coala,stevemontana1980/coala,sophiavanvalkenburg/coala,Asnelchristian/coala,SanketDG/coala,scriptnull/coala,saurabhiiit/coala,SambitAcharya/coala,dagdaggo/coala,jayvdb/coala,scriptnull/coala,damngamerz/coala,karansingh1559/coala,aptrishu/coala,mr-karan/coala,arjunsinghy96/coala,refeed/coala,yashtrivedi96/coala,SanketDG/coala,scottbelden/coala,yland/coala,netman92/coala,abhiroyg/coala,sophiavanvalkenburg/coala,dagdaggo/coala,coala/coala,ManjiriBirajdar/coala,NalinG/coala,scriptnull/coala,kartikeys98/coala,d6e/coala,stevemontana1980/coala,kartikeys98/coala,abhiroyg/coala,tltuan/coala,vinc456/coala,nemaniarjun/coala,Nosferatul/coala,sudheesh001/coala,SambitAcharya/coala,Asalle/coala,Asalle/coala,MattAllmendinger/coala,scriptnull/coala,yashtrivedi96/coala | coalib/tests/output/dbus/BuildDbusServiceTest.py | coalib/tests/output/dbus/BuildDbusServiceTest.py | import sys
import unittest
from setuptools.dist import Distribution
from distutils.errors import DistutilsOptionError
sys.path.insert(0, ".")
from coalib.output.dbus.BuildDbusService import BuildDbusService
from coalib.misc import Constants
from coalib.misc.ContextManagers import make_temp
class BuildDbusServiceTest(unittest.TestCase):
def test_build(self):
dist = Distribution()
uut = BuildDbusService(dist)
self.assertRaises(DistutilsOptionError, uut.finalize_options)
with make_temp() as uut.output:
uut.finalize_options()
uut.run()
with open(uut.output) as file:
result = file.read(1000)
self.assertEqual(
result,
"[D-BUS Service]\nNames=" + Constants.BUS_NAME +
"\nExec=coala-dbus")
if __name__ == "__main__":
unittest.main(verbosity=2)
| import sys
import unittest
import tempfile
import os
from setuptools.dist import Distribution
from distutils.errors import DistutilsOptionError
sys.path.insert(0, ".")
from coalib.output.dbus.BuildDbusService import BuildDbusService
from coalib.misc import Constants
class BuildDbusServiceTest(unittest.TestCase):
def test_build(self):
dist = Distribution()
uut = BuildDbusService(dist)
self.assertRaises(DistutilsOptionError, uut.finalize_options)
handle, uut.output = tempfile.mkstemp(text=True)
uut.finalize_options()
uut.run()
result = os.read(handle, 1000).decode()
self.assertEqual(
result,
"[D-BUS Service]\nNames=" + Constants.BUS_NAME +
"\nExec=coala-dbus")
if __name__ == "__main__":
unittest.main(verbosity=2)
| agpl-3.0 | Python |
e36e2d58526cf2ab8c4445ee28ab5e53440f4218 | Fix UID strings. | kumpelblase2/rn,kumpelblase2/rn,kumpelblase2/rn | Aufgabe2/server/mail.py | Aufgabe2/server/mail.py | import os
class Mail():
def __init__(self, filename):
self.filename = filename
self.content = []
self.uid = ''
self.deleted = False
def load(self):
file = open(self.filename, 'r')
self.content = file.read().split('\r\n')
self.uid = os.path.basename(file.name)
self.uid = self.uid[:self.uid.index('.')]
file.close()
def size(self):
return len(self.content)
def as_string(self):
return '\r\n'.join(self.content)
def remove(self):
os.remove(self.filename) | import os
class Mail():
def __init__(self, filename):
self.filename = filename
self.content = []
self.uid = ''
self.deleted = False
def load(self):
file = open(self.filename, 'r')
self.content = file.read().split('\r\n')
self.uid = file.name[:file.name.index('.')]
file.close()
def size(self):
return len(self.content)
def as_string(self):
return '\r\n'.join(self.content)
def remove(self):
os.remove(self.filename) | mit | Python |
23c964580f3fc58146865d9e4d1afbf588068de0 | Update pykubectl/kubectl.py | 4Catalyzer/pykubectl | pykubectl/kubectl.py | pykubectl/kubectl.py | import json
import logging
import tempfile
from subprocess import CalledProcessError, check_output
class KubeCtl:
def __init__(self, bin='kubectl', global_flags=''):
super().__init__()
self.kubectl = f'{bin} {global_flags}'
def execute(self, command, definition=None, safe=False):
cmd = f'{self.kubectl} {command}'
with tempfile.NamedTemporaryFile('w') as temp_file:
if definition:
temp_file.write(definition)
temp_file.flush()
cmd = f'{cmd} -f {temp_file.name}'
logging.debug(f'executing {cmd}')
try:
return check_output(cmd, shell=True)
except CalledProcessError as e:
if not safe:
raise e
logging.warn(f'Command {command} failed, swallowing')
def apply(self, *args, **kwargs):
return self.execute('apply', *args, **kwargs)
def delete(self, *args, **kwargs):
return self.execute('delete', *args, **kwargs)
def get(self, *args, **kwargs):
result = self.execute('get -o json', *args, **kwargs).decode()
return json.loads(result)
def describe(self, *args, **kwargs):
return self.execute('describe', *args, **kwargs)
| import json
import logging
import tempfile
from subprocess import CalledProcessError, check_output
class KubeCtl:
def __init__(self, bin='kubectl', global_flags=''):
super().__init__()
self.kubectl = f'{bin} {global_flags}'
def execute(self, command, definition=None, safe=False):
cmd = f'{self.kubectl} {command}'
with tempfile.NamedTemporaryFile('w') as temp_file:
if definition:
temp_file.write(definition)
temp_file.flush()
cmd = f'{cmd} -f {temp_file.name}' \
logging.debug(f'executing {cmd}')
try:
return check_output(cmd, shell=True)
except CalledProcessError as e:
if not safe:
raise e
logging.warn(f'Command {command} failed, swallowing')
def apply(self, *args, **kwargs):
return self.execute('apply', *args, **kwargs)
def delete(self, *args, **kwargs):
return self.execute('delete', *args, **kwargs)
def get(self, *args, **kwargs):
result = self.execute('get -o json', *args, **kwargs).decode()
return json.loads(result)
def describe(self, *args, **kwargs):
return self.execute('describe', *args, **kwargs)
| mit | Python |
0b1376caef3a32d260d36bff4522199b9bf484fe | Normalize version number. | tld/pyptouch | pyptouch/__init__.py | pyptouch/__init__.py | # -*- coding: utf-8 -*-
"""Python driver for P-Touch series of label-printers, with various utilities.
.. moduleauthor:: Terje Elde <terje@elde.net>
"""
__author__ = 'Terje Elde'
__email__ = 'terje@elde.net'
__version__ = '0.0.1.dev0'
| # -*- coding: utf-8 -*-
"""Python driver for P-Touch series of label-printers, with various utilities.
.. moduleauthor:: Terje Elde <terje@elde.net>
"""
__author__ = 'Terje Elde'
__email__ = 'terje@elde.net'
__version__ = '0.0.1-dev0'
| bsd-2-clause | Python |
4e209ce3b531edf41c643cdec94f9746ad032338 | fix rspy.repo.build to not include RelWithDebInfo (in LibCI) | IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense | unit-tests/py/rspy/repo.py | unit-tests/py/rspy/repo.py | # License: Apache 2.0. See LICENSE file in root directory.
# Copyright(c) 2021 Intel Corporation. All Rights Reserved.
import os
# this script is located in librealsense/unit-tests/py/rspy, so main repository is:
root = os.path.dirname( os.path.dirname( os.path.dirname( os.path.dirname( os.path.abspath( __file__ )))))
# Usually we expect the build directory to be directly under the root, named 'build'
build = os.path.join( root, 'win10', 'win64', 'static' )
if not os.path.isdir( build ):
build = os.path.join( root, 'build' )
if not os.path.isdir( build ):
build = None
def find_pyrs():
"""
:return: the location (absolute path) of the pyrealsense2 .so (linux) or .pyd (windows)
"""
global build
import platform
system = platform.system()
linux = ( system == 'Linux' and "microsoft" not in platform.uname()[3].lower() )
from rspy import file
if linux:
for so in file.find( build, '(^|/)pyrealsense2.*\.so$' ):
return os.path.join( build, so )
else:
for pyd in file.find( build, '(^|/)pyrealsense2.*\.pyd$' ):
return os.path.join( build, pyd )
def find_pyrs_dir():
"""
:return: the directory (absolute) in which pyrealsense2 lives, or None if unknown/not found
"""
pyrs = find_pyrs()
if pyrs:
pyrs_dir = os.path.dirname( pyrs )
return pyrs_dir
def pretty_fw_version( fw_version_as_string ):
"""
:return: a version with leading zeros removed, so as to be a little easier to read
"""
return '.'.join( [str(int(c)) for c in fw_version_as_string.split( '.' )] )
| # License: Apache 2.0. See LICENSE file in root directory.
# Copyright(c) 2021 Intel Corporation. All Rights Reserved.
import os
# this script is located in librealsense/unit-tests/py/rspy, so main repository is:
root = os.path.dirname( os.path.dirname( os.path.dirname( os.path.dirname( os.path.abspath( __file__ )))))
# Usually we expect the build directory to be directly under the root, named 'build'
build = os.path.join( root, 'win10', 'win64', 'static', 'RelWithDebInfo' )
if not os.path.isdir( build ):
build = os.path.join( root, 'build' )
if not os.path.isdir( build ):
build = None
def find_pyrs():
"""
:return: the location (absolute path) of the pyrealsense2 .so (linux) or .pyd (windows)
"""
global build
import platform
system = platform.system()
linux = ( system == 'Linux' and "microsoft" not in platform.uname()[3].lower() )
from rspy import file
if linux:
for so in file.find( build, '(^|/)pyrealsense2.*\.so$' ):
return os.path.join( build, so )
else:
for pyd in file.find( build, '(^|/)pyrealsense2.*\.pyd$' ):
return os.path.join( build, pyd )
def find_pyrs_dir():
"""
:return: the directory (absolute) in which pyrealsense2 lives, or None if unknown/not found
"""
pyrs = find_pyrs()
if pyrs:
pyrs_dir = os.path.dirname( pyrs )
return pyrs_dir
def pretty_fw_version( fw_version_as_string ):
"""
:return: a version with leading zeros removed, so as to be a little easier to read
"""
return '.'.join( [str(int(c)) for c in fw_version_as_string.split( '.' )] )
| apache-2.0 | Python |
78dd3f4c46939c619f4a78b854c07612d4b74573 | Update cam_timeLapse_Threaded_cam.py | philprobinson84/RPi,philprobinson84/RPi | camera/timelapse/cam_timeLapse_Threaded_cam.py | camera/timelapse/cam_timeLapse_Threaded_cam.py | #!/usr/bin/env python2.7
import time
import picamera
import os
import errno
import sys
class Logger(object):
def __init__(self):
self.terminal = sys.stdout
self.log = open("logfile.log", "a")
def write(self, message):
self.terminal.write(message)
self.log.write(message)
sys.stdout = Logger()
FRAME_INTERVAL = 30
DIRNAME = "/home/pi/timelapse"
frame = 1
def create_dir():
TIME = time.localtime()
CURRENT_YEAR = TIME[0]
CURRENT_MONTH = TIME[1]
CURRENT_DAY = TIME[2]
CURRENT_HOUR = TIME[3]
global DIRNAME
DIRNAME = "/home/pi/timelapse/%04d%02d%02d_%02d/" % (CURRENT_YEAR, CURRENT_MONTH, CURRENT_DAY, CURRENT_HOUR)
#print "DIRNAME = %s" % DIRNAME
if not os.path.isdir(DIRNAME):
os.makedirs(DIRNAME)
print "camThread:create_dir() created folder: %s" % DIRNAME
global frame
frame = 1
print "camThread:create_dir() frame # reset to: %d" % frame
def force_symlink(file1, file2):
try:
os.symlink(file1, file2)
except OSError, e:
if e.errno == errno.EEXIST:
os.remove(file2)
os.symlink(file1, file2)
def capture_frame(frame):
with picamera.PiCamera() as cam:
time.sleep(1)
cam.resolution = (1280, 720)
create_dir()
global DIRNAME
filename = '%sframe%06d.jpg' % (DIRNAME, frame)
cam.capture(filename, format='jpeg',quality=75)
print "camThread:capture_frame() captured frame %06d: %s" % (frame, filename)
force_symlink(filename, "/home/pi/timelapse/latest/latest.jpg")
while True:
# record start_time
start_time = time.time()
# capture a frame
capture_frame(frame)
# increment frame#
frame += 1
# record end time
end_time = time.time()
# determine elapsed time
elapsed_time = end_time - start_time
# determine how long to sleep
sleep_time = FRAME_INTERVAL - elapsed_time
# check for negative sleep request!
if (sleep_time < 1):
print "camThread: sleep_time < 1!!! (%s)" % sleep_time
sleep_time = 1
# sleep
print "camThread: sleeping for %s seconds" % sleep_time
time.sleep(sleep_time)
| #!/usr/bin/env python2.7
import time
import picamera
import os
import errno
FRAME_INTERVAL = 30
DIRNAME = "/home/pi/timelapse"
frame = 1
def create_dir():
TIME = time.localtime()
CURRENT_YEAR = TIME[0]
CURRENT_MONTH = TIME[1]
CURRENT_DAY = TIME[2]
CURRENT_HOUR = TIME[3]
global DIRNAME
DIRNAME = "/home/pi/timelapse/%04d%02d%02d_%02d/" % (CURRENT_YEAR, CURRENT_MONTH, CURRENT_DAY, CURRENT_HOUR)
#print "DIRNAME = %s" % DIRNAME
if not os.path.isdir(DIRNAME):
os.makedirs(DIRNAME)
print "camThread:create_dir() created folder: %s" % DIRNAME
global frame
frame = 1
print "camThread:create_dir() frame # reset to: %d" % frame
def force_symlink(file1, file2):
try:
os.symlink(file1, file2)
except OSError, e:
if e.errno == errno.EEXIST:
os.remove(file2)
os.symlink(file1, file2)
def capture_frame(frame):
with picamera.PiCamera() as cam:
time.sleep(1)
cam.resolution = (1280, 720)
create_dir()
global DIRNAME
filename = '%sframe%06d.jpg' % (DIRNAME, frame)
cam.capture(filename, format='jpeg',quality=75)
print "camThread:capture_frame() captured frame %06d: %s" % (frame, filename)
force_symlink(filename, "/home/pi/timelapse/latest/latest.jpg")
while True:
# record start_time
start_time = time.time()
# capture a frame
capture_frame(frame)
# increment frame#
frame += 1
# record end time
end_time = time.time()
# determine elapsed time
elapsed_time = end_time - start_time
# determine how long to sleep
sleep_time = FRAME_INTERVAL - elapsed_time
# check for negative sleep request!
if (sleep_time < 1):
print "camThread: sleep_time < 1!!! (%s)" % sleep_time
sleep_time = 1
# sleep
print "camThread: sleeping for %s seconds" % sleep_time
time.sleep(sleep_time)
| artistic-2.0 | Python |
0a5c9f8cdf55916ac9a914a0d2fe68893d2c26af | Fix upload listing test | OpenBazaar/openbazaar-go,OpenBazaar/openbazaar-go,OpenBazaar/openbazaar-go | qa/upload_listing.py | qa/upload_listing.py | import requests
import json
from collections import OrderedDict
from test_framework.test_framework import OpenBazaarTestFramework, TestFailure
class UploadListingTest(OpenBazaarTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 1
def setup_network(self):
self.setup_nodes()
def run_test(self):
with open('testdata/listing.json') as listing_file:
listing_json = json.load(listing_file, object_pairs_hook=OrderedDict)
listing_json["item"]["priceCurrency"]["code"] = "t" + self.cointype
listing_json["item"]["priceCurrency"]["divisibility"] = 8
api_url = self.nodes[0]["gateway_url"] + "ob/listing"
r = requests.post(api_url, data=json.dumps(listing_json, indent=4))
if r.status_code == 404:
raise TestFailure("UploadListingTest - FAIL: Listing post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("UploadListingTest - FAIL: Listing POST failed. Reason: %s", resp["reason"])
api_url = self.nodes[0]["gateway_url"] + "ob/inventory"
r = requests.get(api_url)
if r.status_code == 200:
resp = json.loads(r.text)
inv = resp["ron-swanson-tshirt"]
if inv == None:
raise TestFailure("UploadListingTest - FAIL: Did not return inventory for listing")
if inv["inventory"] != "213":
raise TestFailure("UploadListingTest - FAIL: Returned incorrect amount of inventory: %d", inv["inventory"])
elif r.status_code == 404:
raise TestFailure("UploadListingTest - FAIL: Listing post endpoint not found")
else:
resp = json.loads(r.text)
raise TestFailure("UploadListingTest - FAIL: Listing POST failed. Reason: %s", resp["reason"])
print("UploadListingTest - PASS")
if __name__ == '__main__':
print("Running UploadListingTest")
UploadListingTest().main()
| import requests
import json
from collections import OrderedDict
from test_framework.test_framework import OpenBazaarTestFramework, TestFailure
class UploadListingTest(OpenBazaarTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 1
def setup_network(self):
self.setup_nodes()
def run_test(self):
with open('testdata/listing.json') as listing_file:
listing_json = json.load(listing_file, object_pairs_hook=OrderedDict)
listing_json["item"]["priceCurrency"]["code"] = "t" + self.cointype
listing_json["item"]["priceCurrency"]["divisibility"] = 8
api_url = self.nodes[0]["gateway_url"] + "ob/listing"
r = requests.post(api_url, data=json.dumps(listing_json, indent=4))
if r.status_code == 404:
raise TestFailure("UploadListingTest - FAIL: Listing post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("UploadListingTest - FAIL: Listing POST failed. Reason: %s", resp["reason"])
api_url = self.nodes[0]["gateway_url"] + "ob/inventory"
r = requests.get(api_url)
if r.status_code == 200:
resp = json.loads(r.text)
inv = resp["ron-swanson-tshirt"]
if inv == None:
raise TestFailure("UploadListingTest - FAIL: Did not return inventory for listing")
if inv["inventory"] != 213:
raise TestFailure("UploadListingTest - FAIL: Returned incorrect amount of inventory: %d", inv["inventory"])
elif r.status_code == 404:
raise TestFailure("UploadListingTest - FAIL: Listing post endpoint not found")
else:
resp = json.loads(r.text)
raise TestFailure("UploadListingTest - FAIL: Listing POST failed. Reason: %s", resp["reason"])
print("UploadListingTest - PASS")
if __name__ == '__main__':
print("Running UploadListingTest")
UploadListingTest().main()
| mit | Python |
c3e0249602f2173f21b56af1b88864323baf4e39 | Remove warnings | panoptes/POCS,AstroHuntsman/POCS,Guokr1991/POCS,fmin2958/POCS,AstroHuntsman/POCS,panoptes/POCS,Guokr1991/POCS,joshwalawender/POCS,fmin2958/POCS,joshwalawender/POCS,AstroHuntsman/POCS,joshwalawender/POCS,panoptes/POCS,Guokr1991/POCS,AstroHuntsman/POCS,Guokr1991/POCS,panoptes/POCS,fmin2958/POCS | panoptes/utils/config.py | panoptes/utils/config.py | import yaml
import os
import panoptes.utils.error
panoptes_config = '{}/../../config.yaml'.format(os.path.dirname(__file__))
def has_config(Class):
""" Class Decorator: Adds a config singleton to class """
# If already read, simply return config
if not has_config._config:
load_config(config_file=panoptes_config)
# Add the config to the class
if has_config._config:
setattr(Class, 'config', has_config._config)
return Class
def load_config(refresh=False, config_file=panoptes_config):
""" Loads the config from a file """
if refresh or not has_config._config:
try:
with open(config_file, 'r') as f:
has_config._config.update(yaml.load(f.read()))
except FileNotFoundError as err:
raise InvalidConfig("Config file not found: {}".format(config_file))
# This is global
has_config._config = dict() | import yaml
import warnings
import os
import panoptes.utils.error
panoptes_config = '{}/../../config.yaml'.format(os.path.dirname(__file__))
def has_config(Class):
""" Class Decorator: Adds a config singleton to class """
# If already read, simply return config
if not has_config._config:
load_config(config_file=panoptes_config)
# Add the config to the class
if has_config._config:
setattr(Class, 'config', has_config._config)
return Class
def load_config(refresh=False, config_file=panoptes_config):
""" Loads the config from a file """
if refresh or not has_config._config:
try:
with open(config_file, 'r') as f:
has_config._config.update(yaml.load(f.read()))
except FileNotFoundError as err:
raise InvalidConfig("Config file not found: {}".format(config_file))
# This is global
has_config._config = dict() | mit | Python |
9a36b60fc5a5a3b103582ee438f06c81889ec1f4 | fix pid dir for daemon | sirech/deliver,sirech/deliver | deliverdaemon.py | deliverdaemon.py | import argparse
import os
from supay import Daemon
from updater import prepare, loop
def init_d():
return Daemon(name='deliver', pid_dir=os.path.abspath(os.path.curdir))
def run():
daemon = init_d()
daemon.start(check_pid=True, verbose=True)
prepare()
loop()
def stop():
daemon = init_d()
daemon.stop(verbose=True)
def status():
daemon = init_d()
daemon.status()
actions = {
'start' : run,
'stop' : stop,
'status' : status
}
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Arguments for the deliver daemon')
group = parser.add_mutually_exclusive_group()
group.add_argument('--start', action='store_const', dest='action', const='start', help='Start the daemon [Default]')
group.add_argument('--stop', action='store_const', dest='action', const='stop', help='Stop the daemon')
group.add_argument('--status', action='store_const', dest='action', const='status',
help='Consult the status of the daemon')
# Execute the chosen action
action = parser.parse_args().action or 'start'
actions[action]()
| import argparse
from supay import Daemon
from updater import prepare, loop
def init_d():
return Daemon(name='deliver', pid_dir='.')
def run():
daemon = init_d()
daemon.start(check_pid=True, verbose=True)
prepare()
loop()
def stop():
daemon = init_d()
daemon.stop(verbose=True)
def status():
daemon = init_d()
daemon.status()
actions = {
'start' : run,
'stop' : stop,
'status' : status
}
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Arguments for the deliver daemon')
group = parser.add_mutually_exclusive_group()
group.add_argument('--start', action='store_const', dest='action', const='start', help='Start the daemon [Default]')
group.add_argument('--stop', action='store_const', dest='action', const='stop', help='Stop the daemon')
group.add_argument('--status', action='store_const', dest='action', const='status',
help='Consult the status of the daemon')
# Execute the chosen action
action = parser.parse_args().action or 'start'
actions[action]()
| mit | Python |
108c76d85c268891cac2b166f94e437ad498b383 | fix average calculation to be blocks over time | svn2github/libtorrent-trunk,svn2github/libtorrent-trunk,svn2github/libtorrent-trunk,svn2github/libtorrent-trunk | parse_disk_buffer_log.py | parse_disk_buffer_log.py | #!/bin/python
import os, sys, time
lines = open(sys.argv[1], 'rb').readlines()
# logfile format:
# <time(ms)> <key>: <value>
# example:
# 16434 read cache: 17
key_order = ['receive buffer', 'send buffer', 'write cache', 'read cache', 'hash temp']
colors = ['30f030', 'f03030', '80f080', 'f08080', '4040ff']
keys = []
fields = {}
maximum = {}
out = open('disk_buffer_log.dat', 'w+')
field_sum = {}
field_num_samples = {}
field_timestamp = {}
for c in key_order:
keys.append(c)
fields[c] = 0
maximum[c] = 0
field_sum[c] = 0
field_num_samples[c] = 0
field_timestamp[c] = 0
last_t = 0
for l in lines:
try:
t = int(l[0:l.find(' ')])
c = l[l.find(' ')+1:l.find(':')]
n = int(l[l.find(':')+1:-1])
except:
print l
continue
if last_t != t:
print >>out, '%d\t' % last_t,
for i in keys:
print >>out, '%d\t' % maximum[i],
print >>out, '\n',
if not c in keys:
keys.append(c)
fields[c] = 0
maximum[c] = 0
field_sum[c] = 0
field_num_samples[c] = 0
field_timestamp[c] = 0
field_sum[c] += fields[c] * float(t - field_timestamp[c])
field_timestamp[c] = t
fields[c] = n
if n > maximum[c]: maximum[c] = n
if last_t != t:
last_t = t
maximum = fields
for i in keys:
print '%s: avg: %f' % (i, field_sum[i] / last_t)
print
out.close()
out = open('disk_buffer.gnuplot', 'wb')
print >>out, "set term png size 1200,700"
print >>out, 'set output "disk_buffer.png"'
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time (ms)"'
print >>out, 'set ylabel "buffers"'
print >>out, "set style data lines"
print >>out, "set key box"
print >>out, 'plot',
count = 1 + len(keys)
keys.reverse()
for k in keys:
expr = "$%d" % count
for i in xrange(2, count): expr += "+$%d" % i
count -= 1
print >>out, ' "disk_buffer_log.dat" using 1:(%s) title "%s" with filledcurves x1 lt rgb "#%s",' % (expr, k, colors[count-1]),
print >>out, 'x=0'
out.close()
os.system('gnuplot disk_buffer.gnuplot')
| #!/bin/python
import os, sys, time
lines = open(sys.argv[1], 'rb').readlines()
# logfile format:
# <time(ms)> <key>: <value>
# example:
# 16434 read cache: 17
key_order = ['receive buffer', 'send buffer', 'write cache', 'read cache', 'hash temp']
colors = ['30f030', 'f03030', '80f080', 'f08080', '4040ff']
keys = []
fields = {}
maximum = {}
out = open('disk_buffer_log.dat', 'w+')
field_sum = {}
field_num_samples = {}
for c in key_order:
keys.append(c)
fields[c] = 0
maximum[c] = 0
field_sum[c] = 0
field_num_samples[c] = 0
last_t = 0
for l in lines:
try:
t = int(l[0:l.find(' ')])
c = l[l.find(' ')+1:l.find(':')]
n = int(l[l.find(':')+1:-1])
except:
print l
continue
if last_t != t:
print >>out, '%d\t' % last_t,
for i in keys:
print >>out, '%d\t' % maximum[i],
print >>out, '\n',
if not c in keys:
keys.append(c)
fields[c] = 0
maximum[c] = 0
fields[c] = n
field_sum[c] += n
field_num_samples[c] += 1
if n > maximum[c]: maximum[c] = n
if last_t != t:
last_t = t
maximum = fields
for i in keys:
print '%s: avg: %f' % (i, float(field_sum[i]) / float(field_num_samples[i]))
print
out.close()
out = open('disk_buffer.gnuplot', 'wb')
print >>out, "set term png size 1200,700"
print >>out, 'set output "disk_buffer.png"'
print >>out, 'set xrange [0:*]'
print >>out, 'set xlabel "time (ms)"'
print >>out, 'set ylabel "buffers"'
print >>out, "set style data lines"
print >>out, "set key box"
print >>out, 'plot',
count = 1 + len(keys)
keys.reverse()
for k in keys:
expr = "$%d" % count
for i in xrange(2, count): expr += "+$%d" % i
count -= 1
print >>out, ' "disk_buffer_log.dat" using 1:(%s) title "%s" with filledcurves x1 lt rgb "#%s",' % (expr, k, colors[count-1]),
print >>out, 'x=0'
out.close()
os.system('gnuplot disk_buffer.gnuplot')
| bsd-3-clause | Python |
91073bd5a6733b14f5f684139477fad38494447d | remove debug print | mattoufoutu/EventViz,mattoufoutu/EventViz | eventviz/views/timeline.py | eventviz/views/timeline.py | # -*- coding: utf-8 -*-
from flask import Blueprint, render_template, request, url_for, redirect
import eventviz
from eventviz import settings
from eventviz.db import connection, get_fieldnames, get_event_types, get_item
timeline = Blueprint('timeline', __name__)
@timeline.route('/', methods=['GET', 'POST'])
def index():
project = eventviz.project
if project is None:
# TODO: send flash message
return redirect(url_for('main.index'))
db = connection[project]
available_fields = get_fieldnames(project)
displayed_fields = ['method', 'querystring']
group = None
if request.method == 'POST':
form_fields = request.form.getlist('fields')
if form_fields:
displayed_fields = form_fields
if 'group' in request.form:
group = request.form['group']
data = []
events = []
for event_type in get_event_types(project):
for db_item in db[event_type].find():
db_item_id = str(db_item['_id'])
item = {
'start': db_item['time'].strftime(settings.JS_DATE_FORMAT),
'group': db_item.get(group, 'N/A') if group is not None else event_type,
'content': ' - '.join(map(lambda f: str(db_item.get(f, 'N/A')), displayed_fields)),
'className': '%s eventtype-%s' % (db_item_id, event_type)
}
data.append(item)
events.append(db_item_id)
return render_template(
'timeline.html',
page='timeline',
project=project,
event_fields=available_fields,
data=data,
events=events
)
@timeline.route('/<string:event_type>/<string:event_id>')
def event_details(event_type, event_id):
if event_type not in get_event_types(eventviz.project):
return redirect(url_for('timeline.index'))
event = get_item(eventviz.project, event_type, event_id)
del event['_id']
return render_template('event_details.html', event=event)
| # -*- coding: utf-8 -*-
from flask import Blueprint, render_template, request, url_for, redirect
import eventviz
from eventviz import settings
from eventviz.db import connection, get_fieldnames, get_event_types, get_item
timeline = Blueprint('timeline', __name__)
@timeline.route('/', methods=['GET', 'POST'])
def index():
project = eventviz.project
if project is None:
# TODO: send flash message
return redirect(url_for('main.index'))
db = connection[project]
available_fields = get_fieldnames(project)
displayed_fields = ['method', 'querystring']
group = None
if request.method == 'POST':
form_fields = request.form.getlist('fields')
if form_fields:
displayed_fields = form_fields
if 'group' in request.form:
group = request.form['group']
data = []
events = []
for event_type in get_event_types(project):
for db_item in db[event_type].find():
print db_item
db_item_id = str(db_item['_id'])
item = {
'start': db_item['time'].strftime(settings.JS_DATE_FORMAT),
'group': db_item.get(group, 'N/A') if group is not None else event_type,
'content': ' - '.join(map(lambda f: str(db_item.get(f, 'N/A')), displayed_fields)),
'className': '%s eventtype-%s' % (db_item_id, event_type)
}
data.append(item)
events.append(db_item_id)
return render_template(
'timeline.html',
page='timeline',
project=project,
event_fields=available_fields,
data=data,
events=events
)
@timeline.route('/<string:event_type>/<string:event_id>')
def event_details(event_type, event_id):
if event_type not in get_event_types(eventviz.project):
return redirect(url_for('timeline.index'))
event = get_item(eventviz.project, event_type, event_id)
del event['_id']
return render_template('event_details.html', event=event)
| mit | Python |
ea3a60da2f68969a39e7c13d5dcf1e465bcc597d | add health check | fr0der1c/EveryClass-server,fr0der1c/EveryClass-server,fr0der1c/EveryClass-server,fr0der1c/EveryClass-server | everyclass/server/views.py | everyclass/server/views.py | from flask import Blueprint, flash, jsonify, redirect, render_template, request, url_for
from markupsafe import escape
from everyclass.server.exceptions import NoClassException, NoStudentException
main_blueprint = Blueprint('main', __name__)
@main_blueprint.route('/')
def main():
"""首页"""
return render_template('index.html')
@main_blueprint.route('/faq')
def faq():
"""帮助页面"""
return render_template('faq.html')
@main_blueprint.route('/about')
def about():
"""关于页面"""
return render_template('about.html')
@main_blueprint.route('/guide')
def guide():
"""帮助页面"""
return render_template('guide.html')
@main_blueprint.route('/testing')
def testing():
"""测试页面"""
return render_template('testing.html')
@main_blueprint.route('/donate')
def donate():
"""点击发送邮件后的捐助页面"""
return render_template('donate.html')
@main_blueprint.route('/_healthCheck')
def health_check():
"""健康检查"""
return jsonify({"status": "ok"})
@main_blueprint.app_errorhandler(404)
def page_not_found(error):
# 404跳转回首页
# 404 errors are never handled on the blueprint level
# unless raised from a view func so actual 404 errors,
# i.e. "no route for it" defined, need to be handled
# here on the application level
if request.path.startswith('/api/'):
response = jsonify({'error': 'not found'})
response.status_code = 404
return response
return redirect(url_for('main.main'))
# 405跳转回首页
@main_blueprint.app_errorhandler(405)
def method_not_allowed(error):
return redirect(url_for('main.main'))
@main_blueprint.app_errorhandler(NoStudentException)
def no_student_exception_handle(error):
flash('没有在数据库中找到你哦。是不是输错了?你刚刚输入的是%s' % escape(error))
return redirect(url_for('main.main'))
@main_blueprint.app_errorhandler(NoClassException)
def no_class_exception_handle(error):
flash('没有这门课程哦')
return redirect(url_for('main.main'))
| from flask import Blueprint, flash, jsonify, redirect, render_template, request, url_for
from markupsafe import escape
from everyclass.server.exceptions import NoClassException, NoStudentException
main_blueprint = Blueprint('main', __name__)
@main_blueprint.route('/')
def main():
"""首页"""
return render_template('index.html')
@main_blueprint.route('/faq')
def faq():
"""帮助页面"""
return render_template('faq.html')
@main_blueprint.route('/about')
def about():
"""关于页面"""
return render_template('about.html')
@main_blueprint.route('/guide')
def guide():
"""帮助页面"""
return render_template('guide.html')
@main_blueprint.route('/testing')
def testing():
"""测试页面"""
return render_template('testing.html')
@main_blueprint.route('/donate')
def donate():
"""点击发送邮件后的捐助页面"""
return render_template('donate.html')
@main_blueprint.app_errorhandler(404)
def page_not_found(error):
# 404跳转回首页
# 404 errors are never handled on the blueprint level
# unless raised from a view func so actual 404 errors,
# i.e. "no route for it" defined, need to be handled
# here on the application level
if request.path.startswith('/api/'):
response = jsonify({'error': 'not found'})
response.status_code = 404
return response
return redirect(url_for('main.main'))
# 405跳转回首页
@main_blueprint.app_errorhandler(405)
def method_not_allowed(error):
return redirect(url_for('main.main'))
@main_blueprint.app_errorhandler(NoStudentException)
def no_student_exception_handle(error):
flash('没有在数据库中找到你哦。是不是输错了?你刚刚输入的是%s' % escape(error))
return redirect(url_for('main.main'))
@main_blueprint.app_errorhandler(NoClassException)
def no_class_exception_handle(error):
flash('没有这门课程哦')
return redirect(url_for('main.main'))
| mpl-2.0 | Python |
5098e6e5f6156709b77037d3759dae1f43eec667 | Add solution for Lesson_5_Problem_Set/01-Most_Common_City_Name | krzyste/ud032,krzyste/ud032 | Lesson_5_Problem_Set/01-Most_Common_City_Name/city.py | Lesson_5_Problem_Set/01-Most_Common_City_Name/city.py | #!/usr/bin/env python
"""
Use an aggregation query to answer the following question.
What is the most common city name in our cities collection?
Your first attempt probably identified None as the most frequently occurring city name.
What that actually means is that there are a number of cities without a name field at all.
It's strange that such documents would exist in this collection and, depending on your situation,
might actually warrant further cleaning.
To solve this problem the right way, we should really ignore cities that don't have a name specified.
As a hint ask yourself what pipeline operator allows us to simply filter input?
How do we test for the existence of a field?
Please modify only the 'make_pipeline' function so that it creates and returns an aggregation pipeline
that can be passed to the MongoDB aggregate function. As in our examples in this lesson,
the aggregation pipeline should be a list of one or more dictionary objects.
Please review the lesson examples if you are unsure of the syntax.
Your code will be run against a MongoDB instance that we have provided.
If you want to run this code locally on your machine, you have to install MongoDB,
download and insert the dataset.
For instructions related to MongoDB setup and datasets please see Course Materials.
Please note that the dataset you are using here is a smaller version of the twitter dataset used in
examples in this lesson. If you attempt some of the same queries that we looked at in the lesson
examples, your results will be different.
"""
def get_db(db_name):
from pymongo import MongoClient
client = MongoClient('localhost:27017')
db = client[db_name]
return db
def make_pipeline():
# complete the aggregation pipeline
pipeline = [{"$match": {"name": {"$ne": None}}},
{"$group": {"_id": "$name", "count": {"$sum": 1}}},
{"$sort": {"count": -1}},
{"$limit": 1}]
return pipeline
def aggregate(db, pipeline):
result = db.cities.aggregate(pipeline)
return result
if __name__ == '__main__':
db = get_db('examples')
pipeline = make_pipeline()
result = aggregate(db, pipeline)
import pprint
pprint.pprint(result["result"][0])
assert len(result["result"]) == 1
assert result["result"][0] == {'_id': 'Shahpur', 'count': 6}
| #!/usr/bin/env python
"""
Use an aggregation query to answer the following question.
What is the most common city name in our cities collection?
Your first attempt probably identified None as the most frequently occurring city name.
What that actually means is that there are a number of cities without a name field at all.
It's strange that such documents would exist in this collection and, depending on your situation,
might actually warrant further cleaning.
To solve this problem the right way, we should really ignore cities that don't have a name specified.
As a hint ask yourself what pipeline operator allows us to simply filter input?
How do we test for the existence of a field?
Please modify only the 'make_pipeline' function so that it creates and returns an aggregation pipeline
that can be passed to the MongoDB aggregate function. As in our examples in this lesson,
the aggregation pipeline should be a list of one or more dictionary objects.
Please review the lesson examples if you are unsure of the syntax.
Your code will be run against a MongoDB instance that we have provided.
If you want to run this code locally on your machine, you have to install MongoDB,
download and insert the dataset.
For instructions related to MongoDB setup and datasets please see Course Materials.
Please note that the dataset you are using here is a smaller version of the twitter dataset used in
examples in this lesson. If you attempt some of the same queries that we looked at in the lesson
examples, your results will be different.
"""
def get_db(db_name):
from pymongo import MongoClient
client = MongoClient('localhost:27017')
db = client[db_name]
return db
def make_pipeline():
# complete the aggregation pipeline
pipeline = [ ]
return pipeline
def aggregate(db, pipeline):
result = db.cities.aggregate(pipeline)
return result
if __name__ == '__main__':
db = get_db('examples')
pipeline = make_pipeline()
result = aggregate(db, pipeline)
import pprint
pprint.pprint(result["result"][0])
assert len(result["result"]) == 1
assert result["result"][0] == {'_id': 'Shahpur', 'count': 6}
| agpl-3.0 | Python |
88b01dba22aa1915778f5a0227c6a3d9851add41 | make UnitsNotReducible import in physical_constants private | yt-project/unyt | unyt/physical_constants.py | unyt/physical_constants.py | """
Predefined useful physical constants
Note that all of these names can be imported from the top-level unyt namespace.
For example::
>>> from unyt.physical_constants import gravitational_constant, solar_mass
>>> from unyt import AU
>>> from math import pi
>>>
>>> period = 2 * pi * ((1 * AU)**3 / (gravitational_constant * solar_mass))**0.5
>>> period.in_units('day')
unyt_quantity(365.26893606, 'day')
.. show_all_constants::
"""
# -----------------------------------------------------------------------------
# Copyright (c) 2018, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the LICENSE file, distributed with this software.
# -----------------------------------------------------------------------------
from unyt.array import unyt_quantity as _unyt_quantity
from unyt.exceptions import UnitsNotReducible as _UnitsNotReducible
from unyt._unit_lookup_table import physical_constants as _physical_constants
from unyt.unit_object import Unit as _Unit
from unyt.unit_registry import default_unit_registry as _default_unit_registry
def _generate_constants(namespace, registry):
for constant_name in _physical_constants:
value, unit_name, alternate_names = _physical_constants[constant_name]
for name in alternate_names + [constant_name]:
dim = _Unit(unit_name, registry=registry).dimensions
quan = _unyt_quantity(value, registry.unit_system[dim], registry=registry)
namespace[name] = quan
namespace[name + "_mks"] = _unyt_quantity(
value, unit_name, registry=registry
)
try:
namespace[name + "_cgs"] = quan.in_cgs()
except _UnitsNotReducible:
pass
if name == "h":
# backward compatibility for unyt 1.0, which defined hmks
namespace["hmks"] = namespace["h_mks"].copy()
namespace["hcgs"] = namespace["h_cgs"].copy()
_generate_constants(globals(), registry=_default_unit_registry)
| """
Predefined useful physical constants
Note that all of these names can be imported from the top-level unyt namespace.
For example::
>>> from unyt.physical_constants import gravitational_constant, solar_mass
>>> from unyt import AU
>>> from math import pi
>>>
>>> period = 2 * pi * ((1 * AU)**3 / (gravitational_constant * solar_mass))**0.5
>>> period.in_units('day')
unyt_quantity(365.26893606, 'day')
.. show_all_constants::
"""
# -----------------------------------------------------------------------------
# Copyright (c) 2018, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the LICENSE file, distributed with this software.
# -----------------------------------------------------------------------------
from unyt.array import unyt_quantity as _unyt_quantity
from unyt.exceptions import UnitsNotReducible
from unyt._unit_lookup_table import physical_constants as _physical_constants
from unyt.unit_object import Unit as _Unit
from unyt.unit_registry import default_unit_registry as _default_unit_registry
def _generate_constants(namespace, registry):
for constant_name in _physical_constants:
value, unit_name, alternate_names = _physical_constants[constant_name]
for name in alternate_names + [constant_name]:
dim = _Unit(unit_name, registry=registry).dimensions
quan = _unyt_quantity(value, registry.unit_system[dim], registry=registry)
namespace[name] = quan
namespace[name + "_mks"] = _unyt_quantity(
value, unit_name, registry=registry
)
try:
namespace[name + "_cgs"] = quan.in_cgs()
except UnitsNotReducible:
pass
if name == "h":
# backward compatibility for unyt 1.0, which defined hmks
namespace["hmks"] = namespace["h_mks"].copy()
namespace["hcgs"] = namespace["h_cgs"].copy()
_generate_constants(globals(), registry=_default_unit_registry)
| bsd-3-clause | Python |
208fbd6ac390d050fb23f0ec5d6e620f6b4a3164 | update phoenix login description | bird-house/pyramid-phoenix,bird-house/pyramid-phoenix,bird-house/pyramid-phoenix,bird-house/pyramid-phoenix | phoenix/account/schema.py | phoenix/account/schema.py | import colander
import deform
class PhoenixSchema(colander.MappingSchema):
password = colander.SchemaNode(
colander.String(),
title='Password',
description='If you have not configured your password yet then it is likely to be "qwerty"',
validator=colander.Length(min=6),
widget=deform.widget.PasswordWidget())
class OAuthSchema(colander.MappingSchema):
choices = [('github', 'GitHub'), ('ceda', 'Ceda')]
provider = colander.SchemaNode(
colander.String(),
validator=colander.OneOf([x[0] for x in choices]),
widget=deform.widget.RadioChoiceWidget(values=choices, inline=True),
title='OAuth 2.0 Provider',
description='Select your OAuth Provider.')
class OpenIDSchema(colander.MappingSchema):
openid = colander.SchemaNode(
colander.String(),
validator=colander.url,
title="OpenID",
description="Example: https://esgf-data.dkrz.de/esgf-idp/openid/myname or https://openid.stackexchange.com/",
default='https://openid.stackexchange.com/')
class ESGFOpenIDSchema(colander.MappingSchema):
choices = [('badc', 'BADC'), ('dkrz', 'DKRZ'), ('ipsl', 'IPSL'), ('smhi', 'SMHI'), ('pcmdi', 'PCMDI')]
provider = colander.SchemaNode(
colander.String(),
validator=colander.OneOf([x[0] for x in choices]),
widget=deform.widget.RadioChoiceWidget(values=choices, inline=True),
title='ESGF Provider',
description='Select the Provider of your ESGF OpenID.')
username = colander.SchemaNode(
colander.String(),
validator=colander.Length(min=2),
title="Username",
description="Your ESGF OpenID Username."
)
class LdapSchema(colander.MappingSchema):
username = colander.SchemaNode(
colander.String(),
title="Username",
)
password = colander.SchemaNode(
colander.String(),
title='Password',
widget=deform.widget.PasswordWidget()) | import colander
import deform
class PhoenixSchema(colander.MappingSchema):
password = colander.SchemaNode(
colander.String(),
title='Password',
description='If this is a demo instance your password might be "qwerty"',
validator=colander.Length(min=4),
widget=deform.widget.PasswordWidget())
class OAuthSchema(colander.MappingSchema):
choices = [('github', 'GitHub'), ('ceda', 'Ceda')]
provider = colander.SchemaNode(
colander.String(),
validator=colander.OneOf([x[0] for x in choices]),
widget=deform.widget.RadioChoiceWidget(values=choices, inline=True),
title='OAuth 2.0 Provider',
description='Select your OAuth Provider.')
class OpenIDSchema(colander.MappingSchema):
openid = colander.SchemaNode(
colander.String(),
validator=colander.url,
title="OpenID",
description="Example: https://esgf-data.dkrz.de/esgf-idp/openid/myname or https://openid.stackexchange.com/",
default='https://openid.stackexchange.com/')
class ESGFOpenIDSchema(colander.MappingSchema):
choices = [('badc', 'BADC'), ('dkrz', 'DKRZ'), ('ipsl', 'IPSL'), ('smhi', 'SMHI'), ('pcmdi', 'PCMDI')]
provider = colander.SchemaNode(
colander.String(),
validator=colander.OneOf([x[0] for x in choices]),
widget=deform.widget.RadioChoiceWidget(values=choices, inline=True),
title='ESGF Provider',
description='Select the Provider of your ESGF OpenID.')
username = colander.SchemaNode(
colander.String(),
validator=colander.Length(min=2),
title="Username",
description="Your ESGF OpenID Username."
)
class LdapSchema(colander.MappingSchema):
username = colander.SchemaNode(
colander.String(),
title="Username",
)
password = colander.SchemaNode(
colander.String(),
title='Password',
widget=deform.widget.PasswordWidget()) | apache-2.0 | Python |
468ede353d0f69753212e7dcb1eb448667fd1dc9 | Add missing imports. | repocracy/repocracy,repocracy/repocracy,repocracy/repocracy,codysoyland/snowman,codysoyland/snowman,codysoyland/snowman | repocracy/repo/tasks.py | repocracy/repo/tasks.py | import os
import subprocess
from django.conf import settings
from celery.decorators import task
from repocracy.repo.models import Repository
@task
def translate_repository(repo_pk):
pass
@task
def clone_repository(repo_pk):
try:
repo = Repository.objects.get(pk=repo_pk)
except Repository.DoesNotExist:
pass
else:
destination = os.path.join(
settings.REPOCRACY_BASE_REPO_PATH,
repo.pk
)
destination_dirs = [os.path.join(destination, type) for type in ('hg', 'git')]
for i in destination_dirs:
os.makedirs(i)
result = subprocess.call(
args=['git', '--git-dir=.', 'clone', repo.origin, '.'],
cwd=destination_dirs[1]
)
if result != 0:
result = subprocess.call(
args=['hg', 'clone', repo.origin, '.'],
cwd=destination_dirs[0]
)
if result == 0:
repo.origin_type = 1
repo.status = 1
else:
repo.status = 255
else:
repo.origin_type = 0
repo.status = 1
repo.save()
translate_repository.delay(repo.pk)
| import os
import subprocess
from repo.models import Repository
@task
def translate_repository(repo_pk):
pass
@task
def clone_repository(repo_pk):
try:
repo = Repository.objects.get(pk=repo_pk)
destination = os.path.join(
settings.REPOCRACY_BASE_REPO_PATH,
repo.pk
)
destination_dirs = [os.path.join(destination, type) for type in ('hg', 'git')]
for i in destination_dirs:
os.makedirs(i)
result = subprocess.call(
args=['git', '--git-dir=.', 'clone', repo.origin, '.'],
cwd=destination_dirs[1]
)
if result != 0:
result = subprocess.call(
args=['hg', 'clone', repo.origin, '.'],
cwd=destination_dirs[0]
)
if result == 0:
repo.origin_type = 1
repo.status = 1
else:
repo.status = 255
else:
repo.origin_type = 0
repo.status = 1
repo.save()
translate_repository.delay(repo.pk)
except Repository.DoesNotExist:
pass
| bsd-3-clause | Python |
873aac264d5edbe7ff341a6270cdd4f687e56f0e | Make requirements compile disable pip's require-virtualenv flag always | adamchainz/mariadb-dyncol | requirements/compile.py | requirements/compile.py | #!/usr/bin/env python
from __future__ import annotations
import os
import subprocess
import sys
from pathlib import Path
if __name__ == "__main__":
os.chdir(Path(__file__).parent)
os.environ["CUSTOM_COMPILE_COMMAND"] = "requirements/compile.py"
os.environ["PIP_REQUIRE_VIRTUALENV"] = "0"
common_args = [
"-m",
"piptools",
"compile",
"--generate-hashes",
"--allow-unsafe",
] + sys.argv[1:]
subprocess.run(
["python3.7", *common_args, "-o", "py37.txt"],
check=True,
capture_output=True,
)
subprocess.run(
["python3.8", *common_args, "-o", "py38.txt"],
check=True,
capture_output=True,
)
subprocess.run(
["python3.9", *common_args, "-o", "py39.txt"],
check=True,
capture_output=True,
)
subprocess.run(
["python3.10", *common_args, "-o", "py310.txt"],
check=True,
capture_output=True,
)
subprocess.run(
["python3.11", *common_args, "-o", "py311.txt"],
check=True,
capture_output=True,
)
| #!/usr/bin/env python
from __future__ import annotations
import os
import subprocess
import sys
from pathlib import Path
if __name__ == "__main__":
os.chdir(Path(__file__).parent)
os.environ["CUSTOM_COMPILE_COMMAND"] = "requirements/compile.py"
os.environ.pop("PIP_REQUIRE_VIRTUALENV", None)
common_args = [
"-m",
"piptools",
"compile",
"--generate-hashes",
"--allow-unsafe",
] + sys.argv[1:]
subprocess.run(
["python3.7", *common_args, "-o", "py37.txt"],
check=True,
capture_output=True,
)
subprocess.run(
["python3.8", *common_args, "-o", "py38.txt"],
check=True,
capture_output=True,
)
subprocess.run(
["python3.9", *common_args, "-o", "py39.txt"],
check=True,
capture_output=True,
)
subprocess.run(
["python3.10", *common_args, "-o", "py310.txt"],
check=True,
capture_output=True,
)
subprocess.run(
["python3.11", *common_args, "-o", "py311.txt"],
check=True,
capture_output=True,
)
| mit | Python |
49181062e5f697775b8f3fe12050d350b1dd8b9d | Clean up and store http response code as well | icereval/scrapi,mehanig/scrapi,erinspace/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,felliott/scrapi,fabianvf/scrapi,fabianvf/scrapi,ostwald/scrapi,CenterForOpenScience/scrapi,felliott/scrapi,jeffreyliu3230/scrapi,alexgarciac/scrapi,erinspace/scrapi | scrapi/requests.py | scrapi/requests.py | from __future__ import absolute_import
import json
import logging
import functools
from datetime import datetime
import requests
import cqlengine
from cqlengine import columns
from scrapi import database # noqa
from scrapi import settings
logger = logging.getLogger(__name__)
class HarvesterResponse(cqlengine.Model):
__table_name__ = 'responses'
__keyspace__ = settings.CASSANDRA_KEYSPACE
method = columns.Text(primary_key=True)
url = columns.Text(primary_key=True, required=True, index=True)
# Raw request data
content = columns.Bytes()
headers_str = columns.Text()
status_code = columns.Integer()
time_made = columns.DateTime(default=datetime.now)
@property
def json(self):
return json.loads(self.content)
@property
def headers(self):
# TODO: make case insensitive multidict
return json.loads(self.headers_str)
def record_or_load_response(method, url, **kwargs):
try:
return HarvesterResponse.get(url=url, method=method)
except HarvesterResponse.DoesNotExist:
response = requests.request(method, url, **kwargs)
return HarvesterResponse(
url=url,
method=method,
content=response.content,
status_code=response.status_code,
headers_str=json.dumps(response.headers)
).save()
def request(method, url, **kwargs):
if settings.RECORD_HTTP_TRANSACTIONS:
return record_or_load_response(method, url, **kwargs)
return requests.request(method, url, **kwargs)
get = functools.partial(request, 'get')
put = functools.partial(request, 'put')
post = functools.partial(request, 'post')
delete = functools.partial(request, 'delete')
| from __future__ import absolute_import
import json
import logging
import functools
from datetime import datetime
import requests
import cqlengine
from cqlengine import columns
from cqlengine import management
from cassandra.cluster import NoHostAvailable
from scrapi import settings
logger = logging.getLogger(__name__)
try:
cqlengine.connection.setup(settings.CASSANDRA_URI, settings.CASSANDRA_KEYSPACE)
management.create_keyspace(settings.CASSANDRA_KEYSPACE, replication_factor=1, strategy_class='SimpleStrategy')
except NoHostAvailable:
logger.error('Could not connect to Cassandra, expect errors.')
if settings.RECORD_HTTP_TRANSACTIONS:
raise
class HarvesterResponse(cqlengine.Model):
__table_name__ = 'responses'
__keyspace__ = settings.CASSANDRA_KEYSPACE
method = columns.Text(primary_key=True)
url = columns.Text(primary_key=True, required=True, index=True)
# Raw request data
content = columns.Bytes()
headers_str = columns.Text()
time_made = columns.DateTime(default=datetime.now)
@property
def json(self):
return json.loads(self.content)
@property
def headers(self):
# TODO: make case insensitive multidict
return json.loads(self.headers_str)
def record_or_load_response(method, url, **kwargs):
try:
return HarvesterResponse.get(url=url, method=method)
except HarvesterResponse.DoesNotExist:
response = requests.request(method, url, **kwargs)
return HarvesterResponse(
url=url,
method=method,
content=response.content
).save()
def request(method, url, **kwargs):
if settings.RECORD_HTTP_TRANSACTIONS:
return record_or_load_response(method, url)
return requests.request(method, url, **kwargs)
# This has to be done after HarvesterResponse definition
management.sync_table(HarvesterResponse)
get = functools.partial(request, 'get')
put = functools.partial(request, 'put')
post = functools.partial(request, 'post')
delete = functools.partial(request, 'delete')
| apache-2.0 | Python |
d2c5c5867a8d8ccd3af23251170fdff405c4cea2 | comment out all ddb | mauriceyap/ccm-assistant | src/resources/playback_db.py | src/resources/playback_db.py | import boto3
import os
dynamodb = boto3.resource('dynamodb', region_name='eu-west-1',
endpoint_url=("https://dynamodb.eu-west-1."
"amazonaws.com"),
aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_'
'SECRET_ACCESS_KEY']
)
table = dynamodb.Table('alexa-ChristChurchMayfairAssistant-playback')
def store_audio_url_for_user(user_id, audio_url):
# table.put_item(
# Item={
# 'user_id': user_id,
# 'audio_url': audio_url
# }
# )
pass
def store_offset_for_user(user_id, offset):
# table.update_item(
# Key={
# 'user_id': user_id
# },
# UpdateExpression="set offset=:o",
# ExpressionAttributeValues={
# ':o': offset
# }
# )
pass
def get_data_for_user(user_id):
# response = table.get_item(
# Key={
# 'user_id': user_id
# }
# )
# return response['Item']
pass
def reset_user(user_id):
# table.delete_item(
# Key={
# 'user_id': user_id
# }
# )
pass
| import boto3
import os
dynamodb = boto3.resource('dynamodb', region_name='eu-west-1',
endpoint_url=("https://dynamodb.eu-west-1."
"amazonaws.com"),
aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_'
'SECRET_ACCESS_KEY']
)
table = dynamodb.Table('alexa-ChristChurchMayfairAssistant-playback')
def store_audio_url_for_user(user_id, audio_url):
table.put_item(
Item={
'user_id': user_id,
'audio_url': audio_url
}
)
def store_offset_for_user(user_id, offset):
table.update_item(
Key={
'user_id': user_id
},
UpdateExpression="set offset=:o",
ExpressionAttributeValues={
':o': offset
}
)
def get_data_for_user(user_id):
response = table.get_item(
Key={
'user_id': user_id
}
)
return response['Item']
def reset_user(user_id):
table.delete_item(
Key={
'user_id': user_id
}
)
| mit | Python |
ee2970064759eb1f3683410c1ab0d6d5a35b3470 | Fix warning Django 1.9 | lambdalisue/django-permission | src/permission/utils/autodiscover.py | src/permission/utils/autodiscover.py | # coding=utf-8
"""
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
import copy
def autodiscover(module_name=None):
"""
Autodiscover INSTALLED_APPS perms.py modules and fail silently when not
present. This forces an import on them to register any permissions bits
they may want.
"""
if django.VERSION < (1, 8):
from django.utils.importlib import import_module
else
from importlib import import_module
from django.utils.module_loading import module_has_submodule
from permission.conf import settings
module_name = module_name or settings.PERMISSION_AUTODISCOVER_MODULE_NAME
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's perms module
try:
# discover the permission module
discover(app, module_name=module_name)
except:
# Decide whether to bubble up this error. If the app just doesn't
# have an perms module, we can just ignore the error attempting
# to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, module_name):
raise
def discover(app, module_name=None):
"""
Automatically apply the permission logics written in the specified
module.
Examples
--------
Assume if you have a ``perms.py`` in ``your_app`` as::
from permission.logics import AuthorPermissionLogic
PERMISSION_LOGICS = (
('your_app.your_model', AuthorPermissionLogic),
)
Use this method to apply the permission logics enumerated in
``PERMISSION_LOGICS`` variable like:
>>> discover('your_app')
"""
from django.db.models.loading import get_model
if django.VERSION < (1, 8):
from django.utils.importlib import import_module
else
from importlib import import_module
from permission.conf import settings
from permission.utils.logics import add_permission_logic
variable_name = settings.PERMISSION_AUTODISCOVER_VARIABLE_NAME
module_name = module_name or settings.PERMISSION_AUTODISCOVER_MODULE_NAME
# import the module
m = import_module('%s.%s' % (app, module_name))
# check if the module have PERMISSION_LOGICS variable
if hasattr(m, variable_name):
# apply permission logics automatically
permission_logic_set = getattr(m, variable_name)
for model, permission_logic in permission_logic_set:
if isinstance(model, basestring):
# convert model string to model instance
model = get_model(*model.split('.', 1))
add_permission_logic(model, permission_logic)
| # coding=utf-8
"""
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
import copy
def autodiscover(module_name=None):
"""
Autodiscover INSTALLED_APPS perms.py modules and fail silently when not
present. This forces an import on them to register any permissions bits
they may want.
"""
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
from permission.conf import settings
module_name = module_name or settings.PERMISSION_AUTODISCOVER_MODULE_NAME
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's perms module
try:
# discover the permission module
discover(app, module_name=module_name)
except:
# Decide whether to bubble up this error. If the app just doesn't
# have an perms module, we can just ignore the error attempting
# to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, module_name):
raise
def discover(app, module_name=None):
"""
Automatically apply the permission logics written in the specified
module.
Examples
--------
Assume if you have a ``perms.py`` in ``your_app`` as::
from permission.logics import AuthorPermissionLogic
PERMISSION_LOGICS = (
('your_app.your_model', AuthorPermissionLogic),
)
Use this method to apply the permission logics enumerated in
``PERMISSION_LOGICS`` variable like:
>>> discover('your_app')
"""
from django.db.models.loading import get_model
from django.utils.importlib import import_module
from permission.conf import settings
from permission.utils.logics import add_permission_logic
variable_name = settings.PERMISSION_AUTODISCOVER_VARIABLE_NAME
module_name = module_name or settings.PERMISSION_AUTODISCOVER_MODULE_NAME
# import the module
m = import_module('%s.%s' % (app, module_name))
# check if the module have PERMISSION_LOGICS variable
if hasattr(m, variable_name):
# apply permission logics automatically
permission_logic_set = getattr(m, variable_name)
for model, permission_logic in permission_logic_set:
if isinstance(model, basestring):
# convert model string to model instance
model = get_model(*model.split('.', 1))
add_permission_logic(model, permission_logic)
| mit | Python |
b2f91bd5b0a9f06ddcdcff8f220756ad4a6286f7 | Fix stray webkitpy unit test after r157385. | smishenk/blink-crosswalk,jtg-gg/blink,nwjs/blink,crosswalk-project/blink-crosswalk-efl,jtg-gg/blink,Pluto-tv/blink-crosswalk,XiaosongWei/blink-crosswalk,kurli/blink-crosswalk,smishenk/blink-crosswalk,ondra-novak/blink,XiaosongWei/blink-crosswalk,kurli/blink-crosswalk,smishenk/blink-crosswalk,kurli/blink-crosswalk,Bysmyyr/blink-crosswalk,ondra-novak/blink,Pluto-tv/blink-crosswalk,hgl888/blink-crosswalk-efl,PeterWangIntel/blink-crosswalk,jtg-gg/blink,nwjs/blink,ondra-novak/blink,PeterWangIntel/blink-crosswalk,XiaosongWei/blink-crosswalk,XiaosongWei/blink-crosswalk,hgl888/blink-crosswalk-efl,hgl888/blink-crosswalk-efl,kurli/blink-crosswalk,Pluto-tv/blink-crosswalk,PeterWangIntel/blink-crosswalk,hgl888/blink-crosswalk-efl,Bysmyyr/blink-crosswalk,jtg-gg/blink,PeterWangIntel/blink-crosswalk,hgl888/blink-crosswalk-efl,hgl888/blink-crosswalk-efl,nwjs/blink,kurli/blink-crosswalk,smishenk/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,smishenk/blink-crosswalk,jtg-gg/blink,smishenk/blink-crosswalk,kurli/blink-crosswalk,jtg-gg/blink,XiaosongWei/blink-crosswalk,smishenk/blink-crosswalk,modulexcite/blink,kurli/blink-crosswalk,smishenk/blink-crosswalk,nwjs/blink,nwjs/blink,kurli/blink-crosswalk,modulexcite/blink,jtg-gg/blink,Bysmyyr/blink-crosswalk,hgl888/blink-crosswalk-efl,PeterWangIntel/blink-crosswalk,Bysmyyr/blink-crosswalk,modulexcite/blink,jtg-gg/blink,crosswalk-project/blink-crosswalk-efl,ondra-novak/blink,PeterWangIntel/blink-crosswalk,Bysmyyr/blink-crosswalk,Bysmyyr/blink-crosswalk,hgl888/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,nwjs/blink,modulexcite/blink,modulexcite/blink,Pluto-tv/blink-crosswalk,kurli/blink-crosswalk,ondra-novak/blink,crosswalk-project/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,modulexcite/blink,hgl888/blink-crosswalk-efl,nwjs/blink,modulexcite/blink,PeterWangIntel/blink-crosswalk,Bysmyyr/blink-crosswalk,nwjs/blink,ondra-novak/blink,crosswalk-project/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,ondra-novak/blink,Pluto-tv/blink-crosswalk,Pluto-tv/blink-crosswalk,Bysmyyr/blink-crosswalk,smishenk/blink-crosswalk,PeterWangIntel/blink-crosswalk,jtg-gg/blink,Pluto-tv/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,crosswalk-project/blink-crosswalk-efl,Pluto-tv/blink-crosswalk,PeterWangIntel/blink-crosswalk,Pluto-tv/blink-crosswalk,ondra-novak/blink,kurli/blink-crosswalk,modulexcite/blink,Bysmyyr/blink-crosswalk,PeterWangIntel/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,hgl888/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,nwjs/blink,Bysmyyr/blink-crosswalk,smishenk/blink-crosswalk,modulexcite/blink,Pluto-tv/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,modulexcite/blink,XiaosongWei/blink-crosswalk,jtg-gg/blink,nwjs/blink,ondra-novak/blink | Tools/Scripts/webkitpy/common/net/buildbot/chromiumbuildbot_unittest.py | Tools/Scripts/webkitpy/common/net/buildbot/chromiumbuildbot_unittest.py | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import webkitpy.thirdparty.unittest2 as unittest
from webkitpy.common.net.buildbot.chromiumbuildbot import ChromiumBuildBot
class ChromiumBuilderTest(unittest.TestCase):
def test_results_url(self):
builder = ChromiumBuildBot().builder_with_name('WebKit Mac10.8 (dbg)')
self.assertEqual(builder.results_url(),
'https://storage.googleapis.com/chromium-layout-test-archives/WebKit_Mac10_8__dbg_')
def test_accumulated_results_url(self):
builder = ChromiumBuildBot().builder_with_name('WebKit Mac10.8 (dbg)')
self.assertEqual(builder.accumulated_results_url(),
'https://storage.googleapis.com/chromium-layout-test-archives/WebKit_Mac10_8__dbg_/results/layout-test-results')
| # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import webkitpy.thirdparty.unittest2 as unittest
from webkitpy.common.net.buildbot.chromiumbuildbot import ChromiumBuildBot
class ChromiumBuilderTest(unittest.TestCase):
def test_results_url(self):
builder = ChromiumBuildBot().builder_with_name('WebKit Mac10.8 (dbg)')
self.assertEqual(builder.results_url(),
'https://storage.googleapis.com/chromium-layout-test-archives/WebKit_Mac10_8__dbg_')
def test_accumulated_results_url(self):
builder = ChromiumBuildBot().builder_with_name('WebKit Mac10.8 (dbg)')
self.assertEqual(builder.accumulated_results_url(),
'https://storage.googleapis.com/chromium-layout-test-archives/WebKit_Mac10_8__dbg_/results/layout-test-results/layout-test-results')
| bsd-3-clause | Python |
da8a8c9b777792d99d8413f966ba5b7cdf6cf938 | Fix relative path join | openkamer/openkamer,openkamer/openkamer,openkamer/openkamer,openkamer/openkamer | create_local_settings.py | create_local_settings.py | #!/usr/bin/env python3
import codecs
import os
import random
import shutil
import string
import tempfile
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
LOCAL_SETTINGS_PATH = os.path.join(BASE_DIR, 'website/local_settings.py')
LOCAL_SETTINGS_EXAMPLE_PATH = os.path.join(BASE_DIR, 'website/local_settings_example.py')
def main():
if os.path.exists(LOCAL_SETTINGS_PATH):
print('ERROR: ' + LOCAL_SETTINGS_PATH +
' already exists! Please remove this file manually if you intent to overwrite it.')
return
if not os.path.exists(LOCAL_SETTINGS_EXAMPLE_PATH):
print('ERROR: ' + LOCAL_SETTINGS_EXAMPLE_PATH +
' could not be found! Please make sure this example settings file is readable at the given location.')
return
shutil.copyfile(LOCAL_SETTINGS_EXAMPLE_PATH, LOCAL_SETTINGS_PATH)
secret_key_random = generate_random_secret_key()
replace(LOCAL_SETTINGS_PATH, "SECRET_KEY = ''", "SECRET_KEY = '" + secret_key_random + "'")
def replace(source_file_path, pattern, substring):
fh, target_file_path = tempfile.mkstemp()
with codecs.open(target_file_path, 'w', 'utf-8') as target_file:
with codecs.open(source_file_path, 'r', 'utf-8') as source_file:
for line in source_file:
target_file.write(line.replace(pattern, substring))
os.remove(source_file_path)
shutil.move(target_file_path, source_file_path)
def generate_random_secret_key():
# source: https://gist.github.com/mattseymour/9205591
# Get ascii Characters numbers and punctuation (minus quote characters as they could terminate string).
chars = ''.join([string.ascii_letters, string.digits, string.punctuation]).replace('\'', '').replace('"', '').replace('\\', '')
secret_key = ''.join([random.SystemRandom().choice(chars) for i in range(50)])
return secret_key
if __name__ == "__main__":
main() | #!/usr/bin/env python3
import codecs
import os
import random
import shutil
import string
import tempfile
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
LOCAL_SETTINGS_PATH = os.path.join(BASE_DIR, './website/local_settings.py')
LOCAL_SETTINGS_EXAMPLE_PATH = os.path.join(BASE_DIR, './website/local_settings_example.py')
def main():
if os.path.exists(LOCAL_SETTINGS_PATH):
print('ERROR: ' + LOCAL_SETTINGS_PATH +
' already exists! Please remove this file manually if you intent to overwrite it.')
return
if not os.path.exists(LOCAL_SETTINGS_EXAMPLE_PATH):
print('ERROR: ' + LOCAL_SETTINGS_EXAMPLE_PATH +
' could not be found! Please make sure this example settings file is readable at the given location.')
return
shutil.copyfile(LOCAL_SETTINGS_EXAMPLE_PATH, LOCAL_SETTINGS_PATH)
secret_key_random = generate_random_secret_key()
replace(LOCAL_SETTINGS_PATH, "SECRET_KEY = ''", "SECRET_KEY = '" + secret_key_random + "'")
def replace(source_file_path, pattern, substring):
fh, target_file_path = tempfile.mkstemp()
with codecs.open(target_file_path, 'w', 'utf-8') as target_file:
with codecs.open(source_file_path, 'r', 'utf-8') as source_file:
for line in source_file:
target_file.write(line.replace(pattern, substring))
os.remove(source_file_path)
shutil.move(target_file_path, source_file_path)
def generate_random_secret_key():
# source: https://gist.github.com/mattseymour/9205591
# Get ascii Characters numbers and punctuation (minus quote characters as they could terminate string).
chars = ''.join([string.ascii_letters, string.digits, string.punctuation]).replace('\'', '').replace('"', '').replace('\\', '')
secret_key = ''.join([random.SystemRandom().choice(chars) for i in range(50)])
return secret_key
if __name__ == "__main__":
main() | mit | Python |
abe311fb9de6a58c9b40b1079785473b5a72d12c | Update activate-devices.py | JeffreyPowell/pi-heating-hub,JeffreyPowell/pi-heating-hub,JeffreyPowell/pi-heating-hub | cron/activate-devices.py | cron/activate-devices.py | #!/usr/bin/env python
import MySQLdb
#import datetime
#import urllib2
#import os
import datetime
import RPi.GPIO as GPIO
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO!")
servername = "localhost"
username = "pi"
password = "password"
dbname = "pi_heating_db"
cnx = MySQLdb.connect(host=servername, user=username, passwd=password, db=dbname)
cursorupdate = cnx.cursor()
query = ("UPDATE devices SET value = 0;")
cursorupdate.execute(query)
results_devices =cursorupdate.fetchall()
cursorupdate.close()
for result in results_devices:
print("* * * * * *")
DEVICE_ACTIVE = bool( result[0] )
DEVICE_ID = int( result[1] )
print( DEVICE_ID, DEVICE_ACTIVE )
if ( DEVICE_ACTIVE ):
#print( DEVICE_ID, DEVICE_ACTIVE )
cursorupdate = cnx.cursor()
query = ("UPDATE devices SET value = 1 WHERE d_id = "+DEVICE_ID+";")
cursorupdate.execute(query)
results_devices =cursorupdate.fetchall()
cursorupdate.close()
cursorselect = cnx.cursor()
query = ("SELECT * FROM devices;")
cursorselect.execute(query)
results_devices =cursorselect.fetchall()
cursorselect.close()
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
for result in results_devices:
print("- - - - - - - -")
DEVICE_PIN = int( result[2] )
DEVICE_VALUE = int( result[3] )
GPIO.setup(DEVICE_PIN, GPIO.OUT, initial=GPIO.LOW)
GPIO.output(DEVICE_PIN, DEVICE_VALUE)
print( DEVICE_PIN, DEVICE_VALUE )
cnx.close()
| #!/usr/bin/env python
import MySQLdb
#import datetime
#import urllib2
#import os
import datetime
import RPi.GPIO as GPIO
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO!")
servername = "localhost"
username = "pi"
password = "password"
dbname = "pi_heating_db"
cnx = MySQLdb.connect(host=servername, user=username, passwd=password, db=dbname)
cursorupdate = cnx.cursor()
query = ("UPDATE devices SET value = 0;")
cursorupdate.execute(query)
results_devices =cursorupdate.fetchall()
cursorupdate.close()
for result in results_devices:
print("* * * * * *")
DEVICE_ACTIVE = bool( result[0] )
DEVICE_ID = int( result[1] )
print( DEVICE_ID, DEVICE_ACTIVE )
if ( DEVICE_ACTIVE ):
#print( DEVICE_ID, DEVICE_ACTIVE )
cursorupdate = cnx.cursor()
query = ("UPDATE devices SET value = 1 WHERE d_id = "+DEVICE_ID+";")
cursorupdate.execute(query)
results_devices =cursorupdate.fetchall()
cursorupdate.close()
cursorselect = cnx.cursor()
query = ("SELECT * FROM devices;")
cursorselect.execute(query)
results_devices =cursorselect.fetchall()
cursorselect.close()
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
for result in results_devices:
#print("* * * * * *")
DEVICE_PIN = int( result[2] )
DEVICE_VALUE = int( result[3] )
GPIO.setup(DEVICE_PIN, GPIO.OUT, initial=GPIO.LOW)
GPIO.output(DEVICE_PIN, DEVICE_VALUE)
#print( DEVICE_PIN, DEVICE_VALUE )
cnx.close()
| apache-2.0 | Python |
47e470f9c3cdf806fb7190b71447275a9f7a772e | test in base_test adjusted to tuples for parameters | aipescience/uws-client | uws/UWS/tests/test_base.py | uws/UWS/tests/test_base.py | # -*- coding: utf-8 -*-
import unittest
from uws import UWS
class BaseTest(unittest.TestCase):
def testValidateAndParseFilter(self):
filters = {
'phases': ['COMPLETED', 'PENDING']
}
params = UWS.base.BaseUWSClient(None)._validate_and_parse_filters(filters)
self.assertEqual(params, [('PHASE','COMPLETED'), ('PHASE','PENDING')])
def testValidateAndParseFilterInvalidPhase(self):
filters = {
'phases': ['FOO', 'PENDING']
}
self.assertRaises(
UWS.UWSError,
UWS.base.BaseUWSClient(None)._validate_and_parse_filters,
filters
)
| # -*- coding: utf-8 -*-
import unittest
from uws import UWS
class BaseTest(unittest.TestCase):
def testValidateAndParseFilter(self):
filters = {
'phases': ['COMPLETED', 'PENDING']
}
params = UWS.base.BaseUWSClient(None)._validate_and_parse_filters(filters)
self.assertDictEqual(params, {'PHASE[]': ['COMPLETED', 'PENDING']})
def testValidateAndParseFilterInvalidPhase(self):
filters = {
'phases': ['FOO', 'PENDING']
}
self.assertRaises(
UWS.UWSError,
UWS.base.BaseUWSClient(None)._validate_and_parse_filters,
filters
)
| apache-2.0 | Python |
85905353b23ba4d6bec8fbbd37546ae2849967d9 | Update puush.py | sgoo/puush-linux | src/puush.py | src/puush.py | import config
import time
import multipart
import StringIO
# from gi.repository import Gtk, Gdk
import gtk
import os
import pynotify
NO_INTERNET = False
SERVER = 'puush.me'
API_END_POINT = '/api/tb'
FORMAT = 'png'
NOTIFY_TIMEOUT = 10
def screenshot(x, y, w, h):
screenshot = gtk.gdk.Pixbuf.get_from_drawable(gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, True, 8, w, h),
gtk.gdk.get_default_root_window(),
gtk.gdk.colormap_get_system(),
x, y, 0, 0, w, h)
_postSS(screenshot)
def _postSS(screenshot):
fileName = time.strftime('ss (%Y-%m-%d at %H.%M.%S).' + FORMAT)
# split the apiKey for the basicAuth
config.apiKey = config.apiKey.lower()
l = int(len(config.apiKey)/2)
basicAuth = (config.apiKey[:l], config.apiKey[l:])
# save file into buf
picBuf = StringIO.StringIO()
screenshot.save_to_callback(_saveToBuf, FORMAT, {}, {'buf' :picBuf})
# build file list
fileList = [('media', fileName, picBuf.getvalue())]
if NO_INTERNET:
link = "<mediaurl>http://puu.sh/2ES4oa.png</mediaurl>"
else:
link = multipart.post_multipart(SERVER, API_END_POINT, files=fileList, basicAuth=basicAuth)
print link
# link looks like "<mediaurl>http://puu.sh/2ES4o.png</mediaurl>"
# strip open and close tags
_notify(link[10:len(link) - 11])
def _notify(link):
clip = gtk.clipboard_get ('CLIPBOARD')
clip.set_text(link, -1)
clip.store()
if pynotify.init("puush"):
uri = "file://" + os.path.dirname(__file__) + '/icon.png'
n = pynotify.Notification("Puush completed", link, uri)
n.show()
time.sleep(NOTIFY_TIMEOUT)
n.close()
else:
print "Error starting pynotify"
def _saveToBuf(buf, d):
d['buf'].write(buf)
| import config
import time
import multipart
import StringIO
# from gi.repository import Gtk, Gdk
import gtk
import os
import pynotify
NO_INTERNET = False
SERVER = 'puush.me'
API_END_POINT = '/api/tb'
FORMAT = 'png'
NOTIFY_TIMEOUT = 10
def screenshot(x, y, w, h):
screenshot = gtk.gdk.Pixbuf.get_from_drawable(gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, True, 8, w, h),
gtk.gdk.get_default_root_window(),
gtk.gdk.colormap_get_system(),
x, y, 0, 0, w, h)
_postSS(screenshot)
def _postSS(screenshot):
fileName = time.strftime('ss (%Y-%m-%d at %H.%M.%S).' + FORMAT)
# split the apiKey for the basicAuth
config.apiKey = config.apiKey.lower()
l = int(len(config.apiKey)/2)
basicAuth = (config.apiKey[:l], config.apiKey[l:])
# save file into buf
picBuf = StringIO.StringIO()
screenshot.save_to_callback(_saveToBuf, FORMAT, {}, {'buf' :picBuf})
# build file list
fileList = [('media', fileName, picBuf.getvalue())]
if NO_INTERNET:
link = "<mediaurl>http://puu.sh/2ES4oa.png</mediaurl>"
else:
link = multipart.post_multipart(SERVER, API_END_POINT, files=fileList, basicAuth=basicAuth)
print link
_notify(link[10:len(link) - 11])
def _notify(link):
# link looks like "<mediaurl>http://puu.sh/2ES4o.png</mediaurl>"
# strip open and close tags
clip = gtk.clipboard_get ('CLIPBOARD')
clip.set_text(link, -1)
clip.store()
if pynotify.init("puush"):
uri = "file://" + os.path.dirname(__file__) + '/icon.png'
n = pynotify.Notification("Puush completed", link, uri)
n.show()
time.sleep(NOTIFY_TIMEOUT)
n.close()
else:
print "Error starting pynotify"
def _saveToBuf(buf, d):
d['buf'].write(buf)
| mit | Python |
24217196f4516e198155b843d807d02b4911db6c | Include an external_version property in the coverity.hpi.VERSION file created by build.py | jenkinsci/coverity-plugin,jenkinsci/coverity-plugin,jenkinsci/coverity-plugin,jenkinsci/coverity-plugin | build.py | build.py | #/*******************************************************************************
# * Copyright (c) 2016 Synopsys, Inc
# * All rights reserved. This program and the accompanying materials
# * are made available under the terms of the Eclipse Public License v1.0
# * which accompanies this distribution, and is available at
# * http://www.eclipse.org/legal/epl-v10.html
# *
# * Contributors:
# * Synopsys, Inc - initial implementation and documentation
# *******************************************************************************/
import sys
import subprocess
import re
import json
import shutil
if __name__ == "__main__":
# Check to make sure we have the correct number of arguments
if len(sys.argv) != 4:
print "Incorrect number of arguments given. Build.py takes three arguments, first is the version number, second is the build number $BUILD_NUMBER and third is build id $BUILD_ID"
sys.exit(-1)
# Save version, build number and id that was passed in from jenkins
version = sys.argv[1]
build_number = sys.argv[2]
build_id = sys.argv[3]
# git log for the current commit id hash
output = subprocess.Popen("git log --pretty=format:'%H' -n 1", stdout=subprocess.PIPE, shell=True)
commit_id = output.stdout.read()
# Generate the json output text
json_output = json.dumps({ "commit_id" : commit_id.strip(), "build_number" : build_number, "build_id" : build_id, "external_version" : version }, indent=4)
# Run the typical build for jenkins
subprocess.check_call("mvn clean install", shell=True)
# write the version output file
version_file = open("./target/coverity.hpi.VERSION","w")
version_file.write(json_output)
# move the .hpi file to a versioned file
shutil.move("./target/coverity.hpi", "./target/coverity-{0}.hpi".format(version))
| #/*******************************************************************************
# * Copyright (c) 2016 Synopsys, Inc
# * All rights reserved. This program and the accompanying materials
# * are made available under the terms of the Eclipse Public License v1.0
# * which accompanies this distribution, and is available at
# * http://www.eclipse.org/legal/epl-v10.html
# *
# * Contributors:
# * Synopsys, Inc - initial implementation and documentation
# *******************************************************************************/
import sys
import subprocess
import re
import json
import shutil
if __name__ == "__main__":
# Check to make sure we have the correct number of arguments
if len(sys.argv) != 4:
print "Incorrect number of arguments given. Build.py takes three arguments, first is the version number, second is the build number $BUILD_NUMBER and third is build id $BUILD_ID"
sys.exit(-1)
# Save version, build number and id that was passed in from jenkins
version = sys.argv[1]
build_number = sys.argv[2]
build_id = sys.argv[3]
# git log for the current commit id hash
output = subprocess.Popen("git log --pretty=format:'%H' -n 1", stdout=subprocess.PIPE, shell=True)
commit_id = output.stdout.read()
# Generate the json output text
json_output = json.dumps({ "commit_id" : commit_id.strip(), "build_number" : build_number, "build_id" : build_id }, indent=4)
# Run the typical build for jenkins
subprocess.check_call("mvn clean install", shell=True)
# write the version output file
version_file = open("./target/coverity.hpi.VERSION","w")
version_file.write(json_output)
# move the .hpi file to a versioned file
shutil.move("./target/coverity.hpi", "./target/coverity-{0}.hpi".format(version))
| epl-1.0 | Python |
4c2c86f77c457611d40b39ba36637f70974977f0 | fix flake8 | Caleydo/pathfinder_graph,Caleydo/pathfinder_graph,Caleydo/pathfinder_graph | build.py | build.py | import shutil
from codecs import open
import json
__author__ = 'Samuel Gratzl'
def _git_head(cwd):
import subprocess
try:
output = subprocess.check_output(['git', 'rev-parse', '--verify', 'HEAD'], cwd=cwd)
return output.strip()
except subprocess.CalledProcessError:
return 'error'
def _resolve_plugin(repo, version):
import os.path
if os.path.isdir('.git') and repo:
if repo.endswith('.git'):
repo = repo[0:-4]
return repo + '/commit/' + _git_head('.')
# not a git repo
return version
def to_version(v):
import datetime
now = datetime.datetime.utcnow()
return v.replace('SNAPSHOT', now.strftime('%Y%m%d-%H%M%S'))
with open('package.json', 'r', encoding='utf-8') as f:
pkg = json.load(f)
name = pkg['name']
version = to_version(pkg['version'])
resolved = _resolve_plugin(pkg.get('repository', {}).get('url'), version)
# copy source code
shutil.rmtree('build/source/' + name, ignore_errors=True)
shutil.copytree(name, 'build/source/' + name, symlinks=False, ignore=shutil.ignore_patterns('*.pyc'))
# create buildInfo.json
build_info = dict(name=name, version=version, resolved=resolved, description=pkg['description'],
homepage=pkg.get('homepage'), repository=pkg.get('repository', {}).get('url'))
# TODO create build Info
with open('build/source/' + name + '/buildInfo.json', 'w', encoding='utf-8') as f:
json.dump(build_info, f, indent=2)
| import shutil
from codecs import open
import json
__author__ = 'Samuel Gratzl'
def _git_head(cwd):
import subprocess
try:
output = subprocess.check_output(['git', 'rev-parse', '--verify', 'HEAD'], cwd=cwd)
return output.strip()
except subprocess.CalledProcessError:
return 'error'
def _resolve_plugin(repo, version):
import os.path
if os.path.isdir('.git') and repo:
if repo.endswith('.git'):
repo = repo[0:-4]
return repo + '/commit/' + _git_head('.')
# not a git repo
return version
def to_version(v):
import datetime
now = datetime.datetime.utcnow()
return v.replace('SNAPSHOT', now.strftime('%Y%m%d-%H%M%S'))
with open('package.json', 'r', encoding='utf-8') as f:
pkg = json.load(f)
name = pkg['name']
version = to_version(pkg['version'])
resolved = _resolve_plugin(pkg.get('repository', {}).get('url'), version)
# copy source code
shutil.rmtree('build/source/' + name, ignore_errors=True)
shutil.copytree(name, 'build/source/' + name, symlinks=False, ignore=shutil.ignore_patterns('*.pyc'))
# create buildInfo.json
build_info = dict(name=name, version=version, resolved=resolved, description=pkg['description'],
homepage=pkg.get('homepage'), repository=pkg.get('repository', {}).get('url'))
# TODO create build Info
with open('build/source/'+ name +'/buildInfo.json', 'w', encoding='utf-8') as f:
json.dump(build_info, f, indent=2)
| bsd-3-clause | Python |
4a2144ce2c01c5675a7389d5600af4d96462e6d7 | edit ... | ryanrhymes/scandex | cache.py | cache.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Cache abstraction.
#
# Liang Wang @ Computer Lab, Cambridge University
# 2015.06.15
import time
from config import conf, logger
class Cache():
def __init__(self):
"""Init the cache."""
self._root = conf['image_dir']
self._quota = conf['cache_quota']
self._cache = {}
pass
def add(self, sid):
"""Add a new item to cache."""
if sid not in self._cache:
self._cache[sid] = {}
self._cache[sid]['size'] = 0 # fix
self._cache[sid]['path'] = '' # fix
# dumping the image ... not done yet ...
self.touch(sid)
pass
def remove(self, sid):
"""Remove an item from cache."""
if sid in self._cache:
del self._cache[sid]
# delete the image file, not done yet ...
pass
def touch(self, sid):
"""Update the timestamp of the content using the current time."""
self._cache[sid]['timestamp'] = time.time()
pass
def is_full(self):
"""Check if the cache is full."""
used = sum([ v['size'] for v in self._cache.values() ])
return used >= self._quota
pass
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Cache abstraction.
#
# Liang Wang @ Computer Lab, Cambridge University
# 2015.06.15
import time
from config import conf, logger
class Cache():
def __init__(self):
"""Init the cache."""
self._root = conf['image_dir']
self._quota = conf['cache_quota']
self._cache = {}
pass
def add(self, sid):
"""Add a new item to cache."""
if sid not in self._cache:
self._cache[sid] = {}
self._cache[sid]['size'] = 0 # fix
# dumping the image ... not done yet ...
self.touch(sid)
pass
def remove(self, sid):
"""Remove an item from cache."""
if sid in self._cache:
del self._cache[sid]
# delete the image file, not done yet ...
pass
def touch(self, sid):
"""Update the timestamp of the content using the current time."""
self._cache[sid]['timestamp'] = time.time()
pass
def is_full(self):
"""Check if the cache is full."""
used = sum([ v['size'] for v in self._cache.values() ])
return used >= self._quota
pass
| mit | Python |
97f53c98a2371d4d4d93b946868f3396a124a737 | bump 0.1.103 | theonion/djes | djes/__init__.py | djes/__init__.py | import djes.signals # noqa
__version__ = "0.1.103"
default_app_config = "djes.apps.DJESConfig"
| import djes.signals # noqa
__version__ = "0.1.102"
default_app_config = "djes.apps.DJESConfig"
| mit | Python |
f0e0ee6f4f8e34cf22213e827ba5993a1b3b91cb | Rename cli description | ekonstantinidis/pypiup | pypiup/cli.py | pypiup/cli.py | import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
| import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPI Up-to-date\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
| bsd-2-clause | Python |
56c760c9c38c0acf40435d52396dcde9222ab4c7 | fix all sections are below the threshold case | F-Tag/python-vad | pyvad/trim.py | pyvad/trim.py |
import numpy as np
from .vad import vad
def trim(data, fs, fs_vad=16000, hoplength=30, vad_mode=0, thr = 0.015):
""" Voice activity detection.
Trim leading and trailing silence from an audio signal by using vad.
Parameters
----------
data : ndarray
numpy array of mono (1 ch) speech data.
1-d or 2-d, if 2-d, shape must be (1, time_length) or (time_length, 1).
if data type is int, -32768 < data < 32767.
if data type is float, -1 < data < 1.
fs : int
Sampling frequency of data.
fs_vad : int, optional
Sampling frequency for webrtcvad.
fs_vad must be 8000, 16000, 32000 or 48000.
Default is 16000.
hoplength : int, optional
Step size[milli second].
hoplength must be 10, 20, or 30.
Default is 0.1.
vad_mode : int, optional
set vad aggressiveness.
As vad_mode increases, it becomes more aggressive.
vad_mode must be 0, 1, 2 or 3.
Default is 0.
Returns
-------
trimed_data : ndarray
trimed_data. trimed input data.
If voice activity can't be detected, return None.
"""
vact = vad(data, fs, fs_vad, hoplength, vad_mode)
vact_diff = np.diff(vact).astype('int')
start_i = np.where(vact_diff == 1)[0]
end_i = np.where(vact_diff == -1)[0]
if len(start_i) == 0 and len(end_i) == 0:
return None
if len(start_i) < 1:
start_i = np.hstack((end_i, 0))
if len(end_i) < 1:
end_i = np.hstack((end_i, len(vact)-1))
if end_i[0] <= start_i[0]:
start_i = np.hstack((0,start_i))
if len(start_i) > len(end_i):
end_i = np.hstack((end_i, len(vact)-1))
thr_ind=[]
for i, (s, e) in enumerate(zip(start_i, end_i)):
power = np.mean(data[s:e]**2)**0.5
if power > thr:
thr_ind.append(i)
if len(thr_ind) == 0:
return None
sec = (start_i[thr_ind[0]], end_i[thr_ind[-1]])
return data[sec[0]:sec[1]] |
import numpy as np
from .vad import vad
def trim(data, fs, fs_vad=16000, hoplength=30, vad_mode=0, thr = 0.015):
""" Voice activity detection.
Trim leading and trailing silence from an audio signal by using vad.
Parameters
----------
data : ndarray
numpy array of mono (1 ch) speech data.
1-d or 2-d, if 2-d, shape must be (1, time_length) or (time_length, 1).
if data type is int, -32768 < data < 32767.
if data type is float, -1 < data < 1.
fs : int
Sampling frequency of data.
fs_vad : int, optional
Sampling frequency for webrtcvad.
fs_vad must be 8000, 16000, 32000 or 48000.
Default is 16000.
hoplength : int, optional
Step size[milli second].
hoplength must be 10, 20, or 30.
Default is 0.1.
vad_mode : int, optional
set vad aggressiveness.
As vad_mode increases, it becomes more aggressive.
vad_mode must be 0, 1, 2 or 3.
Default is 0.
Returns
-------
trimed_data : ndarray
trimed_data. trimed input data.
If voice activity can't be detected, return None.
"""
vact = vad(data, fs, fs_vad, hoplength, vad_mode)
vact_diff = np.diff(vact).astype('int')
start_i = np.where(vact_diff == 1)[0]
end_i = np.where(vact_diff == -1)[0]
if len(start_i) == 0 and len(end_i) == 0:
return None
if len(start_i) < 1:
start_i = np.hstack((end_i, 0))
if len(end_i) < 1:
end_i = np.hstack((end_i, len(vact)-1))
if end_i[0] <= start_i[0]:
start_i = np.hstack((0,start_i))
if len(start_i) > len(end_i):
end_i = np.hstack((end_i, len(vact)-1))
thr_ind=[]
for i, (s, e) in enumerate(zip(start_i, end_i)):
power = np.mean(data[s:e]**2)**0.5
if power > thr:
thr_ind.append(i)
sec = (start_i[thr_ind[0]], end_i[thr_ind[-1]])
return data[sec[0]:sec[1]] | mit | Python |
8747219974bce5efbe1976910ce4860ada919343 | set default lines to blank strings | pigletto/django-postal,mthornhill/django-postal,mthornhill/django-postal,pigletto/django-postal | src/postal/models.py | src/postal/models.py | """
Model of Postal Address, could possibly use some ideas from
http://www.djangosnippets.org/snippets/912/ in the future
"""
# django imports
from django.db import models
from django.utils.translation import ugettext_lazy as _
# other imports
from countries.models import Country
class PostalAddress(models.Model):
line1 = models.CharField(_("Line 1"), max_length=100, default=u'', blank=True, null=True)
line2 = models.CharField(_("Line 2"), max_length=100, default=u'', blank=True, null=True)
line3 = models.CharField(_("Line 3"), max_length=100, default=u'', blank=True, null=True)
line4 = models.CharField(_("Line 4"), max_length=100, default=u'', blank=True, null=True)
line5 = models.CharField(_("Line 5"), max_length=100, default=u'', blank=True, null=True)
country = models.ForeignKey(Country, verbose_name=_("Country"), blank=True, null=True)
def __unicode__(self):
return "%s, %s, %s, %s, %s, %s" % (self.line1, self.line2, self.line3, self.line4, self.line5, self.country)
class Meta:
verbose_name_plural = "Postal Addresses"
unique_together = ("line1", "line2", "line3", "line4", "line5", "country")
| """
Model of Postal Address, could possibly use some ideas from
http://www.djangosnippets.org/snippets/912/ in the future
"""
# django imports
from django.db import models
from django.utils.translation import ugettext_lazy as _
# other imports
from countries.models import Country
class PostalAddress(models.Model):
line1 = models.CharField(_("Line 1"), max_length=100, blank=True, null=True)
line2 = models.CharField(_("Line 2"), max_length=100, blank=True, null=True)
line3 = models.CharField(_("Line 3"), max_length=100, blank=True, null=True)
line4 = models.CharField(_("Line 4"), max_length=100, blank=True, null=True)
line5 = models.CharField(_("Line 5"), max_length=100, blank=True, null=True)
country = models.ForeignKey(Country, verbose_name=_("Country"), blank=True, null=True)
def __unicode__(self):
return "%s, %s, %s, %s, %s, %s" % (self.line1, self.line2, self.line3, self.line4, self.line5, self.country)
class Meta:
verbose_name_plural = "Postal Addresses"
unique_together = ("line1", "line2", "line3", "line4", "line5", "country")
| mit | Python |
dcd4919958840c2629625d4c36ef25f0c8657e07 | Fix tenant deletion | opennode/nodeconductor-saltstack | src/nodeconductor_saltstack/exchange/handlers.py | src/nodeconductor_saltstack/exchange/handlers.py | from nodeconductor.quotas.models import Quota
def increase_exchange_storage_usage_on_tenant_creation(sender, instance=None, created=False, **kwargs):
if created:
add_quota = instance.service_project_link.add_quota_usage
add_quota(instance.service_project_link.Quotas.exchange_storage, instance.mailbox_size * instance.max_users)
def decrease_exchange_storage_usage_on_tenant_deletion(sender, instance=None, **kwargs):
try:
add_quota = instance.service_project_link.add_quota_usage
add_quota(instance.service_project_link.Quotas.exchange_storage, -instance.mailbox_size * instance.max_users)
except Quota.DoesNotExist:
# in case of cascade deletion tenant will not have quotas
pass
def increase_global_mailbox_size_usage_on_user_creation_or_modification(sender, instance=None, created=False, **kwargs):
if created:
instance.tenant.add_quota_usage(instance.tenant.Quotas.global_mailbox_size, instance.mailbox_size)
else:
instance.tenant.add_quota_usage(instance.tenant.Quotas.global_mailbox_size,
instance.mailbox_size - instance.tracker.previous('mailbox_size'))
def decrease_global_mailbox_size_usage_on_user_deletion(sender, instance=None, **kwargs):
try:
instance.tenant.add_quota_usage(instance.tenant.Quotas.global_mailbox_size, -instance.mailbox_size)
except Quota.DoesNotExist:
# in case of cascade deletion tenant will not have quotas
pass
| def increase_exchange_storage_usage_on_tenant_creation(sender, instance=None, created=False, **kwargs):
if created:
add_quota = instance.service_project_link.add_quota_usage
add_quota(instance.service_project_link.Quotas.exchange_storage, instance.mailbox_size * instance.max_users)
def decrease_exchange_storage_usage_on_tenant_deletion(sender, instance=None, **kwargs):
add_quota = instance.service_project_link.add_quota_usage
add_quota(instance.service_project_link.Quotas.exchange_storage, -instance.mailbox_size * instance.max_users)
def increase_global_mailbox_size_usage_on_user_creation_or_modification(sender, instance=None, created=False, **kwargs):
if created:
instance.tenant.add_quota_usage(instance.tenant.Quotas.global_mailbox_size, instance.mailbox_size)
else:
instance.tenant.add_quota_usage(instance.tenant.Quotas.global_mailbox_size,
instance.mailbox_size - instance.tracker.previous('mailbox_size'))
def decrease_global_mailbox_size_usage_on_user_deletion(sender, instance=None, **kwargs):
instance.tenant.add_quota_usage(instance.tenant.Quotas.global_mailbox_size, -instance.mailbox_size)
| mit | Python |
5c809c1b2607e00e49d472be54e14a672c62d022 | Fix python import | DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix | python/test.py | python/test.py | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
for i in range(100):
ledMatrix.setPixel(randint(0, width), randint(0, height), randint(0, 255), randint(0, 255), randint(0, 255))
time.sleep(0.05)
time.sleep(5)
ledMatrix.Clear() | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randInt
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
for i in range(100):
ledMatrix.setPixel(randInt(0, width), randInt(0, height), randInt(0, 255), randInt(0, 255), randInt(0, 255))
time.sleep(0.05)
time.sleep(5)
ledMatrix.Clear() | mit | Python |
b2c53260ae73b01b21452852b932e1febda66746 | Improve Bluetooth serial reliability. | pebble/libpebble2 | libpebble2/communication/transports/serial.py | libpebble2/communication/transports/serial.py | from __future__ import absolute_import
__author__ = 'katharine'
import errno
import serial
import struct
from . import BaseTransport, MessageTargetWatch
from libpebble2.exceptions import ConnectionError
class SerialTransport(BaseTransport):
"""
Represents a direct connection to a physical Pebble paired to the computer via Bluetooth serial.
This transport expects to be given a device file over which it can communicate with the watch via Bluetooth.
.. warning::
Using this transport may cause occasional kernel panics on some versions of OS X.
:param device: The path to the device file (on OS X, often of the form ``/dev/cu.PebbleTimeXXXX-SerialPo`` or
``/dev/cu.PebbleXXXX-SerialPortSe``).
:type device: str
"""
must_initialise = True
def __init__(self, device):
self.device = device
self.connection = None
def connect(self):
try:
self.connection = serial.Serial(self.device, 115200)
except OSError as e:
if e.errno == errno.EBUSY:
raise ConnectionError("Could not connect to Pebble.")
@property
def connected(self):
return self.connection is not None and self.connection.isOpen()
def read_packet(self):
try:
data = self.connection.read(2)
except serial.SerialException:
self.connection.close()
raise ConnectionError("Disconnected from watch.")
if len(data) < 2:
raise ConnectionError("Got malformed packet.")
length, = struct.unpack('!H', data)
data += self.connection.read(length + 2)
return MessageTargetWatch(), data
def send_packet(self, message, target=MessageTargetWatch()):
assert isinstance(target, MessageTargetWatch)
self.connection.write(message)
| from __future__ import absolute_import
__author__ = 'katharine'
import serial
import struct
from . import BaseTransport, MessageTargetWatch
from libpebble2.exceptions import ConnectionError
class SerialTransport(BaseTransport):
"""
Represents a direct connection to a physical Pebble paired to the computer via Bluetooth serial.
This transport expects to be given a device file over which it can communicate with the watch via Bluetooth.
.. warning::
Using this transport may cause occasional kernel panics on some versions of OS X.
:param device: The path to the device file (on OS X, often of the form ``/dev/cu.PebbleTimeXXXX-SerialPo`` or
``/dev/cu.PebbleXXXX-SerialPortSe``).
:type device: str
"""
must_initialise = True
def __init__(self, device):
self.device = device
self.connection = None
def connect(self):
self.connection = serial.Serial(self.device, 115200, timeout=5)
@property
def connected(self):
return self.connection is not None and self.connection.isOpen()
def read_packet(self):
data = self.connection.read(2)
if len(data) < 2:
raise ConnectionError("Got malformed packet.")
length, = struct.unpack('!H', data)
data += self.connection.read(length + 2)
return MessageTargetWatch(), data
def send_packet(self, message, target=MessageTargetWatch()):
assert isinstance(target, MessageTargetWatch)
self.connection.write(message)
| mit | Python |
254ea51140c1c7ede701f4844bc8c297edabab84 | Add linked list class | derekmpham/interview-prep,derekmpham/interview-prep | linked-list/reverse-linked-list.py | linked-list/reverse-linked-list.py | # reverse singly linked list
class Node(object): # define constructor
def __init__(self, data):
self.data = data
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
| # reverse singly linked list
class Node(object): # define constructor
def __init__(self, data):
self.data = data
self.next = None
| mit | Python |
b52ff68324bef3883d8c6e176ccd646d65c62b46 | Add repo score calculation | RepoReapers/reaper,RepoReapers/reaper,RepoReapers/reaper,RepoReapers/reaper | score_repo.py | score_repo.py | #!/usr/bin/env python3
import argparse
import importlib
import json
import sys
def loadAttributePlugins(attributes):
for attribute in attributes:
try:
attribute['implementation'] = importlib.import_module("attributes.{0}.main".format(attribute['name']))
except ImportError:
print("Failed to load the {0} attribute.".format(attribute['name']))
def processConfiguration(config_file):
try:
config = json.load(config_file)
return config
except:
print("Malformatted or missing configuration.")
sys.exit(2)
def processArguments():
parser = argparse.ArgumentParser(description='Calculate the score of a repository.')
parser.add_argument('-c', '--config', type=argparse.FileType('r'), default='config.json', dest='config_file', help='Path to the configuration file.')
parser.add_argument('repository_id', type=int, nargs=1, help='Identifier for a repository as it appears in the GHTorrent database.')
if len(sys.argv) is 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def main():
args = processArguments()
config = processConfiguration(args.config_file)
attributes = config['attributes']
loadAttributePlugins(attributes)
score = 0
for attribute in attributes:
result = attribute['implementation'].run(metadata, repo_path, attribute['options'])
score += result * attribute['weight']
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
import argparse
import importlib
import json
import sys
def loadAttributePlugins(attributes):
for attribute in attributes:
try:
attribute['implementation'] = importlib.import_module("attributes.{0}.main".format(attribute['name']))
except ImportError:
print("Failed to load the {0} attribute.".format(attribute['name']))
def processConfiguration(config_file):
try:
config = json.load(config_file)
return config
except:
print("Malformatted or missing configuration.")
sys.exit(2)
def processArguments():
parser = argparse.ArgumentParser(description='Calculate the score of a repository.')
parser.add_argument('-c', '--config', type=argparse.FileType('r'), default='config.json', dest='config_file', help='Path to the configuration file.')
parser.add_argument('repository_id', type=int, nargs=1, help='Identifier for a repository as it appears in the GHTorrent database.')
if len(sys.argv) is 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def main():
args = processArguments()
config = processConfiguration(args.config_file)
attributes = config['attributes']
loadAttributePlugins(attributes)
for attribute in attributes:
result = attribute['implementation'].run(metadata, repo_path, config[attribute]['options'])
if __name__ == '__main__':
main()
| apache-2.0 | Python |
4858af4e946919b80de6e67204f5a2925bc85800 | Fix backward compatible functions in scrapy.log | songfj/scrapy,agusc/scrapy,cleydson/scrapy,WilliamKinaan/scrapy,jdemaeyer/scrapy,zackslash/scrapy,GregoryVigoTorres/scrapy,Geeglee/scrapy,lacrazyboy/scrapy,foromer4/scrapy,olorz/scrapy,kimimj/scrapy,agreen/scrapy,codebhendi/scrapy,rootAvish/scrapy,umrashrf/scrapy,farhan0581/scrapy,dracony/scrapy,Bourneer/scrapy,nowopen/scrapy,dangra/scrapy,amboxer21/scrapy,umrashrf/scrapy,jeffreyjinfeng/scrapy,johnardavies/scrapy,1yvT0s/scrapy,pawelmhm/scrapy,cursesun/scrapy,JacobStevenR/scrapy,raphaelfruneaux/scrapy,wenyu1001/scrapy,cleydson/scrapy,yidongliu/scrapy,Adai0808/scrapy-1,Chenmxs/scrapy,godfreyy/scrapy,kashyap32/scrapy,zhangtao11/scrapy,darkrho/scrapy-scrapy,wujuguang/scrapy,wzyuliyang/scrapy,Zephor5/scrapy,bmess/scrapy,eLRuLL/scrapy,olorz/scrapy,redapple/scrapy,profjrr/scrapy,barraponto/scrapy,AaronTao1990/scrapy,nowopen/scrapy,starrify/scrapy,Geeglee/scrapy,Ryezhang/scrapy,URXtech/scrapy,KublaikhanGeek/scrapy,dracony/scrapy,elacuesta/scrapy,webmakin/scrapy,livepy/scrapy,pablohoffman/scrapy,cursesun/scrapy,redapple/scrapy,codebhendi/scrapy,zackslash/scrapy,amboxer21/scrapy,OpenWhere/scrapy,mlyundin/scrapy,johnardavies/scrapy,pombredanne/scrapy,irwinlove/scrapy,hyrole/scrapy,barraponto/scrapy,JacobStevenR/scrapy,huoxudong125/scrapy,Lucifer-Kim/scrapy,rootAvish/scrapy,wangjun/scrapy,pablohoffman/scrapy,ArturGaspar/scrapy,hectoruelo/scrapy,xiao26/scrapy,smaty1/scrapy,nfunato/scrapy,agreen/scrapy,fqul/scrapy,liyy7/scrapy,fqul/scrapy,z-fork/scrapy,Ryezhang/scrapy,csalazar/scrapy,rklabs/scrapy,fontenele/scrapy,wujuguang/scrapy,jc0n/scrapy,kazitanvirahsan/scrapy,pombredanne/scrapy,Allianzcortex/scrapy,tagatac/scrapy,agreen/scrapy,heamon7/scrapy,carlosp420/scrapy,mgedmin/scrapy,dgillis/scrapy,WilliamKinaan/scrapy,wangjun/scrapy,IvanGavran/scrapy,rklabs/scrapy,ssteo/scrapy,eliasdorneles/scrapy,olafdietsche/scrapy,Lucifer-Kim/scrapy,Digenis/scrapy,github-account-because-they-want-it/scrapy,ENjOyAbLE1991/scrapy,rolando-contrib/scrapy,pawelmhm/scrapy,hwsyy/scrapy,ylcolala/scrapy,github-account-because-they-want-it/scrapy,nikgr95/scrapy,curita/scrapy,z-fork/scrapy,stenskjaer/scrapy,TarasRudnyk/scrapy,Zephor5/scrapy,carlosp420/scrapy,wenyu1001/scrapy,Allianzcortex/scrapy,kalessin/scrapy,yarikoptic/scrapy,Parlin-Galanodel/scrapy,wangjun/scrapy,bmess/scrapy,shaform/scrapy,legendtkl/scrapy,darkrho/scrapy-scrapy,raphaelfruneaux/scrapy,smaty1/scrapy,famorted/scrapy,umrashrf/scrapy,rolando/scrapy,hyrole/scrapy,legendtkl/scrapy,profjrr/scrapy,famorted/scrapy,csalazar/scrapy,tagatac/scrapy,heamon7/scrapy,pombredanne/scrapy,mlyundin/scrapy,Chenmxs/scrapy,snowdream1314/scrapy,jdemaeyer/scrapy,Digenis/scrapy,rolando-contrib/scrapy,1yvT0s/scrapy,z-fork/scrapy,yusofm/scrapy,kmike/scrapy,hwsyy/scrapy,nfunato/scrapy,Slater-Victoroff/scrapy,AaronTao1990/scrapy,scorphus/scrapy,eLRuLL/scrapy,TarasRudnyk/scrapy,heamon7/scrapy,finfish/scrapy,rahulsharma1991/scrapy,jeffreyjinfeng/scrapy,YeelerG/scrapy,fafaman/scrapy,dacjames/scrapy,starrify/scrapy,Cnfc19932/scrapy,wzyuliyang/scrapy,jiezhu2007/scrapy,nowopen/scrapy,hectoruelo/scrapy,csalazar/scrapy,jiezhu2007/scrapy,ssteo/scrapy,agusc/scrapy,xiao26/scrapy,KublaikhanGeek/scrapy,Djlavoy/scrapy,AaronTao1990/scrapy,eLRuLL/scrapy,Allianzcortex/scrapy,Parlin-Galanodel/scrapy,hbwzhsh/scrapy,tagatac/scrapy,IvanGavran/scrapy,kmike/scrapy,elacuesta/scrapy,fafaman/scrapy,aivarsk/scrapy,xiao26/scrapy,aivarsk/scrapy,olorz/scrapy,Timeship/scrapy,hectoruelo/scrapy,ssh-odoo/scrapy,farhan0581/scrapy,yusofm/scrapy,finfish/scrapy,coderabhishek/scrapy,foromer4/scrapy,huoxudong125/scrapy,elacuesta/scrapy,nguyenhongson03/scrapy,foromer4/scrapy,CodeJuan/scrapy,haiiiiiyun/scrapy,fpy171/scrapy,dangra/scrapy,rahulsharma1991/scrapy,dacjames/scrapy,rolando/scrapy,livepy/scrapy,elijah513/scrapy,redapple/scrapy,Preetwinder/scrapy,aivarsk/scrapy,dracony/scrapy,OpenWhere/scrapy,zjuwangg/scrapy,elijah513/scrapy,YeelerG/scrapy,OpenWhere/scrapy,elijah513/scrapy,w495/scrapy,huoxudong125/scrapy,nikgr95/scrapy,stenskjaer/scrapy,irwinlove/scrapy,zhangtao11/scrapy,Slater-Victoroff/scrapy,cyrixhero/scrapy,snowdream1314/scrapy,arush0311/scrapy,moraesnicol/scrapy,tntC4stl3/scrapy,haiiiiiyun/scrapy,pranjalpatil/scrapy,mlyundin/scrapy,Digenis/scrapy,crasker/scrapy,shaform/scrapy,Djlavoy/scrapy,Chenmxs/scrapy,curita/scrapy,taito/scrapy,Bourneer/scrapy,yusofm/scrapy,dacjames/scrapy,carlosp420/scrapy,kashyap32/scrapy,darkrho/scrapy-scrapy,bmess/scrapy,pranjalpatil/scrapy,hansenDise/scrapy,codebhendi/scrapy,agusc/scrapy,pranjalpatil/scrapy,cyrixhero/scrapy,kimimj/scrapy,kazitanvirahsan/scrapy,songfj/scrapy,ylcolala/scrapy,rklabs/scrapy,yarikoptic/scrapy,scorphus/scrapy,hansenDise/scrapy,Lucifer-Kim/scrapy,jdemaeyer/scrapy,jc0n/scrapy,nguyenhongson03/scrapy,CodeJuan/scrapy,hbwzhsh/scrapy,moraesnicol/scrapy,godfreyy/scrapy,kazitanvirahsan/scrapy,fqul/scrapy,zjuwangg/scrapy,zorojean/scrapy,dgillis/scrapy,Timeship/scrapy,Preetwinder/scrapy,ArturGaspar/scrapy,fontenele/scrapy,pfctdayelise/scrapy,kalessin/scrapy,godfreyy/scrapy,jeffreyjinfeng/scrapy,yidongliu/scrapy,amboxer21/scrapy,Preetwinder/scrapy,songfj/scrapy,KublaikhanGeek/scrapy,profjrr/scrapy,webmakin/scrapy,raphaelfruneaux/scrapy,liyy7/scrapy,coderabhishek/scrapy,zorojean/scrapy,GregoryVigoTorres/scrapy,Cnfc19932/scrapy,pfctdayelise/scrapy,Cnfc19932/scrapy,rolando/scrapy,Adai0808/scrapy-1,scrapy/scrapy,stenskjaer/scrapy,wenyu1001/scrapy,tliber/scrapy,moraesnicol/scrapy,Zephor5/scrapy,arush0311/scrapy,nfunato/scrapy,crasker/scrapy,yarikoptic/scrapy,github-account-because-they-want-it/scrapy,eliasdorneles/scrapy,URXtech/scrapy,kashyap32/scrapy,TarasRudnyk/scrapy,famorted/scrapy,w495/scrapy,ArturGaspar/scrapy,jiezhu2007/scrapy,Djlavoy/scrapy,tliber/scrapy,avtoritet/scrapy,Adai0808/scrapy-1,rolando-contrib/scrapy,dangra/scrapy,johnardavies/scrapy,nguyenhongson03/scrapy,barraponto/scrapy,zjuwangg/scrapy,ylcolala/scrapy,crasker/scrapy,jc0n/scrapy,shaform/scrapy,URXtech/scrapy,Parlin-Galanodel/scrapy,zhangtao11/scrapy,cyberplant/scrapy,scrapy/scrapy,Bourneer/scrapy,snowdream1314/scrapy,avtoritet/scrapy,pablohoffman/scrapy,scorphus/scrapy,cyberplant/scrapy,GregoryVigoTorres/scrapy,nikgr95/scrapy,hyrole/scrapy,Ryezhang/scrapy,smaty1/scrapy,kmike/scrapy,coderabhishek/scrapy,livepy/scrapy,lacrazyboy/scrapy,eliasdorneles/scrapy,w495/scrapy,cyrixhero/scrapy,olafdietsche/scrapy,cursesun/scrapy,zorojean/scrapy,kalessin/scrapy,Timeship/scrapy,legendtkl/scrapy,tntC4stl3/scrapy,WilliamKinaan/scrapy,farhan0581/scrapy,zackslash/scrapy,pfctdayelise/scrapy,fpy171/scrapy,hbwzhsh/scrapy,starrify/scrapy,rootAvish/scrapy,ssh-odoo/scrapy,Slater-Victoroff/scrapy,mgedmin/scrapy,hwsyy/scrapy,Geeglee/scrapy,irwinlove/scrapy,taito/scrapy,curita/scrapy,webmakin/scrapy,yidongliu/scrapy,mgedmin/scrapy,cleydson/scrapy,fontenele/scrapy,YeelerG/scrapy,fpy171/scrapy,CodeJuan/scrapy,IvanGavran/scrapy,kimimj/scrapy,rahulsharma1991/scrapy,taito/scrapy,arush0311/scrapy,cyberplant/scrapy,ssh-odoo/scrapy,finfish/scrapy,ENjOyAbLE1991/scrapy,hansenDise/scrapy,olafdietsche/scrapy,scrapy/scrapy,haiiiiiyun/scrapy,ENjOyAbLE1991/scrapy,JacobStevenR/scrapy,liyy7/scrapy,lacrazyboy/scrapy,fafaman/scrapy,1yvT0s/scrapy,dgillis/scrapy,wzyuliyang/scrapy,avtoritet/scrapy,wujuguang/scrapy,pawelmhm/scrapy,tliber/scrapy,tntC4stl3/scrapy,ssteo/scrapy | scrapy/log.py | scrapy/log.py | """
This module is kept to provide a helpful warning about its removal.
"""
import logging
import warnings
from twisted.python.failure import Failure
from scrapy.exceptions import ScrapyDeprecationWarning
logger = logging.getLogger(__name__)
warnings.warn("Module `scrapy.log` has been deprecated, Scrapy now relies on "
"the builtin Python library for logging. Read the updated "
"logging entry in the documentation to learn more.",
ScrapyDeprecationWarning, stacklevel=2)
# Imports kept for backwards-compatibility
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
SILENT = CRITICAL + 1
def msg(message=None, _level=logging.INFO, **kw):
warnings.warn('log.msg has been deprecated, create a python logger and '
'log through it instead',
ScrapyDeprecationWarning, stacklevel=2)
level = kw.pop('level', _level)
message = kw.pop('format', message)
# NOTE: logger.log doesn't handle well passing empty dictionaries with format
# arguments because of some weird use-case:
# https://hg.python.org/cpython/file/648dcafa7e5f/Lib/logging/__init__.py#l269
logger.log(level, message, *[kw] if kw else [])
def err(_stuff=None, _why=None, **kw):
warnings.warn('log.err has been deprecated, create a python logger and '
'use its error method instead',
ScrapyDeprecationWarning, stacklevel=2)
level = kw.pop('level', logging.ERROR)
failure = kw.pop('failure', _stuff) or Failure()
message = kw.pop('why', _why) or failure.value
logger.log(level, message, *[kw] if kw else [], extra={'failure': failure})
| """
This module is kept to provide a helpful warning about its removal.
"""
import logging
import warnings
from twisted.python.failure import Failure
from scrapy.exceptions import ScrapyDeprecationWarning
logger = logging.getLogger(__name__)
warnings.warn("Module `scrapy.log` has been deprecated, Scrapy now relies on "
"the builtin Python library for logging. Read the updated "
"logging entry in the documentation to learn more.",
ScrapyDeprecationWarning, stacklevel=2)
# Imports kept for backwards-compatibility
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
SILENT = CRITICAL + 1
def msg(message, _level=logging.INFO, **kw):
warnings.warn('log.msg has been deprecated, create a python logger and '
'log through it instead',
ScrapyDeprecationWarning, stacklevel=2)
level = kw.pop('level', _level)
logger.log(level, message, kw)
def err(_stuff=None, _why=None, **kw):
warnings.warn('log.err has been deprecated, create a python logger and '
'use its error method instead',
ScrapyDeprecationWarning, stacklevel=2)
level = kw.pop('level', logging.ERROR)
failure = kw.pop('failure', _stuff) or Failure()
message = kw.pop('why', _why) or failure.value
logger.log(level, message, kw, extra={'failure': failure})
| bsd-3-clause | Python |
975b1f2cb1be68d542f1f03f126451d6f8d6929e | Optimise AbstractXmlWriter _escape performance .. seems to improve robot run time by 3-4% | fiuba08/robotframework,waldenner/robotframework,fiuba08/robotframework,fiuba08/robotframework,waldenner/robotframework,waldenner/robotframework,ldtri0209/robotframework,ldtri0209/robotframework,ldtri0209/robotframework,waldenner/robotframework,waldenner/robotframework,fiuba08/robotframework,ldtri0209/robotframework,fiuba08/robotframework,ldtri0209/robotframework | src/robot/utils/abstractxmlwriter.py | src/robot/utils/abstractxmlwriter.py | # Copyright 2008-2011 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from unic import unic
_ILLEGAL_CHARS_IN_XML = u'\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e' \
+ u'\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\ufffe'
_ILLEGAL_CHARS_IN_XML_PATTERN = re.compile('['+_ILLEGAL_CHARS_IN_XML+']')
class AbstractXmlWriter:
def start(self, name, attributes={}, newline=True):
self._start(name, self._escape_attrs(attributes))
if newline:
self.content('\n')
def _start(self, name, attrs):
raise NotImplementedError
def _escape_attrs(self, attrs):
return dict((n, self._escape(v)) for n, v in attrs.items())
def _escape(self, content):
content = unic(content)
# Avoid bug http://ironpython.codeplex.com/workitem/29402
return _ILLEGAL_CHARS_IN_XML_PATTERN.sub('', content)
def content(self, content):
if content is not None:
self._content(self._escape(content))
def _content(self, content):
raise NotImplementedError
def end(self, name, newline=True):
self._end(name)
if newline:
self.content('\n')
def _end(self, name):
raise NotImplementedError
def element(self, name, content=None, attributes={}, newline=True):
self.start(name, attributes, newline=False)
self.content(content)
self.end(name, newline)
def close(self):
self._close()
self.closed = True
def _close(self):
self._writer.endDocument()
self._output.close()
| # Copyright 2008-2011 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unic import unic
_ILLEGAL_CHARS_IN_XML = u'\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e' \
+ u'\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\ufffe'
class AbstractXmlWriter:
def start(self, name, attributes={}, newline=True):
self._start(name, self._escape_attrs(attributes))
if newline:
self.content('\n')
def _start(self, name, attrs):
raise NotImplementedError
def _escape_attrs(self, attrs):
return dict((n, self._escape(v)) for n, v in attrs.items())
def _escape(self, content):
content = unic(content)
for char in _ILLEGAL_CHARS_IN_XML:
# Avoid bug http://ironpython.codeplex.com/workitem/29402
if char in content:
content = content.replace(char, '')
return content
def content(self, content):
if content is not None:
self._content(self._escape(content))
def _content(self, content):
raise NotImplementedError
def end(self, name, newline=True):
self._end(name)
if newline:
self.content('\n')
def _end(self, name):
raise NotImplementedError
def element(self, name, content=None, attributes={}, newline=True):
self.start(name, attributes, newline=False)
self.content(content)
self.end(name, newline)
def close(self):
self._close()
self.closed = True
def _close(self):
self._writer.endDocument()
self._output.close()
| apache-2.0 | Python |
606f151e2909d16436d5f2480dccf7a61fd3428f | add first_name and last_name to profile | podhub-io/website | podhub/website/models.py | podhub/website/models.py | from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.func import now
import sqlalchemy
import uuid
from . import db
class Base(db.Model):
id = db.Column(UUID, default=lambda: str(uuid.uuid4()), primary_key=True)
created_at = db.Column(db.DateTime(), default=now())
updated_at = db.Column(
sqlalchemy.DateTime(), default=now(), onupdate=now())
__mapper_args__ = {'order_by': sqlalchemy.desc('updated_at')}
class User(Base):
profile = db.relationship('Profile', backref='user', lazy='dynamic',
uselist=False)
def __init__(self, username, email):
self.username = username
self.email = email
class Profile(Base):
username = db.Column(db.String(80), index=True, unique=True)
email = db.Column(db.String(120), index=True, unique=True)
display_name = db.Column(db.String(80), index=True, unique=True)
first_name = db.Column(db.String(80), index=True)
last_name = db.Column(db.String(80), index=True)
user_id = db.Column(UUID, db.ForeignKey('user.id'))
def __init__(self, username, email, first_name=None, last_name=None,
display_name=None):
self.username = username
self.email = email
if not display_name:
self.display_name = username
else:
self.display_name = display_name
self.first_name = first_name
self.last_name = last_name
| from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.func import now
import sqlalchemy
import uuid
from . import db
class Base(db.Model):
id = db.Column(UUID, default=lambda: str(uuid.uuid4()), primary_key=True)
created_at = db.Column(db.DateTime(), default=now())
updated_at = db.Column(
sqlalchemy.DateTime(), default=now(), onupdate=now())
__mapper_args__ = {'order_by': sqlalchemy.desc('updated_at')}
class User(Base):
profile = db.relationship('Profile', backref='user', lazy='dynamic',
uselist=False)
def __init__(self, username, email):
self.username = username
self.email = email
class Profile(Base):
username = db.Column(db.String(80), index=True, unique=True)
email = db.Column(db.String(120), index=True, unique=True)
display_name = db.Column(db.String(80), index=True, unique=True)
user_id = db.Column(UUID, db.ForeignKey('user.id'))
def __init__(self, username, email, display_name=None):
self.username = username
self.email = email
if not display_name:
self.display_name = username
else:
self.display_name = display_name
| apache-2.0 | Python |
e0f9c113956078577e3dca420358a203c7f3ed95 | Remove the __main__ function in the script since its in the cli now | schae234/PonyTools | ponytools/cli/sortVCF.py | ponytools/cli/sortVCF.py | #!/usr/bin/env python3
import sys
import tempfile
from optparse import OptionParser
def log(message,*formatting):
print(message.format(*formatting),file=sys.stderr)
def sortVCF(args):
import sys,os
vcf_file = args.vcf
fasta_file = args.fasta
temp_dir="/tmp"
out = args.out
headers = list()
variants = list()
cur_byte = 0
chroms = list()
temps = dict()
log("Sorting {}",vcf_file)
# Get the chromosome order
with open(fasta_file,'r') as FASTA:
for line in FASTA:
if line.startswith('>'):
chrom,*info = line.strip().lstrip('>').split()
log("Found chromosome {}",chrom)
chroms.append(chrom)
temps[chrom] = tempfile.NamedTemporaryFile('w')
# Get headers and extract positions with file byte offsets
log("Reading in VCF: {}",vcf_file)
with open(vcf_file,'r') as VCF:
for i,line in enumerate(VCF):
if line.startswith("#"):
headers.append(line.strip())
else:
chrom,pos,*junk = line.split()
temps[chrom].write(line)
# close all temp files
for key,val in temps.items():
log("flushing tmp file: {}",key)
val.flush()
log("soring chroms")
with open(out,'w') as OUT:
# print headers
print("\n".join(headers),file=OUT)
for chrom in chroms:
# read in that chroms bullshit
with open(temps[chrom].name,'r') as CHROM:
variants = CHROM.readlines()
# sort by position
variants.sort(key=lambda x: int(x.split()[1]))
log("printing chrom {}",chrom)
print("".join(variants),file=OUT,end="")
temps[chrom].close()
| #!/usr/bin/env python3
import sys
import tempfile
from optparse import OptionParser
def log(message,*formatting):
print(message.format(*formatting),file=sys.stderr)
def sortVCF(args):
import sys,os
vcf_file = args.vcf
fasta_file = args.fasta
temp_dir="/tmp"
out = args.out
headers = list()
variants = list()
cur_byte = 0
chroms = list()
temps = dict()
log("Sorting {}",vcf_file)
# Get the chromosome order
with open(fasta_file,'r') as FASTA:
for line in FASTA:
if line.startswith('>'):
chrom,*info = line.strip().lstrip('>').split()
log("Found chromosome {}",chrom)
chroms.append(chrom)
temps[chrom] = tempfile.NamedTemporaryFile('w')
# Get headers and extract positions with file byte offsets
log("Reading in VCF: {}",vcf_file)
with open(vcf_file,'r') as VCF:
for i,line in enumerate(VCF):
if line.startswith("#"):
headers.append(line.strip())
else:
chrom,pos,*junk = line.split()
temps[chrom].write(line)
# close all temp files
for key,val in temps.items():
log("flushing tmp file: {}",key)
val.flush()
log("soring chroms")
with open(out,'w') as OUT:
# print headers
print("\n".join(headers),file=OUT)
for chrom in chroms:
# read in that chroms bullshit
with open(temps[chrom].name,'r') as CHROM:
variants = CHROM.readlines()
# sort by position
variants.sort(key=lambda x: int(x.split()[1]))
log("printing chrom {}",chrom)
print("".join(variants),file=OUT,end="")
temps[chrom].close()
def main(args):
parser=OptionParser()
parser.add_option('--vcf',help='unsorted VCF file')
parser.add_option('--fasta',help='fasta file')
parser.add_option('--out',default='sorted.vcf',help='output name [default: sorted.vcf]')
options,args = parser.parse_args(args)
sortVCF(options.vcf,options.fasta,out=options.out)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit | Python |
b1b3504b561a9b4f45664b7c4a69e9604600817e | Fix migration | AltSchool/django-softdelete,AltSchool/django-softdelete | softdelete/migrations/0001_initial.py | softdelete/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ChangeSet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('object_id', models.CharField(max_length=100)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='SoftDeleteRecord',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('object_id', models.CharField(max_length=100)),
('changeset', models.ForeignKey(related_name='soft_delete_records', to='softdelete.ChangeSet')),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='softdeleterecord',
unique_together=set([('changeset', 'content_type', 'object_id')]),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='ChangeSet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('object_id', models.PositiveIntegerField()),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='SoftDeleteRecord',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('object_id', models.PositiveIntegerField()),
('changeset', models.ForeignKey(related_name='soft_delete_records', to='softdelete.ChangeSet')),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='softdeleterecord',
unique_together=set([('changeset', 'content_type', 'object_id')]),
),
]
| bsd-2-clause | Python |
27c9da3129c6fbdd8d54276cf054c1f46e665aaf | Remove trailing slashes, add origin url to responses | talavis/kimenu | flask_app.py | flask_app.py | import flask
import flask_caching
import flask_cors
import main
import slack
app = flask.Flask(__name__)
cache = flask_caching.Cache(app, config={"CACHE_TYPE": "simple"})
cors = flask_cors.CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api")
@cache.cached(timeout=10800)
def list_entities():
return flask.jsonify({"entities": ["restaurant"],
"url": flask.url_for("list_entities", _external=True)})
@app.route("/api/restaurant")
@cache.cached(timeout=10800)
def list_restaurants():
return flask.jsonify({"restaurants": main.list_restaurants(),
"url": flask.url_for("list_restaurants", _external=True)})
@app.route("/api/restaurant/<name>")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return flask.jsonify({"restaurant": data,
"url": flask.url_for("get_restaurant", name=name, _external=True)})
| from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
import slack
app = Flask(__name__)
cache = Cache(app, config={"CACHE_TYPE": "simple"})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api/")
@cache.cached(timeout=10800)
def list_entities():
return jsonify({"entities": ["restaurant"]})
@app.route("/api/restaurant/")
@cache.cached(timeout=10800)
def list_restaurants():
return jsonify({"restaurants": main.list_restaurants()})
@app.route("/api/restaurant/<name>/")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return jsonify({"restaurant": data})
| bsd-3-clause | Python |
de1e7f254d76e3ac7b3d547b35541eea3d83b74a | define main() | yasokada/python-160423_footInput | footInput.py | footInput.py | #!/usr/bin/env python
'''
v0.2 2016 Apr 23
- define main()
- change interval to 10 msec base for UDP comm
v0.1 2016 Apr 23
- can check 5 GPIO input
'''
import RPi.GPIO as GPIO
import time
import os
def main():
GPIO.setmode(GPIO.BOARD)
ins = [40, 38, 36, 32, 26]
for idx in range(5):
GPIO.setup(ins[idx], GPIO.IN, pull_up_down=GPIO.PUD_UP)
vals = range(5)
cnt=0
while True:
cnt=cnt+1
time.sleep(0.01)
if cnt < 30: # 300msec
continue
cnt=0
for idx in range(5):
vals[idx]=GPIO.input(ins[idx])
print vals[idx],
print
if __name__ == '__main__':
main()
| #!/usr/bin/env python
'''
v0.2 2016 Apr 23
- change interval to 10 msec base for UDP comm
v0.1 2016 Apr 23
- can check 5 GPIO input
'''
import RPi.GPIO as GPIO
import time
import os
GPIO.setmode(GPIO.BOARD)
ins = [40, 38, 36, 32, 26]
for idx in range(5):
GPIO.setup(ins[idx], GPIO.IN, pull_up_down=GPIO.PUD_UP)
vals = range(5)
cnt=0
while True:
cnt=cnt+1
time.sleep(0.01)
if cnt < 30: # 300msec
continue
cnt=0
for idx in range(5):
vals[idx]=GPIO.input(ins[idx])
print vals[idx],
print
| mit | Python |
c04ef32ca687e8a941716c078788c45b55c42f7e | Add temporary functions for testing | gogetdata/ggd-cli,gogetdata/ggd-cli | ggd/utils.py | ggd/utils.py | from __future__ import print_function
import os
import sys
import glob
from git import Repo
LOCAL_REPO_DIR = os.getenv("GGD_LOCAL", os.path.expanduser("~/.config/"))
RECIPE_REPO_DIR = os.path.join(LOCAL_REPO_DIR, "ggd-recipes")
GITHUB_URL = "https://github.com/gogetdata/ggd-recipes.git"
def get_species():
update_local_repo()
genomes_dir = os.path.join(RECIPE_REPO_DIR, "genomes")
return os.listdir(genomes_dir)
'''**************
TODO:
Hard Coded
NEED TO FIX
**************'''
## Recipe dir is flattened. Subdirs = channels
def get_ggd_channels():
update_local_repo()
#recipe_dir = os.path.join(RECIPE_REPO_DIR, "recipes")
recipe_dir = os.path.join("/uufs/chpc.utah.edu/common/home/u1138933/QuinlanLab/ggd/post-link-recipes/ggd-recipes","recipes")
return os.listdir(recipe_dir)
'''**************
TODO:
Hard Coded
NEED TO FIX
**************'''
def get_channel_data(ggd_channel):
update_local_repo()
#channeldata_path = os.path.join(RECIPE_REPO_DIR, "channeldata", ggd_channel, "channeldata.json")
channeldata_path = os.path.join("/uufs/chpc.utah.edu/common/home/u1138933/QuinlanLab/ggd/ggd-recipes/", "channeldata", ggd_channel,"ggd-7recipes-channeldata.json")
return (channeldata_path)
def get_builds(species):
update_local_repo()
species_dir = os.path.join(RECIPE_REPO_DIR, "genomes", species)
if species == "*":
paths = glob.glob(species_dir)
builds = []
for path in paths:
builds.extend(os.listdir(path))
return builds
else:
if os.path.isdir(species_dir):
return os.listdir(species_dir)
def update_local_repo():
if not os.path.isdir(LOCAL_REPO_DIR):
os.makedirs(LOCAL_REPO_DIR)
if not os.path.isdir(RECIPE_REPO_DIR):
Repo.clone_from(GITHUB_URL, RECIPE_REPO_DIR)
Repo(RECIPE_REPO_DIR).remotes.origin.pull()
def validate_build(build, species):
if build != "*":
builds_list = get_builds(species)
if not builds_list or build not in builds_list:
if species != "*":
print("Unknown build '%s' for species '%s'" % (build, species), file=sys.stderr)
else:
print("Unknown build '%s'" % (build), file=sys.stderr)
if (builds_list):
print("Available builds: '%s'" % ("', '".join(builds_list)), file=sys.stderr)
return False
return True
if __name__ == "__main__":
import doctest
doctest.testmod()
| from __future__ import print_function
import os
import sys
import glob
from git import Repo
LOCAL_REPO_DIR = os.getenv("GGD_LOCAL", os.path.expanduser("~/.config/"))
RECIPE_REPO_DIR = os.path.join(LOCAL_REPO_DIR, "ggd-recipes")
GITHUB_URL = "https://github.com/gogetdata/ggd-recipes.git"
def get_species():
update_local_repo()
genomes_dir = os.path.join(RECIPE_REPO_DIR, "genomes")
return os.listdir(genomes_dir)
## Recipe dir is flattened. Subdirs = channels
def get_ggd_channels():
update_local_repo()
#recipe_dir = os.path.join(RECIPE_REPO_DIR, "recipes")
recipe_dir = os.path.join("/uufs/chpc.utah.edu/common/home/u1138933/QuinlanLab/ggd/post-link-recipes/ggd-recipes","recipes")
return os.listdir(recipe_dir)
def get_builds(species):
update_local_repo()
species_dir = os.path.join(RECIPE_REPO_DIR, "genomes", species)
if species == "*":
paths = glob.glob(species_dir)
builds = []
for path in paths:
builds.extend(os.listdir(path))
return builds
else:
if os.path.isdir(species_dir):
return os.listdir(species_dir)
def update_local_repo():
if not os.path.isdir(LOCAL_REPO_DIR):
os.makedirs(LOCAL_REPO_DIR)
if not os.path.isdir(RECIPE_REPO_DIR):
Repo.clone_from(GITHUB_URL, RECIPE_REPO_DIR)
Repo(RECIPE_REPO_DIR).remotes.origin.pull()
def validate_build(build, species):
if build != "*":
builds_list = get_builds(species)
if not builds_list or build not in builds_list:
if species != "*":
print("Unknown build '%s' for species '%s'" % (build, species), file=sys.stderr)
else:
print("Unknown build '%s'" % (build), file=sys.stderr)
if (builds_list):
print("Available builds: '%s'" % ("', '".join(builds_list)), file=sys.stderr)
return False
return True
if __name__ == "__main__":
import doctest
doctest.testmod()
| mit | Python |
46e8002729b758639a0b1c1e9da59f380ca567db | Fix typo 'localtion' -> 'location' | elastic/elasticsearch-dsl-py | test_elasticsearch_dsl/test_integration/test_examples/test_parent_child.py | test_elasticsearch_dsl/test_integration/test_examples/test_parent_child.py | from datetime import datetime
from pytest import fixture
from elasticsearch_dsl import Q
from .parent_child import User, Question, Answer, setup, Comment
honza = User(id=42, signed_up=datetime(2013, 4, 3), username='honzakral',
email='honza@elastic.co', location='Prague')
nick = User(id=47, signed_up=datetime(2017, 4, 3), username='fxdgear',
email='nick.lang@elastic.co', location='Colorado')
@fixture
def question(write_client):
setup()
assert write_client.indices.exists_template(name='base')
# create a question object
q = Question(
_id=1,
author=nick,
tags=['elasticsearch', 'python'],
title='How do I use elasticsearch from Python?',
body='''
I want to use elasticsearch, how do I do it from Python?
''',
)
q.save()
return q
def test_comment(write_client, question):
question.add_comment(nick, "Just use elasticsearch-py")
q = Question.get(1)
assert isinstance(q, Question)
assert 1 == len(q.comments)
c = q.comments[0]
assert isinstance(c, Comment)
assert c.author.username == 'fxdgear'
def test_question_answer(write_client, question):
a = question.add_answer(honza, "Just use `elasticsearch-py`!")
assert isinstance(a, Answer)
# refresh the index so we can search right away
Question._index.refresh()
# we can now fetch answers from elasticsearch
answers = question.get_answers()
assert 1 == len(answers)
assert isinstance(answers[0], Answer)
search = Question.search().query('has_child',
type='answer',
inner_hits={},
query=Q('term', author__username__keyword='honzakral'),
)
response = search.execute()
assert 1 == len(response.hits)
q = response.hits[0]
assert isinstance(q, Question)
assert 1 == len(q.meta.inner_hits.answer.hits)
assert q.meta.inner_hits.answer.hits is q.get_answers()
a = q.meta.inner_hits.answer.hits[0]
assert isinstance(a, Answer)
assert isinstance(a.question, Question)
assert a.question.meta.id == '1'
| from datetime import datetime
from pytest import fixture
from elasticsearch_dsl import Q
from .parent_child import User, Question, Answer, setup, Comment
honza = User(id=42, signed_up=datetime(2013, 4, 3), username='honzakral',
email='honza@elastic.co', localtion='Prague')
nick = User(id=47, signed_up=datetime(2017, 4, 3), username='fxdgear',
email='nick.lang@elastic.co', localtion='Colorado')
@fixture
def question(write_client):
setup()
assert write_client.indices.exists_template(name='base')
# create a question object
q = Question(
_id=1,
author=nick,
tags=['elasticsearch', 'python'],
title='How do I use elasticsearch from Python?',
body='''
I want to use elasticsearch, how do I do it from Python?
''',
)
q.save()
return q
def test_comment(write_client, question):
question.add_comment(nick, "Just use elasticsearch-py")
q = Question.get(1)
assert isinstance(q, Question)
assert 1 == len(q.comments)
c = q.comments[0]
assert isinstance(c, Comment)
assert c.author.username == 'fxdgear'
def test_question_answer(write_client, question):
a = question.add_answer(honza, "Just use `elasticsearch-py`!")
assert isinstance(a, Answer)
# refresh the index so we can search right away
Question._index.refresh()
# we can now fetch answers from elasticsearch
answers = question.get_answers()
assert 1 == len(answers)
assert isinstance(answers[0], Answer)
search = Question.search().query('has_child',
type='answer',
inner_hits={},
query=Q('term', author__username__keyword='honzakral'),
)
response = search.execute()
assert 1 == len(response.hits)
q = response.hits[0]
assert isinstance(q, Question)
assert 1 == len(q.meta.inner_hits.answer.hits)
assert q.meta.inner_hits.answer.hits is q.get_answers()
a = q.meta.inner_hits.answer.hits[0]
assert isinstance(a, Answer)
assert isinstance(a.question, Question)
assert a.question.meta.id == '1'
| apache-2.0 | Python |
20d8ecb26fc8c90ec6f55e7f074b5e474db283d9 | fix a missing fake logging method | alfredodeza/remoto,ceph/remoto | remoto/connection.py | remoto/connection.py | from .lib import execnet
#
# Connection Object
#
class Connection(object):
def __init__(self, hostname, logger=None, sudo=False):
self.hostname = hostname
self.gateway = execnet.makegateway('ssh=%s' % hostname)
self.logger = logger or FakeRemoteLogger()
self.sudo = sudo
def execute(self, function, **kw):
return self.gateway.remote_exec(function, **kw)
def exit(self):
self.gateway.exit()
class FakeRemoteLogger:
def error(self, *a, **kw):
pass
def debug(self, *a, **kw):
pass
def info(self, *a, **kw):
pass
| from .lib import execnet
#
# Connection Object
#
class Connection(object):
def __init__(self, hostname, logger=None, sudo=False):
self.hostname = hostname
self.gateway = execnet.makegateway('ssh=%s' % hostname)
self.logger = logger or FakeRemoteLogger()
self.sudo = sudo
def execute(self, function, **kw):
return self.gateway.remote_exec(function, **kw)
def exit(self):
self.gateway.exit()
class FakeRemoteLogger:
def error(self, *a, **kw):
pass
def debug(self, *a, **kw):
pass
| mit | Python |
09019094afcfc258194d70cbc115c35233a08d80 | Fix mailchimp feature flag | billyhunt/osf.io,hmoco/osf.io,chrisseto/osf.io,lyndsysimon/osf.io,KAsante95/osf.io,ZobairAlijan/osf.io,mfraezz/osf.io,samchrisinger/osf.io,cwisecarver/osf.io,icereval/osf.io,baylee-d/osf.io,emetsger/osf.io,adlius/osf.io,GageGaskins/osf.io,rdhyee/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,fabianvf/osf.io,samanehsan/osf.io,KAsante95/osf.io,caseyrygt/osf.io,samchrisinger/osf.io,rdhyee/osf.io,HarryRybacki/osf.io,acshi/osf.io,amyshi188/osf.io,adlius/osf.io,chennan47/osf.io,mfraezz/osf.io,mattclark/osf.io,samanehsan/osf.io,mfraezz/osf.io,revanthkolli/osf.io,TomBaxter/osf.io,KAsante95/osf.io,reinaH/osf.io,MerlinZhang/osf.io,lamdnhan/osf.io,jmcarp/osf.io,alexschiller/osf.io,caseyrollins/osf.io,acshi/osf.io,felliott/osf.io,jinluyuan/osf.io,kushG/osf.io,dplorimer/osf,RomanZWang/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,HarryRybacki/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,alexschiller/osf.io,barbour-em/osf.io,zamattiac/osf.io,dplorimer/osf,himanshuo/osf.io,jolene-esposito/osf.io,mattclark/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,hmoco/osf.io,cwisecarver/osf.io,danielneis/osf.io,caneruguz/osf.io,Ghalko/osf.io,cslzchen/osf.io,jeffreyliu3230/osf.io,kushG/osf.io,felliott/osf.io,binoculars/osf.io,revanthkolli/osf.io,himanshuo/osf.io,brianjgeiger/osf.io,acshi/osf.io,lyndsysimon/osf.io,laurenrevere/osf.io,rdhyee/osf.io,fabianvf/osf.io,zkraime/osf.io,chrisseto/osf.io,abought/osf.io,bdyetton/prettychart,brianjgeiger/osf.io,fabianvf/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,chennan47/osf.io,RomanZWang/osf.io,aaxelb/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,ckc6cz/osf.io,CenterForOpenScience/osf.io,Ghalko/osf.io,billyhunt/osf.io,mluke93/osf.io,SSJohns/osf.io,GageGaskins/osf.io,danielneis/osf.io,jmcarp/osf.io,amyshi188/osf.io,jeffreyliu3230/osf.io,mluo613/osf.io,amyshi188/osf.io,crcresearch/osf.io,brandonPurvis/osf.io,TomHeatwole/osf.io,mluo613/osf.io,aaxelb/osf.io,cldershem/osf.io,ZobairAlijan/osf.io,kch8qx/osf.io,cosenal/osf.io,emetsger/osf.io,arpitar/osf.io,pattisdr/osf.io,lyndsysimon/osf.io,doublebits/osf.io,cslzchen/osf.io,cosenal/osf.io,mluo613/osf.io,jmcarp/osf.io,arpitar/osf.io,zachjanicki/osf.io,cldershem/osf.io,samchrisinger/osf.io,jolene-esposito/osf.io,jnayak1/osf.io,reinaH/osf.io,ckc6cz/osf.io,saradbowman/osf.io,kwierman/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,leb2dg/osf.io,caseyrygt/osf.io,cslzchen/osf.io,asanfilippo7/osf.io,Nesiehr/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,jnayak1/osf.io,adlius/osf.io,doublebits/osf.io,Johnetordoff/osf.io,GaryKriebel/osf.io,mattclark/osf.io,jeffreyliu3230/osf.io,caneruguz/osf.io,amyshi188/osf.io,cslzchen/osf.io,GaryKriebel/osf.io,danielneis/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,pattisdr/osf.io,bdyetton/prettychart,binoculars/osf.io,acshi/osf.io,GageGaskins/osf.io,adlius/osf.io,felliott/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,dplorimer/osf,RomanZWang/osf.io,kwierman/osf.io,leb2dg/osf.io,HarryRybacki/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,lamdnhan/osf.io,Ghalko/osf.io,erinspace/osf.io,Ghalko/osf.io,samanehsan/osf.io,cldershem/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,njantrania/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,SSJohns/osf.io,baylee-d/osf.io,lamdnhan/osf.io,saradbowman/osf.io,monikagrabowska/osf.io,TomBaxter/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,SSJohns/osf.io,samchrisinger/osf.io,kch8qx/osf.io,reinaH/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,haoyuchen1992/osf.io,zamattiac/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,petermalcolm/osf.io,TomHeatwole/osf.io,njantrania/osf.io,bdyetton/prettychart,ckc6cz/osf.io,mluo613/osf.io,petermalcolm/osf.io,zkraime/osf.io,zkraime/osf.io,asanfilippo7/osf.io,binoculars/osf.io,himanshuo/osf.io,kwierman/osf.io,revanthkolli/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,kushG/osf.io,jinluyuan/osf.io,barbour-em/osf.io,pattisdr/osf.io,lyndsysimon/osf.io,haoyuchen1992/osf.io,MerlinZhang/osf.io,billyhunt/osf.io,doublebits/osf.io,sloria/osf.io,sbt9uc/osf.io,sbt9uc/osf.io,kch8qx/osf.io,fabianvf/osf.io,kwierman/osf.io,cosenal/osf.io,billyhunt/osf.io,ticklemepierce/osf.io,alexschiller/osf.io,KAsante95/osf.io,danielneis/osf.io,MerlinZhang/osf.io,MerlinZhang/osf.io,bdyetton/prettychart,jmcarp/osf.io,haoyuchen1992/osf.io,abought/osf.io,crcresearch/osf.io,felliott/osf.io,aaxelb/osf.io,kushG/osf.io,kch8qx/osf.io,emetsger/osf.io,jeffreyliu3230/osf.io,GaryKriebel/osf.io,wearpants/osf.io,brandonPurvis/osf.io,chrisseto/osf.io,billyhunt/osf.io,HarryRybacki/osf.io,jnayak1/osf.io,njantrania/osf.io,Johnetordoff/osf.io,RomanZWang/osf.io,mluke93/osf.io,doublebits/osf.io,jolene-esposito/osf.io,caseyrollins/osf.io,lamdnhan/osf.io,cwisecarver/osf.io,acshi/osf.io,cwisecarver/osf.io,SSJohns/osf.io,sloria/osf.io,sloria/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,GaryKriebel/osf.io,rdhyee/osf.io,leb2dg/osf.io,sbt9uc/osf.io,leb2dg/osf.io,samanehsan/osf.io,zachjanicki/osf.io,wearpants/osf.io,cosenal/osf.io,wearpants/osf.io,abought/osf.io,brandonPurvis/osf.io,reinaH/osf.io,ZobairAlijan/osf.io,caneruguz/osf.io,zachjanicki/osf.io,mluke93/osf.io,laurenrevere/osf.io,abought/osf.io,jnayak1/osf.io,njantrania/osf.io,petermalcolm/osf.io,hmoco/osf.io,ticklemepierce/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,caseyrygt/osf.io,doublebits/osf.io,chennan47/osf.io,asanfilippo7/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,DanielSBrown/osf.io,emetsger/osf.io,barbour-em/osf.io,zamattiac/osf.io,alexschiller/osf.io,arpitar/osf.io,caneruguz/osf.io,icereval/osf.io,laurenrevere/osf.io,arpitar/osf.io,jolene-esposito/osf.io,dplorimer/osf,zachjanicki/osf.io,caseyrollins/osf.io,erinspace/osf.io,himanshuo/osf.io,ticklemepierce/osf.io,ckc6cz/osf.io,zkraime/osf.io,kch8qx/osf.io,baylee-d/osf.io,Nesiehr/osf.io,mluke93/osf.io,revanthkolli/osf.io,hmoco/osf.io,barbour-em/osf.io,RomanZWang/osf.io,erinspace/osf.io | website/mailchimp_utils.py | website/mailchimp_utils.py | import mailchimp
from website import settings
from framework.tasks import app
from framework.auth.core import User
from framework.auth.signals import user_confirmed
def get_mailchimp_api():
if not settings.MAILCHIMP_API_KEY:
raise RuntimeError("An API key is required to connect to Mailchimp.")
return mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY)
def get_list_id_from_name(list_name):
m = get_mailchimp_api()
mailing_list = m.lists.list(filters={'list_name': list_name})
return mailing_list['data'][0]['id']
def get_list_name_from_id(list_id):
m = get_mailchimp_api()
mailing_list = m.lists.list(filters={'list_id': list_id})
return mailing_list['data'][0]['name']
@app.task
def subscribe(list_name, user_id):
user = User.load(user_id)
m = get_mailchimp_api()
list_id = get_list_id_from_name(list_name=list_name)
m.lists.subscribe(id=list_id, email={'email': user.username}, double_optin=False, update_existing=True)
# Update mailing_list user field
if user.mailing_lists is None:
user.mailing_lists = {}
user.save()
user.mailing_lists[list_name] = True
user.save()
@app.task
def unsubscribe(list_name, user_id):
""" Unsubscribe a user from a mailchimp mailing list given its name.
:param str list_name: mailchimp mailing list name
:param str username: current user's email
A ListNotSubscribed error will be raised if a user
not subscribed to the list tries to unsubscribe again.
"""
user = User.load(user_id)
m = get_mailchimp_api()
list_id = get_list_id_from_name(list_name=list_name)
m.lists.unsubscribe(id=list_id, email={'email': user.username})
# Update mailing_list user field
if user.mailing_lists is None:
user.mailing_lists = {}
user.save()
user.mailing_lists[list_name] = False
user.save()
@user_confirmed.connect
def subscribe_on_confirm(user):
# Subscribe user to general OSF mailing list upon account confirmation
if settings.ENABLE_EMAIL_SUBSCRIPTIONS:
subscribe_mailchimp(settings.MAILCHIMP_GENERAL_LIST, user._id)
subscribe_mailchimp = (
subscribe.delay
if settings.USE_CELERY
else subscribe)
unsubscribe_mailchimp = (
unsubscribe.delay
if settings.USE_CELERY
else unsubscribe
)
| import mailchimp
from website import settings
from framework.tasks import app
from framework.auth.core import User
from framework.auth.signals import user_confirmed
def get_mailchimp_api():
if not settings.MAILCHIMP_API_KEY:
raise RuntimeError("An API key is required to connect to Mailchimp.")
return mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY)
def get_list_id_from_name(list_name):
m = get_mailchimp_api()
mailing_list = m.lists.list(filters={'list_name': list_name})
return mailing_list['data'][0]['id']
def get_list_name_from_id(list_id):
m = get_mailchimp_api()
mailing_list = m.lists.list(filters={'list_id': list_id})
return mailing_list['data'][0]['name']
@app.task
def subscribe(list_name, user_id):
user = User.load(user_id)
m = get_mailchimp_api()
list_id = get_list_id_from_name(list_name=list_name)
m.lists.subscribe(id=list_id, email={'email': user.username}, double_optin=False, update_existing=True)
# Update mailing_list user field
if user.mailing_lists is None:
user.mailing_lists = {}
user.save()
user.mailing_lists[list_name] = True
user.save()
@app.task
def unsubscribe(list_name, user_id):
""" Unsubscribe a user from a mailchimp mailing list given its name.
:param str list_name: mailchimp mailing list name
:param str username: current user's email
A ListNotSubscribed error will be raised if a user
not subscribed to the list tries to unsubscribe again.
"""
user = User.load(user_id)
m = get_mailchimp_api()
list_id = get_list_id_from_name(list_name=list_name)
m.lists.unsubscribe(id=list_id, email={'email': user.username})
# Update mailing_list user field
if user.mailing_lists is None:
user.mailing_lists = {}
user.save()
user.mailing_lists[list_name] = False
user.save()
@user_confirmed.connect
def subscribe_on_confirm(user):
# Subscribe user to general OSF mailing list upon account confirmation
if not settings.ENABLE_EMAIL_SUBSCRIPTIONS:
subscribe_mailchimp(settings.MAILCHIMP_GENERAL_LIST, user._id)
subscribe_mailchimp = (
subscribe.delay
if settings.USE_CELERY
else subscribe)
unsubscribe_mailchimp = (
unsubscribe.delay
if settings.USE_CELERY
else unsubscribe
)
| apache-2.0 | Python |
b7c831756825cfeffcb97387294435511ed9c811 | bump version to 1.0.0rc1 | sat-utils/sat-search | satsearch/version.py | satsearch/version.py | __version__ = '1.0.0rc1'
| __version__ = '1.0.0b12'
| mit | Python |
af363ab33a56031e7e0bbbb295c0987b1f6b076a | add warning to log.py | ceph/remoto,alfredodeza/remoto | remoto/log.py | remoto/log.py |
def reporting(conn, result, timeout=None):
timeout = timeout or conn.global_timeout # -1 a.k.a. wait for ever
log_map = {
'debug': conn.logger.debug,
'error': conn.logger.error,
'warning': conn.logger.warning
}
while True:
try:
received = result.receive(timeout)
level_received, message = list(received.items())[0]
log_map[level_received](message.strip('\n'))
except EOFError:
break
except Exception as err:
# the things we need to do here :(
# because execnet magic, we cannot catch this as
# `except TimeoutError`
if err.__class__.__name__ == 'TimeoutError':
msg = 'No data was received after %s seconds, disconnecting...' % timeout
conn.logger.warning(msg)
break
raise
|
def reporting(conn, result, timeout=None):
timeout = timeout or conn.global_timeout # -1 a.k.a. wait for ever
log_map = {'debug': conn.logger.debug, 'error': conn.logger.error}
while True:
try:
received = result.receive(timeout)
level_received, message = list(received.items())[0]
log_map[level_received](message.strip('\n'))
except EOFError:
break
except Exception as err:
# the things we need to do here :(
# because execnet magic, we cannot catch this as
# `except TimeoutError`
if err.__class__.__name__ == 'TimeoutError':
msg = 'No data was received after %s seconds, disconnecting...' % timeout
conn.logger.warning(msg)
break
raise
| mit | Python |
52c6cabc06c835ef3741ba873a5ff58fbedcfbc7 | remove override dep for app-on-ws-init | xenomachina/i3ipc-python,nicoe/i3ipc-python,acrisci/i3ipc-python,chrsclmn/i3ipc-python | examples/app-on-ws-init.py | examples/app-on-ws-init.py | #!/usr/bin/env python3
# https://faq.i3wm.org/question/3699/how-can-i-open-an-application-when-i-open-a-certain-workspace-for-the-first-time/
from argparse import ArgumentParser
from gi.repository import i3ipc, GLib
i3 = i3ipc.Connection()
parser = ArgumentParser(description='Open an application on a given workspace when it is initialized')
parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace')
parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.props.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
GLib.MainLoop().run()
| #!/usr/bin/env python3
# https://faq.i3wm.org/question/3699/how-can-i-open-an-application-when-i-open-a-certain-workspace-for-the-first-time/
from argparse import ArgumentParser
from gi.repository import i3ipc
i3 = i3ipc.Connection()
parser = ArgumentParser(description='Open an application on a given workspace when it is initialized')
parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace')
parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
i3.main()
| bsd-3-clause | Python |
5a293d02b5d6e3905dcd6659b1e2c96fa89d8ba8 | Create succ.py | BlueIsTrue/BlueIsTrue-Cogs | succ/succ.py | succ/succ.py | import discord
from discord.ext import commands
from random import choice as rndchoice
from .utils import checks
import os
class Succ:
"""Succ command."""
def __init__(self, bot):
self.bot = bot
@commands.group(pass_context=True, invoke_without_command=True)
async def givemethesucc(self, ctx, *, user: discord.Member=None):
"""Gives you succ"""
botid = self.bot.user.id
user = ctx.message.author
await self.bot.say("**Slurp Slurp**")
def setup(bot):
n = Succ(bot)
bot.add_cog(n)
| import discord
from discord.ext import commands
from random import choice as rndchoice
from .utils.dataIO import fileIO
from .utils import checks
import os
class Succ:
"""Succ command."""
def __init__(self, bot):
self.bot = bot
@commands.group(pass_context=True, invoke_without_command=True)
async def givemethesucc(self, ctx, *, user: discord.Member=None):
"""Gives you succ"""
botid = self.bot.user.id
user = ctx.message.author
await self.bot.say("**Slurp Slurp**")
def setup(bot):
n = Succ(bot)
bot.add_cog(n)
| mit | Python |
772e094ba31d1d64f22408724f712fcdccce3444 | Add developer secret key | Brok-Bucholtz/CloneTube,Brok-Bucholtz/CloneTube,Brok-Bucholtz/CloneTube | app/app.py | app/app.py | from flask import Flask
app = Flask('CloneTube')
app.secret_key = 'DEV_SECRET_KEY'
| from flask import Flask
app = Flask('CloneTube')
| mit | Python |
2af5600296a09a7d9d72b9089404eaca73cd34a7 | Fix line length. | tsanders-kalloop/django-mailer-2,SmileyChris/django-mailer-2,Giftovus/django-mailer-2,mfwarren/django-mailer-2,APSL/django-mailer-2,GreenLightGo/django-mailer-2,danfairs/django-mailer-2,pegler/django-mailer-2,APSL/django-mailer-2,maykinmedia/django-mailer-2,maykinmedia/django-mailer-2,tclancy/django-mailer-2,maykinmedia/django-mailer-2,kvh/django-mailer-2,torchbox/django-mailer-2,mrbox/django-mailer-2,davidmarble/django-mailer-2,k1000/django-mailer-2,rofrankel/django-mailer-2,fenginx/django-mailer-2,morenopc/django-mailer-2,APSL/django-mailer-2,damkop/django-mailer-2,victorfontes/django-mailer-2,PSyton/django-mailer-2,tachang/django-mailer-2,colinhowe/django-mailer-2,shn/django-mailer-2 | django_mailer/management/commands/send_mail.py | django_mailer/management/commands/send_mail.py | from django.conf import settings
from django.core.management.base import NoArgsCommand
from django_mailer.engine import send_all
from optparse import make_option
import logging
# Provide a way of temporarily pausing the sending of mail.
PAUSE_SEND = getattr(settings, "MAILER_PAUSE_SEND", False)
LOGGING_LEVEL = {'0': logging.CRITICAL, '1': logging.INFO, '2': logging.DEBUG}
class Command(NoArgsCommand):
help = 'Iterate the mail queue, attempting to send all mail.'
option_list = NoArgsCommand.option_list + (
make_option('-b', '--block-size', default=500, type='int',
help='The number of messages to iterate before checking the queue '
'again (in case new messages have been added while the queue '
'is being cleared).'),
)
def handle_noargs(self, verbosity, block_size, **options):
logging.basicConfig(level=LOGGING_LEVEL[verbosity],
format="%(message)s")
# if PAUSE_SEND is turned on don't do anything.
if not PAUSE_SEND:
send_all(block_size)
else:
logging.warning("Sending is paused, exiting without sending "
"queued mail.")
| from django.conf import settings
from django.core.management.base import NoArgsCommand
from django_mailer.engine import send_all
from optparse import make_option
import logging
# Provide a way of temporarily pausing the sending of mail.
PAUSE_SEND = getattr(settings, "MAILER_PAUSE_SEND", False)
LOGGING_LEVEL = {'0': logging.CRITICAL, '1': logging.INFO, '2': logging.DEBUG}
class Command(NoArgsCommand):
help = 'Iterate the mail queue, attempting to send all mail.'
option_list = NoArgsCommand.option_list + (
make_option('-b', '--block-size', default=500, type='int',
help='The number of messages to iterate before checking the queue '
'again (in case new messages have been added while the queue '
'is being cleared).'),
)
def handle_noargs(self, verbosity, block_size, **options):
logging.basicConfig(level=LOGGING_LEVEL[verbosity], format="%(message)s")
# if PAUSE_SEND is turned on don't do anything.
if not PAUSE_SEND:
send_all(block_size)
else:
logging.warning("Sending is paused, exiting without sending "
"queued mail.")
| mit | Python |
3a2853007ad002687d4dda7066b2c50d82f3cc3c | Update 0002_auto_20180627_1121 migration | pydanny/dj-stripe,kavdev/dj-stripe,kavdev/dj-stripe,dj-stripe/dj-stripe,dj-stripe/dj-stripe,pydanny/dj-stripe | djstripe/migrations/0002_auto_20180627_1121.py | djstripe/migrations/0002_auto_20180627_1121.py | # Generated by Django 2.0.6 on 2018-06-27 08:21
from django.db import migrations
import djstripe.fields
class Migration(migrations.Migration):
dependencies = [
("djstripe", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="account",
name="business_name",
field=djstripe.fields.StripeCharField(help_text="The publicly visible name of the business", max_length=255, null=True),
),
migrations.AlterField(
model_name="account",
name="support_url",
field=djstripe.fields.StripeCharField(help_text="A publicly shareable URL that provides support for this account", max_length=200, null=True),
),
migrations.AlterField(
model_name="charge",
name="receipt_number",
field=djstripe.fields.StripeCharField(help_text="The transaction number that appears on email receipts sent for this charge.", max_length=14, null=True),
),
migrations.AlterField(
model_name="product",
name="deactivate_on",
field=djstripe.fields.StripeJSONField(help_text="An array of connect application identifiers that cannot purchase this product. Only applicable to products of `type=good`.", null=True),
),
migrations.AlterField(
model_name="product",
name="images",
field=djstripe.fields.StripeJSONField(help_text="A list of up to 8 URLs of images for this product, meant to be displayable to the customer. Only applicable to products of `type=good`.", null=True),
),
migrations.RemoveField(
model_name="charge",
name="receipt_sent",
),
]
| # Generated by Django 2.0.6 on 2018-06-27 08:21
from django.db import migrations
import djstripe.fields
class Migration(migrations.Migration):
dependencies = [
("djstripe", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="account",
name="business_name",
field=djstripe.fields.StripeCharField(help_text="The publicly visible name of the business", max_length=255, null=True),
),
migrations.AlterField(
model_name="account",
name="support_url",
field=djstripe.fields.StripeCharField(help_text="A publicly shareable URL that provides support for this account", max_length=200, null=True),
),
migrations.AlterField(
model_name="charge",
name="receipt_number",
field=djstripe.fields.StripeCharField(help_text="The transaction number that appears on email receipts sent for this charge.", max_length=14, null=True),
),
migrations.AlterField(
model_name="product",
name="deactivate_on",
field=djstripe.fields.StripeJSONField(help_text="An array of connect application identifiers that cannot purchase this product. Only applicable to products of `type=good`.", null=True),
),
migrations.AlterField(
model_name="product",
name="images",
field=djstripe.fields.StripeJSONField(help_text="A list of up to 8 URLs of images for this product, meant to be displayable to the customer. Only applicable to products of `type=good`.", null=True),
),
]
| mit | Python |
6e16eaec078f18f0a098861afa4bd0bd94cb205f | Fix ROOT_URLCONF and remove WSGI_APP from settings | siggame/discuss | discuss/discuss/settings.py | discuss/discuss/settings.py | """
Django settings for discuss project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
# Determine some important file locations
SETTINGS_DIR = os.path.dirname(__file__)
BASE_DIR = os.path.dirname(SETTINGS_DIR)
BUILDOUT_DIR = os.path.dirname(BASE_DIR)
VAR_DIR = os.path.join(BUILDOUT_DIR, "var")
##########################################################################
#
# Secret settings
#
##########################################################################
# If a secret_settings file isn't defined, open a new one and save a
# SECRET_KEY in it. Then import it. All passwords and other secret
# settings should be stored in secret_settings.py. NOT in settings.py
try:
from secret_settings import *
except ImportError:
print "Couldn't find secret_settings file. Creating a new one."
secret_settings_loc = os.path.join(SETTINGS_DIR, "secret_settings.py")
with open(secret_settings_loc, 'w') as secret_settings:
secret_key = ''.join([chr(ord(x) % 90 + 33) for x in os.urandom(40)])
secret_settings.write("SECRET_KEY = '''%s'''\n" % secret_key)
from secret_settings import *
##########################################################################
#
# Application Definition
#
##########################################################################
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'discuss.discuss.urls'
##########################################################################
#
# Database settings
#
##########################################################################
# The database should *not* be set in this file. It should be set in
# development.py or production.py instead.
DATABASES = None
##########################################################################
#
# Internationalization settings
#
##########################################################################
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
##########################################################################
#
# Static files settings
#
##########################################################################
STATIC_URL = '/static/'
| """
Django settings for discuss project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
# Determine some important file locations
SETTINGS_DIR = os.path.dirname(__file__)
BASE_DIR = os.path.dirname(SETTINGS_DIR)
BUILDOUT_DIR = os.path.dirname(BASE_DIR)
VAR_DIR = os.path.join(BUILDOUT_DIR, "var")
##########################################################################
#
# Secret settings
#
##########################################################################
# If a secret_settings file isn't defined, open a new one and save a
# SECRET_KEY in it. Then import it. All passwords and other secret
# settings should be stored in secret_settings.py. NOT in settings.py
try:
from secret_settings import *
except ImportError:
print "Couldn't find secret_settings file. Creating a new one."
secret_settings_loc = os.path.join(SETTINGS_DIR, "secret_settings.py")
with open(secret_settings_loc, 'w') as secret_settings:
secret_key = ''.join([chr(ord(x) % 90 + 33) for x in os.urandom(40)])
secret_settings.write("SECRET_KEY = '''%s'''\n" % secret_key)
from secret_settings import *
##########################################################################
#
# Application Definition
#
##########################################################################
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'discuss.urls'
WSGI_APPLICATION = 'discuss.wsgi.application'
##########################################################################
#
# Database settings
#
##########################################################################
# The database should *not* be set in this file. It should be set in
# development.py or production.py instead.
DATABASES = None
##########################################################################
#
# Internationalization settings
#
##########################################################################
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
##########################################################################
#
# Static files settings
#
##########################################################################
STATIC_URL = '/static/'
| bsd-3-clause | Python |
e2f83a6a5d43ebc52d03d4059a7526a579a425c1 | Set User Profile Unicode Function | s1na/darkoob,s1na/darkoob,s1na/darkoob | darkoob/social/models.py | darkoob/social/models.py | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User) | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User) | mit | Python |
584f4fb7487e05cfa3bc4dd0c52c57cd17f08ac1 | Add @anonymous_required decorator to django-common | Tivix/django-common,Tivix/django-common,WikiRealtyInc/django-common,WikiRealtyInc/django-common,WikiRealtyInc/django-common,Tivix/django-common | django_common/decorators.py | django_common/decorators.py | try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
import inspect
from django.conf import settings
from django.http import HttpResponseRedirect
def ssl_required(allow_non_ssl=False):
"""Views decorated with this will always get redirected to https except when allow_non_ssl is set to true."""
def wrapper(view_func):
def _checkssl(request, *args, **kwargs):
# allow_non_ssl=True lets non-https requests to come through to this view (and hence not redirect)
if hasattr(settings, 'SSL_ENABLED') and settings.SSL_ENABLED and not request.is_secure() and not allow_non_ssl:
return HttpResponseRedirect(request.build_absolute_uri().replace('http://', 'https://'))
return view_func(request, *args, **kwargs)
return _checkssl
return wrapper
def disable_for_loaddata(signal_handler):
"""
See: https://code.djangoproject.com/ticket/8399
Disables signal from firing if its caused because of loaddata
"""
@wraps(signal_handler)
def wrapper(*args, **kwargs):
for fr in inspect.stack():
if inspect.getmodulename(fr[1]) == 'loaddata':
return
signal_handler(*args, **kwargs)
return wrapper
def anonymous_required(view, redirect_to=None):
"""
Only allow if user is NOT authenticated.
"""
if redirect_to is None:
redirect_to = settings.LOGIN_REDIRECT_URL
@wraps(view)
def wrapper(request, *a, **k):
if request.user and request.user.is_authenticated():
return HttpResponseRedirect(redirect_to)
return view(request, *a, **k)
return wrapper
| try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
import inspect
from django.conf import settings
from django.http import HttpResponseRedirect
def ssl_required(allow_non_ssl=False):
"""Views decorated with this will always get redirected to https except when allow_non_ssl is set to true."""
def wrapper(view_func):
def _checkssl(request, *args, **kwargs):
# allow_non_ssl=True lets non-https requests to come through to this view (and hence not redirect)
if hasattr(settings, 'SSL_ENABLED') and settings.SSL_ENABLED and not request.is_secure() and not allow_non_ssl:
return HttpResponseRedirect(request.build_absolute_uri().replace('http://', 'https://'))
return view_func(request, *args, **kwargs)
return _checkssl
return wrapper
def disable_for_loaddata(signal_handler):
"""
See: https://code.djangoproject.com/ticket/8399
Disables signal from firing if its caused because of loaddata
"""
@wraps(signal_handler)
def wrapper(*args, **kwargs):
for fr in inspect.stack():
if inspect.getmodulename(fr[1]) == 'loaddata':
return
signal_handler(*args, **kwargs)
return wrapper
| mit | Python |
41a002c8f4854f3bcabf395b9e8747e7aa57d0a8 | fix allele delimiter splitting | ClinGen/ildb,ClinGen/ildb,ClinGen/ildb,ClinGen/ildb | beacon/src/api/import_controllers.py | beacon/src/api/import_controllers.py | """
@package api
Data import controllers
"""
from flask import Blueprint, jsonify, Flask, request, redirect, url_for
from werkzeug.utils import secure_filename
from api.database import DataAccess
from api import app
import vcf
import io
import re
import_controllers = Blueprint('import_controllers', __name__)
@import_controllers.route('/vcf', methods=['GET', 'POST'])
def import_vcf():
"""
VCF file upload operation
"""
if request.method == 'POST':
# check if the post request has the file part
if 'file' not in request.files:
return jsonify({'error':'no file in file part'})
print(request.files)
file = request.files['file']
# if user does not select file, browser also
# submit a empty part without filename
if file.filename == '':
flash('No selected file')
return jsonify({'error':'no file'})
# this is used to ensure we can safely use the filename sent to us
filename = secure_filename(file.filename)
# load data from the stream into memory for processing
data = file.read()
vcf_reader = vcf.Reader(io.StringIO(data.decode('utf-8')))
variants = list()
for record in vcf_reader:
#TODO accept multiple samples in a vcf file
sample = record.samples[0]
#TODO - there are better ways to handle this
# Do we need to store the reference for this query
allleles = []
if sample.gt_bases is not None:
alleles = re.split(r'[\\/|]', sample.gt_bases)
# remove duplicates
alleles = set(alleles)
for allele in alleles:
variants.append(record.CHROM + '_' + str(record.POS) + '_' + allele)
DataAccess().import_vcf({'variants': variants})
print (variants)
# TODO: change this to return stats
return jsonify({'result':'ok'})
| """
@package api
Data import controllers
"""
from flask import Blueprint, jsonify, Flask, request, redirect, url_for
from werkzeug.utils import secure_filename
from api.database import DataAccess
from api import app
import vcf
import io
import_controllers = Blueprint('import_controllers', __name__)
@import_controllers.route('/vcf', methods=['GET', 'POST'])
def import_vcf():
"""
VCF file upload operation
"""
if request.method == 'POST':
# check if the post request has the file part
if 'file' not in request.files:
return jsonify({'error':'no file in file part'})
print(request.files)
file = request.files['file']
# if user does not select file, browser also
# submit a empty part without filename
if file.filename == '':
flash('No selected file')
return jsonify({'error':'no file'})
# this is used to ensure we can safely use the filename sent to us
filename = secure_filename(file.filename)
# load data from the stream into memory for processing
data = file.read()
vcf_reader = vcf.Reader(io.StringIO(data.decode('utf-8')))
variants = list()
for record in vcf_reader:
#TODO accept multiple samples in a vcf file
sample = record.samples[0]
#TODO - there are better ways to handle this
# Do we need to store the reference for this query
allleles = []
if sample.gt_bases is not None:
alleles = sample.gt_bases.split('/')
# remove duplicates
alleles = set(alleles)
for allele in alleles:
variants.append(record.CHROM + '_' + str(record.POS) + '_' + allele)
DataAccess().import_vcf({'build':'GRCh38', 'variants': variants})
print (variants)
# TODO: change this to return stats
return jsonify({'result':'ok'})
| mit | Python |
d3e69e7512fbc24537fa37e4e3187ec042009128 | Add a GUID, description and published date to podcast RSS items | blancltd/blanc-basic-podcast | blanc_basic_podcast/podcast/feeds.py | blanc_basic_podcast/podcast/feeds.py | from django.conf import settings
from django.core.urlresolvers import reverse_lazy
from django.contrib.sites.models import Site
from django.contrib.staticfiles.storage import staticfiles_storage
import mimetypes
from .itunesfeed import PodcastFeed
from .models import PodcastFile
class BasicPodcastFeed(PodcastFeed):
title = getattr(settings, 'PODCAST_TITLE', 'Podcast')
link = reverse_lazy('blanc_basic_podcast:feed')
author_name = settings.PODCAST_AUTHOR
author_email = settings.PODCAST_EMAIL
itunes_explicit = getattr(settings, 'PODCAST_EXPLICIT', 'no')
itunes_categories = settings.PODCAST_CATEGORIES
@property
def itunes_image(self):
file_url = staticfiles_storage.url(settings.PODCAST_IMAGE)
# Must be a full URL
if not (file_url.startswith('http://')
or file_url.startswith('https://')):
domain = Site.objects.get_current().domain
file_url = 'http://%s%s' % (domain, file_url)
return file_url
def items(self):
return PodcastFile.objects.all()
def item_description(self, obj):
return obj.description
def item_pubdate(self, obj):
return obj.date
def item_guid(self, obj):
return u'%s:podcast:%d' % (Site.objects.get_current().domain, obj.pk)
def item_enclosure_url(self, obj):
file_url = obj.file.url
# Must be a full URL
if not (file_url.startswith('http://')
or file_url.startswith('https://')):
domain = Site.objects.get_current().domain
file_url = 'http://%s%s' % (domain, file_url)
return file_url
def item_enclosure_mime_type(self, obj):
return mimetypes.guess_type(obj.file.path)[0]
def item_enclosure_length(self, obj):
return obj.file.size
def item_itunes_duration(self, obj):
return obj.time_duration
| from django.conf import settings
from django.core.urlresolvers import reverse_lazy
from django.contrib.sites.models import Site
from django.contrib.staticfiles.storage import staticfiles_storage
import mimetypes
from .itunesfeed import PodcastFeed
from .models import PodcastFile
class BasicPodcastFeed(PodcastFeed):
title = getattr(settings, 'PODCAST_TITLE', 'Podcast')
link = reverse_lazy('blanc_basic_podcast:feed')
author_name = settings.PODCAST_AUTHOR
author_email = settings.PODCAST_EMAIL
itunes_explicit = getattr(settings, 'PODCAST_EXPLICIT', 'no')
itunes_categories = settings.PODCAST_CATEGORIES
@property
def itunes_image(self):
file_url = staticfiles_storage.url(settings.PODCAST_IMAGE)
# Must be a full URL
if not (file_url.startswith('http://')
or file_url.startswith('https://')):
domain = Site.objects.get_current().domain
file_url = 'http://%s%s' % (domain, file_url)
return file_url
def items(self):
return PodcastFile.objects.all()
def item_enclosure_url(self, obj):
file_url = obj.file.url
# Must be a full URL
if not (file_url.startswith('http://')
or file_url.startswith('https://')):
domain = Site.objects.get_current().domain
file_url = 'http://%s%s' % (domain, file_url)
return file_url
def item_enclosure_mime_type(self, obj):
return mimetypes.guess_type(obj.file.path)[0]
def item_enclosure_length(self, obj):
return obj.file.size
def item_itunes_duration(self, obj):
return obj.time_duration
| bsd-2-clause | Python |
8fd6127fe1597262039b483553b9c7d72fdad703 | Change name for content view test | kaka0525/Copy-n-Haste,kaka0525/Copy-n-Haste,tpeek/Copy-n-Haste,tpeek/Copy-n-Haste,tpeek/Copy-n-Haste,kaka0525/Copy-n-Haste | CopyHaste/typing_test/tests.py | CopyHaste/typing_test/tests.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase, Client
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.contrib.auth.models import User
from django.core import mail
from django.test.utils import override_settings
import factory
from faker import Faker
from splinter import Browser
fake = Faker()
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
username = fake.username()
email = fake.email()
# # # # # # # # # # # # # #
# Client Tests for Views #
# # # # # # # # # # # # # #
class PlayClientTests(TestCase):
# Test 1
# Check that /play/ page loads the correct template
def test_game_template(self):
response = Client().get('/play/')
self.assertTemplateUsed(response, 'typingtest2.html')
# Test 2
# Check that /play/multi/ page loads the correct template
def test_multi_template(self):
response = Client().get('/play/multi/')
self.assertTemplateUsed(response, 'typingtest3.html')
# Test 3
# Check that /play/content/ page loads the correct content
def test_content_api(self):
response = Client().post(
'/play/content/',
{
'user': 'tpeek',
'repo': 'Copy-n-Haste',
'path': 'README.md'
}
)
self.assertEqual(response.content[:14], '# Copy-n-Haste')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase, Client
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.contrib.auth.models import User
from django.core import mail
from django.test.utils import override_settings
import factory
from faker import Faker
from splinter import Browser
fake = Faker()
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
username = fake.username()
email = fake.email()
# # # # # # # # # # # # # # # #
# Client Tests for Templates #
# # # # # # # # # # # # # # # #
class PlayClientTests(TestCase):
# Test 1
# Check that /play/ page loads the correct template
def test_game_template(self):
response = Client().get('/play/')
self.assertTemplateUsed(response, 'typingtest2.html')
# Test 2
# Check that /play/multi/ page loads the correct template
def test_multi_template(self):
response = Client().get('/play/multi/')
self.assertTemplateUsed(response, 'typingtest3.html')
# Test 3
# Check that /play/content/ page loads the correct content
def test_home_template(self):
response = Client().post(
'/play/content/',
{
'user': 'tpeek',
'repo': 'Copy-n-Haste',
'path': 'README.md'
}
)
self.assertEqual(response.content[:14], '# Copy-n-Haste')
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.