commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
e480204e6a44de918740056aafd0a6a68f5efe39 | Add problem 4 description | edmondkotowski/project-euler | problems/problem_4.py | problems/problem_4.py | # Largest palindrome product of two 3-digit numbers
def is_palindrome(product):
product = str(product)
reverse = product[::-1]
return product == reverse
def largest_palindrome_product():
left_value = 999
max_product = 0
while left_value > 0:
right_value = 999
while right_value > 0:
product = left_value * right_value
if is_palindrome(product):
if product > max_product:
max_product = product
right_value -= 1
left_value -= 1
return max_product
if __name__ == "__main__":
print "Largest palindrome product: {product}".format(
product=largest_palindrome_product()) | def is_palindrome(product):
product = str(product)
reverse = product[::-1]
return product == reverse
def largest_palindrome_product():
left_value = 999
max_product = 0
while left_value > 0:
right_value = 999
while right_value > 0:
product = left_value * right_value
if is_palindrome(product):
if product > max_product:
max_product = product
right_value -= 1
left_value -= 1
return max_product
if __name__ == "__main__":
print "Largest palindrome product: {product}".format(
product=largest_palindrome_product()) | mit | Python |
808d089b2b93671ef3d4331007fc1c3da2dea0b5 | Use django 1.10 patterns style | davidfischer/rpc4django,davidfischer/rpc4django,davidfischer/rpc4django | example/urls.py | example/urls.py | from django.conf.urls import patterns
from rpc4django.views import serve_rpc_request
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^example/', include('example.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/(.*)', admin.site.root),
('^$', 'rpc4django.views.serve_rpc_request'),
('^RPC2$', serve_rpc_request),
)
| from django.conf.urls import patterns
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^example/', include('example.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/(.*)', admin.site.root),
('^$', 'rpc4django.views.serve_rpc_request'),
('^RPC2$', 'rpc4django.views.serve_rpc_request'),
)
| bsd-3-clause | Python |
821d6c1dfe4569add7d5af74ae64ae04eb05db42 | Enable test.dart on Mac after fixing flakiness caused by signal handlers. | dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk | tools/test_wrapper.py | tools/test_wrapper.py | #!/usr/bin/env python
# Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import platform
import string
import subprocess
import sys
from utils import GuessOS
def Main():
args = sys.argv[1:]
tools_dir = os.path.dirname(os.path.realpath(__file__))
dart_binary_prefix = os.path.join(tools_dir, 'testing', 'bin')
if GuessOS() == "win32":
dart_binary = os.path.join(dart_binary_prefix, 'windows', 'dart.exe')
else:
dart_binary = os.path.join(dart_binary_prefix, GuessOS(), 'dart')
current_directory = os.path.abspath('');
client = os.path.abspath(os.path.join(tools_dir, '..'));
if current_directory == os.path.join(client, 'runtime'):
dart_script_name = 'test-runtime.dart'
elif current_directory == os.path.join(client, 'compiler'):
dart_script_name = 'test-compiler.dart'
else:
dart_script_name = 'test.dart'
dart_test_script = string.join([tools_dir, dart_script_name], os.sep)
command = [dart_binary, dart_test_script] + args
return subprocess.call(command)
if __name__ == '__main__':
sys.exit(Main())
| #!/usr/bin/env python
# Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import platform
import string
import subprocess
import sys
from utils import GuessOS
def Main():
args = sys.argv[1:]
tools_dir = os.path.dirname(os.path.realpath(__file__))
dart_binary_prefix = os.path.join(tools_dir, 'testing', 'bin')
if GuessOS() == "win32":
dart_binary = os.path.join(dart_binary_prefix, 'windows', 'dart.exe')
else:
dart_binary = os.path.join(dart_binary_prefix, GuessOS(), 'dart')
current_directory = os.path.abspath('');
client = os.path.abspath(os.path.join(tools_dir, '..'));
if current_directory == os.path.join(client, 'runtime'):
if GuessOS() == "macos":
dart_script_name = 'test.py'
dart_binary = 'python'
else:
dart_script_name = 'test-runtime.dart'
elif current_directory == os.path.join(client, 'compiler'):
dart_script_name = 'test-compiler.dart'
else:
dart_script_name = 'test.dart'
dart_test_script = string.join([tools_dir, dart_script_name], os.sep)
command = [dart_binary, dart_test_script] + args
return subprocess.call(command)
if __name__ == '__main__':
sys.exit(Main())
| bsd-3-clause | Python |
2d0285447c2474b7d3b1c60cfdcce51e42306315 | Update distancia3d.py | Alan-Jairo/topgeo | topgeo/distancia3d.py | topgeo/distancia3d.py | def caldist3d(X1, Y1, Z1, X2, Y2, Z2):
"""
Esta funcion sirve para realizar el calculo de distancias entre dos puntos arbitrarios.
"""
# Importamos el modulo numpy
import numpy as np
n = (((X1-X2)**2)+((Y1-Y2)**2)+(Z1-Z2))
Dist = np.sqrt(n)
return Dist
| def caldist3d(X1, Y1, Z1, X2, Y2, Z2):
"""
Esta funcion sirve para realizar el calculo de distancias entre dos puntos arbitrarios.
"""
# Importamos los modulos numpy y pandas
import numpy as np
n = (((X1-X2)**2)+((Y1-Y2)**2)+(Z1-Z2))
Dist = np.sqrt(n)
return Dist | mit | Python |
eb43d2ed017259b9aa993c3057db5e7cf9bc054f | change mensage except | GrupoAndradeMartins/totvserprm | totvserprm/baseapi.py | totvserprm/baseapi.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from auth import create_service
from dicttoxml import dicttoxml
from lxml import objectify
from totvserprm.utils import ClassFactory, normalize_xml
from totvserprm.exceptions import ApiError
class BaseApi(object):
dataservername = ''
def __init__(self, server, username, password):
self.service = create_service(server, username, password)
def create(self, dict, context):
xml = dicttoxml(dict, attr_type=False)
response = self.service.SaveRecord(
DataServerName=self.dataservername, XML=xml, Contexto=context)
if len(response.split(';')) == 2:
codcoligada = response.split(';')[0]
element_id = response.split(';')[1]
custom_class = ClassFactory(
self.__class__.__name__, ['codcoligada', 'id'])
return custom_class(codcoligada=codcoligada, id=element_id)
else:
raise ApiError('Error trying to create {}:\n{}'.format(
self.__class__.__name__, response.encode('ascii', 'ignore')))
def get(self, codcoligada, id):
primary_key = '{};{}'.format(codcoligada, id)
return_from_api = self.service.ReadRecord(
DataServerName=self.dataservername, PrimaryKey=primary_key, Contexto='CODCOLIGADA={}'.format(codcoligada))
return_from_api = normalize_xml(return_from_api)
return objectify.fromstring(return_from_api)
def all(self, codcoligada):
return_from_api = self.service.ReadView(
DataServerName=self.dataservername, Filtro='CODCOLIGADA={}'.format(
codcoligada), Contexto='CODCOLIGADA={}'.format(codcoligada))
return_from_api = normalize_xml(return_from_api)
return objectify.fromstring(return_from_api)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from auth import create_service
from dicttoxml import dicttoxml
from lxml import objectify
from totvserprm.utils import ClassFactory, normalize_xml
from totvserprm.exceptions import ApiError
class BaseApi(object):
dataservername = ''
def __init__(self, server, username, password):
self.service = create_service(server, username, password)
def create(self, dict, context):
xml = dicttoxml(dict, attr_type=False)
response = self.service.SaveRecord(DataServerName=self.dataservername, XML=xml, Contexto=context)
if len(response.split(';')) == 2:
codcoligada = response.split(';')[0]
element_id = response.split(';')[1]
custom_class = ClassFactory(self.__class__.__name__, ['codcoligada', 'id'])
return custom_class(codcoligada=codcoligada, id=element_id)
else:
raise ApiError('Error trying to create {}:\n{}'.format(self.__class__.__name__, response))
def get(self, codcoligada, id):
primary_key = '{};{}'.format(codcoligada, id)
return_from_api = self.service.ReadRecord(
DataServerName=self.dataservername, PrimaryKey=primary_key, Contexto='CODCOLIGADA={}'.format(codcoligada))
return_from_api = normalize_xml(return_from_api)
return objectify.fromstring(return_from_api)
def all(self, codcoligada):
return_from_api = self.service.ReadView(
DataServerName=self.dataservername, Filtro='CODCOLIGADA={}'.format(
codcoligada), Contexto='CODCOLIGADA={}'.format(codcoligada))
return_from_api = normalize_xml(return_from_api)
return objectify.fromstring(return_from_api)
| mit | Python |
884c5f17f1f739ffea1aa16205f147cb04d5dc99 | add cast to float | tourbillonpy/tourbillon-log | tourbillon/log/log.py | tourbillon/log/log.py | import logging
import re
import time
logger = logging.getLogger(__name__)
def get_logfile_metrics(agent):
def follow(thefile, run_event):
thefile.seek(0, 2)
while run_event.is_set():
line = thefile.readline()
if not line:
time.sleep(config['frequency'])
continue
yield line
agent.run_event.wait()
config = agent.pluginconfig['log']
db_config = config['database']
agent.create_database(**db_config)
with open(config['log_file'], 'r') as f:
for line in follow(f, agent.run_event):
point = {
'measurement': config['measurement'],
'tags': dict(),
'fields': dict()
}
logger.debug('-' * 90)
log_line = re.match(config['parser']['regex'], line).groups()
for elem in config['parser']['mapping']:
dict_to_fill = None
dict_to_fill = point['fields'] \
if elem['type'] == 'field' else point['tags']
value = log_line[elem['idx']]
if 'cast' in elem:
if elem['cast'] == 'int':
value = int(value)
elif elem['cast'] == 'float':
value = float(value)
dict_to_fill[elem['name']] = value
logger.debug(point)
logger.debug('-' * 90)
agent.push([point], db_config['name'])
logger.info('get_logfile_metrics terminated')
| import logging
import re
import time
logger = logging.getLogger(__name__)
def get_logfile_metrics(agent):
def follow(thefile, run_event):
thefile.seek(0, 2)
while run_event.is_set():
line = thefile.readline()
if not line:
time.sleep(config['frequency'])
continue
yield line
agent.run_event.wait()
config = agent.pluginconfig['log']
db_config = config['database']
agent.create_database(**db_config)
with open(config['log_file'], 'r') as f:
for line in follow(f, agent.run_event):
point = {
'measurement': config['measurement'],
'tags': dict(),
'fields': dict()
}
logger.debug('-' * 90)
log_line = re.match(config['parser']['regex'], line).groups()
for elem in config['parser']['mapping']:
dict_to_fill = None
dict_to_fill = point['fields'] \
if elem['type'] == 'field' else point['tags']
value = log_line[elem['idx']]
if 'cast' in elem:
if elem['cast'] == 'int':
value = int(value)
dict_to_fill[elem['name']] = value
logger.debug(point)
logger.debug('-' * 90)
agent.push([point], db_config['name'])
logger.info('get_logfile_metrics terminated')
| apache-2.0 | Python |
9aebe02e2342b628cacddb68d2f894fca0bf7463 | Fix socket overflow | HWDexperte/ts3observer | ts3observer/models.py | ts3observer/models.py | '''
Created on Dec 1, 2014
@author: fechnert
'''
class Client(object):
''' Represents the client '''
def __init__(self, clid, socket, **kwargs):
''' Fill the object dynamically with client attributes got from telnet '''
self.clid = clid
self.socket = socket
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return '<Client object ({}: {})>'.format(self.clid, self.client_nickname)
def kick(self, reasonid, reason='ByeBye'):
''' Kick a client '''
self.socket.write('clientkick reasonid={} reasonmsg={} clid={}\n'.format(reasonid, reason, self.clid))
self.socket.read_until('msg=ok', 2)
def move(self, to):
''' Move a client :to: a channel '''
self.socket.write('clientmove cid={} clid={}\n'.format(to, self.clid))
self.socket.read_until('msg=ok', 2)
def ban(self, time=0, reason='Kicked'):
''' Ban a client for :sfor: seconds '''
if time:
self.socket.write('banclient clid={} banreason={} time={}\n'.format(self.clid, reason, time))
else:
self.socket.write('banclient clid={} banreason={}\n'.format(self.clid, reason))
self.socket.read_until('msg=ok', 2)
class Channel(object):
''' Represents the Channel '''
def __init__(self, socket, **kwargs):
''' Fill the object dynamically with channel attributes got from telnet '''
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return '<Channel object ({})>'.format(self.channel_name)
def delete(self):
''' Delete a channel '''
raise NotImplementedError
| '''
Created on Dec 1, 2014
@author: fechnert
'''
class Client(object):
''' Represents the client '''
def __init__(self, clid, socket, **kwargs):
''' Fill the object dynamically with client attributes got from telnet '''
self.clid = clid
self.socket = socket
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return '<Client object ({}: {})>'.format(self.clid, self.client_nickname)
def kick(self, reasonid, reason='ByeBye'):
''' Kick a client '''
self.socket.write('clientkick reasonid={} reasonmsg={} clid={}\n'.format(reasonid, reason, self.clid))
self.socket.read_until('msg=ok', 2)
def move(self, to):
''' Move a client :to: a channel '''
self.socket.write('clientmove cid={} clid={}\n'.format(to, self.clid))
def ban(self, time=0, reason='Kicked'):
''' Ban a client for :sfor: seconds '''
if time:
self.socket.write('banclient clid={} banreason={} time={}\n'.format(self.clid, reason, time))
else:
self.socket.write('banclient clid={} banreason={}\n'.format(self.clid, reason))
self.socket.read_until('msg=ok', 2)
class Channel(object):
''' Represents the Channel '''
def __init__(self, socket, **kwargs):
''' Fill the object dynamically with channel attributes got from telnet '''
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return '<Channel object ({})>'.format(self.channel_name)
def delete(self):
''' Delete a channel '''
raise NotImplementedError
| mit | Python |
9a68ee8ee8d94b46dd8481e6d222c82338975602 | Update documentation of txt2for_prediction script | NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts | txt2for_prediction.py | txt2for_prediction.py | """Script to convert text file to input for embem classifier.
The script tokenizes the text and writes it to a new file containing:
<sentence id>\t<sentence (tokens separated by space)>\tNone\n
Usage: python txt2ml.py <dir in> <dir out>
"""
import argparse
import nltk.data
from nltk.tokenize import word_tokenize
import codecs
import os
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('dir_in', help='directory containing the input text '
'files')
parser.add_argument('dir_out', help='the name of the output directory')
args = parser.parse_args()
tokenizer = nltk.data.load('tokenizers/punkt/dutch.pickle')
text_files = [t for t in os.listdir(args.dir_in) if t.endswith('.txt')]
for text_file in text_files:
print text_file
text = ''
fname = os.path.join(args.dir_in, text_file)
try:
with codecs.open(fname, 'rb', 'utf8') as f:
text = f.read()
except:
with codecs.open(fname, 'rb', 'latin-1') as f:
text = f.read()
# clean up text (remove strange html entities)
text = text.replace('\n', ' ')
text = text.replace(u"\u0092", "'")
text = text.replace(u"\u0093", '"')
text = text.replace(u"\u0097", "'")
sentences = tokenizer.tokenize(text)
sents = {}
fname = os.path.join(args.dir_out, text_file)
with codecs.open(fname, 'wb', 'utf8') as f:
for i, s in enumerate(sentences):
# remove duplicate sentences
if s not in sents:
words = word_tokenize(s)
words_str = unicode(' '.join(words))
s_id = '{}_s_{}'.format(text_file.replace('.txt', ''), i)
f.write(u'{}\t{}\tNone\n'.format(s_id, words_str))
sents[s] = None
| """Script to convert text file to input for embem classifier.
The script tokenizes the text and writes it to a new file containing:
<sentence id>\t<sentence (tokens separated by space)>\tNone\n
Usage: python txt2ml.py <dir in> <dir out>
"""
import argparse
import nltk.data
from nltk.tokenize import word_tokenize
import codecs
import os
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('dir_in', help='the name of the list containing the '
'selected ceneton titles (xlsx)')
parser.add_argument('dir_out', help='the name of the output directory')
args = parser.parse_args()
tokenizer = nltk.data.load('tokenizers/punkt/dutch.pickle')
text_files = [t for t in os.listdir(args.dir_in) if t.endswith('.txt')]
for text_file in text_files:
print text_file
text = ''
fname = os.path.join(args.dir_in, text_file)
try:
with codecs.open(fname, 'rb', 'utf8') as f:
text = f.read()
except:
with codecs.open(fname, 'rb', 'latin-1') as f:
text = f.read()
# clean up text (remove strange html entities)
text = text.replace('\n', ' ')
text = text.replace(u"\u0092", "'")
text = text.replace(u"\u0093", '"')
text = text.replace(u"\u0097", "'")
sentences = tokenizer.tokenize(text)
sents = {}
fname = os.path.join(args.dir_out, text_file)
with codecs.open(fname, 'wb', 'utf8') as f:
for i, s in enumerate(sentences):
# remove duplicate sentences
if s not in sents:
words = word_tokenize(s)
words_str = unicode(' '.join(words))
s_id = '{}_s_{}'.format(text_file.replace('.txt', ''), i)
f.write(u'{}\t{}\tNone\n'.format(s_id, words_str))
sents[s] = None
| apache-2.0 | Python |
212261782cbee393c87e760688bd819de689cc51 | add post support for trace search | varnish/varnish-microservice-monitor,varnish/zipnish,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor,varnish/zipnish | ui/app/index/views.py | ui/app/index/views.py | from flask import request, redirect, render_template
from . import index
from .. import db
@index.route('/', methods=['GET', 'POST'])
def index():
# get database engine connection
connection = db.engine.connect()
# populate spans
spans = []
result = connection.execute("SELECT DISTINCT span_name FROM zipkin_spans")
for row in result:
spans.append( row['span_name'] )
# populate services
services = []
result = connection.execute("SELECT DISTINCT service_name FROM zipkin_annotations")
for row in result:
services.append( row['service_name'] )
# close connection
connection.close()
return render_template('index.html', spans=spans, services=services)
| from flask import request, redirect, render_template
from . import index
from .. import db
@index.route('/', methods=['GET'])
def index():
# get database engine connection
connection = db.engine.connect()
# populate spans
spans = []
result = connection.execute("SELECT DISTINCT span_name FROM zipkin_spans")
for row in result:
spans.append( row['span_name'] )
# populate services
services = []
result = connection.execute("SELECT DISTINCT service_name FROM zipkin_annotations")
for row in result:
services.append( row['service_name'] )
# close connection
connection.close()
return render_template('index.html', spans=spans, services=services)
| bsd-2-clause | Python |
46ab31112853fd1819a392d2b58ba60754a14b38 | fix language reference in language command | Mstrodl/jose,lnmds/jose,Mstrodl/jose | ext/joselang.py | ext/joselang.py | #!/usr/bin/env python3
import discord
import asyncio
import sys
sys.path.append("..")
import jauxiliar as jaux
import joseerror as je
import josecommon as jcommon
class JoseLanguage(jaux.Auxiliar):
def __init__(self, cl):
jaux.Auxiliar.__init__(self, cl)
self.LANGLIST = [
'pt', 'en'
]
async def savedb(self):
await jcommon.save_langdb()
async def ext_load(self):
try:
await jcommon.load_langdb()
return True, ''
except Exception as e:
return False, str(e)
async def ext_unload(self):
try:
await self.savedb()
return True, ''
except Exception as e:
return False, str(e)
async def c_reloadlangdb(self, message, args, cxt):
await self.savedb()
await jcommon.load_langdb()
await cxt.say(":speech_left: langdb reloaded")
async def c_language(self, message, args, cxt):
'''`!language lang` - sets language for a server'''
if message.server is None:
await cxt.say("Language support is not available for DMs")
if len(args) < 2:
await cxt.say(self.c_language.__doc__)
return
language = args[1]
if language not in self.LANGLIST:
await cxt.say("%s: Language not found" % language)
return
await jcommon.langdb_set(message.server.id, language)
await cxt.say(":speech_left: Set language to %s" % language)
await self.savedb()
async def c_listlang(self, message, args, cxt):
'''`!listlang` - lists all available languages'''
await cxt.say(self.codeblock("", " ".join(self.LANGLIST)))
| #!/usr/bin/env python3
import discord
import asyncio
import sys
sys.path.append("..")
import jauxiliar as jaux
import joseerror as je
import josecommon as jcommon
class JoseLanguage(jaux.Auxiliar):
def __init__(self, cl):
jaux.Auxiliar.__init__(self, cl)
self.LANGLIST = [
'pt', 'en'
]
async def savedb(self):
await jcommon.save_langdb()
async def ext_load(self):
try:
await jcommon.load_langdb()
return True, ''
except Exception as e:
return False, str(e)
async def ext_unload(self):
try:
await self.savedb()
return True, ''
except Exception as e:
return False, str(e)
async def c_reloadlangdb(self, message, args, cxt):
await self.savedb()
await jcommon.load_langdb()
await cxt.say(":speech_left: langdb reloaded")
async def c_language(self, message, args, cxt):
'''`!language lang` - sets language for a server'''
if message.server is None:
await cxt.say("Language support is not available for DMs")
#await cxt.sayt("jlang_no_lang")
if len(args) < 2:
await cxt.say(self.c_language.__doc__)
return
language = args[1]
if language not in self.LANGLIST:
await cxt.say("%s: Language not found")
#await cxt.sayt("jlang_lang_404", language=language)
return
await jcommon.langdb_set(message.server.id, language)
await cxt.say(":speech_left: Set language to %s" % language)
#await cxt.sayt("jlang_set_lang", language=language)
await self.savedb()
async def c_listlang(self, message, args, cxt):
'''`!listlang` - lists all available languages'''
await cxt.say(self.codeblock("", " ".join(self.LANGLIST)))
| mit | Python |
85b7ce0fd6326cb2f4a965a854b94a9534b8cfd7 | Fix off-by-one errors in page handling | matthiask/survey | survey/views.py | survey/views.py | from django.shortcuts import get_object_or_404, redirect, render
from django.utils import simplejson
from django.utils.translation import ugettext as _
from survey.forms import QuestionForm
from survey.models import Survey, Question, SurveyAnswer
def home(request, code):
survey = get_object_or_404(Survey.objects.filter(is_active=True), code=code)
return redirect(survey.answers.create())
def survey(request, survey_code, code, page=1):
answer = get_object_or_404(SurveyAnswer.objects.select_related('survey'),
survey__is_active=True,
survey__code=survey_code,
code=code)
pages = answer.survey.pages()
# Only look at valid pages
try:
page = int(page)
groups = pages[page - 1]
except (IndexError, TypeError, ValueError):
return redirect(answer)
try:
data = simplejson.loads(answer.answers)
except ValueError:
data = {}
kwargs = {
'questions': Question.objects.filter(group__in=groups).order_by(
'group', 'ordering').select_related('group'),
'initial': data,
}
if request.method == 'POST':
form = QuestionForm(request.POST, **kwargs)
if form.is_valid():
data.update(form.cleaned_data)
answer.answers = simplejson.dumps(data)
answer.save()
if 'next' in request.POST:
offset = 1
elif 'prev' in request.POST:
offset = -1
if 0 < page + offset <= len(pages):
return redirect('survey.views.survey',
survey_code=survey_code,
code=code,
page=page + offset)
# TODO send to thank you page
else:
form = QuestionForm(**kwargs)
return render(request, 'survey/form.html', {
'survey': answer.survey,
'form': form,
'page': page,
'page_count': len(pages),
'is_first_page': page == 1,
'is_last_page': page == len(pages),
})
| from django.shortcuts import get_object_or_404, redirect, render
from django.utils import simplejson
from django.utils.translation import ugettext as _
from survey.forms import QuestionForm
from survey.models import Survey, Question, SurveyAnswer
def home(request, code):
survey = get_object_or_404(Survey.objects.filter(is_active=True), code=code)
return redirect(survey.answers.create())
def survey(request, survey_code, code, page=1):
answer = get_object_or_404(SurveyAnswer.objects.select_related('survey'),
survey__is_active=True,
survey__code=survey_code,
code=code)
pages = answer.survey.pages()
# Only look at valid pages
try:
page = int(page)
groups = pages[page - 1]
except (IndexError, TypeError, ValueError):
return redirect(answer)
try:
data = simplejson.loads(answer.answers)
except ValueError:
data = {}
kwargs = {
'questions': Question.objects.filter(group__in=groups).order_by(
'group', 'ordering').select_related('group'),
'initial': data,
}
if request.method == 'POST':
form = QuestionForm(request.POST, **kwargs)
if form.is_valid():
data.update(form.cleaned_data)
answer.answers = simplejson.dumps(data)
answer.save()
if 'next' in request.POST:
offset = 1
elif 'prev' in request.POST:
offset = -1
if 0 < page + offset < len(pages):
return redirect('survey.views.survey',
survey_code=survey_code,
code=code,
page=page + offset)
# TODO send to thank you page
else:
form = QuestionForm(**kwargs)
return render(request, 'survey/form.html', {
'survey': answer.survey,
'form': form,
'is_first_page': page == 1,
'is_last_page': page == len(pages) - 1,
})
| bsd-3-clause | Python |
adb0576ced11713fd75aa132107456f453bca669 | Revert accidental removal of __future__.unicode_literals from taggit/admin.py | izquierdo/django-taggit | taggit/admin.py | taggit/admin.py | from __future__ import unicode_literals
from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
class TagAdmin(admin.ModelAdmin):
inlines = [TaggedItemInline]
list_display = ["name", "slug"]
ordering = ["name", "slug"]
search_fields = ["name"]
prepopulated_fields = {"slug": ["name"]}
admin.site.register(Tag, TagAdmin)
| from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
class TagAdmin(admin.ModelAdmin):
inlines = [TaggedItemInline]
list_display = ["name", "slug"]
ordering = ["name", "slug"]
search_fields = ["name"]
prepopulated_fields = {"slug": ["name"]}
admin.site.register(Tag, TagAdmin)
| bsd-3-clause | Python |
446ff54b0818df5b770c1295e230fb0e4be2aeff | Add logging | allanlei/django-multitenant | tenant/utils.py | tenant/utils.py | from django.utils.functional import curry
from tenant.signals import tenant_provider
import threading
import os
import sys
import urlparse
import logging
logger = logging.getLogger(__name__)
def get_current_tenant(sender=None, **hints):
if sender is None:
sender = threading.current_thread()
tenant = None
responses = tenant_provider.send(sender=sender, **hints)
for resp in responses:
if resp[1]:
tenant = str(resp[1])
break
return tenant
def get_tenant(sender, tenant=None, **kwargs):
return tenant
def connect_tenant_provider(dispatch_uid, tenant, sender=None):
if sender is None:
sender = threading.current_thread()
signal_function = curry(get_tenant, tenant=tenant)
tenant_provider.connect(signal_function, weak=False, dispatch_uid=dispatch_uid, sender=sender)
def disconnect_tenant_provider(dispatch_uid, sender=None):
if sender is None:
sender = threading.current_thread()
tenant_provider.disconnect(weak=False, dispatch_uid=dispatch_uid, sender=sender)
def parse_connection_string(string):
urlparse.uses_netloc.append('postgres')
urlparse.uses_netloc.append('mysql')
urlparse.uses_netloc.append('postgresql_psycopg2')
url = urlparse.urlparse(string)
settings = {
'NAME': url.path[1:],
'USER': url.username,
'PASSWORD': url.password,
'HOST': url.hostname,
'PORT': url.port,
}
if url.scheme == 'postgres' or url.scheme == 'postgresql_psycopg2':
settings['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
if url.scheme == 'mysql':
settings['ENGINE'] = 'django.db.backends.mysql'
if not getattr(settings, 'ENGINE', None):
raise Exception('DATABASE.ENGINE missing')
return settings
def get_public_models():
from django.db.models import get_models, get_app
from tenant import settings
models = []
for app in settings.MULTITENANT_PUBLIC_APPS:
app = app.split('.')[-1]
models.extend(get_models(get_app(app)))
return models
def get_private_models():
from django.db.models import get_models, get_app
from tenant import settings
models = []
for app in settings.MULTITENANT_PRIVATE_APPS:
app = app.split('.')[-1]
models.extend(get_models(get_app(app)))
return models
| from django.utils.functional import curry
from tenant.signals import tenant_provider
import threading
import os
import sys
import urlparse
def get_current_tenant(sender=None, **hints):
if sender is None:
sender = threading.current_thread()
tenant = None
responses = tenant_provider.send(sender=sender, **hints)
for resp in responses:
if resp[1]:
tenant = str(resp[1])
break
return tenant
def connect_tenant_provider(dispatch_uid, tenant):
signal_function = curry(lambda sender, tenant=None, **kwargs: tenant, tenant=tenant)
tenant_provider.connect(signal_function, weak=False, dispatch_uid=dispatch_uid, sender=threading.current_thread())
def disconnect_tenant_provider(dispatch_uid, sender=None):
if sender is None:
sender = threading.current_thread()
tenant_provider.disconnect(weak=False, dispatch_uid=dispatch_uid, sender=sender)
def parse_connection_string(string):
urlparse.uses_netloc.append('postgres')
urlparse.uses_netloc.append('mysql')
urlparse.uses_netloc.append('postgresql_psycopg2')
url = urlparse.urlparse(string)
settings = {
'NAME': url.path[1:],
'USER': url.username,
'PASSWORD': url.password,
'HOST': url.hostname,
'PORT': url.port,
}
if url.scheme == 'postgres' or url.scheme == 'postgresql_psycopg2':
settings['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
if url.scheme == 'mysql':
settings['ENGINE'] = 'django.db.backends.mysql'
if not getattr(settings, 'ENGINE', None):
raise Exception('DATABASE.ENGINE missing')
return settings
def get_public_models():
from django.db.models import get_models, get_app
from tenant import settings
models = []
for app in settings.MULTITENANT_PUBLIC_APPS:
app = app.split('.')[-1]
models.extend(get_models(get_app(app)))
return models
def get_private_models():
from django.db.models import get_models, get_app
from tenant import settings
models = []
for app in settings.MULTITENANT_PRIVATE_APPS:
app = app.split('.')[-1]
models.extend(get_models(get_app(app)))
return models
| bsd-3-clause | Python |
1737fa10f60a736f9a3a5bc6189b9940a635924d | fix real_n | andrenarchy/pseudopy | pseudopy/visualize.py | pseudopy/visualize.py | from matplotlib import pyplot
import numpy
from matplotlib.tri import Triangulation
from . import compute
def visualize(A,
real_min=-1, real_max=1, real_n=50,
imag_min=-1, imag_max=1, imag_n=50,
levels=None
):
real = numpy.linspace(real_min, real_max, real_n)
imag = numpy.linspace(imag_min, imag_max, imag_n)
x, y = numpy.meshgrid(real, imag)
x = x.flatten()
y = y.flatten()
vals = compute.evaluate_points(A, x+1j*y)
triang = Triangulation(x, y)
pyplot.tricontour(triang, vals, levels=levels)
pyplot.colorbar()
pyplot.show()
| from matplotlib import pyplot
import numpy
from matplotlib.tri import Triangulation
from . import compute
def visualize(A,
real_min=-1, real_max=1, real_n=50,
imag_min=-1, imag_max=1, imag_n=50,
levels=None
):
real = numpy.linspace(real_min, real_max, real_n)
imag = numpy.linspace(imag_min, imag_max, real_n)
x, y = numpy.meshgrid(real, imag)
x = x.flatten()
y = y.flatten()
vals = compute.evaluate_points(A, x+1j*y)
triang = Triangulation(x, y)
pyplot.tricontour(triang, vals, levels=levels)
pyplot.show()
| mit | Python |
d50d5240821cc9dbdfbf29867444df99936559bb | fix base name | krono/pycket,pycket/pycket,samth/pycket,vishesh/pycket,pycket/pycket,cderici/pycket,samth/pycket,magnusmorton/pycket,pycket/pycket,vishesh/pycket,magnusmorton/pycket,cderici/pycket,magnusmorton/pycket,vishesh/pycket,krono/pycket,cderici/pycket,krono/pycket,samth/pycket | pycket/entry_point.py | pycket/entry_point.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
from pycket.expand import load_json_ast_rpython, expand_to_ast, PermException
from pycket.interpreter import interpret_one, ToplevelEnv, interpret_module, GlobalConfig
from pycket.error import SchemeException
from pycket.option_helper import parse_args, ensure_json_ast
from pycket.values import W_String
from rpython.rlib import jit
# _____ Define and setup target ___
def entry_point(argv):
try:
return actual_entry(argv)
except SchemeException, e:
print "ERROR:", e.msg
raise # to see interpreter-level traceback
def actual_entry(argv):
jit.set_param(None, "trace_limit", 20000)
config, names, args, retval = parse_args(argv)
if retval != 0 or config is None:
return retval
args_w = [W_String(arg) for arg in args]
module_name, json_ast = ensure_json_ast(config, names)
if json_ast is None:
ast = expand_to_ast(module_name)
else:
ast = load_json_ast_rpython(json_ast)
GlobalConfig.load(ast)
env = ToplevelEnv()
env.commandline_arguments = args_w
env.module_env.add_module(module_name, ast)
val = interpret_module(ast, env)
return 0
def target(driver, args):
if "--with-branch" in args:
import subprocess
base_name = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"]).strip()
else:
base_name = 'pycket'
if driver.config.translation.jit:
driver.exe_name = base_name + '-%(backend)s'
else:
driver.exe_name = base_name + '-%(backend)s-nojit'
return entry_point, None
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
from pycket.expand import load_json_ast_rpython, expand_to_ast, PermException
from pycket.interpreter import interpret_one, ToplevelEnv, interpret_module, GlobalConfig
from pycket.error import SchemeException
from pycket.option_helper import parse_args, ensure_json_ast
from pycket.values import W_String
from rpython.rlib import jit
# _____ Define and setup target ___
def entry_point(argv):
try:
return actual_entry(argv)
except SchemeException, e:
print "ERROR:", e.msg
raise # to see interpreter-level traceback
def actual_entry(argv):
jit.set_param(None, "trace_limit", 20000)
config, names, args, retval = parse_args(argv)
if retval != 0 or config is None:
return retval
args_w = [W_String(arg) for arg in args]
module_name, json_ast = ensure_json_ast(config, names)
if json_ast is None:
ast = expand_to_ast(module_name)
else:
ast = load_json_ast_rpython(json_ast)
GlobalConfig.load(ast)
env = ToplevelEnv()
env.commandline_arguments = args_w
env.module_env.add_module(module_name, ast)
val = interpret_module(ast, env)
return 0
def target(driver, args):
if "--with-branch" in args:
import subprocess
base_name = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"]).strip()
else:
base_name = 'pycket-'
if driver.config.translation.jit:
driver.exe_name = base_name + '-%(backend)s'
else:
driver.exe_name = base_name + '-%(backend)s-nojit'
return entry_point, None
| mit | Python |
ff62b9542704f2d30da0b536b392d570181bd599 | Add a missing import to the ZayPay script | diath/pyfsw,diath/pyfsw,diath/pyfsw | pyfsw/views/zaypay.py | pyfsw/views/zaypay.py | from flask import render_template, request
from pyfsw import app, db
from pyfsw import login_required, current_user
from pyfsw import Account, ZayPayHistory
from pyfsw import ZAYPAY_OPTIONS
import requests
from bs4 import BeautifulSoup
from time import time
def zaypay_show_payment(payment_id, option):
request = requests.get('https://secure.zaypay.com///pay/{}/payments/{}/?key={}'.format(
option.price_id, payment_id, option.price_key
))
if request.status_code != 200:
return None
data = BeautifulSoup(request.text)
return data.response
@app.route('/zaypay/pay')
@login_required
def route_zaypay():
return render_template(
'zaypay/pay.htm',
options = ZAYPAY_OPTIONS, account_id = current_user().id
)
@app.route('/zaypay/ipn')
def route_zaypay_ipn():
error = False
# Get the payment ID param
payment_id = request.args.get('payment_id', -1, type=int)
if payment_id == -1:
error = True
# Get the price setting ID param
price_setting_id = request.args.get('price_setting_id', -1, type=int)
if price_setting_id == -1:
error = True
# Fetch the ZayPay option
option = ZAYPAY_OPTIONS.get(price_setting_id, None)
if not option:
error = True
# Fetch the payment status
data = zaypay_show_payment(payment_id, option)
# Check the payment status
if data.payment.status != 'paid':
error = True
# Check if the code was already used
que = db.session().query(ZayPayHistory).filter(ZayPayHistory.payment_id == payment_id).first()
if que:
error = True
# Fetch the account
var = data.get('your-variables', '')
var = var.split('&')
var = var[0].split('=')[1]
account = db.session().query(Account).filter(Account.id == int(var)).first()
if not account:
error = True
# Add premium points and history entry
# Unlike with PayPal, we don't actually log failed transactions for ZayPay
if not error:
account.points += option.points
history = ZayPayHistory()
history.account_id = account.id
history.timestamp = int(time())
history.payment_id = payment_id
history.price_setting_id = price_setting_id
history.amount = data.get('total-amount', 0.0)
history.points = option.get('points', 0)
db.session().add(history)
db.session().commit()
return '*ok*', 200
| from flask import render_template, request
from pyfsw import app, db
from pyfsw import login_required
from pyfsw import Account, ZayPayHistory
from pyfsw import ZAYPAY_OPTIONS
import requests
from bs4 import BeautifulSoup
from time import time
def zaypay_show_payment(payment_id, option):
request = requests.get('https://secure.zaypay.com///pay/{}/payments/{}/?key={}'.format(
option.price_id, payment_id, option.price_key
))
if request.status_code != 200:
return None
data = BeautifulSoup(request.text)
return data.response
@app.route('/zaypay/pay')
@login_required
def route_zaypay():
return render_template(
'zaypay/pay.htm',
options = ZAYPAY_OPTIONS, account_id = current_user().id
)
@app.route('/zaypay/ipn')
def route_zaypay_ipn():
error = False
# Get the payment ID param
payment_id = request.args.get('payment_id', -1, type=int)
if payment_id == -1:
error = True
# Get the price setting ID param
price_setting_id = request.args.get('price_setting_id', -1, type=int)
if price_setting_id == -1:
error = True
# Fetch the ZayPay option
option = ZAYPAY_OPTIONS.get(price_setting_id, None)
if not option:
error = True
# Fetch the payment status
data = zaypay_show_payment(payment_id, option)
# Check the payment status
if data.payment.status != 'paid':
error = True
# Check if the code was already used
que = db.session().query(ZayPayHistory).filter(ZayPayHistory.payment_id == payment_id).first()
if que:
error = True
# Fetch the account
var = data.get('your-variables', '')
var = var.split('&')
var = var[0].split('=')[1]
account = db.session().query(Account).filter(Account.id == int(var)).first()
if not account:
error = True
# Add premium points and history entry
# Unlike with PayPal, we don't actually log failed transactions for ZayPay
if not error:
account.points += option.points
history = ZayPayHistory()
history.account_id = account.id
history.timestamp = int(time())
history.payment_id = payment_id
history.price_setting_id = price_setting_id
history.amount = data.get('total-amount', 0.0)
history.points = option.get('points', 0)
db.session().add(history)
db.session().commit()
return '*ok*', 200
| mit | Python |
1ec8c1e0505c2b550edec6631b03e02caf41f8cf | Format docstrings in utils/exc.py | cosmoharrigan/pylearn2,hyqneuron/pylearn2-maxsom,pombredanne/pylearn2,msingh172/pylearn2,msingh172/pylearn2,kastnerkyle/pylearn2,sandeepkbhat/pylearn2,se4u/pylearn2,aalmah/pylearn2,ddboline/pylearn2,mclaughlin6464/pylearn2,kose-y/pylearn2,KennethPierce/pylearnk,w1kke/pylearn2,daemonmaker/pylearn2,pkainz/pylearn2,JesseLivezey/plankton,ddboline/pylearn2,caidongyun/pylearn2,sandeepkbhat/pylearn2,theoryno3/pylearn2,daemonmaker/pylearn2,lunyang/pylearn2,kastnerkyle/pylearn2,fishcorn/pylearn2,nouiz/pylearn2,caidongyun/pylearn2,se4u/pylearn2,Refefer/pylearn2,daemonmaker/pylearn2,mkraemer67/pylearn2,jeremyfix/pylearn2,kastnerkyle/pylearn2,se4u/pylearn2,w1kke/pylearn2,shiquanwang/pylearn2,shiquanwang/pylearn2,alexjc/pylearn2,JesseLivezey/pylearn2,msingh172/pylearn2,hantek/pylearn2,bartvm/pylearn2,mkraemer67/pylearn2,kose-y/pylearn2,JesseLivezey/plankton,lamblin/pylearn2,woozzu/pylearn2,shiquanwang/pylearn2,abergeron/pylearn2,mclaughlin6464/pylearn2,JesseLivezey/pylearn2,lancezlin/pylearn2,matrogers/pylearn2,goodfeli/pylearn2,lunyang/pylearn2,fulmicoton/pylearn2,cosmoharrigan/pylearn2,junbochen/pylearn2,fyffyt/pylearn2,chrish42/pylearn,matrogers/pylearn2,jeremyfix/pylearn2,mclaughlin6464/pylearn2,bartvm/pylearn2,woozzu/pylearn2,hyqneuron/pylearn2-maxsom,TNick/pylearn2,KennethPierce/pylearnk,mkraemer67/pylearn2,theoryno3/pylearn2,ddboline/pylearn2,ashhher3/pylearn2,ashhher3/pylearn2,hantek/pylearn2,lamblin/pylearn2,abergeron/pylearn2,lunyang/pylearn2,theoryno3/pylearn2,lancezlin/pylearn2,caidongyun/pylearn2,aalmah/pylearn2,pkainz/pylearn2,se4u/pylearn2,fyffyt/pylearn2,lamblin/pylearn2,woozzu/pylearn2,JesseLivezey/pylearn2,Refefer/pylearn2,abergeron/pylearn2,msingh172/pylearn2,chrish42/pylearn,bartvm/pylearn2,jamessergeant/pylearn2,aalmah/pylearn2,nouiz/pylearn2,Refefer/pylearn2,lamblin/pylearn2,CIFASIS/pylearn2,CIFASIS/pylearn2,junbochen/pylearn2,cosmoharrigan/pylearn2,fishcorn/pylearn2,matrogers/pylearn2,woozzu/pylearn2,TNick/pylearn2,lisa-lab/pylearn2,hantek/pylearn2,hyqneuron/pylearn2-maxsom,sandeepkbhat/pylearn2,shiquanwang/pylearn2,lisa-lab/pylearn2,alexjc/pylearn2,pkainz/pylearn2,matrogers/pylearn2,chrish42/pylearn,CIFASIS/pylearn2,hyqneuron/pylearn2-maxsom,lunyang/pylearn2,skearnes/pylearn2,daemonmaker/pylearn2,jamessergeant/pylearn2,jeremyfix/pylearn2,jeremyfix/pylearn2,alexjc/pylearn2,junbochen/pylearn2,JesseLivezey/plankton,goodfeli/pylearn2,skearnes/pylearn2,pombredanne/pylearn2,skearnes/pylearn2,mkraemer67/pylearn2,w1kke/pylearn2,goodfeli/pylearn2,abergeron/pylearn2,w1kke/pylearn2,junbochen/pylearn2,jamessergeant/pylearn2,nouiz/pylearn2,kose-y/pylearn2,fulmicoton/pylearn2,ashhher3/pylearn2,fulmicoton/pylearn2,alexjc/pylearn2,ashhher3/pylearn2,pombredanne/pylearn2,fishcorn/pylearn2,kose-y/pylearn2,lisa-lab/pylearn2,fyffyt/pylearn2,fyffyt/pylearn2,kastnerkyle/pylearn2,TNick/pylearn2,ddboline/pylearn2,chrish42/pylearn,JesseLivezey/plankton,KennethPierce/pylearnk,pombredanne/pylearn2,fishcorn/pylearn2,goodfeli/pylearn2,lancezlin/pylearn2,fulmicoton/pylearn2,theoryno3/pylearn2,JesseLivezey/pylearn2,sandeepkbhat/pylearn2,hantek/pylearn2,caidongyun/pylearn2,jamessergeant/pylearn2,pkainz/pylearn2,lisa-lab/pylearn2,Refefer/pylearn2,lancezlin/pylearn2,nouiz/pylearn2,aalmah/pylearn2,mclaughlin6464/pylearn2,skearnes/pylearn2,TNick/pylearn2,bartvm/pylearn2,cosmoharrigan/pylearn2,CIFASIS/pylearn2,KennethPierce/pylearnk | pylearn2/utils/exc.py | pylearn2/utils/exc.py | __author__ = "Ian Goodfellow"
"""
Exceptions used by basic support utilities.
"""
class EnvironmentVariableError(Exception):
"""
An exception raised when a required environment variable is not defined
"""
def __init__(self, *args):
super(EnvironmentVariableError,self).__init__(*args)
| __author__ = "Ian Goodfellow"
"""
Exceptions used by basic support utilities.
"""
class EnvironmentVariableError(Exception):
""" An exception raised when a required environment variable is not defined """
def __init__(self, *args):
super(EnvironmentVariableError,self).__init__(*args)
| bsd-3-clause | Python |
f585746da7e4a01eed023b15a756df5422bf42b9 | Bump version | cool-RR/PySnooper,cool-RR/PySnooper | pysnooper/__init__.py | pysnooper/__init__.py | # Copyright 2019 Ram Rachum and collaborators.
# This program is distributed under the MIT license.
'''
PySnooper - Never use print for debugging again
Usage:
import pysnooper
@pysnooper.snoop()
def your_function(x):
...
A log will be written to stderr showing the lines executed and variables
changed in the decorated function.
For more information, see https://github.com/cool-RR/PySnooper
'''
from .tracer import Tracer as snoop
from .variables import Attrs, Exploding, Indices, Keys
import collections
__VersionInfo = collections.namedtuple('VersionInfo',
('major', 'minor', 'micro'))
__version__ = '0.2.1'
__version_info__ = __VersionInfo(*(map(int, __version__.split('.'))))
del collections, __VersionInfo # Avoid polluting the namespace
| # Copyright 2019 Ram Rachum and collaborators.
# This program is distributed under the MIT license.
'''
PySnooper - Never use print for debugging again
Usage:
import pysnooper
@pysnooper.snoop()
def your_function(x):
...
A log will be written to stderr showing the lines executed and variables
changed in the decorated function.
For more information, see https://github.com/cool-RR/PySnooper
'''
from .tracer import Tracer as snoop
from .variables import Attrs, Exploding, Indices, Keys
import collections
__VersionInfo = collections.namedtuple('VersionInfo',
('major', 'minor', 'micro'))
__version__ = '0.2.0'
__version_info__ = __VersionInfo(*(map(int, __version__.split('.'))))
del collections, __VersionInfo # Avoid polluting the namespace
| mit | Python |
3aa4f192f4ef4807586458234708a9424bb0b8e6 | make separate tests for raised 'Error' | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | tests/cupy_tests/binary_tests/test_packing.py | tests/cupy_tests/binary_tests/test_packing.py | import numpy
import unittest
import pytest
import cupy
from cupy import testing
@testing.gpu
class TestPacking(unittest.TestCase):
@testing.for_int_dtypes()
@testing.numpy_cupy_array_equal()
def check_packbits(self, data, xp, dtype):
# Note numpy <= 1.9 raises an Exception when an input array is bool.
# See https://github.com/numpy/numpy/issues/5377
a = xp.array(data, dtype=dtype)
return xp.packbits(a)
@testing.numpy_cupy_array_equal()
def check_unpackbits(self, data, xp, bitorder='big'):
a = xp.array(data, dtype=xp.uint8)
return xp.unpackbits(a, bitorder=bitorder)
def test_packbits(self):
self.check_packbits([0])
self.check_packbits([1])
self.check_packbits([0, 1])
self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1])
self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1, 1])
self.check_packbits(numpy.arange(24).reshape((2, 3, 4)) % 2)
def test_packbits_empty(self):
# Note packbits of numpy <= 1.11 has a bug against empty arrays.
# See https://github.com/numpy/numpy/issues/8324
self.check_packbits([])
def test_unpackbits(self):
self.check_unpackbits([])
self.check_unpackbits([0])
self.check_unpackbits([1])
self.check_unpackbits([255])
self.check_unpackbits([100, 200, 123, 213])
def test_unpack_invalid_array(self):
a = cupy.array([10, 20, 30])
pytest.raises(TypeError, cupy.unpackbits, a)
pytest.raises(TypeError, cupy.unpackbits, a.astype(float))
def test_pack_unpack_order(self):
for bo in ['big', 'little']:
self.check_unpackbits([], bitorder=bo)
self.check_unpackbits([0], bitorder=bo)
self.check_unpackbits([1], bitorder=bo)
self.check_unpackbits([255], bitorder=bo)
self.check_unpackbits([100, 200, 123, 213], bitorder=bo)
def test_unpack_invalid_order(self):
a = cupy.array([10, 20, 30], dtype=cupy.uint8)
pytest.raises(ValueError, cupy.unpackbits, a, bitorder='r')
pytest.raises(ValueError, cupy.unpackbits, a, bitorder=10)
| import numpy
import unittest
import pytest
import cupy
from cupy import testing
@testing.gpu
class TestPacking(unittest.TestCase):
@testing.for_int_dtypes()
@testing.numpy_cupy_array_equal()
def check_packbits(self, data, xp, dtype):
# Note numpy <= 1.9 raises an Exception when an input array is bool.
# See https://github.com/numpy/numpy/issues/5377
a = xp.array(data, dtype=dtype)
return xp.packbits(a)
@testing.numpy_cupy_array_equal()
def check_unpackbits(self, data, xp, bitorder='big'):
a = xp.array(data, dtype=xp.uint8)
return xp.unpackbits(a, bitorder=bitorder)
def test_packbits(self):
self.check_packbits([0])
self.check_packbits([1])
self.check_packbits([0, 1])
self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1])
self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1, 1])
self.check_packbits(numpy.arange(24).reshape((2, 3, 4)) % 2)
def test_packbits_empty(self):
# Note packbits of numpy <= 1.11 has a bug against empty arrays.
# See https://github.com/numpy/numpy/issues/8324
self.check_packbits([])
def test_unpackbits(self):
self.check_unpackbits([])
self.check_unpackbits([0])
self.check_unpackbits([1])
self.check_unpackbits([255])
self.check_unpackbits([100, 200, 123, 213])
a = cupy.array([10, 20, 30])
pytest.raises(TypeError, cupy.unpackbits, a)
pytest.raises(TypeError, cupy.unpackbits, a.astype(float))
def test_pack_unpack_order(self):
for bo in ['big', 'little']:
self.check_unpackbits([], bitorder=bo)
self.check_unpackbits([0], bitorder=bo)
self.check_unpackbits([1], bitorder=bo)
self.check_unpackbits([255], bitorder=bo)
self.check_unpackbits([100, 200, 123, 213], bitorder=bo)
a = cupy.array([10, 20, 30], dtype=cupy.uint8)
pytest.raises(ValueError, cupy.unpackbits, a, bitorder='r')
pytest.raises(ValueError, cupy.unpackbits, a, bitorder=10)
| mit | Python |
be39343f8db68b33a4fc246b5a7451344353aa94 | Allow arbitrary output types in job graph (#462) | google/turbinia,google/turbinia,google/turbinia,google/turbinia,google/turbinia | tools/turbinia_job_graph.py | tools/turbinia_job_graph.py | #!/usr/bin/env python -v
# -*- coding: utf-8 -*-
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Graph to visualise job/evidence relationships."""
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import graphviz
import sys
from turbinia.jobs import manager as jobs_manager
try:
unicode
except NameError:
unicode = str # pylint: disable=redefined-builtin
def create_graph():
"""Create graph of relationships between Turbinia jobs and evidence.
Returns:
Instance of graphviz.dot.Digraph
"""
dot = graphviz.Digraph(comment='Turbinia Evidence graph', format='png')
for _, job in jobs_manager.JobsManager.GetJobs():
dot.node(job.NAME)
for evidence in job.evidence_input:
dot.node(evidence.__name__, shape='box')
dot.edge(evidence.__name__, job.NAME)
for evidence in job.evidence_output:
dot.node(evidence.__name__, shape='box')
dot.edge(job.NAME, evidence.__name__)
return dot
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Create Turbinia evidence graph.')
parser.add_argument(
'-f', '--format', default='png',
help='The format of the output file you wish to generate. Supported '
'types are here: http://www.graphviz.org/doc/info/output.html')
parser.add_argument('filename', type=unicode, help='where to save the file')
args = parser.parse_args()
if args.format not in graphviz.FORMATS:
print('Format type {0:s} is not supported'.format(args.format))
sys.exit(1)
graph = create_graph()
output_file = args.filename.replace('.png', '')
try:
rendered_graph = graph.render(
filename=output_file, format=args.format, cleanup=True)
print('Graph generated and saved to: {0}'.format(rendered_graph))
except graphviz.ExecutableNotFound:
print('Graphviz is not installed - Run: apt-get install graphviz')
| # -*- coding: utf-8 -*-
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Graph to visualise job/evidence relationships."""
from __future__ import unicode_literals
import argparse
import graphviz
from turbinia.jobs import manager as jobs_manager
try:
unicode
except NameError:
unicode = str # pylint: disable=redefined-builtin
def create_graph():
"""Create graph of relationships between Turbinia jobs and evidence.
Returns:
Instance of graphviz.dot.Digraph
"""
dot = graphviz.Digraph(comment='Turbinia Evidence graph', format='png')
for _, job in jobs_manager.JobsManager.GetJobs():
dot.node(job.NAME)
for evidence in job.evidence_input:
dot.node(evidence.__name__, shape='box')
dot.edge(evidence.__name__, job.NAME)
for evidence in job.evidence_output:
dot.node(evidence.__name__, shape='box')
dot.edge(job.NAME, evidence.__name__)
return dot
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Create Turbinia evidence graph.')
parser.add_argument('filename', type=unicode, help='where to save the file')
args = parser.parse_args()
graph = create_graph()
output_file = args.filename.replace('.png', '')
try:
rendered_graph = graph.render(filename=output_file, cleanup=True)
print('Graph generated and saved to: {0}'.format(rendered_graph))
except graphviz.ExecutableNotFound:
print('Graphviz is not installed - Run: apt-get install graphviz')
| apache-2.0 | Python |
f5b5d3f65ab3377bab3c0273367d8ab2fa5dfdaa | Use str.split maxsplit in the time component | sdague/home-assistant,tmm1/home-assistant,mikaelboman/home-assistant,mikaelboman/home-assistant,florianholzapfel/home-assistant,postlund/home-assistant,partofthething/home-assistant,robbiet480/home-assistant,morphis/home-assistant,ct-23/home-assistant,betrisey/home-assistant,fbradyirl/home-assistant,shaftoe/home-assistant,PetePriority/home-assistant,hexxter/home-assistant,hexxter/home-assistant,g12mcgov/home-assistant,robjohnson189/home-assistant,LinuxChristian/home-assistant,sfam/home-assistant,sdague/home-assistant,postlund/home-assistant,xifle/home-assistant,mahendra-r/home-assistant,Zyell/home-assistant,miniconfig/home-assistant,dmeulen/home-assistant,Julian/home-assistant,leppa/home-assistant,ma314smith/home-assistant,Duoxilian/home-assistant,HydrelioxGitHub/home-assistant,betrisey/home-assistant,philipbl/home-assistant,tinloaf/home-assistant,adrienbrault/home-assistant,badele/home-assistant,sander76/home-assistant,tboyce1/home-assistant,happyleavesaoc/home-assistant,Teagan42/home-assistant,robbiet480/home-assistant,LinuxChristian/home-assistant,miniconfig/home-assistant,bdfoster/blumate,alexmogavero/home-assistant,leoc/home-assistant,mahendra-r/home-assistant,justyns/home-assistant,leoc/home-assistant,jamespcole/home-assistant,emilhetty/home-assistant,titilambert/home-assistant,mikaelboman/home-assistant,aequitas/home-assistant,GenericStudent/home-assistant,varunr047/homefile,tomduijf/home-assistant,mahendra-r/home-assistant,Cinntax/home-assistant,coteyr/home-assistant,ma314smith/home-assistant,nugget/home-assistant,GenericStudent/home-assistant,alanbowman/home-assistant,g12mcgov/home-assistant,Julian/home-assistant,jnewland/home-assistant,keerts/home-assistant,eagleamon/home-assistant,alexkolar/home-assistant,partofthething/home-assistant,tchellomello/home-assistant,Danielhiversen/home-assistant,emilhetty/home-assistant,luxus/home-assistant,MartinHjelmare/home-assistant,turbokongen/home-assistant,bdfoster/blumate,leoc/home-assistant,oandrew/home-assistant,JshWright/home-assistant,stefan-jonasson/home-assistant,sfam/home-assistant,alexkolar/home-assistant,Smart-Torvy/torvy-home-assistant,Zac-HD/home-assistant,happyleavesaoc/home-assistant,instantchow/home-assistant,Smart-Torvy/torvy-home-assistant,devdelay/home-assistant,Duoxilian/home-assistant,LinuxChristian/home-assistant,LinuxChristian/home-assistant,nnic/home-assistant,caiuspb/home-assistant,tboyce021/home-assistant,oandrew/home-assistant,miniconfig/home-assistant,tboyce1/home-assistant,MartinHjelmare/home-assistant,Cinntax/home-assistant,tchellomello/home-assistant,eagleamon/home-assistant,tomduijf/home-assistant,bdfoster/blumate,robjohnson189/home-assistant,jabesq/home-assistant,molobrakos/home-assistant,auduny/home-assistant,hexxter/home-assistant,ma314smith/home-assistant,tboyce1/home-assistant,rohitranjan1991/home-assistant,jnewland/home-assistant,srcLurker/home-assistant,badele/home-assistant,devdelay/home-assistant,toddeye/home-assistant,dmeulen/home-assistant,morphis/home-assistant,Julian/home-assistant,DavidLP/home-assistant,Duoxilian/home-assistant,JshWright/home-assistant,emilhetty/home-assistant,auduny/home-assistant,florianholzapfel/home-assistant,shaftoe/home-assistant,sffjunkie/home-assistant,alexmogavero/home-assistant,shaftoe/home-assistant,morphis/home-assistant,LinuxChristian/home-assistant,jaharkes/home-assistant,balloob/home-assistant,sfam/home-assistant,hexxter/home-assistant,luxus/home-assistant,sffjunkie/home-assistant,keerts/home-assistant,persandstrom/home-assistant,molobrakos/home-assistant,caiuspb/home-assistant,srcLurker/home-assistant,tboyce021/home-assistant,alexmogavero/home-assistant,shaftoe/home-assistant,nugget/home-assistant,turbokongen/home-assistant,DavidLP/home-assistant,oandrew/home-assistant,pottzer/home-assistant,Smart-Torvy/torvy-home-assistant,justyns/home-assistant,Theb-1/home-assistant,xifle/home-assistant,happyleavesaoc/home-assistant,jawilson/home-assistant,FreekingDean/home-assistant,sffjunkie/home-assistant,hmronline/home-assistant,auduny/home-assistant,g12mcgov/home-assistant,Duoxilian/home-assistant,nkgilley/home-assistant,HydrelioxGitHub/home-assistant,philipbl/home-assistant,persandstrom/home-assistant,justyns/home-assistant,rohitranjan1991/home-assistant,mezz64/home-assistant,open-homeautomation/home-assistant,JshWright/home-assistant,adrienbrault/home-assistant,miniconfig/home-assistant,aronsky/home-assistant,ewandor/home-assistant,nnic/home-assistant,kyvinh/home-assistant,badele/home-assistant,betrisey/home-assistant,robjohnson189/home-assistant,PetePriority/home-assistant,kennedyshead/home-assistant,MartinHjelmare/home-assistant,coteyr/home-assistant,mKeRix/home-assistant,molobrakos/home-assistant,leppa/home-assistant,ct-23/home-assistant,fbradyirl/home-assistant,soldag/home-assistant,open-homeautomation/home-assistant,jaharkes/home-assistant,hmronline/home-assistant,MungoRae/home-assistant,MungoRae/home-assistant,sffjunkie/home-assistant,qedi-r/home-assistant,kyvinh/home-assistant,Teagan42/home-assistant,joopert/home-assistant,dmeulen/home-assistant,tinloaf/home-assistant,jabesq/home-assistant,sffjunkie/home-assistant,emilhetty/home-assistant,open-homeautomation/home-assistant,tinloaf/home-assistant,philipbl/home-assistant,jamespcole/home-assistant,nkgilley/home-assistant,luxus/home-assistant,hmronline/home-assistant,MungoRae/home-assistant,Zyell/home-assistant,robjohnson189/home-assistant,deisi/home-assistant,Zyell/home-assistant,nnic/home-assistant,Zac-HD/home-assistant,leoc/home-assistant,mezz64/home-assistant,dorant/home-assistant,DavidLP/home-assistant,jamespcole/home-assistant,sander76/home-assistant,pottzer/home-assistant,balloob/home-assistant,devdelay/home-assistant,pottzer/home-assistant,MungoRae/home-assistant,happyleavesaoc/home-assistant,w1ll1am23/home-assistant,aoakeson/home-assistant,varunr047/homefile,ct-23/home-assistant,alanbowman/home-assistant,pschmitt/home-assistant,hmronline/home-assistant,pschmitt/home-assistant,Theb-1/home-assistant,FreekingDean/home-assistant,ct-23/home-assistant,instantchow/home-assistant,home-assistant/home-assistant,mikaelboman/home-assistant,fbradyirl/home-assistant,Danielhiversen/home-assistant,coteyr/home-assistant,morphis/home-assistant,florianholzapfel/home-assistant,ewandor/home-assistant,betrisey/home-assistant,tmm1/home-assistant,jawilson/home-assistant,jaharkes/home-assistant,varunr047/homefile,deisi/home-assistant,srcLurker/home-assistant,hmronline/home-assistant,rohitranjan1991/home-assistant,xifle/home-assistant,qedi-r/home-assistant,srcLurker/home-assistant,HydrelioxGitHub/home-assistant,Zac-HD/home-assistant,emilhetty/home-assistant,mKeRix/home-assistant,eagleamon/home-assistant,lukas-hetzenecker/home-assistant,instantchow/home-assistant,tomduijf/home-assistant,kyvinh/home-assistant,deisi/home-assistant,nugget/home-assistant,keerts/home-assistant,jnewland/home-assistant,jabesq/home-assistant,florianholzapfel/home-assistant,titilambert/home-assistant,home-assistant/home-assistant,JshWright/home-assistant,dorant/home-assistant,aoakeson/home-assistant,deisi/home-assistant,aronsky/home-assistant,Theb-1/home-assistant,ewandor/home-assistant,aequitas/home-assistant,nevercast/home-assistant,kennedyshead/home-assistant,ct-23/home-assistant,Smart-Torvy/torvy-home-assistant,oandrew/home-assistant,MungoRae/home-assistant,tboyce1/home-assistant,alanbowman/home-assistant,soldag/home-assistant,stefan-jonasson/home-assistant,devdelay/home-assistant,toddeye/home-assistant,nevercast/home-assistant,mKeRix/home-assistant,aequitas/home-assistant,mikaelboman/home-assistant,jaharkes/home-assistant,dorant/home-assistant,lukas-hetzenecker/home-assistant,w1ll1am23/home-assistant,persandstrom/home-assistant,kyvinh/home-assistant,varunr047/homefile,tmm1/home-assistant,dmeulen/home-assistant,stefan-jonasson/home-assistant,varunr047/homefile,open-homeautomation/home-assistant,mKeRix/home-assistant,caiuspb/home-assistant,alexkolar/home-assistant,bdfoster/blumate,ma314smith/home-assistant,PetePriority/home-assistant,stefan-jonasson/home-assistant,Julian/home-assistant,xifle/home-assistant,deisi/home-assistant,joopert/home-assistant,Zac-HD/home-assistant,keerts/home-assistant,nevercast/home-assistant,bdfoster/blumate,balloob/home-assistant,aoakeson/home-assistant,philipbl/home-assistant,eagleamon/home-assistant,alexmogavero/home-assistant | homeassistant/components/scheduler/time.py | homeassistant/components/scheduler/time.py | """
homeassistant.components.scheduler.time
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An event in the scheduler component that will call the service
every specified day at the time specified.
A time event need to have the type 'time', which service to call and at
which time.
{
"type": "time",
"service": "switch.turn_off",
"time": "22:00:00"
}
"""
from datetime import timedelta
import logging
import homeassistant.util.dt as dt_util
from homeassistant.helpers.event import track_point_in_time
from homeassistant.components.scheduler import ServiceEventListener
_LOGGER = logging.getLogger(__name__)
def create_event_listener(schedule, event_listener_data):
""" Create a TimeEvent based on the description. """
service = event_listener_data['service']
(hour, minute, second) = [int(x) for x in
event_listener_data['time'].split(':', 3)]
return TimeEventListener(schedule, service, hour, minute, second)
# pylint: disable=too-few-public-methods
class TimeEventListener(ServiceEventListener):
""" The time event that the scheduler uses. """
# pylint: disable=too-many-arguments
def __init__(self, schedule, service, hour, minute, second):
ServiceEventListener.__init__(self, schedule, service)
self.hour = hour
self.minute = minute
self.second = second
def schedule(self, hass):
""" Schedule this event so that it will be called. """
next_time = dt_util.now().replace(
hour=self.hour, minute=self.minute, second=self.second)
# Calculate the next time the event should be executed.
# That is the next day that the schedule is configured to run
while next_time < dt_util.now() or \
next_time.weekday() not in self.my_schedule.days:
next_time = next_time + timedelta(days=1)
# pylint: disable=unused-argument
def execute(now):
""" Call the execute method """
self.execute(hass)
track_point_in_time(hass, execute, next_time)
_LOGGER.info(
'TimeEventListener scheduled for %s, will call service %s.%s',
next_time, self.domain, self.service)
| """
homeassistant.components.scheduler.time
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An event in the scheduler component that will call the service
every specified day at the time specified.
A time event need to have the type 'time', which service to call and at
which time.
{
"type": "time",
"service": "switch.turn_off",
"time": "22:00:00"
}
"""
from datetime import timedelta
import logging
import homeassistant.util.dt as dt_util
from homeassistant.helpers.event import track_point_in_time
from homeassistant.components.scheduler import ServiceEventListener
_LOGGER = logging.getLogger(__name__)
def create_event_listener(schedule, event_listener_data):
""" Create a TimeEvent based on the description. """
service = event_listener_data['service']
(hour, minute, second) = [int(x) for x in
event_listener_data['time'].split(':')]
return TimeEventListener(schedule, service, hour, minute, second)
# pylint: disable=too-few-public-methods
class TimeEventListener(ServiceEventListener):
""" The time event that the scheduler uses. """
# pylint: disable=too-many-arguments
def __init__(self, schedule, service, hour, minute, second):
ServiceEventListener.__init__(self, schedule, service)
self.hour = hour
self.minute = minute
self.second = second
def schedule(self, hass):
""" Schedule this event so that it will be called. """
next_time = dt_util.now().replace(
hour=self.hour, minute=self.minute, second=self.second)
# Calculate the next time the event should be executed.
# That is the next day that the schedule is configured to run
while next_time < dt_util.now() or \
next_time.weekday() not in self.my_schedule.days:
next_time = next_time + timedelta(days=1)
# pylint: disable=unused-argument
def execute(now):
""" Call the execute method """
self.execute(hass)
track_point_in_time(hass, execute, next_time)
_LOGGER.info(
'TimeEventListener scheduled for %s, will call service %s.%s',
next_time, self.domain, self.service)
| apache-2.0 | Python |
8e225f890fd90112a125648cbd49507340cd3224 | Fix type of EventIndex fields | tuomas777/linkedevents,aapris/linkedevents,aapris/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,aapris/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents | events/search_indexes.py | events/search_indexes.py | from haystack import indexes
from .models import Event, Place, PublicationStatus
from django.utils.html import strip_tags
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
autosuggest = indexes.EdgeNgramField(model_attr='name')
start_time = indexes.DateTimeField(model_attr='start_time')
end_time = indexes.DateTimeField(model_attr='end_time')
def get_updated_field(self):
return 'last_modified_time'
def get_model(self):
return Event
def prepare(self, obj):
#obj.lang_keywords = obj.keywords.filter(language=get_language())
if obj.description:
obj.description = strip_tags(obj.description)
return super(EventIndex, self).prepare(obj)
def index_queryset(self, using=None):
return self.get_model().objects.filter(publication_status=PublicationStatus.PUBLIC)
class PlaceIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
autosuggest = indexes.EdgeNgramField(model_attr='name')
def get_updated_field(self):
return 'last_modified_time'
def get_model(self):
return Place
| from haystack import indexes
from .models import Event, Place, PublicationStatus
from django.utils.html import strip_tags
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
autosuggest = indexes.EdgeNgramField(model_attr='name')
start_time = indexes.DateField(model_attr='start_time')
end_time = indexes.DateField(model_attr='end_time')
def get_updated_field(self):
return 'last_modified_time'
def get_model(self):
return Event
def prepare(self, obj):
#obj.lang_keywords = obj.keywords.filter(language=get_language())
if obj.description:
obj.description = strip_tags(obj.description)
return super(EventIndex, self).prepare(obj)
def index_queryset(self, using=None):
return self.get_model().objects.filter(publication_status=PublicationStatus.PUBLIC)
class PlaceIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
autosuggest = indexes.EdgeNgramField(model_attr='name')
def get_updated_field(self):
return 'last_modified_time'
def get_model(self):
return Place
| mit | Python |
25ae3020afbb55efc90a2c7cb88434a7e1b45b5a | Fix an issue that may lead to an excessive memory usage in Tracer. (#1208) | iamahuman/angr,schieb/angr,angr/angr,angr/angr,iamahuman/angr,schieb/angr,angr/angr,schieb/angr,iamahuman/angr | angr/procedures/tracer/random.py | angr/procedures/tracer/random.py | import angr
import claripy
class random(angr.SimProcedure):
#pylint:disable=arguments-differ
IS_SYSCALL = True
def run(self, buf, count, rnd_bytes):
# return code
r = self.state.solver.ite_cases(((self.state.cgc.addr_invalid(buf), self.state.cgc.EFAULT),
(self.state.solver.And(rnd_bytes != 0,
self.state.cgc.addr_invalid(rnd_bytes)), self.state.cgc.EFAULT)),
claripy.BVV(0, self.state.arch.bits))
if self.state.satisfiable(extra_constraints=[count!=0]):
max_size = min(1024768 * 10, self.state.solver.max_int(count))
self.state.memory.store(buf,
claripy.BVV(b'A' * max_size),
size=count
)
self.state.memory.store(rnd_bytes,
count,
endness='Iend_LE',
condition=rnd_bytes != 0)
return r
| import angr
import claripy
class random(angr.SimProcedure):
#pylint:disable=arguments-differ
IS_SYSCALL = True
def run(self, buf, count, rnd_bytes):
# return code
r = self.state.solver.ite_cases(((self.state.cgc.addr_invalid(buf), self.state.cgc.EFAULT),
(self.state.solver.And(rnd_bytes != 0,
self.state.cgc.addr_invalid(rnd_bytes)), self.state.cgc.EFAULT)),
claripy.BVV(0, self.state.arch.bits))
if self.state.satisfiable(extra_constraints=[count!=0]):
self.state.memory.store(buf,
claripy.BVV(b'A' * self.state.solver.max_int(count)),
size=count)
self.state.memory.store(rnd_bytes,
count,
endness='Iend_LE',
condition=rnd_bytes != 0)
return r
| bsd-2-clause | Python |
40905893c296e2c812539079925adfd25e39d44f | Change location of default settings in WSGI | petervanderdoes/wger,rolandgeider/wger,DeveloperMal/wger,rolandgeider/wger,wger-project/wger,kjagoo/wger_stark,DeveloperMal/wger,wger-project/wger,petervanderdoes/wger,rolandgeider/wger,wger-project/wger,kjagoo/wger_stark,DeveloperMal/wger,DeveloperMal/wger,kjagoo/wger_stark,wger-project/wger,petervanderdoes/wger,rolandgeider/wger,kjagoo/wger_stark,petervanderdoes/wger | wger/wsgi.py | wger/wsgi.py | """
WSGI config for workout_manager project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| """
WSGI config for workout_manager project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "workout_manager.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| agpl-3.0 | Python |
977f114abae2779fa64b8086b083c6563dc20d83 | Add gsutil to PATH in android_compile.py | Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot | slave/skia_slave_scripts/android_compile.py | slave/skia_slave_scripts/android_compile.py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step for Android """
from build_step import BuildStep
from utils import shell_utils
import os
import sys
ENV_VAR = 'ANDROID_SDK_ROOT'
class AndroidCompile(BuildStep):
def _Run(self):
os.environ['PATH'] += os.pathsep + os.path.abspath(os.path.join(
os.pardir, os.pardir, os.pardir, os.pardir, 'third_party', 'gsutil'))
os.environ[ENV_VAR] = self._args['android_sdk_root']
cmd = [os.path.join(os.pardir, 'android', 'bin', 'android_make'),
self._args['target'],
'-d', self._args['device'],
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
if os.name != 'nt':
try:
ccache = shell_utils.Bash(['which', 'ccache'], echo=False)
if ccache:
cmd.append('--use-ccache')
except Exception:
pass
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(AndroidCompile))
| #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step for Android """
from build_step import BuildStep
from utils import shell_utils
import os
import sys
ENV_VAR = 'ANDROID_SDK_ROOT'
class AndroidCompile(BuildStep):
def _Run(self):
os.environ[ENV_VAR] = self._args['android_sdk_root']
cmd = [os.path.join(os.pardir, 'android', 'bin', 'android_make'),
self._args['target'],
'-d', self._args['device'],
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
if os.name != 'nt':
try:
ccache = shell_utils.Bash(['which', 'ccache'], echo=False)
if ccache:
cmd.append('--use-ccache')
except Exception:
pass
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(AndroidCompile))
| bsd-3-clause | Python |
f6d2295980898b9d20dfdb7a2c539e311c75fa8e | Remove whitespace. | flask-admin/flask-admin,flask-admin/flask-admin,flask-admin/flask-admin,flask-admin/flask-admin | flask_admin/contrib/sqla/typefmt.py | flask_admin/contrib/sqla/typefmt.py | from sqlalchemy.ext.associationproxy import _AssociationList
from flask_admin.model.typefmt import BASE_FORMATTERS, EXPORT_FORMATTERS, \
list_formatter
from sqlalchemy.orm.collections import InstrumentedList
def choice_formatter(view, choice):
"""
Return label of selected choice
see https://sqlalchemy-utils.readthedocs.io/
:param choice:
sqlalchemy_utils Choice, which has a `code` and a `value`
"""
return choice.value
def arrow_formatter(view, arrow_time):
"""
Return human-friendly string of the time relative to now.
see https://arrow.readthedocs.io/
:param arrow_time:
Arrow object for handling datetimes
"""
return arrow_time.humanize()
def arrow_export_formatter(view, arrow_time):
"""
Return string representation of Arrow object
see https://arrow.readthedocs.io/
:param arrow_time:
Arrow object for handling datetimes
"""
return arrow_time.format()
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
EXPORT_FORMATTERS = EXPORT_FORMATTERS.copy()
DEFAULT_FORMATTERS.update({
InstrumentedList: list_formatter,
_AssociationList: list_formatter,
})
try:
from sqlalchemy_utils import Choice
DEFAULT_FORMATTERS[Choice] = choice_formatter
except ImportError:
pass
try:
from arrow import Arrow
DEFAULT_FORMATTERS[Arrow] = arrow_formatter
EXPORT_FORMATTERS[Arrow] = arrow_export_formatter
except ImportError:
pass
| from sqlalchemy.ext.associationproxy import _AssociationList
from flask_admin.model.typefmt import BASE_FORMATTERS, EXPORT_FORMATTERS, \
list_formatter
from sqlalchemy.orm.collections import InstrumentedList
def choice_formatter(view, choice):
"""
Return label of selected choice
see https://sqlalchemy-utils.readthedocs.io/
:param choice:
sqlalchemy_utils Choice, which has a `code` and a `value`
"""
return choice.value
def arrow_formatter(view, arrow_time):
"""
Return human-friendly string of the time relative to now.
see https://arrow.readthedocs.io/
:param arrow_time:
Arrow object for handling datetimes
"""
return arrow_time.humanize()
def arrow_export_formatter(view, arrow_time):
"""
Return string representation of Arrow object
see https://arrow.readthedocs.io/
:param arrow_time:
Arrow object for handling datetimes
"""
return arrow_time.format()
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
EXPORT_FORMATTERS = EXPORT_FORMATTERS.copy()
DEFAULT_FORMATTERS.update({
InstrumentedList: list_formatter,
_AssociationList: list_formatter,
})
try:
from sqlalchemy_utils import Choice
DEFAULT_FORMATTERS[Choice] = choice_formatter
except ImportError:
pass
try:
from arrow import Arrow
DEFAULT_FORMATTERS[Arrow] = arrow_formatter
EXPORT_FORMATTERS[Arrow] = arrow_export_formatter
except ImportError:
pass
| bsd-3-clause | Python |
d5c65218ea57944b202a5dcd7170eff445ffe743 | Fix median calculation. | miaecle/deepchem,miaecle/deepchem,ktaneishi/deepchem,lilleswing/deepchem,deepchem/deepchem,ktaneishi/deepchem,miaecle/deepchem,deepchem/deepchem,lilleswing/deepchem,peastman/deepchem,peastman/deepchem,ktaneishi/deepchem,lilleswing/deepchem | examples/low_data/sider_graph_conv_one_fold.py | examples/low_data/sider_graph_conv_one_fold.py | """
Train low-data Sider models with graph-convolution. Test last fold only.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import tempfile
import numpy as np
import tensorflow as tf
import deepchem as dc
from deepchem.models.tensorgraph.models.graph_models import GraphConvModel
# 4-fold splits
K = 4
# num positive/negative ligands
n_pos = 10
n_neg = 10
# 10 trials on test-set
n_trials = 20
sider_tasks, fold_datasets, transformers = dc.molnet.load_sider(
featurizer='GraphConv', split="task")
# Define metric
metric = dc.metrics.Metric(dc.metrics.roc_auc_score, mode="classification")
train_folds = fold_datasets[:-1]
train_dataset = dc.splits.merge_fold_datasets(train_folds)
test_dataset = fold_datasets[-1]
# Get supports on test-set
support_generator = dc.data.SupportGenerator(test_dataset, n_pos, n_neg,
n_trials)
# Compute accuracies
task_scores = {task: [] for task in range(len(test_dataset.get_task_names()))}
for trial_num, (task, support) in enumerate(support_generator):
print("Starting trial %d" % trial_num)
# Number of features on conv-mols
n_feat = 75
# Batch size of models
batch_size = 50
#graph_model = dc.nn.SequentialGraph(n_feat)
model = GraphConvModel(1, graph_conv_layers=[
64, 128, 64], batch_size=batch_size)
# Fit trained model
model.fit(support, nb_epoch=10)
# Test model
task_dataset = dc.data.get_task_dataset_minus_support(test_dataset, support,
task)
y_pred = model.predict(task_dataset)
score = metric.compute_metric(task_dataset.y, y_pred, task_dataset.w)
print("Score on task %s is %s" % (str(task), str(score)))
task_scores[task].append(score)
# Join information for all tasks.
mean_task_scores = {}
std_task_scores = {}
for task in range(len(test_dataset.get_task_names())):
mean_task_scores[task] = np.mean(np.array(task_scores[task]))
std_task_scores[task] = np.std(np.array(task_scores[task]))
print("Mean scores")
print(mean_task_scores)
print("Standard Deviations")
print(std_task_scores)
print("Median of Mean Scores")
"""
To support both python 3.x and 2.7
dict.values() returns an object of type dict_values
and np.median shouts loudly if this is the case so
converted it to list before passing it to np.array()
"""
try:
print(np.median(np.array(mean_task_scores.values())))
except TypeError as e:
print(np.median(np.array(list(mean_task_scores.values()))))
| """
Train low-data Sider models with graph-convolution. Test last fold only.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import tempfile
import numpy as np
import tensorflow as tf
import deepchem as dc
from deepchem.models.tensorgraph.models.graph_models import GraphConvModel
# 4-fold splits
K = 4
# num positive/negative ligands
n_pos = 10
n_neg = 10
# 10 trials on test-set
n_trials = 20
sider_tasks, fold_datasets, transformers = dc.molnet.load_sider(
featurizer='GraphConv', split="task")
# Define metric
metric = dc.metrics.Metric(dc.metrics.roc_auc_score, mode="classification")
train_folds = fold_datasets[:-1]
train_dataset = dc.splits.merge_fold_datasets(train_folds)
test_dataset = fold_datasets[-1]
# Get supports on test-set
support_generator = dc.data.SupportGenerator(test_dataset, n_pos, n_neg,
n_trials)
# Compute accuracies
task_scores = {task: [] for task in range(len(test_dataset.get_task_names()))}
for trial_num, (task, support) in enumerate(support_generator):
print("Starting trial %d" % trial_num)
# Number of features on conv-mols
n_feat = 75
# Batch size of models
batch_size = 50
#graph_model = dc.nn.SequentialGraph(n_feat)
model = GraphConvModel(len(sider_tasks), graph_conv_layers=[
64, 128, 64], batch_size=batch_size)
# Fit trained model
model.fit(support, nb_epoch=10)
# Test model
task_dataset = dc.data.get_task_dataset_minus_support(test_dataset, support,
task)
y_pred = model.predict(task_dataset)
score = metric.compute_metric(task_dataset.y, y_pred, task_dataset.w)
print("Score on task %s is %s" % (str(task), str(score)))
task_scores[task].append(score)
# Join information for all tasks.
mean_task_scores = {}
std_task_scores = {}
for task in range(len(test_dataset.get_task_names())):
mean_task_scores[task] = np.mean(np.array(task_scores[task]))
std_task_scores[task] = np.std(np.array(task_scores[task]))
print("Mean scores")
print(mean_task_scores)
print("Standard Deviations")
print(std_task_scores)
print("Median of Mean Scores")
print(np.median(np.array(mean_task_scores.values())))
| mit | Python |
d254e85bbca12ac03e17dd3654ce0f4107b765c8 | Update fx_xrates.py | iamrobinhood12345/fx_15,iamrobinhood12345/fx_15 | fx_15_min/fx_xrates.py | fx_15_min/fx_xrates.py | from requests import get
from bs4 import BeautifulSoup
from datetime import datetime
from pytz import timezone
from csv import writer
PAGES = [
]
rates_dict = {}
rates_tags = []
for each in PAGES:
page = get(each)
c = page.content
soup = BeautifulSoup(c, 'html.parser')
for each in soup.find_all('td', {'class': 'rtRates'}):
pair = str(each)[42:45] + '/' + str(each)[53:56]
rate = each.text
rates_dict[pair] = rate
utc_time = datetime.now(timezone('UTC')).strftime("%Y%m%d-%H%M%S")
filename = '/Users/ben/code/finance/fx_15_min/data/' + utc_time + '.csv'
with open(filename, 'w+') as csv_file:
csv_writer = writer(csv_file)
for key, value in rates_dict.items():
csv_writer.writerow([key, value])
print(utc_time)
| from requests import get
from bs4 import BeautifulSoup
from datetime import datetime
from pytz import timezone
from csv import writer
PAGES = [
'http://www.x-rates.com/table/?from=USD&amount=1',
'http://www.x-rates.com/table/?from=gbp&amount=1',
'http://www.x-rates.com/table/?from=eur&amount=1',
'http://www.x-rates.com/table/?from=cad&amount=1',
'http://www.x-rates.com/table/?from=aud&amount=1',
'http://www.x-rates.com/table/?from=jpy&amount=1',
'http://www.x-rates.com/table/?from=chf&amount=1',
'http://www.x-rates.com/table/?from=cny&amount=1',
]
rates_dict = {}
rates_tags = []
for each in PAGES:
page = get(each)
c = page.content
soup = BeautifulSoup(c, 'html.parser')
for each in soup.find_all('td', {'class': 'rtRates'}):
pair = str(each)[42:45] + '/' + str(each)[53:56]
rate = each.text
rates_dict[pair] = rate
utc_time = datetime.now(timezone('UTC')).strftime("%Y%m%d-%H%M%S")
filename = '/Users/ben/code/finance/fx_15_min/data/' + utc_time + '.csv'
with open(filename, 'w+') as csv_file:
csv_writer = writer(csv_file)
for key, value in rates_dict.items():
csv_writer.writerow([key, value])
print(utc_time)
| apache-2.0 | Python |
91b9a00c1f866704503fc2c46689df8125ec91fd | Update filter output | takkasila/TwitGeoSpa,takkasila/TwitGeoSpa | user_travel_filter.py | user_travel_filter.py | from user_tracker import *
sys.path.insert(0, './Province')
from province_point import *
from geopy.distance import vincenty
import operator
def filterUser(userList, pvcmDict, speedT=0, distT=0, timeT=0, isAbove = True):
'Threshold: Speed in km/hr, distance in km, time in hour'
opt = operator.ge if isAbove else operator.lt
filtUserTuple = {}
for user in userList:
if len(user.crossTravelData) == 0:
continue
for crossTravelData in user.crossTravelData.values():
if opt(crossTravelData.distance,distT) and opt(crossTravelData.time,timeT) and opt(crossTravelData.speed,speedT):
filtUserTuple[user.uid] = (user, crossTravelData.travelFrom.time)
break
return filtUserTuple
def writeUsertravelPoint(filtUsers):
for user, startPlaneTime in filtUsers.values():
print '############################'
for hist in user.history.values():
print hist
print '----------------------------'
for hist in user.history.values():
if hist.time < startPlaneTime:
continue
elif hist.time == startPlaneTime:
print user.crossTravelData[hist.time]
print hist
else:
print hist
if __name__ == '__main__':
if len(sys.argv) < 3:
print 'Please insert shapefile and twit data processed.csv'
exit()
pvPHolder = ProvinceCMPointHolder(shapefile.Reader(sys.argv[1]), abbrCsv = './Province/Province from Wiki Html table to CSV/ThailandProvinces_abbr.csv')
userTracker = UserTracker(sys.argv[2])
print 'Total users: {}'.format(len(userTracker.uidList))
userTracker.createUserCrossTravelData(pvPHolder.pvcmDict)
planeUserTuple = filterUser(userTracker.uidList.values(), pvPHolder.pvcmDict, speedT = 300, distT = 50, timeT = 0, isAbove = True)
writeUsertravelPoint(planeUserTuple)
print 'Plane users: {}'.format(len(planeUserTuple))
| from user_tracker import *
sys.path.insert(0, './Province')
from province_point import *
from geopy.distance import vincenty
import operator
def filterUser(userList, pvcmDict, speedT=0, distT=0, timeT=0, isAbove = True):
'Threshold: Speed in km/hr, distance in km, time in hour'
opt = operator.ge if isAbove else operator.lt
filtUsers = {}
for user in userList:
if len(user.crossTravelData) == 0:
continue
for crossTravelData in user.crossTravelData.values():
if opt(crossTravelData.distance,distT) and opt(crossTravelData.time,timeT) and opt(crossTravelData.speed,speedT):
filtUsers[user.uid] = user
break
return filtUsers
def writeUsertravelPoint(userList):
for user in userList.values():
print '------------'
print len(user.history)
print len(user.crossTravelData)
# for hist in user.history.values():
# print
if __name__ == '__main__':
if len(sys.argv) < 3:
print 'Please insert shapefile and twit data processed.csv'
exit()
pvPHolder = ProvinceCMPointHolder(shapefile.Reader(sys.argv[1]), abbrCsv = './Province/Province from Wiki Html table to CSV/ThailandProvinces_abbr.csv')
userTracker = UserTracker(sys.argv[2])
print 'Total users: {}'.format(len(userTracker.uidList))
userTracker.createUserCrossTravelData(pvPHolder.pvcmDict)
planeUsers = filterUser(userTracker.uidList.values(), pvPHolder.pvcmDict, speedT = 300, distT = 50, timeT = 0, isAbove = True)
writeUsertravelPoint(planeUsers)
| mit | Python |
b8f55abbbf8ec14bed4342ec8081527533fd30d9 | Add models based on existing libraries | Doveps/mono,Doveps/mono,Doveps/mono,Doveps/mono | api/savant/models.py | api/savant/models.py | from django.db import models
class Comparison(models.Model):
"""A Comparison is a collection of diffs, for example as generated and
exported by the bassist."""
diffs = models.ManyToManyField("Diff")
class Diff(models.Model):
# Hashed so it can't get added twice
content = models.CharField(max_length=250)
system = models.CharField(max_length=250)
action = models.CharField(max_length=250)
name = models.CharField(max_length=250)
def __str__(self):
return "%s|%s|%s" % (self.action, self.system, self.name)
class Set(models.Model):
"""A Set is one or more diffs from a Comparison object. Diffs result from a
change made to an OS. For example: installing a package."""
comparison = models.ForeignKey("Comparison")
diffs = models.ManyToManyField("Diff")
class Playbook(models.Model):
pass
| from django.db import models
class Comparison(models.Model):
pass
class Set(models.Model):
pass
class Playbook(models.Model):
pass
| mit | Python |
d372ee4d8318d5be039002743657465fdb3e8c26 | fix name of migration file. | alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin | migrations/versions/10_create_users.py | migrations/versions/10_create_users.py | """empty message
Revision ID: create_users
Revises: None
Create Date: 2015-11-24 10:39:19.827534
"""
# revision identifiers, used by Alembic.
revision = '10_create_users'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('roles',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('role', sa.String, nullable=False, unique=True)
)
op.create_table('users',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String, nullable=False, unique=True),
sa.Column('email_address', sa.String(length=255), nullable=False),
sa.Column('password', sa.String, nullable=False),
sa.Column('mobile_number', sa.String, nullable=False),
sa.Column('created_at', sa.DateTime, nullable=False),
sa.Column('updated_at', sa.DateTime),
sa.Column('password_changed_at', sa.DateTime),
sa.Column('role_id', sa.Integer, nullable=False),
sa.Column('logged_in_at', sa.DateTime),
sa.Column('failed_login_count', sa.Integer, nullable=False),
sa.Column('state', sa.String, default='pending'),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'])
)
def downgrade():
op.drop_table('users')
op.drop_table('roles')
| """empty message
Revision ID: create_users
Revises: None
Create Date: 2015-11-24 10:39:19.827534
"""
# revision identifiers, used by Alembic.
revision = 'create_users'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('roles',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('role', sa.String, nullable=False, unique=True)
)
op.create_table('users',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String, nullable=False, unique=True),
sa.Column('email_address', sa.String(length=255), nullable=False),
sa.Column('password', sa.String, nullable=False),
sa.Column('mobile_number', sa.String, nullable=False),
sa.Column('created_at', sa.DateTime, nullable=False),
sa.Column('updated_at', sa.DateTime),
sa.Column('password_changed_at', sa.DateTime),
sa.Column('role_id', sa.Integer, nullable=False),
sa.Column('logged_in_at', sa.DateTime),
sa.Column('failed_login_count', sa.Integer, nullable=False),
sa.Column('state', sa.String, default='pending'),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'])
)
def downgrade():
op.drop_table('users')
op.drop_table('roles')
| mit | Python |
4e314231b7d73d8bc07d6f1a1b9b1ffd7047246a | make sure shapes match in array equality | iskandr/dsltools | treelike/testing_helpers.py | treelike/testing_helpers.py | import sys
import time
import numpy as np
from nose.tools import nottest
def run_local_functions(prefix, locals_dict = None):
if locals_dict is None:
last_frame = sys._getframe()
locals_dict = last_frame.f_back.f_locals
good = set([])
# bad = set([])
for k, test in locals_dict.items():
if k.startswith(prefix):
print "Running %s..." % k
try:
test()
print "\n --- %s passed\n" % k
good.add(k)
except:
raise
print "\n%d tests passed: %s\n" % (len(good), ", ".join(good))
@nottest
def run_local_tests(locals_dict = None):
if locals_dict is None:
last_frame = sys._getframe()
locals_dict = last_frame.f_back.f_locals
return run_local_functions("test_", locals_dict)
def eq(x,y):
"""
print "x", x
print "y", y
print "x type", type(x)
print "y type", type(y)
if hasattr(x,'shape'): print "x shape", x.shape
if hasattr(y, "shape"): print "y shape", y.shape
if hasattr(x, 'strides'): print 'x strides', x.strides
if hasattr(y, 'strides'): print 'y strides', y.strides
print "x raveled", np.ravel(x)
print "y raveled", np.ravel(y)
if hasattr(x, 'flags'): print "x flags", x.flags
if hasattr(y, 'flags'): print "y flags", y.flags
"""
if x is None:
return y is None
elif np.isscalar(x) and np.isnan(x):
return np.isscalar(x) and np.isnan(y)
elif isinstance(x, tuple) or isinstance(y, tuple):
return type(x) == type(y) and len(x) == len(y) and all(eq(xi,yi) for xi, yi in zip(x,y))
elif isinstance(x, np.ndarray) or isinstance(y, np.ndarray):
try:
x = np.asarray(x)
except:
return False
try:
y = np.asarray(y)
except:
return False
return x.shape == y.shape and x.dtype == y.dtype and np.allclose(x,y)
else:
return x == y
def expect_eq(actual,expected, test_name = None):
if test_name is None:
test_name = ""
else:
test_name = "[" + test_name + "] "
assert eq(actual,expected), "%sExpected %s but got %s" % (test_name, expected,actual)
def copy(x):
if isinstance(x, np.ndarray):
return x.copy()
else:
return x
| import sys
import time
import numpy as np
from nose.tools import nottest
def run_local_functions(prefix, locals_dict = None):
if locals_dict is None:
last_frame = sys._getframe()
locals_dict = last_frame.f_back.f_locals
good = set([])
# bad = set([])
for k, test in locals_dict.items():
if k.startswith(prefix):
print "Running %s..." % k
try:
test()
print "\n --- %s passed\n" % k
good.add(k)
except:
raise
print "\n%d tests passed: %s\n" % (len(good), ", ".join(good))
@nottest
def run_local_tests(locals_dict = None):
if locals_dict is None:
last_frame = sys._getframe()
locals_dict = last_frame.f_back.f_locals
return run_local_functions("test_", locals_dict)
def eq(x,y):
"""
print "x", x
print "y", y
print "x type", type(x)
print "y type", type(y)
if hasattr(x,'shape'): print "x shape", x.shape
if hasattr(y, "shape"): print "y shape", y.shape
if hasattr(x, 'strides'): print 'x strides', x.strides
if hasattr(y, 'strides'): print 'y strides', y.strides
print "x raveled", np.ravel(x)
print "y raveled", np.ravel(y)
if hasattr(x, 'flags'): print "x flags", x.flags
if hasattr(y, 'flags'): print "y flags", y.flags
"""
if x is None:
return y is None
elif isinstance(x, np.ndarray) and not isinstance(y, np.ndarray):
return False
elif isinstance(x, tuple) or isinstance(y, tuple):
return type(x) == type(y) and len(x) == len(y) and all(xi == yi for xi, yi in zip(x,y))
elif isinstance(y, np.ndarray):
if isinstance(x, np.ndarray) and x.shape == y.shape:
err = abs(np.mean(np.ravel(x) - np.ravel(y)))
m = abs(np.mean(np.ravel(x)))
if np.abs(m) < 0.000001:
m = 0.000001
if not np.all(np.ravel(x) == np.ravel(y)) and err/m > 0.001:
print "err:", err
print "err/m:", err/m
return False
else:
return True
elif np.isscalar(x) and np.isnan(x):
return np.isscalar(x) and np.isnan(y)
else:
try:
x = np.asarray(x)
except:
return False
try:
y = np.asarray(y)
except:
return False
return np.allclose(x,y)
def expect_eq(actual,expected, test_name = None):
if test_name is None:
test_name = ""
else:
test_name = "[" + test_name + "] "
assert eq(actual,expected), "%sExpected %s but got %s" % (test_name, expected,actual)
def copy(x):
if isinstance(x, np.ndarray):
return x.copy()
else:
return x
| bsd-3-clause | Python |
98fad1af84abe13eb64baad58c8a2faf3cd6cccb | Fix every single async task was broken | texastribune/tt_dailyemailblast,texastribune/tt_dailyemailblast | tt_dailyemailblast/tasks.py | tt_dailyemailblast/tasks.py | from celery.task import task
from . import models
from .send_backends import sync
@task
def send_daily_email_blasts(blast_pk):
blast = models.DailyEmailBlast.objects.get(pk=blast_pk)
sync.sync_daily_email_blasts(blast)
@task
def send_recipients_list(recipients_list_pk, blast_pk):
blast = models.DailyEmailBlast.objects.get(pk=blast_pk)
recipients_list = models.RecipientList.objects.get(pk=recipients_list_pk)
sync.sync_recipients_list(recipients_list, blast)
@task
def send_recipients(recipient_pk, recipients_list_pk, blast_pk):
blast = models.DailyEmailBlast.objects.get(pk=blast_pk)
recipients_list = models.RecipientList.objects.get(pk=recipients_list_pk)
recipient = models.Recipient.objects.get(pk=recipient_pk)
sync.sync_recipient(recipient, recipients_list, blast)
| from celery.task import task
from . import models
from . import send_backends
@task
def send_daily_email_blasts(blast_pk):
blast = models.DailyEmailBlast.objects.get(pk=blast_pk)
send_backends.sync_daily_email_blasts(blast)
@task
def send_recipients_list(recipients_list_pk, blast_pk):
blast = models.DailyEmailBlast.objects.get(pk=blast_pk)
recipients_list = models.RecipientList.objects.get(pk=recipients_list_pk)
send_backends.sync_recipients_list(recipients_list, blast)
@task
def send_recipients(recipient_pk, recipients_list_pk, blast_pk):
blast = models.DailyEmailBlast.objects.get(pk=blast_pk)
recipients_list = models.RecipientList.objects.get(pk=recipients_list_pk)
recipient = models.Recipient.objects.get(pk=recipient_pk)
send_backends.sync_recipient(recipient, recipients_list, blast)
| apache-2.0 | Python |
e0716585b34bb22f70e16609a0071228f317cc3e | Use correct pen up position | mrzl/Composition37XY,fogleman/xy | xy/device.py | xy/device.py | import serial
import time
PORT = '/dev/tty.wchusbserial640'
BAUD = 115200
UP = 10
DOWN = 40
class Device(object):
def __init__(self, port=PORT, baud=BAUD, up=UP, down=DOWN, verbose=False):
self.serial = serial.Serial(port, baud) if port else None
self.up = up
self.down = down
self.verbose = verbose
def read(self):
data = []
while True:
c = self.serial.read(1) if self.serial else '\n'
if c == '\n':
return ''.join(data)
data.append(c)
def write(self, *args):
line = ' '.join(map(str, args))
if self.verbose:
print line
if self.serial:
self.serial.write('%s\n' % line)
response = self.read()
if self.verbose:
print response
def home(self):
self.write('G28')
def move(self, x, y):
x = 'X%s' % x
y = 'Y%s' % y
self.write('G1', x, y)
def pen(self, position):
self.write('M1', position)
def pen_up(self):
self.pen(self.up)
def pen_down(self):
self.pen(self.down)
def draw(self, points, up=None, down=None):
if not points:
return
self.pen(self.up if up is None else up)
self.move(*points[0])
self.pen(self.down if down is None else down)
time.sleep(0.15)
for point in points:
self.move(*point)
self.pen(self.up if up is None else up)
time.sleep(0.15)
def gcode(self, g):
for line in g.lines:
self.write(line)
| import serial
import time
PORT = '/dev/tty.wchusbserial640'
BAUD = 115200
UP = 10
DOWN = 40
class Device(object):
def __init__(self, port=PORT, baud=BAUD, up=UP, down=DOWN, verbose=False):
self.serial = serial.Serial(port, baud) if port else None
self.up = up
self.down = down
self.verbose = verbose
def read(self):
data = []
while True:
c = self.serial.read(1) if self.serial else '\n'
if c == '\n':
return ''.join(data)
data.append(c)
def write(self, *args):
line = ' '.join(map(str, args))
if self.verbose:
print line
if self.serial:
self.serial.write('%s\n' % line)
response = self.read()
if self.verbose:
print response
def home(self):
self.write('G28')
def move(self, x, y):
x = 'X%s' % x
y = 'Y%s' % y
self.write('G1', x, y)
def pen(self, position):
self.write('M1', position)
def pen_up(self):
self.pen(self.up)
def pen_down(self):
self.pen(self.down)
def draw(self, points, up=None, down=None):
if not points:
return
self.pen(self.up if up is None else up)
self.move(*points[0])
self.pen(self.down if down is None else down)
time.sleep(0.15)
for point in points:
self.move(*point)
self.pen(up)
time.sleep(0.15)
def gcode(self, g):
for line in g.lines:
self.write(line)
| mit | Python |
d33a624fa6aedb93ae43ba1d2c0f6a76d90ff4a6 | Allow directory of files to be indexed | zivy/SimpleITK-Notebooks,InsightSoftwareConsortium/SimpleITK-Notebooks,InsightSoftwareConsortium/SimpleITK-Notebooks,zivy/SimpleITK-Notebooks,InsightSoftwareConsortium/SimpleITK-Notebooks,thewtex/SimpleITK-Notebooks,zivy/SimpleITK-Notebooks,thewtex/SimpleITK-Notebooks,thewtex/SimpleITK-Notebooks | foldermd5sums.py | foldermd5sums.py | #!/usr/bin/env python
"""Script to read data files in a directory, compute their md5sums, and output
them to a JSON file."""
import json
import os
import sys
import hashlib
def get_relative_filepaths(base_directory):
""" Return a list of file paths without the base_directory prefix"""
file_list = []
for root, subFolders, files in os.walk('Data'):
relative_path="/".join(root.split('/')[1:])
for file in files:
file_list.append(os.path.join(relative_path,file))
return file_list
def get_md5sums(base_directory):
md5sums = []
for filename in get_relative_filepaths(base_directory):
md5 = hashlib.md5()
full_filepath = os.path.join(base_directory, filename)
with open(full_filepath, 'rb') as fp:
for chunk in iter(lambda: fp.read(128 * md5.block_size), b''):
md5.update(chunk)
md5hash = md5.hexdigest()
md5sums.append((filename, md5hash))
return md5sums
if __name__ == '__main__':
if len(sys.argv) < 3:
print('Usage: ' + sys.argv[0] + ' input_directory output.json')
sys.exit(1)
directory = sys.argv[1]
if not os.path.exists(directory):
print('Directory does not exist!')
sys.exit(1)
output_json = sys.argv[2]
md5sums = get_md5sums(directory)
with open(output_json, 'w') as fp:
json.dump(md5sums, fp, indent=0)
| #!/usr/bin/env python
"""Script to read data files in a directory, compute their md5sums, and output
them to a JSON file."""
import json
import os
import sys
import hashlib
def get_md5sums(directory):
md5sums = []
for filename in os.listdir(directory):
md5 = hashlib.md5()
with open(os.path.join(directory, filename), 'rb') as fp:
for chunk in iter(lambda: fp.read(128 * md5.block_size), b''):
md5.update(chunk)
md5hash = md5.hexdigest()
md5sums.append((filename, md5hash))
return md5sums
if __name__ == '__main__':
if len(sys.argv) < 3:
print('Usage: ' + sys.argv[0] + ' input_directory output.json')
sys.exit(1)
directory = sys.argv[1]
if not os.path.exists(directory):
print('Directory does not exist!')
sys.exit(1)
output_json = sys.argv[2]
md5sums = get_md5sums(directory)
with open(output_json, 'w') as fp:
json.dump(md5sums, fp, indent=0)
| apache-2.0 | Python |
5b709764e4efd79165765bd4a26a1a49f40d2cb6 | rename function for better understanding | misisnik/testinsta,sudoguy/instabot,AlexBGoode/instabot,instagrambot/instabot,instagrambot/instapro,ohld/instabot,instagrambot/instabot,misisnik/testinsta,rasperepodvipodvert/instabot,vkgrd/instabot,Diapostrofo/instabot | examples/unsubscribe_not_mutually_followers.py | examples/unsubscribe_not_mutually_followers.py | #!/usr/bin/env python
import pandas as pd
import datetime, time
import random
import sys, os
sys.path.append(os.path.join(sys.path[0],'../'))
from instabot import API
def unsubscribe_from_not_mutually_followers(api):
""" Unsubscribes from people that don't follow you.
I know that the name of this example and function is bad.
Feel free to give me an advice."""
all_followers_data = api.get_followers(api.user_login)
followers = [item["username"] for item in all_followers_data][::-1]
print ("You follow %d people."%len(followers))
total_unsubscribed = 0
for follower in followers:
info = api.get_profile_info(follower)
time.sleep(5 * random.random())
if info:
if not info["follows_viewer"]:
print ("%s is not following you! Unsubscribe!"%follower)
if api.unfollow(api.get_user_id_by_username(follower)):
total_unsubscribed += 1
print (" Done. Total unsubscribed: %d"%total_unsubscribed)
else:
print (" Something broke up. I can't unsubscribe.")
time.sleep(30 + 30 * random.random())
else:
print (" Something broke up. I can't get profile info.")
if total_unsubscribed >= 200:
print ("You have unsubscribed from 200 people. That's enought. I'll be stopped.")
break
print ("Now you follow %d people."%(len(followers) - total_unsubscribed))
return True
if __name__ == "__main__":
api = API()
unsubscribe_from_not_mutually_followers(api)
api.logout()
| #!/usr/bin/env python
import pandas as pd
import datetime, time
import random
import sys, os
sys.path.append(os.path.join(sys.path[0],'../'))
from instabot import API
def unsubscribe_not_mutually_followers(api):
""" Unsubscribes from people that don't follow you.
I know that the name of this example and function is bad.
Feel free to give me an advice."""
all_followers_data = api.get_followers(api.user_login)
followers = [item["username"] for item in all_followers_data][::-1]
print ("You follow %d people."%len(followers))
total_unsubscribed = 0
for follower in followers:
info = api.get_profile_info(follower)
time.sleep(5 * random.random())
if info:
if not info["follows_viewer"]:
print ("%s is not following you! Unsubscribe!"%follower)
if api.unfollow(api.get_user_id_by_username(follower)):
total_unsubscribed += 1
print (" Done. Total unsubscribed: %d"%total_unsubscribed)
else:
print (" Something broke up. I can't unsubscribe.")
time.sleep(30 + 30 * random.random())
else:
print (" Something broke up. I can't get profile info.")
if total_unsubscribed >= 200:
print ("You have unsubscribed from 200 people. That's enought. I'll be stopped.")
break
print ("Now you follow %d people."%(len(followers) - total_unsubscribed))
return True
if __name__ == "__main__":
api = API()
unsubscribe_not_mutually_followers(api)
api.logout()
| apache-2.0 | Python |
e07c34603385563515e5c438bd575f91a8e9dd1b | Upgrade gitiles-servlet to 0.2-7 | GerritCodeReview/plugins_gitiles | external_plugin_deps.bzl | external_plugin_deps.bzl | load("//tools/bzl:maven_jar.bzl", "GERRIT", "MAVEN_CENTRAL", "MAVEN_LOCAL", "maven_jar")
COMMONMARK_VERSION = "0.10.0"
def external_plugin_deps():
maven_jar(
name = "gitiles-servlet",
artifact = "com.google.gitiles:gitiles-servlet:0.2-7",
sha1 = "f23b22cb27fe5c4a78f761492082159d17873f57",
repository = GERRIT,
)
# prettify must match the version used in Gitiles
maven_jar(
name = "prettify",
artifact = "com.github.twalcari:java-prettify:1.2.2",
sha1 = "b8ba1c1eb8b2e45cfd465d01218c6060e887572e",
)
maven_jar(
name = "commons-lang3",
artifact = "org.apache.commons:commons-lang3:3.7",
sha1 = "557edd918fd41f9260963583ebf5a61a43a6b423",
)
maven_jar(
name = "commons-text",
artifact = "org.apache.commons:commons-text:1.2",
sha1 = "74acdec7237f576c4803fff0c1008ab8a3808b2b",
)
# commonmark must match the version used in Gitiles
maven_jar(
name = "commonmark",
artifact = "com.atlassian.commonmark:commonmark:" + COMMONMARK_VERSION,
sha1 = "119cb7bedc3570d9ecb64ec69ab7686b5c20559b",
)
maven_jar(
name = "cm-autolink",
artifact = "com.atlassian.commonmark:commonmark-ext-autolink:" + COMMONMARK_VERSION,
sha1 = "a6056a5efbd68f57d420bc51bbc54b28a5d3c56b",
)
maven_jar(
name = "autolink",
artifact = "org.nibor.autolink:autolink:0.7.0",
sha1 = "649f9f13422cf50c926febe6035662ae25dc89b2",
)
maven_jar(
name = "gfm-strikethrough",
artifact = "com.atlassian.commonmark:commonmark-ext-gfm-strikethrough:" + COMMONMARK_VERSION,
sha1 = "40837da951b421b545edddac57012e15fcc9e63c",
)
maven_jar(
name = "gfm-tables",
artifact = "com.atlassian.commonmark:commonmark-ext-gfm-tables:" + COMMONMARK_VERSION,
sha1 = "c075db2a3301100cf70c7dced8ecf86b494458a2",
)
| load("//tools/bzl:maven_jar.bzl", "GERRIT", "MAVEN_CENTRAL", "MAVEN_LOCAL", "maven_jar")
COMMONMARK_VERSION = "0.10.0"
def external_plugin_deps():
maven_jar(
name = "gitiles-servlet",
artifact = "com.google.gitiles:gitiles-servlet:0.2-6",
sha1 = "74a3b22c9283adafafa1e388d62f693e5e2fab2b",
repository = GERRIT,
)
# prettify must match the version used in Gitiles
maven_jar(
name = "prettify",
artifact = "com.github.twalcari:java-prettify:1.2.2",
sha1 = "b8ba1c1eb8b2e45cfd465d01218c6060e887572e",
)
maven_jar(
name = "commons-lang3",
artifact = "org.apache.commons:commons-lang3:3.7",
sha1 = "557edd918fd41f9260963583ebf5a61a43a6b423",
)
maven_jar(
name = "commons-text",
artifact = "org.apache.commons:commons-text:1.2",
sha1 = "74acdec7237f576c4803fff0c1008ab8a3808b2b",
)
# commonmark must match the version used in Gitiles
maven_jar(
name = "commonmark",
artifact = "com.atlassian.commonmark:commonmark:" + COMMONMARK_VERSION,
sha1 = "119cb7bedc3570d9ecb64ec69ab7686b5c20559b",
)
maven_jar(
name = "cm-autolink",
artifact = "com.atlassian.commonmark:commonmark-ext-autolink:" + COMMONMARK_VERSION,
sha1 = "a6056a5efbd68f57d420bc51bbc54b28a5d3c56b",
)
maven_jar(
name = "autolink",
artifact = "org.nibor.autolink:autolink:0.7.0",
sha1 = "649f9f13422cf50c926febe6035662ae25dc89b2",
)
maven_jar(
name = "gfm-strikethrough",
artifact = "com.atlassian.commonmark:commonmark-ext-gfm-strikethrough:" + COMMONMARK_VERSION,
sha1 = "40837da951b421b545edddac57012e15fcc9e63c",
)
maven_jar(
name = "gfm-tables",
artifact = "com.atlassian.commonmark:commonmark-ext-gfm-tables:" + COMMONMARK_VERSION,
sha1 = "c075db2a3301100cf70c7dced8ecf86b494458a2",
)
| apache-2.0 | Python |
e850fe2ec9676ba1c3006a56af46d868ecbf2887 | validate email before blacklist create | StreetVoice/django-celery-ses | djcelery_ses/views.py | djcelery_ses/views.py | # coding: utf-8
import json
import re
from django.http import HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
from django.core.mail import mail_admins
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from .models import Blacklist
@csrf_exempt
def sns_notification(request):
"""
Receive AWS SES bounce SNS notification
"""
# decode json
try:
data = json.loads(request.read())
except ValueError:
return HttpResponseBadRequest('Invalid JSON')
# handle SNS subscription
if data['Type'] == 'SubscriptionConfirmation':
subscribe_url = data['SubscribeURL']
subscribe_body = """
Please visit this URL below to confirm your subscription with SNS
%s """ % subscribe_url
mail_admins('Please confirm SNS subscription', subscribe_body)
return HttpResponse('OK')
#
try:
message = json.loads(data['Message'])
except ValueError:
assert False, data['Message']
#
type = 0 if message['notificationType'] == 'Bounce' else 1
email = message['mail']['destination'][0]
try:
validate_email(email)
except ValidationError:
try:
email = re.findall(r"<(.+?)>", email)[0]
except IndexError:
email = None
if not email:
return HttpResponse('Email Error')
# add email to blacklist
try:
Blacklist.objects.get(email=email)
except Blacklist.DoesNotExist:
Blacklist.objects.create(email=email, type=type)
return HttpResponse('Done')
| # coding: utf-8
import json
from django.http import HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
from django.core.mail import mail_admins
from .models import Blacklist
@csrf_exempt
def sns_notification(request):
"""
Receive AWS SES bounce SNS notification
"""
# decode json
try:
data = json.loads(request.read())
except ValueError:
return HttpResponseBadRequest('Invalid JSON')
# handle SNS subscription
if data['Type'] == 'SubscriptionConfirmation':
subscribe_url = data['SubscribeURL']
subscribe_body = """
Please visit this URL below to confirm your subscription with SNS
%s """ % subscribe_url
mail_admins('Please confirm SNS subscription', subscribe_body)
return HttpResponse('OK')
#
try:
message = json.loads(data['Message'])
except ValueError:
assert False, data['Message']
#
type = 0 if message['notificationType'] == 'Bounce' else 1
email = message['mail']['destination'][0]
# add email to blacklist
try:
Blacklist.objects.get(email=email)
except Blacklist.DoesNotExist:
Blacklist.objects.create(email=email, type=type)
return HttpResponse('Done')
| mit | Python |
33bcf1c4953bf318d4581b9f7da2ec4b5770b7fe | bump to 0.1.14 | pmorissette/ffn | ffn/__init__.py | ffn/__init__.py | from . import core
from . import data
from .data import get
#from .core import year_frac, PerformanceStats, GroupStats, merge
from .core import *
core.extend_pandas()
__version__ = (0, 1, 14)
| from . import core
from . import data
from .data import get
#from .core import year_frac, PerformanceStats, GroupStats, merge
from .core import *
core.extend_pandas()
__version__ = (0, 1, 13)
| mit | Python |
d20969aec0c312a6d60d8263e970a01c9b3b0d01 | bump 0.2.1 | pmorissette/ffn | ffn/__init__.py | ffn/__init__.py | from . import core
from . import data
from .data import get
#from .core import year_frac, PerformanceStats, GroupStats, merge
from .core import *
core.extend_pandas()
__version__ = (0, 2, 1)
| from . import core
from . import data
from .data import get
#from .core import year_frac, PerformanceStats, GroupStats, merge
from .core import *
core.extend_pandas()
__version__ = (0, 2, 0)
| mit | Python |
d42e2460a411f5dbd57822df87a502866d0f9e1c | Fix _log to work on python 2 & 3 | bcb/jsonrpcserver | jsonrpcserver/log.py | jsonrpcserver/log.py | """Logging"""
import logging
def _configure_logger(logger, fmt):
"""Set up a logger, if no handler has been configured for it"""
if not logging.root.handlers and logger.level == logging.NOTSET:
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(fmt=fmt))
logger.addHandler(handler)
def _log(logger, level, message, *args, **kwargs):
"""Configure before logging"""
fmt = kwargs.pop('fmt', '%(message)s')
_configure_logger(logger, fmt)
getattr(logger, level)(message, *args, **kwargs)
| """Logging"""
import logging
def _configure_logger(logger, fmt):
"""Set up a logger, if no handler has been configured for it"""
if not logging.root.handlers and logger.level == logging.NOTSET:
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(fmt=fmt))
logger.addHandler(handler)
def _log(logger, level, message, *args, fmt='%(message)s', **kwargs):
"""Configure before logging"""
_configure_logger(logger, fmt)
getattr(logger, level)(message, *args, **kwargs)
| mit | Python |
493007fea95a8d3f54f5bd1e783d95a58ac02b8f | Fix typo | maruel/git-hooks-go,maruel/git-hooks-go | install.py | install.py | #!/usr/bin/env python
# Copyright 2014 Marc-Antoine Ruel. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Installs git pre-commit hook on the repository one directory above."""
import os
import shutil
import subprocess
import sys
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
def main():
try:
parent = os.path.dirname(THIS_DIR)
git_dir = subprocess.check_output(
['git', 'rev-parse', '--git-dir'],
cwd=parent).strip()
git_dir = os.path.normpath(os.path.join(parent, git_dir))
except subprocess.CalledProcessError:
print >> sys.stderr, 'Failed to find parent git repository root'
return 1
git_hook_dir = os.path.join(git_dir, 'hooks')
precommit_dest = os.path.join(git_hook_dir, 'pre-commit')
if os.path.isfile(precommit_dest):
# Better be safe than sorry.
print >> sys.stderr, '%s already exist, aborting' % precommit_dest
return 1
if sys.platform == 'win32':
# This means it'll get stale on Windows.
shutil.copyfile(os.path.join(THIS_DIR, 'pre-commit'), precommit_dest)
else:
relpath = os.path.relpath(THIS_DIR, git_hook_dir)
os.symlink(os.path.join(relpath, 'pre-commit'), precommit_dest)
print('Installed %s' % precommit_dest)
return 0
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# Copyright 2014 Marc-Antoine Ruel. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Installs git pre-commit hook on the repository one directory above."""
import os
import shutil
import subprocess
import sys
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
def main():
try:
parent = os.path.dirname(THIS_DIR)
git_dir = subprocess.check_output(
['git', 'rev-parse', '--git-dir'],
cwd=parent).strip()
git_dir = os.path.normpath(os.path.join(parent, git_dir))
except subprocess.CalledProcessError:
print >> sys.stderr, 'Failed to find parent git repository root'
return 1
git_hook_dir = os.path.join(git_dir, 'hooks')
precommit_dest = os.path.join(git_hook_dir, 'pre-commit')
if os.path.isfile(precommit_test):
# Better be safe than sorry.
print >> sys.stderr, '%s already exist, aborting' % precommit_dest
return 1
if sys.platform == 'win32':
# This means it'll get stale on Windows.
shutil.copyfile(os.path.join(THIS_DIR, 'pre-commit'), precommit_dest)
else:
relpath = os.path.relpath(THIS_DIR, git_hook_dir)
os.symlink(os.path.join(relpath, 'pre-commit'), precommit_dest)
print('Installed %s' % precommit_dest)
return 0
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 | Python |
cbefb84542d9dfddd0f2fdf8bd0cb2fc89d5b824 | Allow "jupyter nbextension install/enable --py jupytext" | mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext | jupytext/__init__.py | jupytext/__init__.py | """Read and write Jupyter notebooks as text files"""
from .jupytext import readf, writef, writes, reads
from .formats import NOTEBOOK_EXTENSIONS, guess_format, get_format_implementation
from .version import __version__
try:
from .contentsmanager import TextFileContentsManager
except ImportError as err:
class TextFileContentsManager:
"""A class that raises the previous ImportError"""
err = err
def __init__(self):
raise self.err
def _jupyter_nbextension_paths():
"""Allows commands like
jupyter nbextension install --py jupytext
jupyter nbextension enable --py jupytext
jupyter labextension install jupyterlab-jupytext"""
return [dict(
section="notebook",
# the path is relative to the `jupytext` directory
src="nbextension",
# directory in the `nbextension/` namespace
dest="jupytext",
# _also_ in the `nbextension/` namespace
require="jupytext/index")]
__all__ = ['readf', 'writef', 'writes', 'reads',
'NOTEBOOK_EXTENSIONS', 'guess_format', 'get_format_implementation',
'TextFileContentsManager', '__version__']
| """Read and write Jupyter notebooks as text files"""
from .jupytext import readf, writef, writes, reads
from .formats import NOTEBOOK_EXTENSIONS, guess_format, get_format_implementation
from .version import __version__
try:
from .contentsmanager import TextFileContentsManager
except ImportError as err:
class TextFileContentsManager:
"""A class that raises the previous ImportError"""
err = err
def __init__(self):
raise self.err
__all__ = ['readf', 'writef', 'writes', 'reads',
'NOTEBOOK_EXTENSIONS', 'guess_format', 'get_format_implementation',
'TextFileContentsManager', '__version__']
| mit | Python |
c6f8153f9163e421a0684b3667c6b5425c9c2168 | add form | girlsgoit/GirlsGoIT,girlsgoit/GirlsGoIT | ggit_platform/forms.py | ggit_platform/forms.py | from django import forms
from .models import Track
from .models import Region
from .models import Member
from .models import Event
from .models import Story
class TrackForm(forms.ModelForm):
class Meta:
model = Track
fields = '__all__'
class MemberForm(forms.ModelForm):
class Meta:
model = Member
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = '__all__'
class RegionForm(forms.ModelForm):
class Meta:
model = Region
fields = '__all__'
class StoryForm(forms.ModelForm):
class Meta:
model = Story
fields = '__all__'
| from django import forms
from .models import Track
from .models import Region
from .models import Member
from .models import Event
from .models import Story
class TrackForm(forms.ModelForm):
class Meta:
model = Track
fields = '__all__'
class MemberForm(forms.ModelForm):
class Meta:
model = Member
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = '__all__'
class StoryForm(forms.ModelForm):
class Meta:
model = Story
fields = '__all__'
| mit | Python |
de5899d60736f555f975713810d5e1936d823794 | Remove ternary expressions | ludios/Strfrag | strfrag.py | strfrag.py | __version__ = '11.5.9'
import sys
import warnings
class StringFragment(object):
"""
Represents a fragment of a string. Used to avoid copying, especially in
network protocols.
DO NOT adjust the attributes of the object after you instantiate it; this
is faux-immutable.
You can slice a L{StringFragment}, which will return a new
L{StringFragment}. You can index it, which will return a 1-byte C{str}.
Equal and hash-equivalent to other L{StringFragment}s that represent
the same string fragment.
"""
__slots__ = ('_string', '_pos', 'size')
def __init__(self, string, pos, size):
self._string = string
self._pos = pos
self.size = size
def __repr__(self):
return '<%s for 0x%x, pos=%r, size=%r, represents %r>' % (
self.__class__.__name__, id(self._string), self._pos, self.size, str(self))
def __len__(self):
# Note: __len__ needs to be implemented for another
# reason: so that __getslice__ works properly when sliced
# with negative numbers.
return self.size
def __getitem__(self, num):
# Unlike for __getslice__, Python passes through negative numbers
# to __getitem__.
pos = self._pos
size = self.size
rightLimit = pos + size - 1
if num < 0:
num = size + num
num = pos + num
if not pos <= num <= rightLimit:
raise IndexError("StringFragment index out of range")
return self._string[num]
def __getslice__(self, start, end):
maximumLength = min(self.size - start, end - start)
newStart = self._pos + start
return StringFragment(self._string, newStart, max(0, maximumLength))
# TODO: as_memoryview # Python does not provide a __memoryview__
def as_buffer(self): # Python does not provide a __buffer__
"""
Return a C{buffer} object for the fragment. Note that Python
will not collect the underlying string object if there is a buffer
of it.
"""
return buffer(self._string, self._pos, self.size)
def __str__(self):
pos = self._pos
return self._string[pos:pos + self.size]
def __hash__(self):
return hash(self.as_buffer())
def __eq__(self, other):
if type(self) != type(other):
return False
return self.as_buffer() == other.as_buffer()
def __ne__(self, other):
if type(self) != type(other):
return True
return self.as_buffer() != other.as_buffer()
| __version__ = '11.5.9'
import sys
import warnings
class StringFragment(object):
"""
Represents a fragment of a string. Used to avoid copying, especially in
network protocols.
DO NOT adjust the attributes of the object after you instantiate it; this
is faux-immutable.
You can slice a L{StringFragment}, which will return a new
L{StringFragment}. You can index it, which will return a 1-byte C{str}.
Equal and hash-equivalent to other L{StringFragment}s that represent
the same string fragment.
"""
__slots__ = ('_string', '_pos', 'size')
def __init__(self, string, pos, size):
self._string = string
self._pos = pos
self.size = size
def __repr__(self):
return '<%s for 0x%x, pos=%r, size=%r, represents %r>' % (
self.__class__.__name__, id(self._string), self._pos, self.size, str(self))
def __len__(self):
# Note: __len__ needs to be implemented for another
# reason: so that __getslice__ works properly when sliced
# with negative numbers.
return self.size
def __getitem__(self, num):
# Unlike for __getslice__, Python passes through negative numbers
# to __getitem__.
pos = self._pos
size = self.size
rightLimit = pos + size - 1
if num < 0:
num = size + num
num = pos + num
if not pos <= num <= rightLimit:
raise IndexError("StringFragment index out of range")
return self._string[num]
def __getslice__(self, start, end):
maximumLength = min(self.size - start, end - start)
newStart = self._pos + start
return StringFragment(self._string, newStart, max(0, maximumLength))
# TODO: as_memoryview # Python does not provide a __memoryview__
def as_buffer(self): # Python does not provide a __buffer__
"""
Return a C{buffer} object for the fragment. Note that Python
will not collect the underlying string object if there is a buffer
of it.
"""
return buffer(self._string, self._pos, self.size)
def __str__(self):
pos = self._pos
return self._string[pos:pos + self.size]
def __hash__(self):
return hash(self.as_buffer())
def __eq__(self, other):
return False if type(self) != type(other) else self.as_buffer() == other.as_buffer()
def __ne__(self, other):
return True if type(self) != type(other) else self.as_buffer() != other.as_buffer()
| mit | Python |
812d17c6fcf97cc1c493f2489c92af24006ea0a8 | Fix pep8 error. | armet/python-armet | src/armet/resources/resource/__init__.py | src/armet/resources/resource/__init__.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, division
import six
from .base import Resource as BaseResource
from .meta import ResourceBase
__all__ = [
'Resource'
]
class Resource(six.with_metaclass(ResourceBase, BaseResource)):
"""Implements the RESTful resource protocol for abstract resources.
"""
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, division
import six
from .base import Resource as BaseResource
from .meta import ResourceBase
__all__ = [
'Resource'
]
class Resource(six.with_metaclass(ResourceBase, BaseResource)):
"""Implements the RESTful resource protocol for abstract resources.
"""
| mit | Python |
8cbc9498b03b68e15b1698211997b773ec263a3f | Update __init__.py | keras-team/keras-cv,keras-team/keras-cv,keras-team/keras-cv | keras_cv/__init__.py | keras_cv/__init__.py | # Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.4dev"
| # Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.4"
| apache-2.0 | Python |
e911bf6156e25f2f2da86a63fe54b9bac6ba0544 | fix #191 remove_limit.py | DocNow/twarc,hugovk/twarc | utils/remove_limit.py | utils/remove_limit.py | #!/usr/bin/env python
"""
Utility to remove limit warnings from Filter API output.
If --warnings was used, you will have the following in output:
{"limit": {"track": 2530, "timestamp_ms": "1482168932301"}}
This utility removes any limit warnings from output.
Usage:
remove_limit.py aleppo.jsonl > aleppo_no_warnings.jsonl
"""
from __future__ import print_function
import sys
import json
import fileinput
limitbreaker = '{"limit":{"track":'
limit_breaker = '{"limit": {"track":'
for line in fileinput.input():
if limitbreaker not in line and limit_breaker not in line:
print(json.dumps(line))
| #!/usr/bin/env python
"""
Utility to remove limit warnings from Filter API output.
If --warnings was used, you will have the following in output:
{"limit": {"track": 2530, "timestamp_ms": "1482168932301"}}
This utility removes any limit warnings from output.
Usage:
remove_limit.py aleppo.jsonl > aleppo_no_warnings.jsonl
"""
from __future__ import print_function
import sys
import json
import fileinput
limit_breaker = '{"limit": {"track":'
for line in fileinput.input():
if limit_breaker not in line:
print(json.dumps(line))
| mit | Python |
5aeab439a081d7bb06777072f912d9443181aac9 | Fix dhcp_build | murrown/cyder,OSU-Net/cyder,drkitty/cyder,akeym/cyder,drkitty/cyder,akeym/cyder,akeym/cyder,zeeman/cyder,OSU-Net/cyder,drkitty/cyder,drkitty/cyder,zeeman/cyder,zeeman/cyder,OSU-Net/cyder,zeeman/cyder,akeym/cyder,murrown/cyder,OSU-Net/cyder,murrown/cyder,murrown/cyder | cyder/management/commands/dhcp_build.py | cyder/management/commands/dhcp_build.py | import syslog
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from cyder.cydhcp.build.builder import DHCPBuilder
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
### action options ###
make_option('-p', '--push',
dest='push',
action='store_true',
default=False,
help="Check files into vcs and push upstream."),
### logging/debug options ###
make_option('-l', '--syslog',
dest='to_syslog',
action='store_true',
help="Log to syslog."),
make_option('-L', '--no-syslog',
dest='to_syslog',
action='store_false',
help="Do not log to syslog."),
### miscellaneous ###
make_option('-C', '--no-sanity-check',
dest='sanity_check',
action='store_false',
default=True,
help="Don't run the diff sanity check."),
)
def handle(self, *args, **options):
builder_opts = {}
if options['to_syslog']:
syslog.openlog(b'dhcp_build', 0, syslog.LOG_LOCAL6)
builder_opts['to_syslog'] = True
verbosity = int(options['verbosity'])
builder_opts['quiet'] = verbosity == 0
builder_opts['verbose'] = verbosity >= 2
with DHCPBuilder(**builder_opts) as b:
b.build()
if options['push']:
b.push(sanity_check=options['sanity_check'])
| from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from cyder.cydhcp.build.builder import DHCPBuilder
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
### action options ###
make_option('-p', '--push',
dest='push',
action='store_true',
default=False,
help="Check files into vcs and push upstream."),
### logging/debug options ###
make_option('-l', '--syslog',
dest='to_syslog',
action='store_true',
help="Log to syslog."),
make_option('-L', '--no-syslog',
dest='to_syslog',
action='store_false',
help="Do not log to syslog."),
### miscellaneous ###
make_option('-C', '--no-sanity-check',
dest='sanity_check',
action='store_false',
default=True,
help="Don't run the diff sanity check."),
)
def handle(self, *args, **options):
builder_opts = {}
if options['to_syslog']:
syslog.openlog(b'dhcp_build', 0, syslog.LOG_LOCAL6)
builder_opts['to_syslog'] = True
verbosity = int(options['verbosity'])
builder_opts['quiet'] = verbosity == 0
builder_opts['verbose'] = verbosity >= 2
with DHCPBuilder(**builder_opts) as b:
b.build()
if options['push']:
b.push(sanity_check=options['sanity_check'])
| bsd-3-clause | Python |
d3b374a705a72ef3d0ad2da332f056f04063ae92 | Add -t alias to --tag flag for preprocess command. | grow/grow,grow/grow,grow/grow,grow/grow,grow/pygrow,grow/pygrow,grow/pygrow | grow/commands/preprocess.py | grow/commands/preprocess.py | from grow.common import utils
from grow.pods import pods
from grow.pods import storage
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--all', '-A', 'run_all', is_flag=True, default=False,
help='Whether to run all preprocessors, even if a preprocessor'
' has autorun disabled.')
@click.option('--preprocessor', '-p', type=str, multiple=True,
help='Name of preprocessor to run. Preprocessors controlled by'
' the preprocess command must have names or tags.')
@click.option('--tag', '-t', type=str, multiple=True,
help='Tags of preprocessors to run. Preprocessors controlled by'
' the preprocess command must have names or tags.')
@click.option('--ratelimit', type=int,
help='Limit the execution speed of preprocessors. Grow will '
'sleep for X seconds between running each preprocessor, '
'where X is the value of --ratelimit. This flag can be '
'useful when using mulitple Google APIs-based '
'preprocessors on the same resource to avoid rate limit '
'errors.')
def preprocess(pod_path, preprocessor, run_all, tag, ratelimit):
"""Runs preprocessors."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
pod.preprocess(preprocessor, run_all=run_all, tags=tag, ratelimit=ratelimit)
| from grow.common import utils
from grow.pods import pods
from grow.pods import storage
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--all', '-A', 'run_all', is_flag=True, default=False,
help='Whether to run all preprocessors, even if a preprocessor'
' has autorun disabled.')
@click.option('--preprocessor', '-p', type=str, multiple=True,
help='Name of preprocessor to run. Preprocessors controlled by'
' the preprocess command must have names or tags.')
@click.option('--tag', '-tag', type=str, multiple=True,
help='Tags of preprocessors to run. Preprocessors controlled by'
' the preprocess command must have names or tags.')
@click.option('--ratelimit', type=int,
help='Limit the execution speed of preprocessors. Grow will '
'sleep for X seconds between running each preprocessor, '
'where X is the value of --ratelimit. This flag can be '
'useful when using mulitple Google APIs-based '
'preprocessors on the same resource to avoid rate limit '
'errors.')
def preprocess(pod_path, preprocessor, run_all, tag, ratelimit):
"""Runs preprocessors."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
pod.preprocess(preprocessor, run_all=run_all, tags=tag, ratelimit=ratelimit)
| mit | Python |
bbc97d650d1731c1e89587183f58652e254bb7bb | Tag new release: 3.9.4 | Floobits/floobits-sublime,Floobits/floobits-sublime | floo/version.py | floo/version.py | PLUGIN_VERSION = '3.9.4'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| PLUGIN_VERSION = '3.9.3'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| apache-2.0 | Python |
1be1200b94b852b84eb8e8a7e95cbc528f5ec114 | Tag new release: 3.5.1 | Floobits/floobits-sublime,Floobits/floobits-sublime | floo/version.py | floo/version.py | PLUGIN_VERSION = '3.5.1'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| PLUGIN_VERSION = '3.5.0'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| apache-2.0 | Python |
754c00adda3e16b1a2bb61017d56f9d1d8959d59 | Refresh monitoring.nagios.plugin.exceptions and fix pylint+pep8. | bigbrozer/monitoring.nagios,bigbrozer/monitoring.nagios | monitoring/nagios/plugin/exceptions.py | monitoring/nagios/plugin/exceptions.py | # -*- coding: utf-8 -*-
# Copyright (C) Vincent BESANCON <besancon.vincent@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Exceptions modules for all plugins."""
class NagiosCritical(Exception):
"""
Raise to fire a CRITICAL event to Nagios and stop plugin execution.
:param msg: Output message in Nagios
:type msg: string
"""
def __init__(self, msg):
print "CRITICAL - %s" % msg
raise SystemExit(2)
class NagiosWarning(Exception):
"""
Raise to fire a WARNING event to Nagios and stop plugin execution.
:param msg: Output message in Nagios
:type msg: string
"""
def __init__(self, msg):
print "WARNING - %s" % msg
raise SystemExit(1)
class NagiosUnknown(Exception):
"""
Raise to fire a UNKNOWN event to Nagios and stop plugin execution.
:param msg: Output message in Nagios
:type msg: string
"""
def __init__(self, msg):
print "UNKNOWN - %s" % msg
raise SystemExit(3)
class NagiosOk(Exception):
"""
Raise to fire a OK event to Nagios and stop plugin execution.
:param msg: Output message in Nagios
:type msg: string
"""
def __init__(self, msg):
print "OK - %s" % msg
raise SystemExit(0)
class PluginError(StandardError):
"""
Exception when a plugin error occur.
:param output: Message to show in Nagios status information output.
:type output: str
:param longoutput: Message to show in long output (extra infos).
:type longoutput: str
"""
def __init__(self, output, longoutput, *args, **kwargs):
super(PluginError, self).__init__(*args, **kwargs)
self.message = '%s\n%s' % (output, longoutput)
def __str__(self):
return self.message
| # -*- coding: utf-8 -*-
#===============================================================================
# Filename : exceptions
# Author : Vincent BESANCON aka 'v!nZ' <besancon.vincent@gmail.com>
# Description : Define class for Nagios events and plugin exceptions.
#-------------------------------------------------------------------------------
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#===============================================================================
class NagiosCritical(Exception):
"""Raise to fire a CRITICAL event to Nagios and stop plugin execution.
:param msg: Output message in Nagios
:type msg: string
"""
def __init__(self, msg):
print "CRITICAL - %s" % msg
raise SystemExit(2)
class NagiosWarning(Exception):
"""Raise to fire a WARNING event to Nagios and stop plugin execution.
:param msg: Output message in Nagios
:type msg: string
"""
def __init__(self, msg):
print "WARNING - %s" % msg
raise SystemExit(1)
class NagiosUnknown(Exception):
"""Raise to fire a UNKNOWN event to Nagios and stop plugin execution.
:param msg: Output message in Nagios
:type msg: string
"""
def __init__(self, msg):
print "UNKNOWN - %s" % msg
raise SystemExit(3)
class NagiosOk(Exception):
"""Raise to fire a OK event to Nagios and stop plugin execution.
:param msg: Output message in Nagios
:type msg: string
"""
def __init__(self, msg):
print "OK - %s" % msg
raise SystemExit(0)
class PluginError(StandardError):
"""
Exception when a plugin error occur.
:param output: Message to show in Nagios status information output.
:type output: str
:param longoutput: Message to show in long output (extra infos).
:type longoutput: str
"""
def __init__(self, output, longoutput, *args, **kwargs):
super(PluginError, self).__init__(*args, **kwargs)
self.message = '%s\n%s' % (output, longoutput)
def __str__(self):
return self.message | mit | Python |
91cb7b80f12ebb8d8f7b1b4110dea8ec6a0c0d71 | Bump version number to 0.0.4 | alphagov/gapy,alphagov/gapy | gapy/__init__.py | gapy/__init__.py | __title__ = "gapy"
__version__ = "0.0.4"
__author__ = "Rob Young"
from .error import GapyError
from .client import client_from_private_key, client_from_secrets_file
| __title__ = "gapy"
__version__ = "0.0.3"
__author__ = "Rob Young"
from .error import GapyError
from .client import client_from_private_key, client_from_secrets_file
| mit | Python |
fbf014285baa61b8294ef5dd27ea3e61c964e9b0 | add depend product_uom_qty in missing_quantity | Gebesa-Dev/Addons-gebesa | mrp_shipment/models/sale_order_line.py | mrp_shipment/models/sale_order_line.py | # -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import _, api, fields, models
class SaleOrderLine(models.Model):
_inherit = 'sale.order.line'
quantity_shipped = fields.Float(
string=_(u'Quantity shipped'),
compute='_quantity_shipped',
store=True,
)
missing_quantity = fields.Float(
string=_(u'missing quantity'),
compute='_missing_quantity',
store=True,
)
shipment_line_ids = fields.One2many(
'mrp.shipment.line',
'order_line_id',
string=_('Shipment'),
)
@api.depends('shipment_line_ids', 'shipment_line_ids.quantity_shipped')
def _quantity_shipped(self):
for line in self:
# domain = [('order_line_id', '=', line.id)]
# shipment_line = self.env['mrp.shipment.line'].search(domain)
quantity_shipped = 0
for shipment in line.shipment_line_ids:
quantity_shipped += shipment.quantity_shipped
line.quantity_shipped = quantity_shipped
@api.depends('quantity_shipped', 'product_uom_qty')
def _missing_quantity(self):
for line in self:
line.missing_quantity = line.product_uom_qty - \
line.quantity_shipped
| # -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import _, api, fields, models
class SaleOrderLine(models.Model):
_inherit = 'sale.order.line'
quantity_shipped = fields.Float(
string=_(u'Quantity shipped'),
compute='_quantity_shipped',
store=True,
)
missing_quantity = fields.Float(
string=_(u'missing quantity'),
compute='_missing_quantity',
store=True,
)
shipment_line_ids = fields.One2many(
'mrp.shipment.line',
'order_line_id',
string=_('Shipment'),
)
@api.depends('shipment_line_ids', 'shipment_line_ids.quantity_shipped')
def _quantity_shipped(self):
for line in self:
# domain = [('order_line_id', '=', line.id)]
# shipment_line = self.env['mrp.shipment.line'].search(domain)
quantity_shipped = 0
for shipment in line.shipment_line_ids:
quantity_shipped += shipment.quantity_shipped
line.quantity_shipped = quantity_shipped
@api.depends('quantity_shipped')
def _missing_quantity(self):
for line in self:
line.missing_quantity = line.product_uom_qty - \
line.quantity_shipped
| agpl-3.0 | Python |
2f9e49d486a307406c72ef5f3fedb8ab97081096 | Add files via upload | piyushravi/CS101-Project,piyushravi/CS101-Project | form_backend.py | form_backend.py | #!/usr/bin/python
import mysql.connector
from mysql.connector import errorcode
# Import modules for CGI handling
import cgi, cgitb
# Create instance of FieldStorage
form = cgi.FieldStorage()
cnn=mysql.connector.connect(user = "root", password = "", host = "localhost", database = "shouut")
cursor=cnn.cursor()
name = form.getvalue('name')
ashoka_id = form.getvalue('ashoka_id')
e_number = form.getvalue('e_number')
mobile = form.getvalue('mobile')
id=form.getvalue('id')
print "Content-type:text/html\r\n\r\n"
print "<html>"
print "<head>"
print "<title>Shuttle Bookings</title>"
print "</head>"
registers = 'SELECT e_number FROM seats'
cursor.execute(registers)
# Fetch all the rows in a list of lists.
result_register = cursor.fetchall()
print result_register
for i in result_register:
if int(i[0])==int(e_number):
print "User Exists"
print "\n"
print "<a href=/programs/SignUp.html>Re-Register</a>"
break
else:
print "Welcome"
addName="""INSERT INTO seats (name,ashoka_id,e_number,mobile) VALUES ('%s','%s','%s','%s')""" % (name, ashoka_id , e_number , mobile)
#addName="""INSERT INTO seats (name,ashoka_id,e_number,mobile) VALUES (abc,def,hgi,zxc)"""
cursor.execute(addName)
cnn.commit()
cnn.close()
#print "<a href='/cgi-bin/getdata.py'> Check Availability </a>"
print "<body>"
#print "<h2> Hello %s %s %s %s %s %s %s %s </h2>" % (first_name,last_name,middle_name,mobile,day,month,year,qualification)
print "</body>"
print "</html>"
| #!/usr/bin/python
import mysql.connector
from mysql.connector import errorcode
# Import modules for CGI handling
import cgi, cgitb
# Create instance of FieldStorage
form = cgi.FieldStorage()
cnn=mysql.connector.connect(user = "root", password = "", host = "localhost", database = "shouut")
cursor=cnn.cursor()
name = form.getvalue('name')
ashoka_id = form.getvalue('ashoka_id')
e_number = form.getvalue('e_number')
mobile = form.getvalue('mobile')
print "Content-type:text/html\r\n\r\n"
print "<html>"
print "<head>"
print "<title>Shuttle Bookings</title>"
print "</head>"
sql = 'SELECT e_number FROM seats'
cursor.execute(sql)
# Fetch all the rows in a list of lists.
results = cursor.fetchall()
for i in results:
if i[0]==int(e_number):
print "User Exists"
print "\n"
print "<a href=/programs/SignUp.html>Re-Register</a>"
break
else:
if int(id)==20:
print "Content-type:text/html\r\n\r\n"
print "Full"
break
else:
print "Content-type:text/html\r\n\r\n"
print "Welcome"
addName="INSERT INTO seats (name,ashoka_id,e_number,mobile) VALUES ('%s',%s',d,d)" % (name, ashoka_id , e_number , mobile)
cursor.execute(addName)
print "Content-type:text/html\r\n\r\n"
#print "<a href='/cgi-bin/getdata.py'> Check Availability </a>"
print "<body>"
#print "<h2> Hello %s %s %s %s %s %s %s %s </h2>" % (first_name,last_name,middle_name,mobile,day,month,year,qualification)
print "</body>"
print "</html>"
cnn.commit()
cnn.close() | mit | Python |
d4e7f49f25b7ab4bf83b273cc08e002e7e7f6c7f | Use the animator module in framebuilder | twoodford/audiovisualizer | framebuilder.py | framebuilder.py | import audiovisualizer
import PIL.Image
import PIL.ImageDraw
import pylab
import scipy
import scipy.io
import scipy.signal
# The sample rate of the input matrix (Hz)
SAMPLE_RATE=44100
# Frequency range to display (audible is 16-16384Hz)
DISPLAY_FREQ=(16, 1000)
# FPS of output (Hz)
OUT_FPS = 30
# Size of the moving average (s)
SAMPLE_TIME=0.5
# Output image size
OUT_SIZE = (800, 400)
audio = scipy.io.loadmat("song1.mat")['x']
mf = audiovisualizer.movingfft.moving_fft(audio, SAMPLE_TIME, OUT_FPS, SAMPLE_RATE)
mf = audiovisualizer.movingfft.isolate_freq_range(mf, DISPLAY_FREQ, SAMPLE_RATE, SAMPLE_TIME)
drum1 = audiovisualizer.movingfft.extract_freq(mf, DISPLAY_FREQ, (320,335), SAMPLE_RATE, SAMPLE_TIME)
FC=10/(0.5*SAMPLE_RATE)
N=1001
a=1
b=scipy.signal.firwin(N, cutoff=FC, window='hamming')
#lowpassed = scipy.signal.lfilter(b, a, audio)
mf2 = audiovisualizer.movingfft.moving_fft(audio, 1.5, OUT_FPS, SAMPLE_RATE)
#mf2 = audiovisualizer.movingfft.normalize_freq(mf2)
print(mf2)
fvis = audiovisualizer.movingfft.isolate_freq_range(mf2, (500, 1500), SAMPLE_RATE, SAMPLE_TIME)
visualizers = [
audiovisualizer.widgets.DrumCircle(drum1, (100, 100), 50, 100, ((0, (50, 230, 30)), (1455, (200, 0, 200)))),
audiovisualizer.widgets.FrequencyPoints(fvis, [(200, 0), (1600, 800)], ((0, (0, 0, 255)),), 60),
audiovisualizer.widgets.MeterDisplay(fvis, 17, (100, 350), 80, (0,240,255))
]
for frame, image in enumerate(audiovisualizer.animator.make_frames(visualizers, mf.shape[0], OUT_SIZE)):
image.save(open("frame_"+str(frame).zfill(4)+".png", "wb"), "PNG")
frame += 1
| import audiovisualizer
import PIL.Image
import PIL.ImageDraw
import pylab
import scipy
import scipy.io
import scipy.signal
# The sample rate of the input matrix (Hz)
SAMPLE_RATE=44100
# Frequency range to display (audible is 16-16384Hz)
DISPLAY_FREQ=(16, 1000)
# FPS of output (Hz)
OUT_FPS = 30
# Size of the moving average (s)
SAMPLE_TIME=0.5
# Output image size
OUT_SIZE = (400, 400)
audio = scipy.io.loadmat("song1.mat")['x']
mf = audiovisualizer.movingfft.moving_fft(audio, SAMPLE_TIME, OUT_FPS, SAMPLE_RATE)
print(mf.shape)
mf = audiovisualizer.movingfft.isolate_freq_range(mf, DISPLAY_FREQ, SAMPLE_RATE, SAMPLE_TIME)
print(mf.shape)
drum1 = audiovisualizer.movingfft.extract_freq(mf, DISPLAY_FREQ, (320,335), SAMPLE_RATE, SAMPLE_TIME)
visualizers = [
audiovisualizer.widgets.DrumCircle(drum1, (100, 100), 50, 100, (50, 230, 30))
]
background = PIL.Image.new("RGBA", OUT_SIZE)
PIL.ImageDraw.Draw(background).rectangle([(0,0), OUT_SIZE], fill=(0,0,0,255))
frame = 0
for sindex in range(0, mf.shape[0]):
image = PIL.Image.new("RGBA", OUT_SIZE)
imgdr = PIL.ImageDraw.Draw(image)
imgdr.rectangle([(0,0), (100,100)], fill=(100, 0, 0, 255))
for vis in visualizers:
vis.display(sindex, imgdr)
PIL.Image.alpha_composite(background, image).save(open("frame_"+str(frame).zfill(4)+".png", "wb"), "PNG")
frame += 1
| apache-2.0 | Python |
913985dec32feadc84428b48cf463d8589ed1396 | Test zero_momentum after attaching | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | hoomd/md/pytest/test_zero_momentum.py | hoomd/md/pytest/test_zero_momentum.py | import hoomd
import numpy as np
import pytest
def test_before_attaching():
trigger = hoomd.trigger.Periodic(100)
zm = hoomd.md.update.ZeroMomentum(trigger)
assert zm.trigger is trigger
trigger = hoomd.trigger.Periodic(10, 30)
zm.trigger = trigger
assert zm.trigger is trigger
def test_after_attaching(simulation_factory,
two_particle_snapshot_factory):
brownian = hoomd.md.methods.Brownian(filter=hoomd.filter.All(),
kT=hoomd.variant.Constant(2.0),
seed=2)
sim = simulation_factory(two_particle_snapshot_factory())
sim.operations.integrator = hoomd.md.Integrator(0.005, methods=[brownian])
trigger = hoomd.trigger.Periodic(100)
zm = hoomd.md.update.ZeroMomentum(trigger)
sim.operations.add(zm)
sim.run(0)
assert zm.trigger is trigger
trigger = hoomd.trigger.Periodic(10, 30)
zm.trigger = trigger
assert zm.trigger is trigger
sim.run(100)
| import hoomd
import numpy as np
import pytest
def test_before_attaching():
trigger = hoomd.trigger.Periodic(100)
zm = hoomd.md.update.ZeroMomentum(trigger)
assert zm.trigger is trigger
trigger = hoomd.trigger.Periodic(10, 30)
zm.trigger = trigger
assert zm.trigger is trigger
| bsd-3-clause | Python |
7bfe699afb9e49bc090c531c8615402012aa5ac2 | Optimize gsxt_mobile code | 9468305/script | gsxt_mobile/GongShiQuery.py | gsxt_mobile/GongShiQuery.py | #!/usr/local/bin/python3
# -*- coding: utf-8 -*-
'''通过国家企业信用信息公示系统(www.gsxt.gov.cn) Mobile App HTTP API 查询统一社会信用代码'''
import json
import requests
URL = 'http://yd.gsxt.gov.cn/QuerySummary'
MOBILE_ACTION = 'entSearch'
TOPIC = 1
PAGE_NUM = 1
PAGE_SIZE = 10
USER_ID = 'id001'
USER_IP = '192.168.0.1'
USER_AGENT = 'Mozilla/5.0 (Linux; Android 4.4.2; vivo Y28L Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36 Html5Plus/1.0'
ACCEPT_LANGUAGE = 'zh-CN,en-US;q=0.8'
XRW = 'com.zongjucredit'
ORIGIN = 'file://'
CHARSET = 'application/x-www-form-urlencoded; charset=UTF-8'
def main(keyword):
'''main entry'''
_data = [('mobileAction', MOBILE_ACTION),
('keywords', keyword),
('topic', TOPIC),
('pageNum', PAGE_NUM),
('pageSize', PAGE_SIZE),
('userID', USER_ID),
('userIP', USER_IP)]
_headers = {'User-Agent': USER_AGENT,
'Accept-Language': ACCEPT_LANGUAGE,
'X-Requested-With': XRW,
'Origin': ORIGIN,
'Content-Type': CHARSET}
_response = requests.post(URL, data=_data, headers=_headers)
print(_response.status_code)
if _response.status_code == 200:
_content = _response.json()
print(json.dumps(_content, indent=2, sort_keys=True, ensure_ascii=False))
else:
print('request fail')
if __name__ == "__main__":
KEY_WORD = '腾讯科技'
main(KEY_WORD)
| #!/usr/local/bin/python3
# -*- coding: utf-8 -*-
'''Query Uniform-Social-Credit-Code from National-Enterprise-Credit-Information-Publicity-System.'''
import json
import requests
if __name__ == "__main__":
URL = 'http://yd.gsxt.gov.cn/QuerySummary'
MOBILE_ACTION = 'entSearch'
KEY_WORDS = '腾讯科技'
TOPIC = 1
PAGE_NUM = 1
PAGE_SIZE = 10
USER_ID = 'id001'
USER_IP = '192.168.0.1'
DATA = [('mobileAction', MOBILE_ACTION),
('keywords', KEY_WORDS),
('topic', TOPIC),
('pageNum', PAGE_NUM),
('pageSize', PAGE_SIZE),
('userID', USER_ID),
('userIP', USER_IP)]
USER_AGENT = 'Mozilla/5.0 (Linux; Android 4.4.2; vivo Y28L Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36 Html5Plus/1.0'
ACCEPT_LANGUAGE = 'zh-CN,en-US;q=0.8'
XRW = 'com.zongjucredit'
ORIGIN = 'file://'
CHARSET = 'application/x-www-form-urlencoded; charset=UTF-8'
HEADERS = {'User-Agent': USER_AGENT,
'Accept-Language': ACCEPT_LANGUAGE,
'X-Requested-With': XRW,
'Origin': ORIGIN,
'Content-Type': CHARSET}
RESPONSE = requests.post(URL, data=DATA, headers=HEADERS)
print(RESPONSE.status_code)
if RESPONSE.status_code == 200:
CONTENT = RESPONSE.json()
print(json.dumps(CONTENT, indent=2, sort_keys=True, ensure_ascii=False))
else:
print('request fail')
| mit | Python |
c98879268c01a67abe88c2f8e15bff9d93d587cc | Remove test skipping since #1396954 was fixed | tsufiev/horizon,NCI-Cloud/horizon,mdavid/horizon,orbitfp7/horizon,Dark-Hacker/horizon,Tesora/tesora-horizon,Solinea/horizon,sandvine/horizon,promptworks/horizon,Daniex/horizon,watonyweng/horizon,sandvine/horizon,tellesnobrega/horizon,ChameleonCloud/horizon,ChameleonCloud/horizon,CiscoSystems/horizon,RudoCris/horizon,anthonydillon/horizon,karthik-suresh/horizon,noironetworks/horizon,CiscoSystems/horizon,Mirantis/mos-horizon,FNST-OpenStack/horizon,karthik-suresh/horizon,django-leonardo/horizon,vladryk/horizon,mandeepdhami/horizon,wolverineav/horizon,django-leonardo/horizon,wangxiangyu/horizon,coreycb/horizon,django-leonardo/horizon,vladryk/horizon,pranavtendolkr/horizon,saydulk/horizon,redhat-openstack/horizon,coreycb/horizon,redhat-openstack/horizon,wangxiangyu/horizon,icloudrnd/automation_tools,gerrive/horizon,karthik-suresh/horizon,anthonydillon/horizon,django-leonardo/horizon,luhanhan/horizon,eayunstack/horizon,eayunstack/horizon,maestro-hybrid-cloud/horizon,xinwu/horizon,endorphinl/horizon,Dark-Hacker/horizon,maestro-hybrid-cloud/horizon,Mirantis/mos-horizon,icloudrnd/automation_tools,saydulk/horizon,dan1/horizon-x509,BiznetGIO/horizon,Dark-Hacker/horizon,doug-fish/horizon,redhat-openstack/horizon,dan1/horizon-proto,BiznetGIO/horizon,openstack/horizon,mdavid/horizon,newrocknj/horizon,liyitest/rr,yjxtogo/horizon,Tesora/tesora-horizon,pranavtendolkr/horizon,davidcusatis/horizon,watonyweng/horizon,mandeepdhami/horizon,dan1/horizon-proto,damien-dg/horizon,dan1/horizon-x509,kfox1111/horizon,icloudrnd/automation_tools,bigswitch/horizon,endorphinl/horizon-fork,Metaswitch/horizon,saydulk/horizon,Solinea/horizon,tqtran7/horizon,RudoCris/horizon,RudoCris/horizon,newrocknj/horizon,redhat-cip/horizon,openstack/horizon,BiznetGIO/horizon,blueboxgroup/horizon,endorphinl/horizon-fork,philoniare/horizon,wangxiangyu/horizon,idjaw/horizon,BiznetGIO/horizon,endorphinl/horizon-fork,bigswitch/horizon,idjaw/horizon,blueboxgroup/horizon,yeming233/horizon,promptworks/horizon,liyitest/rr,tqtran7/horizon,NCI-Cloud/horizon,coreycb/horizon,davidcusatis/horizon,yeming233/horizon,newrocknj/horizon,dan1/horizon-x509,sandvine/horizon,sandvine/horizon,VaneCloud/horizon,yeming233/horizon,luhanhan/horizon,kfox1111/horizon,Solinea/horizon,FNST-OpenStack/horizon,dan1/horizon-x509,pranavtendolkr/horizon,tsufiev/horizon,Daniex/horizon,Mirantis/mos-horizon,promptworks/horizon,openstack/horizon,Daniex/horizon,mdavid/horizon,Metaswitch/horizon,Metaswitch/horizon,gerrive/horizon,tellesnobrega/horizon,endorphinl/horizon-fork,redhat-openstack/horizon,Daniex/horizon,vladryk/horizon,liyitest/rr,Mirantis/mos-horizon,blueboxgroup/horizon,eayunstack/horizon,tsufiev/horizon,izadorozhna/dashboard_integration_tests,mandeepdhami/horizon,henaras/horizon,bigswitch/horizon,davidcusatis/horizon,Metaswitch/horizon,takeshineshiro/horizon,damien-dg/horizon,yjxtogo/horizon,Tesora/tesora-horizon,noironetworks/horizon,endorphinl/horizon,Hodorable/0602,doug-fish/horizon,pranavtendolkr/horizon,j4/horizon,takeshineshiro/horizon,wolverineav/horizon,izadorozhna/dashboard_integration_tests,yjxtogo/horizon,newrocknj/horizon,j4/horizon,tellesnobrega/horizon,promptworks/horizon,orbitfp7/horizon,orbitfp7/horizon,anthonydillon/horizon,NeCTAR-RC/horizon,tqtran7/horizon,redhat-cip/horizon,noironetworks/horizon,VaneCloud/horizon,j4/horizon,bac/horizon,gerrive/horizon,takeshineshiro/horizon,gerrive/horizon,watonyweng/horizon,redhat-cip/horizon,liyitest/rr,Hodorable/0602,henaras/horizon,philoniare/horizon,xinwu/horizon,luhanhan/horizon,philoniare/horizon,bigswitch/horizon,NeCTAR-RC/horizon,bac/horizon,davidcusatis/horizon,wangxiangyu/horizon,wolverineav/horizon,tsufiev/horizon,NCI-Cloud/horizon,agileblaze/OpenStackTwoFactorAuthentication,ChameleonCloud/horizon,tqtran7/horizon,CiscoSystems/horizon,NeCTAR-RC/horizon,CiscoSystems/horizon,kfox1111/horizon,saydulk/horizon,openstack/horizon,mandeepdhami/horizon,wolverineav/horizon,dan1/horizon-proto,ChameleonCloud/horizon,Tesora/tesora-horizon,agileblaze/OpenStackTwoFactorAuthentication,NCI-Cloud/horizon,redhat-cip/horizon,agileblaze/OpenStackTwoFactorAuthentication,agileblaze/OpenStackTwoFactorAuthentication,henaras/horizon,VaneCloud/horizon,RudoCris/horizon,damien-dg/horizon,vladryk/horizon,philoniare/horizon,xinwu/horizon,damien-dg/horizon,luhanhan/horizon,noironetworks/horizon,kfox1111/horizon,Hodorable/0602,j4/horizon,bac/horizon,anthonydillon/horizon,maestro-hybrid-cloud/horizon,watonyweng/horizon,orbitfp7/horizon,Hodorable/0602,tellesnobrega/horizon,Dark-Hacker/horizon,coreycb/horizon,doug-fish/horizon,VaneCloud/horizon,doug-fish/horizon,Solinea/horizon,mdavid/horizon,dan1/horizon-proto,endorphinl/horizon,henaras/horizon,blueboxgroup/horizon,takeshineshiro/horizon,NeCTAR-RC/horizon,icloudrnd/automation_tools,yeming233/horizon,idjaw/horizon,endorphinl/horizon,yjxtogo/horizon,idjaw/horizon,FNST-OpenStack/horizon,karthik-suresh/horizon,bac/horizon,xinwu/horizon,FNST-OpenStack/horizon,maestro-hybrid-cloud/horizon | horizon/test/jasmine/jasmine_tests.py | horizon/test/jasmine/jasmine_tests.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from horizon.test import helpers as test
class ServicesTests(test.JasmineTests):
sources = [
'horizon/js/horizon.js',
'horizon/js/angular/horizon.conf.js',
'horizon/js/angular/horizon.js',
'horizon/js/angular/services/horizon.utils.js',
'horizon/js/angular/controllers/metadata-widget-controller.js',
'horizon/js/angular/hz.api.module.js',
'horizon/js/angular/services/hz.api.service.js',
'angular/widget.module.js',
'angular/help-panel/help-panel.js',
'angular/wizard/wizard.js',
'angular/table/table.js',
'angular/modal/modal.js',
]
specs = [
'horizon/tests/jasmine/utilsSpec.js',
'horizon/tests/jasmine/metadataWidgetControllerSpec.js',
'horizon/js/angular/services/hz.api.service.spec.js',
'angular/help-panel/help-panel.spec.js',
'angular/wizard/wizard.spec.js',
'angular/table/table.spec.js',
'angular/modal/simple-modal.spec.js',
]
externalTemplates = [
'angular/help-panel/help-panel.html',
'angular/wizard/wizard.html'
]
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils import unittest
from horizon.test import helpers as test
@unittest.skip("Temporarily disabled, see bug #1396954")
class ServicesTests(test.JasmineTests):
sources = [
'horizon/js/horizon.js',
'horizon/js/angular/horizon.conf.js',
'horizon/js/angular/horizon.js',
'horizon/js/angular/services/horizon.utils.js',
'horizon/js/angular/controllers/metadata-widget-controller.js',
'horizon/js/angular/hz.api.module.js',
'horizon/js/angular/services/hz.api.service.js',
'angular/widget.module.js',
'angular/help-panel/help-panel.js',
'angular/wizard/wizard.js',
'angular/table/table.js',
'angular/modal/modal.js',
]
specs = [
'horizon/tests/jasmine/utilsSpec.js',
'horizon/tests/jasmine/metadataWidgetControllerSpec.js',
'horizon/js/angular/services/hz.api.service.spec.js',
'angular/help-panel/help-panel.spec.js',
'angular/wizard/wizard.spec.js',
'angular/table/table.spec.js',
'angular/modal/simple-modal.spec.js',
]
externalTemplates = [
'angular/help-panel/help-panel.html',
'angular/wizard/wizard.html'
]
| apache-2.0 | Python |
20d21b851d02bbcf8c6a0f065b9f05f5e0bfc662 | Use format=5 in YT search to prevent "embedding disabled" | 6/GeoDJ,6/GeoDJ | geodj/youtube.py | geodj/youtube.py | from gdata.youtube.service import YouTubeService, YouTubeVideoQuery
from django.utils.encoding import smart_str
import re
class YoutubeMusic:
def __init__(self):
self.service = YouTubeService()
def search(self, artist):
query = YouTubeVideoQuery()
query.vq = artist
query.orderby = 'relevance'
query.racy = 'exclude'
query.format = '5'
query.categories.append("/Music")
feed = self.service.YouTubeQuery(query)
results = []
for entry in feed.entry:
if not self.is_valid_entry(artist, entry):
continue
results.append({
'url': entry.media.player.url,
'title': smart_str(entry.media.title.text),
'duration': int(entry.media.duration.seconds),
})
return {'artist': artist, 'results': results}
def is_valid_entry(self, artist, entry):
duration = int(entry.media.duration.seconds)
title = smart_str(entry.media.title.text).lower()
if entry.rating is not None and float(entry.rating.average) < 3.5:
return False
if duration < (2 * 60) or duration > (9 * 60):
return False
if artist.lower() not in title:
return False
if re.search("\b(concert|cover)\b", title):
return False
return True
| from gdata.youtube.service import YouTubeService, YouTubeVideoQuery
from django.utils.encoding import smart_str
import re
class YoutubeMusic:
def __init__(self):
self.service = YouTubeService()
def search(self, artist):
query = YouTubeVideoQuery()
query.vq = artist
query.orderby = 'relevance'
query.racy = 'exclude'
query.categories.append("/Music")
feed = self.service.YouTubeQuery(query)
results = []
for entry in feed.entry:
if not self.is_valid_entry(artist, entry):
continue
results.append({
'url': entry.media.player.url,
'title': smart_str(entry.media.title.text),
'duration': int(entry.media.duration.seconds),
})
return {'artist': artist, 'results': results}
def is_valid_entry(self, artist, entry):
duration = int(entry.media.duration.seconds)
title = smart_str(entry.media.title.text).lower()
if entry.rating is not None and float(entry.rating.average) < 3.5:
return False
if duration < (2 * 60) or duration > (9 * 60):
return False
if artist.lower() not in title:
return False
if re.search("\b(concert|cover)\b", title):
return False
return True
| mit | Python |
29076f5d1f93601bbab4783e38d2df82badf4891 | Add sources to sub-objects. | rshorey/pupa,influence-usa/pupa,mileswwatkins/pupa,opencivicdata/pupa,mileswwatkins/pupa,datamade/pupa,opencivicdata/pupa,influence-usa/pupa,datamade/pupa,rshorey/pupa | pupa/scrape/helpers.py | pupa/scrape/helpers.py | """ these are helper classes for object creation during the scrape """
from larvae.person import Person
from larvae.organization import Organization
from larvae.membership import Membership
class Legislator(Person):
_is_legislator = True
__slots__ = ('district', 'party', 'chamber', '_contact_details')
def __init__(self, name, district, party=None, chamber=None, **kwargs):
super(Legislator, self).__init__(name, **kwargs)
self.district = district
self.party = party
self.chamber = chamber
self._contact_details = []
def add_contact(self, type, value, note):
self._contact_details.append({'type': type, 'value': value,
'note': note})
def add_committee_membership(self, com_name, role='member'):
org = Organization(com_name, classification='committee')
self.add_membership(org, role=role)
org.sources = self.sources
self._related.append(org)
class Committee(Organization):
def __init__(self, *args, **kwargs):
super(Committee, self).__init__(*args, **kwargs)
def add_member(self, name, role='member', **kwargs):
member = Person(name)
membership = Membership(member._id, self._id, role=role,
**kwargs)
member.sources = self.sources
self._related.append(member)
self._related.append(membership)
| """ these are helper classes for object creation during the scrape """
from larvae.person import Person
from larvae.organization import Organization
from larvae.membership import Membership
class Legislator(Person):
_is_legislator = True
__slots__ = ('district', 'party', 'chamber', '_contact_details')
def __init__(self, name, district, party=None, chamber=None, **kwargs):
super(Legislator, self).__init__(name, **kwargs)
self.district = district
self.party = party
self.chamber = chamber
self._contact_details = []
def add_contact(self, type, value, note):
self._contact_details.append({'type': type, 'value': value,
'note': note})
def add_committee_membership(self, com_name, role='member'):
org = Organization(com_name, classification='committee')
self.add_membership(org, role=role)
self._related.append(org)
class Committee(Organization):
def __init__(self, *args, **kwargs):
super(Committee, self).__init__(*args, **kwargs)
def add_member(self, name, role='member', **kwargs):
member = Person(name)
membership = Membership(member._id, self._id, role=role,
**kwargs)
self._related.append(member)
self._related.append(membership)
| bsd-3-clause | Python |
822b119e770f3494cd622f7723350faa518bf984 | Simplify redirect code and make it easier to read | Inboxen/website,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen | views/inbox/delete.py | views/inbox/delete.py | ##
# Copyright (C) 2013 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, Http404
from inboxen.models import Email
def delete(request, email_address, emailid):
emailid = int(emailid, 16)
alias, domain = email_address.split("@", 1)
daes = alias == "support" #daes = does alias equal support :P
try:
if request.user.is_staff and daes:
email = Email.objects.filter(id=emailid).only("id")
else:
email = Email.objects.filter(id=emailid, user=request.user).only("id")
email.delete()
except Email.DoesNotExist:
raise Http404
# check if they were on the admin support page, if so return them there
if daes and request.user.is_staff:
return HttpResponseRedirect("/admin/support")
else:
return HttpResponseRedirect("/inbox/%s/" % email_address)
| ##
# Copyright (C) 2013 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, Http404
from inboxen.models import Email
def delete(request, email_address, emailid):
emailid = int(emailid, 16)
alias, domain = email_address.split("@", 1)
try:
if request.user.is_staff and alias == "support":
email = Email.objects.filter(id=emailid).only("id")
else:
email = Email.objects.filter(id=emailid, user=request.user).only("id")
email.delete()
except Email.DoesNotExist:
raise Http404
# check if they were on the admin support page, if so return them there
# todo: could this be done better?
if request.META["HTTP_REFERER"].endswith("/admin/support/") and request.user.is_staff:
return HttpResponseRedirect("/admin/support")
return HttpResponseRedirect("/inbox/%s/" % email_address)
| agpl-3.0 | Python |
855ae722340cfaaef6a6e9cc1be91e43bc46f0c8 | Add inclusion of jasyhelper from appcache in unify | unify/unify,unify/unify,unify/unify,unify/unify,unify/unify,unify/unify | unify/support/jasy/unify.py | unify/support/jasy/unify.py | import webbrowser, http.server, os, multiprocessing
from jasy.core import Project
appcache_project = Project.getProjectByName("appcache")
exec(compile(open(os.path.realpath(os.path.abspath(appcache_project.getPath() + "/jasyhelper.py"))).read(), "jasyhelper.py", 'exec'))
def unify_source():
# Permutation independend config
jsFormatting.enable("comma")
jsFormatting.enable("semicolon")
jsOptimization.disable("privates")
jsOptimization.disable("variables")
jsOptimization.disable("declarations")
jsOptimization.disable("blocks")
# Assets
assetManager.addSourceProfile()
# Store loader script
includedByKernel = storeKernel("script/kernel.js")
# Process every possible permutation
for permutation in session.permutate():
# Resolving dependencies
resolver = Resolver().addClassName("%s.Application" % NAMESPACE).excludeClasses(includedByKernel)
# Building class loader
storeLoader(resolver.getSortedClasses(), "script/%s-%s.js" % (NAMESPACE, permutation.getChecksum()), "unify.core.Init.startUp();")
def unify_build(cdnPrefix="asset"):
# Assets
assetManager.addBuildProfile(cdnPrefix)
assetManager.deploy(Resolver().addClassName("%s.Application" % NAMESPACE).getIncludedClasses())
# Store loader script
includedByKernel = storeKernel("script/kernel.js")
# Copy files from source
updateFile("source/index.html", "index.html")
# Process every possible permutation
for permutation in session.getPermutations():
# Resolving dependencies
resolver = Resolver().addClassName("%s.Application" % NAMESPACE).excludeClasses(includedByKernel)
# Compressing classes
storeCompressed(resolver.getSortedClasses(), "script/%s-%s.js" % (NAMESPACE, permutation.getChecksum()), "unify.core.Init.startUp();")
def run_server(port):
Handler = http.server.SimpleHTTPRequestHandler
httpd = http.server.HTTPServer(("",port), Handler)
print("Serve on 8095")
httpd.serve_forever()
@task("Open help in browser")
def help():
# Clearing cache
webbrowser.open("http://unify-training.com/")
@task("Start webserver on localhost")
def server(port=8000):
p = multiprocessing.Process(target=run_server, args=(port,))
p.start()
webbrowser.open("http://localhost:%d/" % port)
| import webbrowser, http.server, os, multiprocessing
def unify_source():
# Permutation independend config
jsFormatting.enable("comma")
jsFormatting.enable("semicolon")
jsOptimization.disable("privates")
jsOptimization.disable("variables")
jsOptimization.disable("declarations")
jsOptimization.disable("blocks")
# Assets
assetManager.addSourceProfile()
# Store loader script
includedByKernel = storeKernel("script/kernel.js")
# Process every possible permutation
for permutation in session.permutate():
# Resolving dependencies
resolver = Resolver().addClassName("%s.Application" % NAMESPACE).excludeClasses(includedByKernel)
# Building class loader
storeLoader(resolver.getSortedClasses(), "script/%s-%s.js" % (NAMESPACE, permutation.getChecksum()), "unify.core.Init.startUp();")
def unify_build(cdnPrefix="asset"):
# Assets
assetManager.addBuildProfile(cdnPrefix)
assetManager.deploy(Resolver().addClassName("%s.Application" % NAMESPACE).getIncludedClasses())
# Store loader script
includedByKernel = storeKernel("script/kernel.js")
# Copy files from source
updateFile("source/index.html", "index.html")
# Process every possible permutation
for permutation in session.getPermutations():
# Resolving dependencies
resolver = Resolver().addClassName("%s.Application" % NAMESPACE).excludeClasses(includedByKernel)
# Compressing classes
storeCompressed(resolver.getSortedClasses(), "script/%s-%s.js" % (NAMESPACE, permutation.getChecksum()), "unify.core.Init.startUp();")
def run_server(port):
Handler = http.server.SimpleHTTPRequestHandler
httpd = http.server.HTTPServer(("",port), Handler)
print("Serve on 8095")
httpd.serve_forever()
@task("Open help in browser")
def help():
# Clearing cache
webbrowser.open("http://unify-training.com/")
@task("Start webserver on localhost")
def server(port=8000):
p = multiprocessing.Process(target=run_server, args=(port,))
p.start()
webbrowser.open("http://localhost:%d/" % port)
| mit | Python |
b7aef5fe6ddfced35cb7546e73b6a256f9de79d9 | use requests instead of urllib | janiheikkinen/irods,janiheikkinen/irods,janiheikkinen/irods,PaulVanSchayck/irods,PaulVanSchayck/irods,janiheikkinen/irods,PaulVanSchayck/irods,janiheikkinen/irods,PaulVanSchayck/irods,janiheikkinen/irods,janiheikkinen/irods,PaulVanSchayck/irods,PaulVanSchayck/irods,PaulVanSchayck/irods,janiheikkinen/irods,PaulVanSchayck/irods,janiheikkinen/irods,PaulVanSchayck/irods | iRODS/scripts/python/validate_json.py | iRODS/scripts/python/validate_json.py | from __future__ import print_function
import json
import sys
import requests
if len(sys.argv) != 3:
sys.exit('Usage: {0} <configuration_file> <schema_url>'.format(sys.argv[0]))
def print_error(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
try:
import jsonschema
except ImportError:
print_error('WARNING: Validation Failed for ['+sys.argv[1]+'] -- jsonschema not installed')
sys.exit(0)
try:
# load configuration file
with open(sys.argv[1], 'r+') as f:
config = json.load(f)
# load the schema url
response = requests.get(sys.argv[2])
schema = json.loads(response.text)
# validate
jsonschema.validate( config, schema )
except (jsonschema.exceptions.RefResolutionError) as e:
print_error('WARNING: Validation Failed for ['+sys.argv[1]+']')
print_error(" {0}: {1}".format(e.__class__.__name__, e))
sys.exit(0)
except (
ValueError,
jsonschema.exceptions.ValidationError,
jsonschema.exceptions.SchemaError
) as e:
print_error('ERROR: Validation Failed for ['+sys.argv[1]+']')
print_error(" {0}: {1}".format(e.__class__.__name__, e))
sys.exit(1)
except Exception as e:
print_error('ERROR: Validation Failed for ['+sys.argv[1]+']')
print_error(" {0}: {1}".format(e.__class__.__name__, e))
sys.exit(1)
except:
sys.exit(1)
else:
print("Validating ["+sys.argv[1]+"]... Success")
sys.exit(0)
| from __future__ import print_function
import json
import sys
try:
# python 3+
from urllib.request import urlopen
except ImportError:
# python 2
from urllib2 import urlopen
if len(sys.argv) != 3:
sys.exit('Usage: {0} configuration_file schema_url'.format(sys.argv[0]))
def print_error(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
try:
import jsonschema
except ImportError:
print_error('WARNING: Validation Failed for ['+sys.argv[1]+'] -- jsonschema not installed')
sys.exit(0)
try:
# load configuration file
with open(sys.argv[1], 'r+') as f:
config = json.load(f)
# load the schema url
response = urlopen(sys.argv[2])
schema = json.loads(response.read())
# validate
jsonschema.validate( config, schema )
except (jsonschema.exceptions.RefResolutionError) as e:
print_error('WARNING: Validation Failed for ['+sys.argv[1]+']')
print_error(" {0}: {1}".format(e.__class__.__name__, e))
sys.exit(0)
except (
ValueError,
jsonschema.exceptions.ValidationError,
jsonschema.exceptions.SchemaError
) as e:
print_error('ERROR: Validation Failed for ['+sys.argv[1]+']')
print_error(" {0}: {1}".format(e.__class__.__name__, e))
sys.exit(1)
except Exception as e:
print_error('ERROR: Validation Failed for ['+sys.argv[1]+']')
print_error(" {0}: {1}".format(e.__class__.__name__, e))
sys.exit(1)
except:
sys.exit(1)
else:
print("Validating ["+sys.argv[1]+"]... Success")
sys.exit(0)
| bsd-3-clause | Python |
cc1e2e85d86f728d2d90599c752a9d52ca3b756f | Add connection pytest fixture | jonathanstallings/learning-journal,jonathanstallings/learning-journal | test_journal.py | test_journal.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import pytest
from sqlalchemy import create_engine
from sqlalchemy.exc import IntegrityError
TEST_DATABASE_URL = os.environ.get(
'DATABASE_URL',
'postgresql://jonathan:@localhost:5432/test-learning-journal'
)
os.environ['DATABASE_URL'] = TEST_DATABASE_URL
os.environ['TESTING'] = "True"
import journal
@pytest.fixture(scope='session')
def connection(request):
engine = create_engine(TEST_DATABASE_URL)
journal.Base.metadata.create_all(engine)
connection = engine.connect()
journal.DBSession.registry.clear()
journal.DBSession.configure(bind=connection)
journal.Base.metadata.bind = engine
request.addfinalizer(journal.Base.metadata.drop_all)
return connection
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import pytest
from sqlalchemy import create_engine
from sqlalchemy.exc import IntegrityError
TEST_DATABASE_URL = os.environ.get(
'DATABASE_URL',
'postgresql://jonathan:@localhost:5432/test-learning-journal'
)
os.environ['DATABASE_URL'] = TEST_DATABASE_URL
os.environ['TESTING'] = "True"
import journal
| mit | Python |
175f32ea419e43766216b7692436d5614405a7b2 | remove print statements | TomAlanCarroll/synthia | synthia.py | synthia.py | """
Synthia
~~~~~~
The Synthetic Intelligent Assistant for your home
"""
from flask import Flask
import weather, requests, json, play_audio
app = Flask(__name__)
# Calls a function to get a customer morning message, then plays it
def play_morning_message():
message = get_morning_message()
play_audio.play_message(message)
# Gets a custom morning message with helpful tips to start your day
def get_morning_message():
message = 'Good morning! '
result = requests.get(weather.API_END_POINT);
content = result.content
data = json.loads(content)
weather_data = data.get('weather')
current_weather = weather_data[0].get('main')
current_weather_desc = weather_data[0].get('description')
if current_weather:
message += 'The current weather is ' + current_weather_desc + " in Berlin. "
current_weather_reminder_message = get_current_weather_reminder_message(current_weather)
if current_weather_reminder_message:
message += current_weather_reminder_message
message += 'Have a nice day and don\'t forget your keys!'
return message
# Returns a message based off the current weather
def get_current_weather_reminder_message(current_weather):
if current_weather == weather.DRIZZLE or current_weather == weather.RAIN:
return 'You should bring an umbrella. '
if current_weather == weather.SUNNY:
return 'You should wear sunglasses. '
return ''
# Play a welcome home message
def play_evening_message():
message = get_welcome_home_message()
play_audio.play_message(message)
play_song()
# Get a custom welcome home message
def get_welcome_home_message(name = ''):
message = 'Welcome home'
if name:
message += ', ' + name
return message
# Play a mp3 music file
def play_song():
audio_file = "songs/evening.mp3"
play_audio.play_audio_file(audio_file)
| """
Synthia
~~~~~~
The Synthetic Intelligent Assistant for your home
"""
from flask import Flask
import weather, requests, json, play_audio
app = Flask(__name__)
# Calls a function to get a customer morning message, then plays it
def play_morning_message():
message = get_morning_message()
play_audio.play_message(message)
# Gets a custom morning message with helpful tips to start your day
def get_morning_message():
message = 'Good morning! '
result = requests.get(weather.API_END_POINT);
content = result.content
data = json.loads(content)
weather_data = data.get('weather')
current_weather = weather_data[0].get('main')
current_weather_desc = weather_data[0].get('description')
if current_weather:
message += 'The current weather is ' + current_weather_desc + " in Berlin. "
current_weather_reminder_message = get_current_weather_reminder_message(current_weather)
print(current_weather)
print(weather.RAIN)
print(current_weather_reminder_message)
if current_weather_reminder_message:
message += current_weather_reminder_message
message += 'Have a nice day and don\'t forget your keys!'
return message
# Returns a message based off the current weather
def get_current_weather_reminder_message(current_weather):
if current_weather == weather.DRIZZLE or current_weather == weather.RAIN:
return 'You should bring an umbrella. '
if current_weather == weather.SUNNY:
return 'You should wear sunglasses. '
return ''
# Play a welcome home message
def play_evening_message():
message = get_welcome_home_message()
play_audio.play_message(message)
play_song()
# Get a custom welcome home message
def get_welcome_home_message(name = ''):
message = 'Welcome home'
if name:
message += ', ' + name
return message
# Play a mp3 music file
def play_song():
audio_file = "songs/evening.mp3"
play_audio.play_audio_file(audio_file)
| apache-2.0 | Python |
1d6bb5e7ce706c8f54599f98744f3a5d62ce104e | Replace get_base_dir and set_base_dir with more abstract methods get and set | mmetering/mmetering-cli | src/config.py | src/config.py | import os
import ConfigParser as configparser
class Config(object):
def __init__(self):
self.config = configparser.RawConfigParser()
self.configfile = os.path.expanduser('~/.mmetering-clirc')
if not os.path.isfile(self.configfile):
# setup a new config file
self.init_file()
def init_file(self):
self.config.add_section('mmetering')
with open(self.configfile, 'a+') as configfile:
self.config.write(configfile)
def read(self):
self.config.read(self.configfile)
def write(self):
with open(self.configfile, 'wb') as configfile:
self.config.write(configfile)
def set(self, section, name, value):
val = os.path.expanduser(value)
self.read()
self.config.set(section, name, val)
self.write()
def get(self, section, name):
self.read()
try:
return self.config.get(section, name)
except configparser.NoOptionError:
print 'No %s specified in %s' % (name, self.configfile)
print 'Try \'mmetering-cli setup\''
| import os
import ConfigParser as configparser
class Config(object):
def __init__(self):
self.config = configparser.RawConfigParser()
self.configfile = os.path.expanduser('~/.mmetering-clirc')
if not os.path.isfile(self.configfile):
# setup a new config file
self.init_file()
def init_file(self):
self.config.add_section('mmetering')
with open(self.configfile, 'a+') as configfile:
self.config.write(configfile)
def read(self):
self.config.read(self.configfile)
def write(self):
with open(self.configfile, 'wb') as configfile:
self.config.write(configfile)
def get_base_dir(self):
self.read()
try:
return self.config.get('mmetering', 'base_dir')
except configparser.NoOptionError:
print 'No base_dir specified in %s' % self.configfile
print 'Use mmetering-cli setup'
def set_base_dir(self, path):
base_dir = os.path.expanduser(path)
self.read()
self.config.set('mmetering', 'base_dir', base_dir)
self.write()
| mit | Python |
26dc2aaa97d043efe4c82516afe1e5b09d49bd8a | add remove_subtitle | thongdong7/subfind,thongdong7/subfind,thongdong7/subfind,thongdong7/subfind | subfind/utils/subtitle.py | subfind/utils/subtitle.py | # Credit to https://github.com/callmehiphop/subtitle-extensions/blob/master/subtitle-extensions.json
import os
from os.path import join, exists
subtitle_extensions = set([
"aqt",
"gsub",
"jss",
"sub",
"ttxt",
"pjs",
"psb",
"rt",
"smi",
"slt",
"ssf",
"srt",
"ssa",
"ass",
"usf",
"idx",
"vtt"
])
def get_subtitle_ext(path):
if not path:
return None
for ext in subtitle_extensions:
if path.endswith('.%s' % ext):
return ext
return None
def remove_subtitle(path, release_name, lang):
for ext in subtitle_extensions:
sub_file = join(path, '%s.%s.%s' % (release_name, lang, ext))
if exists(sub_file):
os.unlink(sub_file)
| # Credit to https://github.com/callmehiphop/subtitle-extensions/blob/master/subtitle-extensions.json
subtitle_extensions = [
"aqt",
"gsub",
"jss",
"sub",
"ttxt",
"pjs",
"psb",
"rt",
"smi",
"slt",
"ssf",
"srt",
"ssa",
"ass",
"usf",
"idx",
"vtt"
]
def get_subtitle_ext(path):
if not path:
return None
for ext in subtitle_extensions:
if path.endswith('.%s' % ext):
return ext
return None
| mit | Python |
1b5e2c9b312d215a9c506064f90a010af6510c86 | fix monitor_disk_usage error | samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur | utils/monitor_disk_usage.py | utils/monitor_disk_usage.py | #!/usr/local/bin/python3
import sys
sys.path.append('/srv/newsblur')
import subprocess
import requests
from newsblur_web import settings
import socket
def main():
df = subprocess.Popen(["df", "/"], stdout=subprocess.PIPE)
output = df.communicate()[0].decode('utf-8')
device, size, used, available, percent, mountpoint = output.split("\n")[1].split()
hostname = socket.gethostname()
percent = int(percent.strip('%'))
admin_email = settings.ADMINS[0][1]
if True:
#if percent > 95:
requests.post(
"https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME,
auth=("api", settings.MAILGUN_ACCESS_KEY),
data={"from": "NewsBlur Disk Monitor: %s <admin@%s.newsblur.com>" % (hostname, hostname),
"to": [admin_email],
"subject": "%s hit %s%% disk usage!" % (hostname, percent),
"text": "Usage on %s: %s" % (hostname, output)})
print(" ---> Disk usage is NOT fine: %s / %s%% used" % (hostname, percent))
else:
print(" ---> Disk usage is fine: %s / %s%% used" % (hostname, percent))
if __name__ == '__main__':
main()
| #!/usr/local/bin/python3
import sys
sys.path.append('/srv/newsblur')
import subprocess
import requests
from newsblur_web import settings
import socket
def main():
df = subprocess.Popen(["df", "/"], stdout=subprocess.PIPE)
output = df.communicate()[0]
device, size, used, available, percent, mountpoint = output.split("\n")[1].split()
hostname = socket.gethostname()
percent = int(percent.strip('%'))
admin_email = settings.ADMINS[0][1]
if True:
#if percent > 95:
requests.post(
"https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME,
auth=("api", settings.MAILGUN_ACCESS_KEY),
data={"from": "NewsBlur Disk Monitor: %s <admin@%s.newsblur.com>" % (hostname, hostname),
"to": [admin_email],
"subject": "%s hit %s%% disk usage!" % (hostname, percent),
"text": "Usage on %s: %s" % (hostname, output)})
print(" ---> Disk usage is NOT fine: %s / %s%% used" % (hostname, percent))
else:
print(" ---> Disk usage is fine: %s / %s%% used" % (hostname, percent))
if __name__ == '__main__':
main()
| mit | Python |
546322c384de78254b166c32f994bbb3de85e248 | Revert "Make sure Cassandra is started only once" | bsc-dd/hecuba,bsc-dd/hecuba,bsc-dd/hecuba,bsc-dd/hecuba | hecuba_py/tests/__init__.py | hecuba_py/tests/__init__.py | import atexit
import ccmlib.cluster
import os
import sys
import tempfile
import logging
from distutils.util import strtobool
class TestConfig:
pass
test_config = TestConfig()
test_config.n_nodes = int(os.environ.get('TEST_CASSANDRA_N_NODES', '2'))
TEST_DEBUG = strtobool(os.environ.get("TEST_DEBUG", "False").lower())
if TEST_DEBUG:
logging.warning(("You are using TEST_DEBUG=True, a Cassandra cluster must be already running. "
"Keep in mind that the results of the test might be altered by data already existing."))
def set_ccm_cluster():
global test_config
test_config.ccm_cluster = ccmlib.cluster.Cluster(
tempfile.mkdtemp("tmp_data"),
'hecuba_test',
cassandra_version=os.environ.get('TEST_CASSANDRA_VERSION', '3.11.4'))
def set_up_default_cassandra():
global test_config
if hasattr(test_config, "ccm_cluster") and any(
map(lambda node: node.is_live(), test_config.ccm_cluster.nodes.values())):
return
set_ccm_cluster()
try:
test_config.ccm_cluster.populate(test_config.n_nodes).start(allow_root=True)
except Exception as a:
if TEST_DEBUG:
logging.warning("TEST_DEBUG: ignoring exception")
else:
raise a
if 'hecuba' in sys.modules:
import importlib
import hecuba
importlib.reload(hecuba)
@atexit.register
def turning_down_cassandra():
global test_config
if test_config is None or not hasattr(test_config, "ccm_cluster"):
return
print("Turning down Cassandra")
from hfetch import disconnectCassandra
disconnectCassandra()
test_config.ccm_cluster.stop()
test_config.ccm_cluster.clear()
set_up_default_cassandra()
| import atexit
import ccmlib.cluster
import os
import sys
import tempfile
import logging
from distutils.util import strtobool
class TestConfig:
n_nodes = int(os.environ.get('TEST_CASSANDRA_N_NODES', '2'))
ccm_cluster = None
TEST_DEBUG = strtobool(os.environ.get("TEST_DEBUG", "False").lower())
if TEST_DEBUG:
logging.warning(("You are using TEST_DEBUG=True, a Cassandra cluster must be already running. "
"Keep in mind that the results of the test might be altered by data already existing."))
def set_ccm_cluster():
TestConfig.ccm_cluster = ccmlib.cluster.Cluster(
tempfile.mkdtemp("tmp_data"),
'hecuba_test',
cassandra_version=os.environ.get('TEST_CASSANDRA_VERSION', '3.11.4'))
def set_up_default_cassandra():
if TestConfig.ccm_cluster and any(map(lambda node: node.is_live(), TestConfig.ccm_cluster.nodes.values())):
return
set_ccm_cluster()
try:
TestConfig.ccm_cluster.populate(TestConfig.n_nodes).start(allow_root=True)
except Exception as ex:
if TEST_DEBUG:
logging.warning("TEST_DEBUG: ignoring exception")
else:
raise ex
if 'hecuba' in sys.modules:
import importlib
import hecuba
importlib.reload(hecuba)
@atexit.register
def turning_down_cassandra():
if TestConfig is None or TestConfig.ccm_cluster is None:
return
print("Shutting down Cassandra")
from hfetch import disconnectCassandra
disconnectCassandra()
TestConfig.ccm_cluster.stop()
TestConfig.ccm_cluster.clear()
TestConfig.ccm_cluster = None
set_up_default_cassandra()
| apache-2.0 | Python |
a7600025ec407cea7c5a80bf7d470173d01bbdf7 | refactor and fix count_diacritics.py | AliOsm/arabic-text-diacritization | helpers/count_diacritics.py | helpers/count_diacritics.py | # -*- coding: utf-8 -*-
import argparse
import sys
import pickle as pkl
from os import listdir
from os.path import isfile, join
CONSTANTS_PATH = 'constants'
def count_each_dic(FILE_PATH):
each = dict()
with open(CONSTANTS_PATH + '/CLASSES_LIST.pickle', 'rb') as file:
CLASSES_LIST = pkl.load(file)
with open(CONSTANTS_PATH + '/ARABIC_LETTERS_LIST.pickle', 'rb') as file:
ARABIC_LETTERS_LIST = pkl.load(file)
with open(CONSTANTS_PATH + '/DIACRITICS_LIST.pickle', 'rb') as file:
DIACRITICS_LIST = pkl.load(file)
with open(FILE_PATH, 'r') as file:
lines = file.readlines()
for line in lines:
for idx, char in enumerate(line):
if char in DIACRITICS_LIST:
continue
char_diac = ''
if idx + 1 < len(line) and line[idx + 1] in DIACRITICS_LIST:
char_diac = line[idx + 1]
if idx + 2 < len(line) and line[idx + 2] in DIACRITICS_LIST and char_diac + line[idx + 2] in CLASSES_LIST:
char_diac += line[idx + 2]
elif idx + 2 < len(line) and line[idx + 2] in DIACRITICS_LIST and line[idx + 2] + char_diac in CLASSES_LIST:
char_diac = line[idx + 2] + char_diac
try:
each[char_diac] += 1
except:
each[char_diac] = 1
return each
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Count each diacritic frequency')
parser.add_argument('-in', '--file-path', help='File path to count from', required=True)
args = parser.parse_args()
each = count_each_dic(args.file_path)
name = {'' : 'No Diacritic ',
'َ' : 'Fatha ',
'ً' : 'Fathatah ',
'ُ' : 'Damma ',
'ٌ' : 'Dammatan ',
'ِ' : 'Kasra ',
'ٍ' : 'Kasratan ',
'ْ' : 'Sukun ',
'ّ' : 'Shaddah ',
'َّ' : 'Shaddah + Fatha ',
'ًّ' : 'Shaddah + Fathatah ',
'ُّ' : 'Shaddah + Damma ',
'ٌّ' : 'Shaddah + Dammatan ',
'ِّ' : 'Shaddah + Kasra ',
'ٍّ' : 'Shaddah + Kasratan '}
print(each)
for key in each:
print(name[key] + ':', each[key]) | # -*- coding: utf-8 -*-
import sys
import pickle as pkl
from os import listdir
from os.path import isfile, join
CONSTANTS_PATH = '../Constants'
each = dict()
def count_each_dic(FILE_PATH):
with open(CONSTANTS_PATH + '/CLASSES_LIST.pickle', 'rb') as file:
CLASSES_LIST = pkl.load(file)
with open(CONSTANTS_PATH + '/ARABIC_LETTERS_LIST.pickle', 'rb') as file:
ARABIC_LETTERS_LIST = pkl.load(file)
with open(CONSTANTS_PATH + '/DIACRITICS_LIST.pickle', 'rb') as file:
DIACRITICS_LIST = pkl.load(file)
with open(FILE_PATH, 'r') as f:
lines = f.readlines()
previous_char = ''
for line in lines:
for index in range(len(line)):
if line[index] in ARABIC_LETTERS_LIST:
if index + 1 < len(line) and line[index + 1] in DIACRITICS_LIST:
if index + 2 < len(line) and line[index + 1: index + 3] in CLASSES_LIST :
try:
each[line[index + 1: index + 2]] += 1
except:
each[line[index + 1: index + 2]] = 1
elif (index + 2 < len(line) and line[index + 1: index + 3] not in CLASSES_LIST) or index + 2 >= len(line):
try:
each[line[index + 1]] += 1
except:
each[line[index + 1]] = 1
if (index + 1 < len(line) and line[index + 1] not in DIACRITICS_LIST) or index + 1 >= len(line) :
try:
each['no_diac'] += 1
except:
each['no_diac'] = 1
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit('usage: python %s [FILE_PATH]' % sys.argv[0])
FILE_PATH = sys.argv[1]
count_each_dic(FILE_PATH)
for key in each:
print(key + ':', each[key])
| mit | Python |
99df87e5ee17605e8d3d05243e7ec50bb0aa1bb9 | Update __init__.py | yablochkin/vkontakte-viomg | vkontakte_viomg/__init__.py | vkontakte_viomg/__init__.py | from vkontakte_viomg.api import API, VKError, signature
from vkontakte_viomg.lock import LockTimeout
| from vkontakte_viomg.api import API, VKError, signature
| mit | Python |
e966ddd804eee2f1b053de6f0bbf943d80dccc59 | Fix get value more safe | uncovertruth/django-elastipymemcache | django_elastipymemcache/client.py | django_elastipymemcache/client.py | from pymemcache.client.hash import HashClient
class Client(HashClient):
def get_many(self, keys, gets=False, *args, **kwargs):
# pymemcache's HashClient may returns {'key': False}
end = super(Client, self).get_many(keys, gets, args, kwargs)
return {key: end.get(key) for key in end if end.get(key)}
get_multi = get_many
| from pymemcache.client.hash import HashClient
class Client(HashClient):
def get_many(self, keys, gets=False, *args, **kwargs):
# pymemcache's HashClient may returns {'key': False}
end = super(Client, self).get_many(keys, gets, args, kwargs)
return {key: end[key] for key in end if end[key]}
get_multi = get_many
| mit | Python |
e382b1687e5528fcb3e19e3f0d8b589e10bcb6ad | Remove unintentional pytest dependency. Fix #6398 (#6399) | dmlc/tvm,dmlc/tvm,sxjscience/tvm,tqchen/tvm,dmlc/tvm,tqchen/tvm,tqchen/tvm,dmlc/tvm,tqchen/tvm,Laurawly/tvm-1,sxjscience/tvm,dmlc/tvm,sxjscience/tvm,Laurawly/tvm-1,Laurawly/tvm-1,Laurawly/tvm-1,dmlc/tvm,tqchen/tvm,sxjscience/tvm,tqchen/tvm,tqchen/tvm,sxjscience/tvm,tqchen/tvm,dmlc/tvm,Laurawly/tvm-1,sxjscience/tvm,sxjscience/tvm,Laurawly/tvm-1,tqchen/tvm,sxjscience/tvm,Laurawly/tvm-1,Laurawly/tvm-1,tqchen/tvm,dmlc/tvm,Laurawly/tvm-1,Laurawly/tvm-1,sxjscience/tvm,dmlc/tvm | python/tvm/__init__.py | python/tvm/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=redefined-builtin, wildcard-import
"""TVM: Open Deep Learning Compiler Stack."""
import multiprocessing
import sys
import traceback
# top-level alias
# tvm._ffi
from ._ffi.base import TVMError, __version__
from ._ffi.runtime_ctypes import DataTypeCode, DataType
from ._ffi import register_object, register_func, register_extension, get_global_func
# top-level alias
# tvm.runtime
from .runtime.object import Object
from .runtime.ndarray import context, cpu, gpu, opencl, cl, vulkan, metal, mtl
from .runtime.ndarray import vpi, rocm, ext_dev, micro_dev, hexagon
from .runtime import ndarray as nd
# tvm.error
from . import error
# tvm.ir
from .ir import IRModule
from .ir import transform
from .ir import container
from . import ir
# tvm.tir
from . import tir
# tvm.target
from . import target
# tvm.te
from . import te
# tvm.driver
from .driver import build, lower
# tvm.parser
from . import parser
# tvm tir hybrid script
from . import hybrid
# others
from . import arith
# support infra
from . import support
# Contrib initializers
from .contrib import rocm as _rocm, nvcc as _nvcc, sdaccel as _sdaccel
def tvm_wrap_excepthook(exception_hook):
"""Wrap given excepthook with TVM additional work."""
def wrapper(exctype, value, trbk):
"""Clean subprocesses when TVM is interrupted."""
exception_hook(exctype, value, trbk)
if hasattr(multiprocessing, 'active_children'):
# pylint: disable=not-callable
for p in multiprocessing.active_children():
p.terminate()
return wrapper
sys.excepthook = tvm_wrap_excepthook(sys.excepthook)
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=redefined-builtin, wildcard-import
"""TVM: Open Deep Learning Compiler Stack."""
import multiprocessing
import sys
import traceback
# top-level alias
# tvm._ffi
from ._ffi.base import TVMError, __version__
from ._ffi.runtime_ctypes import DataTypeCode, DataType
from ._ffi import register_object, register_func, register_extension, get_global_func
# top-level alias
# tvm.runtime
from .runtime.object import Object
from .runtime.ndarray import context, cpu, gpu, opencl, cl, vulkan, metal, mtl
from .runtime.ndarray import vpi, rocm, ext_dev, micro_dev, hexagon
from .runtime import ndarray as nd
# tvm.error
from . import error
# tvm.ir
from .ir import IRModule
from .ir import transform
from .ir import container
from . import ir
# tvm.tir
from . import tir
# tvm.target
from . import target
# tvm.te
from . import te
# tvm.testing
from . import testing
# tvm.driver
from .driver import build, lower
# tvm.parser
from . import parser
# tvm tir hybrid script
from . import hybrid
# others
from . import arith
# support infra
from . import support
# Contrib initializers
from .contrib import rocm as _rocm, nvcc as _nvcc, sdaccel as _sdaccel
def tvm_wrap_excepthook(exception_hook):
"""Wrap given excepthook with TVM additional work."""
def wrapper(exctype, value, trbk):
"""Clean subprocesses when TVM is interrupted."""
exception_hook(exctype, value, trbk)
if hasattr(multiprocessing, 'active_children'):
# pylint: disable=not-callable
for p in multiprocessing.active_children():
p.terminate()
return wrapper
sys.excepthook = tvm_wrap_excepthook(sys.excepthook)
| apache-2.0 | Python |
25b755c68b8f87b16adf55c7003f55aca947d39c | Update parser.py | robcza/intelmq,robcza/intelmq,certtools/intelmq,certtools/intelmq,sch3m4/intelmq,aaronkaplan/intelmq,pkug/intelmq,robcza/intelmq,pkug/intelmq,robcza/intelmq,sch3m4/intelmq,aaronkaplan/intelmq,pkug/intelmq,pkug/intelmq,sch3m4/intelmq,aaronkaplan/intelmq,certtools/intelmq,sch3m4/intelmq | intelmq/bots/parsers/openbl/parser.py | intelmq/bots/parsers/openbl/parser.py | from datetime import datetime
from intelmq.lib.bot import Bot, sys
from intelmq.lib.event import Event
from intelmq.bots import utils
class OpenBLParserBot(Bot):
def process(self):
report = self.receive_message()
if report:
for row in report.split('\n'):
row = row.strip()
if len(row) == 0 or row.startswith('#'):
continue
row = row.split()
event = Event()
columns = ["source_ip", "source_time"]
for key, value in zip(columns, row):
if key == "source_time":
value = datetime.fromtimestamp(int(value)).strftime('%Y-%m-%d %H:%M:%S') + " UTC"
event.add(key, value.strip())
event.add('feed', 'openbl')
event.add('feed_url', 'http://www.openbl.org/lists/date_all.txt')
event.add('type', 'blacklist')
event = utils.parse_source_time(event, "source_time")
event = utils.generate_observation_time(event, "observation_time")
event = utils.generate_reported_fields(event)
self.send_message(event)
self.acknowledge_message()
if __name__ == "__main__":
bot = OpenBLParserBot(sys.argv[1])
bot.start()
| from datetime import datetime
from intelmq.lib.bot import Bot, sys
from intelmq.lib.event import Event
from intelmq.bots import utils
class OpenBLParserBot(Bot):
def process(self):
report = self.receive_message()
if report:
for row in report.split('\n'):
row = row.strip()
if len(row) == 0 or row.startswith('#'):
continue
row = row.split()
event = Event()
columns = ["source_ip", "source_time"]
for key, value in zip(columns, row):
if key == "source_time":
value = datetime.fromtimestamp(int(value)).strftime('%Y-%m-%d %H:%M:%S')
value += " UTC"
event.add(key, value.strip())
event.add('feed', 'openbl')
event.add('feed_url', 'http://www.openbl.org/lists/date_all.txt')
event.add('type', 'blacklist')
event = utils.parse_source_time(event, "source_time")
event = utils.generate_observation_time(event, "observation_time")
event = utils.generate_reported_fields(event)
self.send_message(event)
self.acknowledge_message()
if __name__ == "__main__":
bot = OpenBLParserBot(sys.argv[1])
bot.start()
| agpl-3.0 | Python |
91e459a0baf240429d40e47e46dc4e1b11350f68 | Add command processing decorator | kalafut/taskbug | taskbug.py | taskbug.py | import os
import readline
history = []
commands = {}
class Track:
def __init__(self, task=None):
self.tasks = []
self.push(task)
def push(self, task):
if task:
self.tasks.append(task)
def pop(self):
return self.tasks.pop()
def top(self):
if self.count() > 0:
return self.tasks[-1]
else:
return None
def count(self):
return len(self.tasks)
class TrackCollection:
def __init__(self):
self.tracks = [Track()]
self.current_track = 0
self.last_track = 0
def current(self):
return self.tracks[self.current_track]
def add(self, task):
self.tracks.append(Track(task))
self.current_track = len(self.tracks) - 1
def delete(self):
pass
tc = TrackCollection()
def command(keyword, helptext=None):
def decorator(f):
assert keyword not in commands
commands[keyword] = (f, helptext)
def wrapper(*args, **kwargs):
print keyword
return f(*args, **kwargs)
return wrapper
return decorator
@command('clear', "Clear Screen")
def clear(line):
os.system("clear")
@command('q')
def quit(line):
exit()
@command('?')
def help(line):
for k,f in commands.iteritems():
print "{} - {}".format(k, f[1])
def parse(line):
for k,f in commands.iteritems():
if line.startswith(k):
f[0](line)
routes = {
'q': quit,
'c': clear
}
while True:
t = tc.current()
print t.top()
cmd = raw_input('{} >'.format(t.count()))
parse(cmd)
# if cmd == "done":
# history.append(t.pop())
# elif cmd == "hist":
# print history
# elif cmd.startswith("n "):
# tc.add(cmd)
# elif cmd == "clear":
# os.system("clear")
# elif cmd == "q":
# pass
# else:
# t.push(cmd)
print
| history = []
class Track:
def __init__(self, task=None):
self.tasks = []
self.push(task)
def push(self, task):
if task:
self.tasks.append(task)
def pop(self):
return self.tasks.pop()
def top(self):
if self.count() > 0:
return self.tasks[-1]
else:
return None
def count(self):
return len(self.tasks)
class TrackCollection:
def __init__(self):
self.tracks = [Track()]
self.current_track = 0
self.last_track = 0
def current(self):
return self.tracks[self.current_track]
def add(self, task):
self.tracks.append(Track(task))
self.current_track = len(self.tracks) - 1
def delete(self):
pass
tc = TrackCollection()
while True:
t = tc.current()
print t.top()
cmd = raw_input('>' * (t.count() + 1))
if cmd == "done":
history.append(t.pop())
elif cmd == "hist":
print history
elif cmd.startswith("n "):
tc.add(cmd)
elif cmd == "q":
pass
else:
t.push(cmd)
print
| mit | Python |
1978b7ba1568114a8fbabdb96b06ad712db9f54f | Add comments describing base command class. | manylabs/flow,manylabs/flow | flow/commands/command.py | flow/commands/command.py | import abc
import subprocess
import json
import logging
#
# This is a base command class.
# Subclasses can override the exec_impl method for handling messages.
# Messages responses are automatically sent and the base class can
# do some error handling.
#
class Command(object):
__metaclass__ = abc.ABCMeta
# logging.basicConfig(level=logging.DEBUG)
#
# Create a new Command base class.
#
def __init__(self, flow, cmd_name, params):
self.flow = flow
self.cmd_name = cmd_name
self.params = params
self.response = None
#
# Clients call this to perform some operation
#
def exec_cmd(self):
try:
self.exec_impl()
except subprocess.CalledProcessError as e:
#
# Failure path
#
logging.debug("Error executing: %s %s" % (e.cmd, e.output));
if self.flow is not None:
self.flow.send_message(self.cmd_name + "_response",
{ 'success': False,
'message': 'Error executing command: %s' % (e.output) })
return
#
# Success path
#
logging.debug("Sending response: %s" % (self.response))
if self.flow is not None:
self.flow.send_message(self.cmd_name + "_response", self.response)
self.post_exec()
#
# Helper to execute subprocess commands
#
def shell_helper(self, cmd_arr):
output = subprocess.check_output(cmd_arr, stderr=subprocess.STDOUT)
return output
#
# Get the response object
#
def get_response(self):
return self.response
#
# Override this to perform some subclass specific operation
#
@abc.abstractmethod
def exec_impl(self):
""" Subclasses implement this method to perform specific operations """
return
#
# Override this to perform some subclass specific operation after
# exec_impl is called and response is sent.
#
def post_exec(self):
return
| import abc
import subprocess
import json
import logging
class Command(object):
__metaclass__ = abc.ABCMeta
# logging.basicConfig(level=logging.DEBUG)
#
# Create a new Command base class.
#
def __init__(self, flow, cmd_name, params):
self.flow = flow
self.cmd_name = cmd_name
self.params = params
self.response = None
#
# Clients call this to perform some operation
#
def exec_cmd(self):
try:
self.exec_impl()
except subprocess.CalledProcessError as e:
#
# Failure path
#
logging.debug("Error executing: %s %s" % (e.cmd, e.output));
if self.flow is not None:
self.flow.send_message(self.cmd_name + "_response",
{ 'success': False,
'message': 'Error executing command: %s' % (e.output) })
return
#
# Success path
#
logging.debug("Sending response: %s" % (self.response))
if self.flow is not None:
self.flow.send_message(self.cmd_name + "_response", self.response)
self.post_exec()
#
# Helper to execute subprocess commands
#
def shell_helper(self, cmd_arr):
output = subprocess.check_output(cmd_arr, stderr=subprocess.STDOUT)
return output
#
# Get the response object
#
def get_response(self):
return self.response
#
# Override this to perform some subclass specific operation
#
@abc.abstractmethod
def exec_impl(self):
""" Subclasses implement this method to perform specific operations """
return
#
# Override this to perform some subclass specific operation after
# exec_impl is called and response is sent.
#
def post_exec(self):
return
| mit | Python |
abb8b574395f1f4ab8be08887b7bcf54a42bb254 | Revert HTTPS | fluidinfo/fluidinfo-explorer,fluidinfo/fluidinfo-explorer | fluiddbexplorer/utils.py | fluiddbexplorer/utils.py | # -*- coding: utf-8 -*-
"""
fluiddbexplorer.utils
~~~~~~~~~~~~~~~~~~~~~
Utility functions
:copyright: 2010 by Fluidinfo Explorer Authors
:license: MIT, see LICENSE for more information
"""
import os
from flask import current_app, url_for
INSTANCE_URLS = {
'main': 'http://fluiddb.fluidinfo.com',
'fluidinfo': 'http://fluiddb.fluidinfo.com',
'fluiddb': 'http://fluiddb.fluidinfo.com',
'sandbox': 'http://sandbox.fluidinfo.com',
}
def get_instance_url(instance):
try:
url = INSTANCE_URLS[instance]
except KeyError:
url = 'http://' + instance
return url
def dated_url_for(endpoint, **values):
if endpoint == 'static':
filename = values.get('filename', None)
if filename:
file_path = os.path.join(current_app.root_path,
endpoint, filename)
values['q'] = int(os.stat(file_path).st_mtime)
return url_for(endpoint, **values)
| # -*- coding: utf-8 -*-
"""
fluiddbexplorer.utils
~~~~~~~~~~~~~~~~~~~~~
Utility functions
:copyright: 2010 by Fluidinfo Explorer Authors
:license: MIT, see LICENSE for more information
"""
import os
from flask import current_app, url_for
INSTANCE_URLS = {
'main': 'https://fluiddb.fluidinfo.com',
'fluidinfo': 'https://fluiddb.fluidinfo.com',
'fluiddb': 'https://fluiddb.fluidinfo.com',
'sandbox': 'https://sandbox.fluidinfo.com',
}
def get_instance_url(instance):
try:
url = INSTANCE_URLS[instance]
except KeyError:
url = 'http://' + instance
return url
def dated_url_for(endpoint, **values):
if endpoint == 'static':
filename = values.get('filename', None)
if filename:
file_path = os.path.join(current_app.root_path,
endpoint, filename)
values['q'] = int(os.stat(file_path).st_mtime)
return url_for(endpoint, **values)
| mit | Python |
da961ec779cc05b3629eb740a800122a493bfcaf | remove print statement | stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten | features/gestalten/templatetags/dismissible.py | features/gestalten/templatetags/dismissible.py | import json
from django import template
from django.db import models
from django.template import Library, loader
from ..models import GestaltSetting
register = Library()
@register.simple_tag(name='dismiss', takes_context=True)
def do_dismiss(context, name, category='dismissible', type='button'):
template_name = 'dismissible/%s.html' % type
options = {'name': name, 'category': category, 'value': 'dismissed'}
try:
dismissible_template = loader.get_template(template_name)
except template.TemplateDoesNotExist as err:
message = 'unknown dismissible template "%s"' % template_name
raise template.TemplateSyntaxError(message) from err
try:
options['gestalt'] = context['user'].gestalt.id
except (AttributeError, KeyError):
# no user is present, settings cannot be saved
return ''
return dismissible_template.render({
'payload': json.dumps(options)
})
@register.tag(name='dismissible')
def do_dismissible(parser, token):
nodelist = parser.parse(('enddismissible',))
parser.delete_first_token()
try:
name = token.split_contents()[1]
except IndexError:
raise template.TemplateSyntaxError('please provide a name for the dismissible')
if name[0] in ['\'', '"']:
name = name[1:-1]
return DismissibleNode(name, nodelist)
class DismissibleNode(template.Node):
def __init__(self, name, nodelist):
self.name = name
self.nodelist = nodelist
def render(self, context):
try:
gestalt = context['user'].gestalt
setting = GestaltSetting.objects.get(gestalt=gestalt, name=self.name)
should_render = setting.value != 'dismissed'
except (AttributeError, KeyError, models.ObjectDoesNotExist):
should_render = True
if should_render:
return '<div class="dismissible">%s</div>' % self.nodelist.render(context)
else:
return ''
| import json
from django import template
from django.db import models
from django.template import Library, loader
from ..models import GestaltSetting
register = Library()
@register.simple_tag(name='dismiss', takes_context=True)
def do_dismiss(context, name, category='dismissible', type='button'):
template_name = 'dismissible/%s.html' % type
options = {'name': name, 'category': category, 'value': 'dismissed'}
try:
dismissible_template = loader.get_template(template_name)
except template.TemplateDoesNotExist as err:
message = 'unknown dismissible template "%s"' % template_name
raise template.TemplateSyntaxError(message) from err
try:
options['gestalt'] = context['user'].gestalt.id
except (AttributeError, KeyError):
# no user is present, settings cannot be saved
return ''
return dismissible_template.render({
'payload': json.dumps(options)
})
@register.tag(name='dismissible')
def do_dismissible(parser, token):
nodelist = parser.parse(('enddismissible',))
parser.delete_first_token()
try:
name = token.split_contents()[1]
except IndexError:
raise template.TemplateSyntaxError('please provide a name for the dismissible')
if name[0] in ['\'', '"']:
name = name[1:-1]
return DismissibleNode(name, nodelist)
class DismissibleNode(template.Node):
def __init__(self, name, nodelist):
self.name = name
self.nodelist = nodelist
def render(self, context):
try:
gestalt = context['user'].gestalt
setting = GestaltSetting.objects.get(gestalt=gestalt, name=self.name)
should_render = setting.value != 'dismissed'
except (AttributeError, KeyError, models.ObjectDoesNotExist) as e:
print(type(e), e)
should_render = True
if should_render:
return '<div class="dismissible">%s</div>' % self.nodelist.render(context)
else:
return ''
| agpl-3.0 | Python |
46e52318c07a2021dafe549a84492e6cc60147e4 | Add new exception types | jonahbull/rabbitpy,gmr/rabbitpy,gmr/rabbitpy | rabbitpy/exceptions.py | rabbitpy/exceptions.py | """
rabbitpy Specific Exceptions
"""
from pamqp import specification
class ActionException(Exception):
def __repr__(self):
return self.args[0]
class ChannelClosedException(Exception):
def __repr__(self):
return 'Can not perform RPC requests on a closed channel, you must ' \
'create a new channel'
class ConnectionBlockedWarning(Warning):
def __repr__(self):
return 'Will not write to a connection that RabbitMQ is throttling'
class ConnectionException(Exception):
def __repr__(self):
return 'Unable to connect to the remote server %r' % self.args
class ConnectionResetException(Exception):
def __repr__(self):
return 'Connection was reset at socket level'
class EmptyExchangeNameError(Exception):
def __repr__(self):
return 'You must specify an Exchange name'
class EmptyQueueNameError(Exception):
def __repr__(self):
return 'You must specify a Queue name'
class RemoteClosedChannelException(Exception):
def __repr__(self):
return 'Channel %i was closed by the remote server (%i): %s' % \
(self.args[0], self.args[1], self.args[2])
class RemoteClosedException(Exception):
def __repr__(self):
return 'Connection was closed by the remote server (%i): %s' % \
(self.args[0], self.args[1])
class MessageReturnedException(Exception):
def __repr__(self):
return 'Message %s was returned by RabbitMQ: (%s) %s' % \
(self.args[0], self.args[1], self.args[2])
class NoActiveTransactionError(Exception):
def __repr__(self):
return 'No active transaction for the request, channel closed'
class TooManyChannelsError(Exception):
def __repr__(self):
return 'The maximum amount of negotiated channels has been reached'
class UnexpectedResponseError(Exception):
def __repr__(self):
return 'Received an expected response, expected %s, received %s' % \
(self.args[0], self.args[1])
# AMQP Exceptions
AMQPContentTooLarge = specification.AMQPContentTooLarge
AMQPNoRoute = specification.AMQPNoRoute
AMQPNoConsumers = specification.AMQPNoConsumers
AMQPConnectionForced = specification.AMQPConnectionForced
AMQPInvalidPath = specification.AMQPInvalidPath
AMQPAccessRefused = specification.AMQPAccessRefused
AMQPNotFound = specification.AMQPNotFound
AMQPResourceLocked = specification.AMQPResourceLocked
AMQPPreconditionFailed = specification.AMQPPreconditionFailed
AMQPFrameError = specification.AMQPFrameError
AMQPSyntaxError = specification.AMQPSyntaxError
AMQPCommandInvalid = specification.AMQPCommandInvalid
AMQPChannelError = specification.AMQPChannelError
AMQPUnexpectedFrame = specification.AMQPUnexpectedFrame
AMQPResourceError = specification.AMQPResourceError
AMQPNotAllowed = specification.AMQPNotAllowed
AMQPNotImplemented = specification.AMQPNotImplemented
AMQPInternalError = specification.AMQPInternalError
AMQP = {311: AMQPContentTooLarge,
312: AMQPNoRoute,
313: AMQPNoConsumers,
320: AMQPConnectionForced,
402: AMQPInvalidPath,
403: AMQPAccessRefused,
404: AMQPNotFound,
405: AMQPResourceLocked,
406: AMQPPreconditionFailed,
501: AMQPFrameError,
502: AMQPSyntaxError,
503: AMQPCommandInvalid,
504: AMQPChannelError,
505: AMQPUnexpectedFrame,
506: AMQPResourceError,
530: AMQPNotAllowed,
540: AMQPNotImplemented,
541: AMQPInternalError}
| """
rabbitpy Specific Exceptions
"""
class ActionException(Exception):
def __repr__(self):
return self.args[0]
class ChannelClosedException(Exception):
def __repr__(self):
return 'Can not perform RPC requests on a closed channel, you must ' \
'create a new channel'
class ConnectionBlockedWarning(Warning):
def __repr__(self):
return 'Will not write to a connection that RabbitMQ is throttling'
class ConnectionException(Exception):
def __repr__(self):
return 'Unable to connect to the remote server %r' % self.args
class EmptyExchangeNameError(Exception):
def __repr__(self):
return 'You must specify an Exchange name'
class EmptyQueueNameError(Exception):
def __repr__(self):
return 'You must specify a Queue name'
class RemoteClosedChannelException(Exception):
def __repr__(self):
return 'Channel %i was closed by the remote server (%i): %s' % \
(self.args[0], self.args[1], self.args[2])
class RemoteClosedException(Exception):
def __repr__(self):
return 'Connection was closed by the remote server (%i): %s' % \
(self.args[0], self.args[1])
class MessageReturnedException(Exception):
def __repr__(self):
return 'Message %s was returned by RabbitMQ: (%s) %s' % \
(self.args[0], self.args[1], self.args[2])
class NoActiveTransactionError(Exception):
def __repr__(self):
return 'No active transaction for the request, channel closed'
class TooManyChannelsError(Exception):
def __repr__(self):
return 'The maximum amount of negotiated channels has been reached'
class UnexpectedResponseError(Exception):
def __repr__(self):
return 'Received an expected response, expected %s, received %s' % \
(self.args[0], self.args[1])
| bsd-3-clause | Python |
0faf594ee8b0bf7168e89f26bbed08c25defc568 | allow config-rewrite | ulno/ulnoiot,ulno/ulnoiot,ulno/ulnoiot,ulno/micropython-extra-ulno,ulno/ulnoiot,ulno/ulnoiot,ulno/micropython-extra-ulno | lib/node_types/esp8266/freeze/uiot/_cfg.py | lib/node_types/esp8266/freeze/uiot/_cfg.py | # Configuration file management
#
_file = "/config.py"
def write():
global config
f = open(_file, "w")
f.write(
"""wifi_name = {}
wifi_pw = "{}"
netrepl = "{}"
mqtt_host = "{}"
mqtt_topic = "{}"
mqtt_user = "{}"
mqtt_pw = "{}"
""".format(
config.wifi_name,
config.wifi_pw,
config.key,
config.mqtt_host,
config.mqtt_topic,
config.mqtt_user,
config.mqtt_pw
) )
f.close()
def wifi(name, password, save=True):
# write wifi values to config file
global config
config.wifi_name = name
config.wifi_pw = password
if save:
write()
def netrepl(key, save=True):
global config
config.key = key
if save:
write()
def mqtt(host, topic, user=None, pw=None, save=True):
if user==None:
user=""
if pw==None:
pw=""
config.mqtt_host = host
config.mqtt_topic = topic
config.mqtt_user = user
config.mqtt_pw = pw
if save:
write()
# Try to import config
try:
import config
except ImportError:
class config():
pass
config.wifi_name = ""
config.wifi_pw = ""
config.netrepl = ""
config.mqtt_host = ""
config.mqtt_topic = ""
config.mqtt_user = ""
config.mqtt_pw = ""
| # Configuration file management
#
_file = "/config.py"
def write():
global config
f = open(_file, "w")
f.write(
"""wifi_name = {}
wifi_pw = {}
netrepl = {}
mqtt_host = {}
mqtt_topic = {}
mqtt_user = {}
mqtt_pw = {}
""".format(
config.wifi_name,
config.wifi_pw,
) )
f.close()
def wifi(name, password, save=True):
# write wifi values to config file
global config
config.wifi_name = name
config.wifi_pw = password
if save:
write()
def netrepl(key, save=True):
global config
config.key = key
if save:
write()
def mqtt(host, topic, user=None, pw=None, save=True):
if user==None:
user=""
if pw==None:
pw=""
config.mqtt_host = host
config.mqtt_topic = topic
config.mqtt_user = user
config.mqtt_pw = pw
if save:
write()
# Try to import config
try:
import config
except ImportError:
class config():
pass
config.wifi_name = ""
config.wifi_pw = ""
config.netrepl = ""
config.mqtt_host = ""
config.mqtt_topic = ""
config.mqtt_user = ""
config.mqtt_pw = ""
| mit | Python |
6389bebf4cac642d055fe1df3fe1ef4750f5861a | read data_server from environment in testing.py | GuillaumeMorini/roomfinder,Guismo1/roomfinder,GuillaumeMorini/roomfinder,Guismo1/roomfinder,Guismo1/roomfinder,GuillaumeMorini/roomfinder | testing.py | testing.py | import unittest
import sys
sys.path.append('roomfinder_web/roomfinder_web')
import web_server
class FlaskTestCase(unittest.TestCase):
def setUp(self):
sys.stderr.write('Setup testing.')
web_server.data_server = os.getenv("roomfinder_data_server")
web_server.app.config['TESTING'] = True
self.app = web_server.app.test_client()
def test_correct_http_response(self):
sys.stderr.write('Test HTTP GET / == 200.')
resp = self.app.get('/')
self.assertEquals(resp.status_code, 200)
def test_about_correct_http_response(self):
sys.stderr.write('Test HTTP GET /about == 200.')
resp = self.app.get('/about')
self.assertEquals(resp.status_code, 200)
def test_form_correct_http_response(self):
sys.stderr.write('Test HTTP GET /form == 200.')
resp = self.app.get('/form')
self.assertEquals(resp.status_code, 200)
# def test_correct_content(self):
# resp = self.app.get('/hello/world')
# self.assertEquals(resp.data, '"Hello World!"\n')
# def test_universe_correct_content(self):
# resp = self.app.get('/hello/universe')
# self.assertEquals(resp.data, '"Hello Universe!"\n')
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| import unittest
import sys
sys.path.append('roomfinder_web/roomfinder_web')
import web_server
class FlaskTestCase(unittest.TestCase):
def setUp(self):
sys.stderr.write('Setup testing.')
web_server.app.config['TESTING'] = True
self.app = web_server.app.test_client()
def test_correct_http_response(self):
sys.stderr.write('Test HTTP GET / == 200.')
resp = self.app.get('/')
self.assertEquals(resp.status_code, 200)
def test_about_correct_http_response(self):
sys.stderr.write('Test HTTP GET /about == 200.')
resp = self.app.get('/about')
self.assertEquals(resp.status_code, 200)
def test_form_correct_http_response(self):
sys.stderr.write('Test HTTP GET /form == 200.')
resp = self.app.get('/form')
self.assertEquals(resp.status_code, 200)
# def test_correct_content(self):
# resp = self.app.get('/hello/world')
# self.assertEquals(resp.data, '"Hello World!"\n')
# def test_universe_correct_content(self):
# resp = self.app.get('/hello/universe')
# self.assertEquals(resp.data, '"Hello Universe!"\n')
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
60daa277d5c3f1d9ab07ff5beccdaa323996068b | Add assignment tag util for rendering chunks to tpl context | ixc/glamkit-feincmstools,ixc/glamkit-feincmstools | feincmstools/templatetags/feincmstools_tags.py | feincmstools/templatetags/feincmstools_tags.py | import os
from django import template
from feincms.templatetags.feincms_tags import feincms_render_content
register = template.Library()
@register.filter
def is_parent_of(page1, page2):
"""
Determines whether a given page is the parent of another page
Example:
{% if page|is_parent_of:feincms_page %} ... {% endif %}
"""
if page1 is None:
return False
return (page1.tree_id == page2.tree_id and
page1.lft < page2.lft and
page1.rght > page2.rght)
@register.filter
def is_equal_or_parent_of(page1, page2):
return (page1.tree_id == page2.tree_id and
page1.lft <= page2.lft and
page1.rght >= page2.rght)
@register.filter
def is_sibling_of(page1, page2):
"""
Determines whether a given page is a sibling of another page
{% if page|is_sibling_of:feincms_page %} ... {% endif %}
"""
if page1 is None or page2 is None:
return False
return (page1.parent_id == page2.parent_id)
@register.filter
def get_extension(filename):
""" Return the extension from a file name """
return os.path.splitext(filename)[1][1:]
@register.assignment_tag(takes_context=True)
def feincms_render_content_as(context, content, request=None):
return feincms_render_content(context, content, request)
| import os
from django import template
register = template.Library()
@register.filter
def is_parent_of(page1, page2):
"""
Determines whether a given page is the parent of another page
Example:
{% if page|is_parent_of:feincms_page %} ... {% endif %}
"""
if page1 is None:
return False
return (page1.tree_id == page2.tree_id and
page1.lft < page2.lft and
page1.rght > page2.rght)
@register.filter
def is_equal_or_parent_of(page1, page2):
return (page1.tree_id == page2.tree_id and
page1.lft <= page2.lft and
page1.rght >= page2.rght)
@register.filter
def is_sibling_of(page1, page2):
"""
Determines whether a given page is a sibling of another page
{% if page|is_sibling_of:feincms_page %} ... {% endif %}
"""
if page1 is None or page2 is None:
return False
return (page1.parent_id == page2.parent_id)
@register.filter
def get_extension(filename):
""" Return the extension from a file name """
return os.path.splitext(filename)[1][1:]
| bsd-3-clause | Python |
9bb0953b7aec9dddeaa8ef3c271bde1195a9cea5 | Update 3txt_tag_init.py | FinancialSentimentAnalysis-team/Finanical-annual-reports-analysis-code,FinancialSentimentAnalysis-team/Finanical-annual-reports-analysis-code,FinancialSentimentAnalysis-team/Finanical-annual-reports-analysis-code | wangyi/wk5/3txt_tag_init.py | wangyi/wk5/3txt_tag_init.py |
# coding: utf-8
# In[ ]:
# In[1]:
import re
import os
if __name__ == '__main__':
# Edit Area
# ===================================================================
root_path= r'/usr/yyy/wk5/txt_tagged/'
result_path = r'/usr/yyy/wk5/txt_tagged_init/'
# ===================================================================
if not os.path.exists(result_path):
os.mkdir(result_path)
file_list=os.listdir(root_path)
for f in file_list:
reader=open(root_path+f, 'rU')
content=reader.read()
word_tag_pair=re.findall('([A-Za-z]+)_([BCDEFGHIJMNOPRSTUVWXYZ$]+)_([A-Za-z@]+)', content)
reader.close()
des = open(result_path + f, 'wb')
for w in word_tag_pair:
des.write(w[2] + ' ')
des.close()
print f +' successfully.'
print '-----------------Done-----------------'
# In[ ]:
|
# coding: utf-8
# In[ ]:
# In[1]:
import re
import os
if __name__ == '__main__':
root_path= r'/usr/yyy/wk5/txt_tagged/'
result_path = r'/usr/yyy/wk5/txt_tagged_init/'
if not os.path.exists(result_path):
os.mkdir(result_path)
file_list=os.listdir(root_path)
for f in file_list:
reader=open(root_path+f, 'rU')
content=reader.read()
word_tag_pair=re.findall('([A-Za-z]+)_([BCDEFGHIJMNOPRSTUVWXYZ$]+)_([A-Za-z@]+)', content)
reader.close()
des = open(result_path + f, 'wb')
for w in word_tag_pair:
des.write(w[2] + ' ')
des.close()
print f +' successfully.'
print '-----------------Done-----------------'
# In[ ]:
| apache-2.0 | Python |
a388e1fd8dab1c745f750d08b75ae6ff612d8330 | Add more field types | rdmorganiser/rdmo,rdmorganiser/rdmo,rdmorganiser/rdmo | rdmo/core/constants.py | rdmo/core/constants.py | from django.utils.translation import gettext_lazy as _
VALUE_TYPE_TEXT = 'text'
VALUE_TYPE_URL = 'url'
VALUE_TYPE_INTEGER = 'integer'
VALUE_TYPE_FLOAT = 'float'
VALUE_TYPE_BOOLEAN = 'boolean'
VALUE_TYPE_DATETIME = 'datetime'
VALUE_TYPE_OPTIONS = 'option'
VALUE_TYPE_OPTIONS = 'email'
VALUE_TYPE_OPTIONS = 'phone'
VALUE_TYPE_FILE = 'file'
VALUE_TYPE_CHOICES = (
(VALUE_TYPE_TEXT, _('Text')),
(VALUE_TYPE_URL, _('URL')),
(VALUE_TYPE_INTEGER, _('Integer')),
(VALUE_TYPE_FLOAT, _('Float')),
(VALUE_TYPE_BOOLEAN, _('Boolean')),
(VALUE_TYPE_DATETIME, _('Datetime')),
(VALUE_TYPE_OPTIONS, _('Option')),
(VALUE_TYPE_FILE, _('File'))
)
PERMISSIONS = {
'condition': (
'conditions.add_condition', 'conditions.change_condition', 'conditions.delete_condition'
),
'attribute': (
'domain.add_attribute', 'domain.change_attribute', 'domain.delete_attribute'
),
'optionset': (
'options.add_optionset', 'options.change_optionset', 'options.delete_optionset'
),
'option': (
'options.add_option', 'options.change_option', 'options.delete_option'
),
'catalog': (
'questions.add_catalog', 'questions.change_catalog', 'questions.delete_catalog'
),
'section': (
'questions.add_section', 'questions.change_section', 'questions.delete_section'
),
'questionset': (
'questions.add_questionset', 'questions.change_questionset', 'questions.delete_questionset'
),
'question': (
'questions.add_question', 'questions.change_question', 'questions.delete_question'
),
'task': (
'tasks.add_task', 'tasks.change_task', 'tasks.delete_task'
),
'view': (
'views.add_view', 'views.change_view', 'views.delete_view'
)
}
| from django.utils.translation import gettext_lazy as _
VALUE_TYPE_TEXT = 'text'
VALUE_TYPE_URL = 'url'
VALUE_TYPE_INTEGER = 'integer'
VALUE_TYPE_FLOAT = 'float'
VALUE_TYPE_BOOLEAN = 'boolean'
VALUE_TYPE_DATETIME = 'datetime'
VALUE_TYPE_OPTIONS = 'option'
VALUE_TYPE_FILE = 'file'
VALUE_TYPE_CHOICES = (
(VALUE_TYPE_TEXT, _('Text')),
(VALUE_TYPE_URL, _('URL')),
(VALUE_TYPE_INTEGER, _('Integer')),
(VALUE_TYPE_FLOAT, _('Float')),
(VALUE_TYPE_BOOLEAN, _('Boolean')),
(VALUE_TYPE_DATETIME, _('Datetime')),
(VALUE_TYPE_OPTIONS, _('Option')),
(VALUE_TYPE_FILE, _('File'))
)
PERMISSIONS = {
'condition': (
'conditions.add_condition', 'conditions.change_condition', 'conditions.delete_condition'
),
'attribute': (
'domain.add_attribute', 'domain.change_attribute', 'domain.delete_attribute'
),
'optionset': (
'options.add_optionset', 'options.change_optionset', 'options.delete_optionset'
),
'option': (
'options.add_option', 'options.change_option', 'options.delete_option'
),
'catalog': (
'questions.add_catalog', 'questions.change_catalog', 'questions.delete_catalog'
),
'section': (
'questions.add_section', 'questions.change_section', 'questions.delete_section'
),
'questionset': (
'questions.add_questionset', 'questions.change_questionset', 'questions.delete_questionset'
),
'question': (
'questions.add_question', 'questions.change_question', 'questions.delete_question'
),
'task': (
'tasks.add_task', 'tasks.change_task', 'tasks.delete_task'
),
'view': (
'views.add_view', 'views.change_view', 'views.delete_view'
)
}
| apache-2.0 | Python |
56a842fae1f88ee80d7ac88071819d82ee470e9f | Fix path for static_dir | lsst-sqre/ltd-keeper,lsst-sqre/ltd-keeper | keeper/dashboard/templateproviders.py | keeper/dashboard/templateproviders.py | """Providers load templates from specific sources and provider a
Jinja2 rendering environment.
"""
from __future__ import annotations
from pathlib import Path
import jinja2
from .context import BuildContextList, EditionContextList, ProjectContext
from .jinjafilters import filter_simple_date
class BuiltinTemplateProvider:
"""A template provider for Keeper's built in dashboard templates."""
def __init__(self) -> None:
self.template_dir = Path(__file__).parent.joinpath("template")
self.static_dir = Path(__file__).parent.joinpath("static")
self.jinja_env = self._create_environment()
def _create_environment(self) -> jinja2.Environment:
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(self.template_dir),
autoescape=jinja2.select_autoescape(["html"]),
)
env.filters["simple_date"] = filter_simple_date
return env
def render_edition_dashboard(
self,
*,
project_context: ProjectContext,
edition_contexts: EditionContextList,
) -> str:
template = self.jinja_env.get_template("edition_dashboard.jinja")
return template.render(
project=project_context,
editions=edition_contexts,
asset_dir="../_dashboard-assets",
)
def render_build_dashboard(
self,
*,
project_context: ProjectContext,
build_contexts: BuildContextList,
) -> str:
template = self.jinja_env.get_template("build_dashboard.jinja")
return template.render(
project=project_context,
builds=build_contexts,
asset_dir="../_dashboard-assets",
)
| """Providers load templates from specific sources and provider a
Jinja2 rendering environment.
"""
from __future__ import annotations
from pathlib import Path
import jinja2
from .context import BuildContextList, EditionContextList, ProjectContext
from .jinjafilters import filter_simple_date
class BuiltinTemplateProvider:
"""A template provider for Keeper's built in dashboard templates."""
def __init__(self) -> None:
self.template_dir = Path(__file__).parent.joinpath("template")
self.static_dir = self.template_dir.joinpath("static")
self.jinja_env = self._create_environment()
def _create_environment(self) -> jinja2.Environment:
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(self.template_dir),
autoescape=jinja2.select_autoescape(["html"]),
)
env.filters["simple_date"] = filter_simple_date
return env
def render_edition_dashboard(
self,
*,
project_context: ProjectContext,
edition_contexts: EditionContextList,
) -> str:
template = self.jinja_env.get_template("edition_dashboard.jinja")
return template.render(
project=project_context,
editions=edition_contexts,
asset_dir="../_dashboard-assets",
)
def render_build_dashboard(
self,
*,
project_context: ProjectContext,
build_contexts: BuildContextList,
) -> str:
template = self.jinja_env.get_template("build_dashboard.jinja")
return template.render(
project=project_context,
builds=build_contexts,
asset_dir="../_dashboard-assets",
)
| mit | Python |
1e5102d8bafb3b4d2cb07822129397aa56f30bbe | Handle using the input function in python 2 for getting username for examples | michaelcho/python-devicecloud,michaelcho/python-devicecloud,digidotcom/python-devicecloud,brucetsao/python-devicecloud,ctrlaltdel/python-devicecloud,digidotcom/python-devicecloud,brucetsao/python-devicecloud,ctrlaltdel/python-devicecloud | devicecloud/examples/example_helpers.py | devicecloud/examples/example_helpers.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015 Digi International, Inc.
from getpass import getpass
import os
from six.moves import input
from devicecloud import DeviceCloud
def get_authenticated_dc():
while True:
base_url = os.environ.get('DC_BASE_URL', 'https://login.etherios.com')
username = os.environ.get('DC_USERNAME', None)
if not username:
username = input("username: ")
password = os.environ.get('DC_PASSWORD', None)
if not password:
password = getpass("password: ")
dc = DeviceCloud(username, password, base_url=base_url)
if dc.has_valid_credentials():
print("Credentials accepted!")
return dc
else:
print("Invalid username or password provided, try again")
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015 Digi International, Inc.
from getpass import getpass
import os
from devicecloud import DeviceCloud
def get_authenticated_dc():
while True:
base_url = os.environ.get('DC_BASE_URL', 'https://login.etherios.com')
username = os.environ.get('DC_USERNAME', None)
if not username:
username = input("username: ")
password = os.environ.get('DC_PASSWORD', None)
if not password:
password = getpass("password: ")
dc = DeviceCloud(username, password, base_url=base_url)
if dc.has_valid_credentials():
print("Credentials accepted!")
return dc
else:
print("Invalid username or password provided, try again")
| mpl-2.0 | Python |
f6d396157ad0b469f302506a2a55d5959d375d84 | use new string formatting style | Proteogenomics/trackhub-creator,Proteogenomics/trackhub-creator | toolbox.py | toolbox.py | #
# Author : Manuel Bernal Llinares
# Project : trackhub-creator
# Timestamp : 28-06-2017 11:03
# ---
# © 2017 Manuel Bernal Llinares <mbdebian@gmail.com>
# All rights reserved.
#
"""
This module implements some useful functions for the pipeline runner
"""
import os
import json
from exceptions import ToolBoxException
def read_json(json_file="json_file_not_specified.json"):
"""
Reads a json file and it returns its object representation, no extra checks
are performed on the file so, in case anything happens, the exception will
reach the caller
:param json_file: path to the file in json format to read
:return: an object representation of the data in the json file
"""
with open(json_file) as jf:
return json.load(jf)
def check_create_folders(folders):
"""
Check if folders exist, create them otherwise
:param folders: list of folder paths to check
:return: no return value
"""
for folder in folders:
if not os.path.isdir(folder):
if os.path.exists(folder):
raise ToolBoxException("'{}' is not a folder".format(folder))
else:
try:
os.mkdir(folder)
except Exception as e:
raise ToolBoxException(str(e))
if __name__ == '__main__':
print("ERROR: This script is part of a pipeline collection and it is not met to be run in stand alone mode")
| #
# Author : Manuel Bernal Llinares
# Project : trackhub-creator
# Timestamp : 28-06-2017 11:03
# ---
# © 2017 Manuel Bernal Llinares <mbdebian@gmail.com>
# All rights reserved.
#
"""
This module implements some useful functions for the pipeline runner
"""
import os
import json
from exceptions import ToolBoxException
def read_json(json_file="json_file_not_specified.json"):
"""
Reads a json file and it returns its object representation, no extra checks
are performed on the file so, in case anything happens, the exception will
reach the caller
:param json_file: path to the file in json format to read
:return: an object representation of the data in the json file
"""
with open(json_file) as jf:
return json.load(jf)
def check_create_folders(folders):
"""
Check if folders exist, create them otherwise
:param folders: list of folder paths to check
:return: no return value
"""
for folder in folders:
if not os.path.isdir(folder):
if os.path.exists(folder):
raise ToolBoxException(folder + " is not a folder")
else:
try:
os.mkdir(folder)
except Exception as e:
raise ToolBoxException(str(e))
if __name__ == '__main__':
print("ERROR: This script is part of a pipeline collection and it is not met to be run in stand alone mode")
| apache-2.0 | Python |
2d5e7a3c0804cd30db9c099842f5dc76ec9fb670 | Fix tests | barseghyanartur/flower,getupcloud/flower,ChinaQuants/flower,alexmojaki/flower,alexmojaki/flower,pygeek/flower,ucb-bar/bar-crawl-web,allengaller/flower,raphaelmerx/flower,pj/flower,barseghyanartur/flower,lucius-feng/flower,getupcloud/flower,raphaelmerx/flower,marrybird/flower,lucius-feng/flower,raphaelmerx/flower,asmodehn/flower,pj/flower,pygeek/flower,Lingling7/flower,asmodehn/flower,ucb-bar/bar-crawl-web,allengaller/flower,alexmojaki/flower,ucb-bar/bar-crawl-web,allengaller/flower,marrybird/flower,jzhou77/flower,asmodehn/flower,tellapart/flower,ChinaQuants/flower,lucius-feng/flower,pj/flower,jzhou77/flower,pygeek/flower,jzhou77/flower,getupcloud/flower,marrybird/flower,tellapart/flower,ChinaQuants/flower,barseghyanartur/flower,Lingling7/flower,Lingling7/flower,tellapart/flower | tests/__init__.py | tests/__init__.py | try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
import tornado.testing
import celery
from flower.app import Flower
from flower.urls import handlers
from flower.events import Events
from flower.state import State
from flower.settings import APP_SETTINGS
class AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase):
def get_app(self, celery_app=None, events=None, state=None):
celery_app = celery_app or celery.Celery()
events = events or Events(celery_app)
state = state or State(celery_app)
self.app = Flower(celery_app=celery_app, events=events,
state=state, handlers=handlers, **APP_SETTINGS)
self.app.delay = lambda method, *args, **kwargs: method(*args, **kwargs)
return self.app
def get(self, url, **kwargs):
return self.fetch(url, **kwargs)
def post(self, url, **kwargs):
if 'body' in kwargs and isinstance(kwargs['body'], dict):
kwargs['body'] = urlencode(kwargs['body'])
return self.fetch(url, method='POST', **kwargs)
| try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
import tornado.testing
import celery
from flower.app import Flower
from flower.urls import handlers
from flower.events import Events
from flower.state import State
from flower.settings import APP_SETTINGS
class AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase):
def get_app(self, celery_app=None, events=None, state=None):
celery_app = celery_app or celery.Celery()
events = events or Events(celery_app)
state = state or State(celery_app)
self.app = Flower(celery_app=celery_app, events=events,
state=state, handlers=handlers, **APP_SETTINGS)
return self.app
def get(self, url, **kwargs):
return self.fetch(url, **kwargs)
def post(self, url, **kwargs):
if 'body' in kwargs and isinstance(kwargs['body'], dict):
kwargs['body'] = urlencode(kwargs['body'])
return self.fetch(url, method='POST', **kwargs)
| bsd-3-clause | Python |
70acac5b1494301b933acd00e88dfefe46715bd1 | Fix webbrowser.open_url() (#319) | GoogleCloudPlatform/django-cloud-deploy,GoogleCloudPlatform/django-cloud-deploy | django_cloud_deploy/utils/webbrowser.py | django_cloud_deploy/utils/webbrowser.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Webbrowser utility functions."""
import os
import webbrowser
def open_url(url: str):
"""Filter ugly terminal output when using webbrowser.
When using webbrowser.open the follow error is shown:
[20451:20471:0313/132752.481635:ERROR:browser_process_sub_thread.cc(209)]
Waited 3 ms for network service.
In attempts to improve UX when using the CLI, we are surpressing that
error with the following utility. For more information refer to:
http://man7.org/linux/man-pages/man2/dup.2.html
"""
# Save previous standard file descriptors
prev_stderr_fd = os.dup(2)
prev_stdout_fd = os.dup(1)
with open(os.devnull, 'wb') as f:
# redirect stderr and stdout to os.devnull
os.dup2(f.fileno(), 2)
os.dup2(f.fileno(), 1)
try:
webbrowser.open(url)
except webbrowser.Error:
# We are not able to do anything if any internal errors happen
# with webbrowser.open()
pass
finally:
# restore stdout and stderr
os.dup2(prev_stdout_fd, 1)
os.dup2(prev_stderr_fd, 2)
| # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Webbrowser utility functions."""
import os
import webbrowser
def open_url(url: str):
"""Filter ugly terminal output when using webbrowser.
When using webbrowser.open the follow error is shown:
[20451:20471:0313/132752.481635:ERROR:browser_process_sub_thread.cc(209)]
Waited 3 ms for network service.
In attempts to improve UX when using the CLI, we are surpressing that
error with the following utility. For more information refer to:
http://man7.org/linux/man-pages/man2/dup.2.html
"""
with open(os.devnull, 'wb') as f:
os.dup2(f.fileno(), 2)
webbrowser.open(url)
| apache-2.0 | Python |
72c0d83390dbbd6053f0af3518266c3d6b517401 | remove backticks from field names in query | rutube/django-sphinx-db,anatoliy-larin/django-sphinx-db | django_sphinx_db/backend/sphinx/base.py | django_sphinx_db/backend/sphinx/base.py | from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
def quote_name(self, name):
""" Disable backtick field escaping, for support of sphinx fields
started with @-sign."""
return name
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
# The following can be useful for unit testing, with multiple databases
# configured in Django, if one of them does not support transactions,
# Django will fall back to using clear/create (instead of begin...rollback)
# between each test. The method Django uses to detect transactions uses
# CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
# transactions ARE. Therefore, we can just set this to True, and Django will
# use transactions for clearing data between tests when all OTHER backends
# support it.
self.features.supports_transactions = True
| from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
# The following can be useful for unit testing, with multiple databases
# configured in Django, if one of them does not support transactions,
# Django will fall back to using clear/create (instead of begin...rollback)
# between each test. The method Django uses to detect transactions uses
# CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
# transactions ARE. Therefore, we can just set this to True, and Django will
# use transactions for clearing data between tests when all OTHER backends
# support it.
self.features.supports_transactions = True
| bsd-3-clause | Python |
a89c173181283bba6cdbde26af8dbba0c6c3760c | fix test fixture | DiamondLightSource/ispyb-api,DiamondLightSource/ispyb-api | tests/conftest.py | tests/conftest.py | # pytest configuration file
from __future__ import absolute_import, division, print_function
import os
import ispyb
import pytest
@pytest.fixture
def testconfig():
'''Return the path to a configuration file pointing to a test database.'''
config_file = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', 'conf', 'config.cfg'))
if not os.path.exists(config_file):
pytest.skip("No configuration file for test database found. Skipping database tests")
return config_file
@pytest.fixture
def testdb():
'''Return an ISPyB connection object for the test database configuration.'''
with ispyb.open(testconfig()) as conn:
yield conn
@pytest.fixture
def testconfig_ws():
'''Return the path to a configuration file pointing to a websocket
test instance.'''
config_file = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', 'conf', 'ws_config.cfg'))
if not os.path.exists(config_file):
pytest.skip("No configuration file for websocket tests found. Skipping websocket tests")
return config_file
| # pytest configuration file
from __future__ import absolute_import, division, print_function
import os
import pytest
@pytest.fixture
def testconfig():
'''Return the path to a configuration file pointing to a test database.'''
config_file = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', 'conf', 'config.cfg'))
if not os.path.exists(config_file):
pytest.skip("No configuration file for test database found. Skipping database tests")
return config_file
@pytest.fixture
def testdb():
'''Return an ISPyB connection object for the test database configuration.'''
with ispyb.open(testconfig()) as conn:
yield conn
@pytest.fixture
def testconfig_ws():
'''Return the path to a configuration file pointing to a websocket
test instance.'''
config_file = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', 'conf', 'ws_config.cfg'))
if not os.path.exists(config_file):
pytest.skip("No configuration file for websocket tests found. Skipping websocket tests")
return config_file
| apache-2.0 | Python |
4fc0b0c3f2775ad04e8f148016b2590a9ffab1df | Update errors.py | brokensound77/AlertLogic-event-api | src/errors.py | src/errors.py | """Custom errors"""
class AlApiError(Exception):
"""Base class for exceptions in this module"""
class NotAuthenticatedError(AlApiError):
"""Raise when a non 200 is returned"""
class CredentialsNotSet(AlApiError):
"""Placeholder for missing credentials"""
class EventNotRetrievedError(AlApiError):
"""Failed to retrieve event; most often because of authentication"""
| """Custome errors"""
class AlApiError(Exception):
"""Base class for exceptions in this module"""
class NotAuthenticatedError(AlApiError):
"""Raise when a non 200 is returned"""
class CredentialsNotSet(AlApiError):
"""Placeholder for missing credentials"""
class EventNotRetrievedError(AlApiError):
"""Failed to retrieve event; most often because of authentication"""
| mit | Python |
3c572de428b5ec63afc38945a7c4953318fbd5df | Add EXIF filter (#46) | tfeldmann/organize | tests/conftest.py | tests/conftest.py | import os
from typing import Iterable, Tuple, Union
from unittest.mock import patch
import pytest
from organize.compat import Path
from organize.utils import DotDict
TESTS_FOLDER = os.path.dirname(os.path.abspath(__file__))
TESTS_FOLDER = os.path.dirname(os.path.abspath(__file__))
def create_filesystem(tmp_path, files, config):
# create files
for f in files:
try:
name, content = f
except Exception:
name = f
content = ""
p = tmp_path / "files" / Path(name)
p.parent.mkdir(parents=True, exist_ok=True)
with p.open("w") as ptr:
ptr.write(content)
# create config
with (tmp_path / "config.yaml").open("w") as f:
f.write(config)
# change working directory
os.chdir(str(tmp_path))
def assertdir(path, *files):
os.chdir(str(path / "files"))
assert set(files) == set(str(x) for x in Path(".").glob("**/*") if x.is_file())
@pytest.fixture
def mock_exists():
with patch.object(Path, "exists") as mck:
yield mck
@pytest.fixture
def mock_samefile():
with patch.object(Path, "samefile") as mck:
yield mck
@pytest.fixture
def mock_rename():
with patch.object(Path, "rename") as mck:
yield mck
@pytest.fixture
def mock_move():
with patch("shutil.move") as mck:
yield mck
@pytest.fixture
def mock_copy():
with patch("shutil.copy2") as mck:
yield mck
@pytest.fixture
def mock_remove():
with patch("os.remove") as mck:
yield mck
@pytest.fixture
def mock_trash():
with patch("send2trash.send2trash") as mck:
yield mck
@pytest.fixture
def mock_parent():
with patch.object(Path, "parent") as mck:
yield mck
@pytest.fixture
def mock_mkdir():
with patch.object(Path, "mkdir") as mck:
yield mck
@pytest.fixture
def mock_echo():
with patch("organize.actions.Echo.print") as mck:
yield mck
| import os
from typing import Iterable, Tuple, Union
from unittest.mock import patch
import pytest
from organize.compat import Path
from organize.utils import DotDict
TESTS_FOLDER = os.path.dirname(os.path.abspath(__file__))
def create_filesystem(tmp_path, files, config):
# create files
for f in files:
try:
name, content = f
except Exception:
name = f
content = ""
p = tmp_path / "files" / Path(name)
p.parent.mkdir(parents=True, exist_ok=True)
with p.open("w") as ptr:
ptr.write(content)
# create config
with (tmp_path / "config.yaml").open("w") as f:
f.write(config)
# change working directory
os.chdir(str(tmp_path))
def assertdir(path, *files):
os.chdir(str(path / "files"))
assert set(files) == set(str(x) for x in Path(".").glob("**/*") if x.is_file())
@pytest.fixture
def mock_exists():
with patch.object(Path, "exists") as mck:
yield mck
@pytest.fixture
def mock_samefile():
with patch.object(Path, "samefile") as mck:
yield mck
@pytest.fixture
def mock_rename():
with patch.object(Path, "rename") as mck:
yield mck
@pytest.fixture
def mock_move():
with patch("shutil.move") as mck:
yield mck
@pytest.fixture
def mock_copy():
with patch("shutil.copy2") as mck:
yield mck
@pytest.fixture
def mock_remove():
with patch("os.remove") as mck:
yield mck
@pytest.fixture
def mock_trash():
with patch("send2trash.send2trash") as mck:
yield mck
@pytest.fixture
def mock_parent():
with patch.object(Path, "parent") as mck:
yield mck
@pytest.fixture
def mock_mkdir():
with patch.object(Path, "mkdir") as mck:
yield mck
@pytest.fixture
def mock_echo():
with patch("organize.actions.Echo.print") as mck:
yield mck
| mit | Python |
73c9bc5010dfa2821b54abb57a93d06087acf00f | Add new properties. | jr-garcia/Engendro3D | e3d/gui/LayerClass.py | e3d/gui/LayerClass.py | from ..Base3DObjectClass import Attachable
from cycgkit.cgtypes import *
class Layer(Attachable):
"""
Virtual top level container for gui objects.
Only objects attached to this will be drawn 'above' the scene, in 2D mode.
"""
def __init__(self, ID, guiMan, visible=True):
"""
@rtype : Layer
@type visible: bool
"""
super(Layer, self).__init__(None)
self.ID = ID
self.visible = visible
self._pixelSize = vec3(1)
self._realScale = vec3(1)
self._inverseScale = vec3(1)
self._guiMan = guiMan
self._previousSize = self.pixelSize
self._onInit = True
self._rotationMatrix = mat4(1)
self._position = vec3(0)
self._updatePixelSize()
def _getis2D(self):
return True
_is2D = property(_getis2D)
def __repr__(self):
return self.ID
def attachTo(self, parent):
raise ValueError('layers can only contain 2D controls. Can not be attached.')
def _update(self):
if self.visible:
for c in reversed(self._children):
c._update()
def _updatePixelSize(self):
self._previousSize = self._pixelSize
x, y = self._guiMan._window.size
baseSize = vec3(x, y, 1)
self._pixelSize = baseSize
self._scale = self.realScale
if self._onInit:
self._previousSize = self._pixelSize
self._onInit = False
def getPixelSize(self):
return self._pixelSize
pixelSize = property(getPixelSize)
@property
def size(self):
return self._pixelSize
def _getRealScale(self):
return self._realScale
realScale = property(_getRealScale)
def _getInverseScale(self):
return self._inverseScale
inverseScale = property(_getInverseScale, doc='Scale needed to pass from local to window scale.')
def _resizeCallback(self):
self._updatePixelSize()
for c in reversed(self._children):
c._resizeCallback()
@property
def _offset(self):
return vec3(0)
| from ..Base3DObjectClass import Attachable
from cycgkit.cgtypes import *
class Layer(Attachable):
"""
Virtual top level container for gui objects.
Only objects attached to this will be drawn 'above' the scene, in 2D mode.
"""
def __init__(self, ID, guiMan, visible=True):
"""
@rtype : Layer
@type visible: bool
"""
super(Layer, self).__init__(None)
self.ID = ID
self.visible = visible
self._realSize = vec3(1)
self._realScale = vec3(1)
self._inverseScale = vec3(1)
self._guiMan = guiMan
self._previousSize = self.realSize
self._onInit = True
self._updateRealSize()
def _getis2D(self):
return True
_is2D = property(_getis2D)
def __repr__(self):
return self.ID
def attachTo(self, parent):
raise ValueError('layers can only contain 2D controls. Can not be attached.')
def _update(self):
if self.visible:
for c in reversed(self._children):
c._update()
def _updateRealSize(self):
self._previousSize = self._realSize
x, y = self._guiMan._window.size
baseSize = vec3(x, y, 1)
self._realSize = baseSize
if self._onInit:
self._previousSize = self._realSize
self._onInit = False
def getRealSize(self):
return self._realSize
realSize = property(getRealSize)
@property
def size(self):
return self._realSize
def _getRealScale(self):
return self._realScale
realScale = property(_getRealScale)
def _getInverseScale(self):
return self._inverseScale
inverseScale = property(_getInverseScale, doc='Scale needed to pass from local to window scale.')
def _resizeCallback(self):
self._updateRealSize()
for c in reversed(self._children):
c._resizeCallback()
| mit | Python |
95f5baeaff85e2f1f5f36e8cb4ffa871ab0c0a30 | Fix broken tutorial step | gilessbrown/wextracto,eBay/wextracto,justinvanwinkle/wextracto,gilessbrown/wextracto,eBay/wextracto,justinvanwinkle/wextracto | docs/samples/tutorial/step8/tutorial.py | docs/samples/tutorial/step8/tutorial.py | from wex.extractor import label, Attributes
from wex.response import Response
from wex.etree import xpath, text
attrs = Attributes(
name = xpath('//h1') | text,
country = xpath('//dd[@id="country"]') | text,
region = xpath('//dd[@id="region"]') | text
)
extract = label(Response.geturl)(attrs)
| from wex.extractor import label, Attributes
from wex.url import get_url
from wex.etree import xpath, text
attrs = Attributes(
name = xpath('//h1') | text,
country = xpath('//dd[@id="country"]') | text,
region = xpath('//dd[@id="region"]') | text
)
extract = label(get_url)(attrs)
| bsd-3-clause | Python |
7d09dd673bd2baeb30ba529498e7e71d6278f373 | Stop setting quiz finished boolean twice | kdeloach/nyc-trees,kdeloach/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,kdeloach/nyc-trees,maurizi/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,kdeloach/nyc-trees | src/nyc_trees/apps/home/training/views.py | src/nyc_trees/apps/home/training/views.py | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.db import transaction
from apps.home.training.utils import get_quiz_or_404
from apps.users.models import TrainingResult
def training_list_page(request):
from apps.home.training import training_summary
return {'training_steps': training_summary.get_context(request.user)}
def intro_quiz(request):
quiz_slug = 'intro_quiz'
quiz = get_quiz_or_404(quiz_slug)
return {
'quiz': quiz,
'quiz_slug': quiz_slug
}
@transaction.atomic
def complete_quiz(request):
from apps.home.training.types import Quiz
quiz_slug = 'intro_quiz'
quiz = get_quiz_or_404(quiz_slug)
user = request.user
answers = Quiz.extract_answers(request.POST)
score = quiz.score(answers)
correct_answers = list(quiz.correct_answers(answers))
quiz_summary = list(_quiz_summary(quiz, answers))
result, created = TrainingResult.objects.get_or_create(
user_id=user.id,
module_name=quiz_slug)
best_score = max(result.score, score)
result.score = best_score
result.save()
passed_quiz = (score >= quiz.passing_score)
return {
'quiz': quiz,
'quiz_slug': quiz_slug,
'quiz_summary': quiz_summary,
'score': score,
'best_score': best_score,
'passed_quiz': passed_quiz,
'correct_answers': correct_answers
}
def _quiz_summary(quiz, submitted_answers):
for i, question in enumerate(quiz.questions):
candidate = submitted_answers[i]
yield {
'question': question,
'submitted_answers': [ans for i, ans in enumerate(question.choices)
if i in candidate],
'is_correct': question.is_correct(candidate)
}
def training_instructions(request):
user = request.user
step1_complete = user.online_training_complete
step2_complete = step1_complete and user.field_training_complete
return {
'step1_complete': step1_complete,
'step2_complete': step2_complete,
}
| # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.db import transaction
from apps.home.training.utils import get_quiz_or_404
from apps.users.models import TrainingResult
def training_list_page(request):
from apps.home.training import training_summary
return {'training_steps': training_summary.get_context(request.user)}
def intro_quiz(request):
quiz_slug = 'intro_quiz'
quiz = get_quiz_or_404(quiz_slug)
return {
'quiz': quiz,
'quiz_slug': quiz_slug
}
@transaction.atomic
def complete_quiz(request):
from apps.home.training.types import Quiz
quiz_slug = 'intro_quiz'
quiz = get_quiz_or_404(quiz_slug)
user = request.user
answers = Quiz.extract_answers(request.POST)
score = quiz.score(answers)
correct_answers = list(quiz.correct_answers(answers))
quiz_summary = list(_quiz_summary(quiz, answers))
result, created = TrainingResult.objects.get_or_create(
user_id=user.id,
module_name=quiz_slug)
best_score = max(result.score, score)
result.score = best_score
result.save()
passed_quiz = (score >= quiz.passing_score)
passed_quiz_bool = 'training_finished_%s' % quiz_slug
if passed_quiz and getattr(user, passed_quiz_bool) is False:
setattr(user, passed_quiz_bool, True)
user.save()
return {
'quiz': quiz,
'quiz_slug': quiz_slug,
'quiz_summary': quiz_summary,
'score': score,
'best_score': best_score,
'passed_quiz': passed_quiz,
'correct_answers': correct_answers
}
def _quiz_summary(quiz, submitted_answers):
for i, question in enumerate(quiz.questions):
candidate = submitted_answers[i]
yield {
'question': question,
'submitted_answers': [ans for i, ans in enumerate(question.choices)
if i in candidate],
'is_correct': question.is_correct(candidate)
}
def training_instructions(request):
user = request.user
step1_complete = user.online_training_complete
step2_complete = step1_complete and user.field_training_complete
return {
'step1_complete': step1_complete,
'step2_complete': step2_complete,
}
| agpl-3.0 | Python |
f5342ae1a1f8473626a59bb6987e9f072d393a0b | Fix tag url regex | devunt/hydrocarbon,devunt/hydrocarbon,devunt/hydrocarbon | board/urls/default.py | board/urls/default.py | from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.utils.functional import curry
from django.views.defaults import permission_denied
from redactor.forms import FileForm, ImageForm
from board.views import HCLoginView, HCSettingsView, HCSignupView, HCRedactorUploadView
from board.views import IndexView, JSConstantsView, PostListByTagView
urlpatterns = patterns('',
url(r'^account/login/$', HCLoginView.as_view(), name='account_login'),
url(r'^account/signup/$', HCSignupView.as_view(), name='account_signup'),
url(r'^account/settings/$', HCSettingsView.as_view(), name='account_settings'),
url(r'^redactor/upload/image/(?P<upload_to>.*)',
HCRedactorUploadView.as_view(form_class=ImageForm),
name='redactor_upload_image'),
url(r'^redactor/upload/file/(?P<upload_to>.*)',
HCRedactorUploadView.as_view(form_class=FileForm),
name='redactor_upload_file'),
url(r'^account/', include('account.urls')),
url(r'^search/', include('haystack.urls')),
url(r'^redactor/', include('redactor.urls')),
url(r'^constants.js', JSConstantsView.as_view(), name='constants.js'),
url(r'^t/(?P<tag>.+)/', PostListByTagView.as_view(), name='post_list_by_tag'),
url(r'^x/', include('board.urls.ajax')),
url(r'^u/(?P<user>\d+)/', include('board.urls.user')),
url(r'^b/(?P<board>\w+)/', include('board.urls.board')),
url(r'^(?P<pk>\d+)/', include('board.urls.post')),
url(r'^$', IndexView.as_view()),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
handler403 = curry(permission_denied, template_name='errors/403.html')
| from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.utils.functional import curry
from django.views.defaults import permission_denied
from redactor.forms import FileForm, ImageForm
from board.views import HCLoginView, HCSettingsView, HCSignupView, HCRedactorUploadView
from board.views import IndexView, JSConstantsView, PostListByTagView
urlpatterns = patterns('',
url(r'^account/login/$', HCLoginView.as_view(), name='account_login'),
url(r'^account/signup/$', HCSignupView.as_view(), name='account_signup'),
url(r'^account/settings/$', HCSettingsView.as_view(), name='account_settings'),
url(r'^redactor/upload/image/(?P<upload_to>.*)',
HCRedactorUploadView.as_view(form_class=ImageForm),
name='redactor_upload_image'),
url(r'^redactor/upload/file/(?P<upload_to>.*)',
HCRedactorUploadView.as_view(form_class=FileForm),
name='redactor_upload_file'),
url(r'^account/', include('account.urls')),
url(r'^search/', include('haystack.urls')),
url(r'^redactor/', include('redactor.urls')),
url(r'^constants.js', JSConstantsView.as_view(), name='constants.js'),
url(r'^t/(?P<tag>\w+)/', PostListByTagView.as_view(), name='post_list_by_tag'),
url(r'^x/', include('board.urls.ajax')),
url(r'^u/(?P<user>\d+)/', include('board.urls.user')),
url(r'^b/(?P<board>\w+)/', include('board.urls.board')),
url(r'^(?P<pk>\d+)/', include('board.urls.post')),
url(r'^$', IndexView.as_view()),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
handler403 = curry(permission_denied, template_name='errors/403.html')
| mit | Python |
0cfeeb68177969125adab4cad33d28137b7710ed | Clean up incorrect comment | denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase | apps/storybase_taxonomy/views.py | apps/storybase_taxonomy/views.py | from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from django.http import Http404
from storybase_story.views import ExplorerRedirectView, StoryListView, StoryListWidgetView
from storybase_taxonomy.models import Category, Tag
class CategoryExplorerRedirectView(ExplorerRedirectView):
def get_query_string(self, **kwargs):
slug = kwargs.get('slug', None)
if not slug:
return None
try:
category = Category.objects.get(categorytranslation__slug=slug)
return "topics=%d" % category.pk
except Category.DoesNotExist:
return None
class CategoryWidgetView(StoryListWidgetView):
queryset = Category.objects.all()
related_field_name = "topics"
def get_slug_field_name(self):
return 'categorytranslation__slug'
def get_object(self):
queryset = self.get_queryset()
slug = self.kwargs.get('slug', None)
if slug is not None:
queryset = queryset.filter(categorytranslation__slug=slug)
else:
raise AssertionError("%s must be called with a slug" %
(self.__class__.__name__))
try:
obj = queryset.get()
except ObjectDoesNotExist:
raise Http404(
_(u"No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
return obj
class TagViewMixin(object):
queryset = Tag.objects.all()
related_field_name = "tags"
class TagStoryListView(TagViewMixin, StoryListView):
pass
class TagWidgetView(TagViewMixin, StoryListWidgetView):
pass
| from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from django.http import Http404
from storybase_story.views import ExplorerRedirectView, StoryListView, StoryListWidgetView
from storybase_taxonomy.models import Category, Tag
class CategoryExplorerRedirectView(ExplorerRedirectView):
def get_query_string(self, **kwargs):
slug = kwargs.get('slug', None)
if not slug:
return None
try:
category = Category.objects.get(categorytranslation__slug=slug)
return "topics=%d" % category.pk
except Category.DoesNotExist:
return None
class CategoryWidgetView(StoryListWidgetView):
queryset = Category.objects.all()
related_field_name = "topics"
def get_slug_field_name(self):
return 'categorytranslation__slug'
def get_object(self):
"""Retrieve the object by it's model specific id instead of pk"""
queryset = self.get_queryset()
slug = self.kwargs.get('slug', None)
if slug is not None:
queryset = queryset.filter(categorytranslation__slug=slug)
else:
raise AssertionError("%s must be called with a slug" %
(self.__class__.__name__))
try:
obj = queryset.get()
except ObjectDoesNotExist:
raise Http404(
_(u"No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
return obj
class TagViewMixin(object):
queryset = Tag.objects.all()
related_field_name = "tags"
class TagStoryListView(TagViewMixin, StoryListView):
pass
class TagWidgetView(TagViewMixin, StoryListWidgetView):
pass
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.