commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
f48f010391f2f17acaa62470e8ae532cf832dd36 | make script as an importable module | hemartin/hn-responder | hn-responder.py | hn-responder.py | #!/usr/bin/python
# coding: utf-8
import json
import re
import requests
from Queue import Queue
from requests_oauthlib import OAuth1
from threading import Thread
from firebase import firebase
with open('twitter-secrets.json') as twitter_secrets:
s = json.load(twitter_secrets)
access_token = s['access_token']
access_token_secret = s['access_token_secret']
consumer_key = s['consumer_key']
consumer_secret = s['consumer_secret']
auth = OAuth1(consumer_key, consumer_secret, access_token, access_token_secret)
firebase = firebase.FirebaseApplication(
'https://hacker-news.firebaseio.com', None)
work_queue = Queue()
def read_tweets():
url = 'https://userstream.twitter.com/1.1/user.json'
r = requests.get(url, auth=auth, stream=True)
for line in r.iter_lines():
if line:
tweet = json.loads(line.decode('utf-8'))
work_queue.put(tweet)
def run():
while True:
tweet = work_queue.get()
if 'user' in tweet and tweet['user']['id'] == 14335498:
tweet_id = tweet['id']
title = re.sub('https://t.co.*$', '', tweet['text']).strip()
link = tweet['entities']['urls'][0]['expanded_url']
hackernews_id = find_hackernews_id(title)
post_tweet(tweet_id, title, link, hackernews_id)
work_queue.task_done()
def find_hackernews_id(title):
top_ids = firebase.get('/v0/topstories', None)
for item_id in top_ids:
item = firebase.get('/v0/item', item_id)
if 'title' in item and item['title'].strip() == title:
return item_id
def trim_tweet(title, max_len):
trimmed_title = title
if len(title) > max_len:
trimmed_title = title[:max_len - 3] + '...'
return trimmed_title
def post_tweet(tweet_id, title, link, hackernews_id):
url = 'https://api.twitter.com/1.1/statuses/update.json'
trimmed_tweet = trim_tweet(title, 92)
tweet_text = (
trimmed_tweet +
' ' + link +
' https://news.ycombinator.com/item?id=' + str(hackernews_id))
params = {'status': tweet_text}
requests.post(url, auth=auth, data=params)
def main():
t = Thread(target=run)
t.daemon = True
t.start()
read_tweets()
work_queue.join()
if __name__ == '__main__':
main()
| #!/usr/bin/python
import json
import re
import requests
from Queue import Queue
from requests_oauthlib import OAuth1
from threading import Thread
from firebase import firebase
with open('twitter-secrets.json') as twitter_secrets:
s = json.load(twitter_secrets)
access_token = s['access_token']
access_token_secret = s['access_token_secret']
consumer_key = s['consumer_key']
consumer_secret = s['consumer_secret']
auth = OAuth1(consumer_key, consumer_secret, access_token, access_token_secret)
firebase = firebase.FirebaseApplication(
'https://hacker-news.firebaseio.com', None)
work_queue = Queue()
def read_tweets():
url = 'https://userstream.twitter.com/1.1/user.json'
r = requests.get(url, auth=auth, stream=True)
for line in r.iter_lines():
if line:
tweet = json.loads(line.decode('utf-8'))
work_queue.put(tweet)
def run():
while True:
tweet = work_queue.get()
if 'user' in tweet and tweet['user']['id'] == 14335498:
tweet_id = tweet['id']
title = re.sub('https://t.co.*$', '', tweet['text']).strip()
link = tweet['entities']['urls'][0]['expanded_url']
hackernews_id = find_hackernews_id(title)
post_tweet(tweet_id, title, link, hackernews_id)
work_queue.task_done()
def find_hackernews_id(title):
top_ids = firebase.get('/v0/topstories', None)
for item_id in top_ids:
item = firebase.get('/v0/item', item_id)
if 'title' in item and item['title'].strip() == title:
return item_id
def trim_tweet(title, max_len):
trimmed_title = title
if len(title) > max_len:
trimmed_title = title[:max_len - 3] + '...'
return trimmed_title
def post_tweet(tweet_id, title, link, hackernews_id):
url = 'https://api.twitter.com/1.1/statuses/update.json'
trimmed_tweet = trim_tweet(title, 92)
tweet_text = (
trimmed_tweet +
' ' + link +
' https://news.ycombinator.com/item?id=' + str(hackernews_id))
params = {'status': tweet_text}
requests.post(url, auth=auth, data=params)
t = Thread(target=run)
t.daemon = True
t.start()
read_tweets()
work_queue.join()
| unlicense | Python |
e16f5d6b3837027e1b5178568701e1d4ff53287d | fix apikey | BlueHouseLab/sms-openapi,BlueHouseLab/sms-openapi,BlueHouseLab/sms-openapi,BlueHouseLab/sms-openapi,BlueHouseLab/sms-openapi,BlueHouseLab/sms-openapi,BlueHouseLab/sms-openapi,BlueHouseLab/sms-openapi,BlueHouseLab/sms-openapi | python/conf.py | python/conf.py | # -*- coding: utf-8 -*-
import base64
appid = 'example'
apikey = 'c5dd7e7dkjp27377l903c42c032b413b'
address = 'api.bluehouselab.com'
sender = '01000000000' # FIXME - MUST BE CHANGED AS REAL PHONE NUMBER
receivers = ['01000000000', ] # FIXME - MUST BE CHANGED AS REAL PHONE NUMBERS
content = u'λλ μ 리λ₯Ό λ¨Ήμ μ μμ΄μ. κ·Έλλ μνμ§ μμμ'
credential = "Basic "+base64.encodestring(appid+':'+apikey).strip()
headers = {
"Content-type": "application/json;charset=utf-8",
"Authorization": credential,
}
print "Warnings: python httplib doesn't verify SSL certificate."
print " python-requests module is recommended for secured API call"
| # -*- coding: utf-8 -*-
import base64
appid = 'example'
apikey = 'c5dd7e7dkjp27377l903c42c032b413b'
appid = 'test01'
apikey = '213d7e220f5c11e487a7040113e09101'
address = 'api.bluehouselab.com'
sender = '01000000000' # FIXME - MUST BE CHANGED AS REAL PHONE NUMBER
receivers = ['01000000000', ] # FIXME - MUST BE CHANGED AS REAL PHONE NUMBERS
content = u'λλ μ 리λ₯Ό λ¨Ήμ μ μμ΄μ. κ·Έλλ μνμ§ μμμ'
credential = "Basic "+base64.encodestring(appid+':'+apikey).strip()
headers = {
"Content-type": "application/json;charset=utf-8",
"Authorization": credential,
}
print "Warnings: python httplib doesn't verify SSL certificate."
print " python-requests module is recommended for secured API call"
| apache-2.0 | Python |
03a7fbe198ce8ed723d92af976fe03c7986098c8 | update imported path of utils module | faycheng/tpl,faycheng/tpl | tests/test_constructor.py | tests/test_constructor.py | import sys
import pytest
from tpl.path import TempFile
from tpl.constructor import construct_context_from_py
from candy.utils.faker import *
def test_construct_context_from_py():
with TempFile(random_lower_string(), 'py') as f:
f.fd.write('def construct():return {"key":"value"}')
f.fd.flush()
context = construct_context_from_py(f.path)
assert isinstance(context, dict)
assert context.get('key') == 'value'
sys.path_importer_cache.clear()
with TempFile(random_lower_string(), 'py') as f:
f.fd.write(random_lower_string())
f.fd.flush()
with pytest.raises(NameError):
construct_context_from_py(f.path)
sys.path_importer_cache.clear()
with TempFile(random_lower_string(), 'py') as f:
f.fd.write('def construct():return None')
f.fd.flush()
context = construct_context_from_py(f.path)
assert context is None
| import sys
import pytest
from .faker import *
from tpl.path import TempFile
from tpl.constructor import construct_context_from_py
def test_construct_context_from_py():
with TempFile(random_lower_string(), 'py') as f:
f.fd.write('def construct():return {"key":"value"}')
f.fd.flush()
context = construct_context_from_py(f.path)
assert isinstance(context, dict)
assert context.get('key') == 'value'
sys.path_importer_cache.clear()
with TempFile(random_lower_string(), 'py') as f:
f.fd.write(random_lower_string())
f.fd.flush()
with pytest.raises(NameError):
construct_context_from_py(f.path)
sys.path_importer_cache.clear()
with TempFile(random_lower_string(), 'py') as f:
f.fd.write('def construct():return None')
f.fd.flush()
context = construct_context_from_py(f.path)
assert context is None
| mit | Python |
9b6cd0cd7b86f63b365d2c1968fd19495b707758 | Exit with `1` if no fixed command selected | nvbn/thefuck,mlk/thefuck,scorphus/thefuck,mlk/thefuck,SimenB/thefuck,scorphus/thefuck,SimenB/thefuck,Clpsplug/thefuck,Clpsplug/thefuck,nvbn/thefuck | thefuck/main.py | thefuck/main.py | # Initialize output before importing any module, that can use colorama.
from .system import init_output
init_output()
from argparse import ArgumentParser
from warnings import warn
from pprint import pformat
import sys
from . import logs, types
from .shells import shell
from .conf import settings
from .corrector import get_corrected_commands
from .exceptions import EmptyCommand
from .utils import get_installation_info, get_alias
from .ui import select_command
def fix_command():
"""Fixes previous command. Used when `thefuck` called without arguments."""
settings.init()
with logs.debug_time('Total'):
logs.debug(u'Run with settings: {}'.format(pformat(settings)))
try:
command = types.Command.from_raw_script(sys.argv[1:])
except EmptyCommand:
logs.debug('Empty command, nothing to do')
return
corrected_commands = get_corrected_commands(command)
selected_command = select_command(corrected_commands)
if selected_command:
selected_command.run(command)
else:
sys.exit(1)
def print_alias(entry_point=True):
"""Prints alias for current shell."""
if entry_point:
warn('`thefuck-alias` is deprecated, use `thefuck --alias` instead.')
position = 1
else:
position = 2
alias = get_alias()
if len(sys.argv) > position:
alias = sys.argv[position]
print(shell.app_alias(alias))
def how_to_configure_alias():
"""Shows useful information about how-to configure alias.
It'll be only visible when user type fuck and when alias isn't configured.
"""
settings.init()
logs.how_to_configure_alias(shell.how_to_configure())
def main():
parser = ArgumentParser(prog='thefuck')
version = get_installation_info().version
parser.add_argument(
'-v', '--version',
action='version',
version='The Fuck {} using Python {}'.format(
version, sys.version.split()[0]))
parser.add_argument('-a', '--alias',
action='store_true',
help='[custom-alias-name] prints alias for current shell')
parser.add_argument('command',
nargs='*',
help='command that should be fixed')
known_args = parser.parse_args(sys.argv[1:2])
if known_args.alias:
print_alias(False)
elif known_args.command:
fix_command()
else:
parser.print_usage()
| # Initialize output before importing any module, that can use colorama.
from .system import init_output
init_output()
from argparse import ArgumentParser
from warnings import warn
from pprint import pformat
import sys
from . import logs, types
from .shells import shell
from .conf import settings
from .corrector import get_corrected_commands
from .exceptions import EmptyCommand
from .utils import get_installation_info, get_alias
from .ui import select_command
def fix_command():
"""Fixes previous command. Used when `thefuck` called without arguments."""
settings.init()
with logs.debug_time('Total'):
logs.debug(u'Run with settings: {}'.format(pformat(settings)))
try:
command = types.Command.from_raw_script(sys.argv[1:])
except EmptyCommand:
logs.debug('Empty command, nothing to do')
return
corrected_commands = get_corrected_commands(command)
selected_command = select_command(corrected_commands)
if selected_command:
selected_command.run(command)
def print_alias(entry_point=True):
"""Prints alias for current shell."""
if entry_point:
warn('`thefuck-alias` is deprecated, use `thefuck --alias` instead.')
position = 1
else:
position = 2
alias = get_alias()
if len(sys.argv) > position:
alias = sys.argv[position]
print(shell.app_alias(alias))
def how_to_configure_alias():
"""Shows useful information about how-to configure alias.
It'll be only visible when user type fuck and when alias isn't configured.
"""
settings.init()
logs.how_to_configure_alias(shell.how_to_configure())
def main():
parser = ArgumentParser(prog='thefuck')
version = get_installation_info().version
parser.add_argument(
'-v', '--version',
action='version',
version='The Fuck {} using Python {}'.format(
version, sys.version.split()[0]))
parser.add_argument('-a', '--alias',
action='store_true',
help='[custom-alias-name] prints alias for current shell')
parser.add_argument('command',
nargs='*',
help='command that should be fixed')
known_args = parser.parse_args(sys.argv[1:2])
if known_args.alias:
print_alias(False)
elif known_args.command:
fix_command()
else:
parser.print_usage()
| mit | Python |
9b439e486e4a4932f0e76bfb51a43db7e2de8ea8 | fix id resolver tests | biothings/biothings_explorer,biothings/biothings_explorer | tests/test_id_resolver.py | tests/test_id_resolver.py | import unittest
from biothings_explorer.id_resolver import IDResolver
class TestIDResolver(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.idr = IDResolver()
def test_genes_as_input(self):
inputs = [(['CXCR4', 'CXCR3', 'CXCR2'], 'SYMBOL', 'Gene')]
res = self.idr.resolve_ids(inputs)
self.assertIn('SYMBOL:CXCR4', res)
self.assertEqual(res['SYMBOL:CXCR4']['ENSEMBL'], ['ENSG00000121966'])
self.assertEqual(res['SYMBOL:CXCR4']['NCBIGene'], ['7852'])
self.assertEqual(res['SYMBOL:CXCR4']['SYMBOL'], ['CXCR4'])
def test_wrong_gene_SYMBOLss(self):
inputs = [(['CXCR4', 'CXCR3', 'CXCR2', '123'], 'SYMBOL', 'Gene')]
res = self.idr.resolve_ids(inputs)
self.assertIn('SYMBOL:123', res)
self.assertDictEqual(res['SYMBOL:123'], {'SYMBOL': ['123']})
if __name__ == '__main__':
unittest.main() | import unittest
from biothings_explorer.id_resolver import IDResolver
class TestIDResolver(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.idr = IDResolver()
def test_input_genes(self):
inputs = [(['CXCR4', 'CXCR3', 'CXCR2'], 'symbol', 'Gene')]
res = self.idr.resolve_ids(inputs)
self.assertIn('symbol:CXCR4', res)
self.assertEqual(res['symbol:CXCR4']['ensembl'], ['ENSG00000121966'])
self.assertEqual(res['symbol:CXCR4']['entrez'], ['7852'])
self.assertEqual(res['symbol:CXCR4']['symbol'], ['CXCR4'])
def test_wrong_gene_symbolss(self):
inputs = [(['CXCR4', 'CXCR3', 'CXCR2', '123'], 'symbol', 'Gene')]
res = self.idr.resolve_ids(inputs)
self.assertIn('symbol:123', res)
self.assertDictEqual(res['symbol:123'], {'symbol': ['123']})
if __name__ == '__main__':
unittest.main() | apache-2.0 | Python |
9a021c0cece225435ed650063210af4e815c9b2d | Make sure we shutdown the listener in the api service on SIGINT (CTRL+C). | dennybaa/st2,grengojbo/st2,Itxaka/st2,armab/st2,lakshmi-kannan/st2,Plexxi/st2,peak6/st2,lakshmi-kannan/st2,grengojbo/st2,nzlosh/st2,peak6/st2,nzlosh/st2,alfasin/st2,jtopjian/st2,punalpatel/st2,lakshmi-kannan/st2,Plexxi/st2,dennybaa/st2,peak6/st2,pixelrebel/st2,punalpatel/st2,jtopjian/st2,emedvedev/st2,StackStorm/st2,emedvedev/st2,pinterb/st2,Itxaka/st2,Itxaka/st2,armab/st2,pinterb/st2,emedvedev/st2,nzlosh/st2,jtopjian/st2,Plexxi/st2,tonybaloney/st2,grengojbo/st2,pixelrebel/st2,StackStorm/st2,alfasin/st2,StackStorm/st2,dennybaa/st2,pinterb/st2,pixelrebel/st2,Plexxi/st2,tonybaloney/st2,armab/st2,nzlosh/st2,punalpatel/st2,StackStorm/st2,alfasin/st2,tonybaloney/st2 | st2api/st2api/cmd/api.py | st2api/st2api/cmd/api.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import eventlet
from oslo.config import cfg
from eventlet import wsgi
from st2common import log as logging
from st2common.models.db import db_setup
from st2common.models.db import db_teardown
from st2common.constants.logging import DEFAULT_LOGGING_CONF_PATH
from st2api.listener import get_listener_if_set
from st2api import config
from st2api import app
eventlet.monkey_patch(
os=True,
select=True,
socket=True,
thread=False if '--use-debugger' in sys.argv else True,
time=True)
LOG = logging.getLogger(__name__)
def _setup():
# Set up logger which logs everything which happens during and before config
# parsing to sys.stdout
logging.setup(DEFAULT_LOGGING_CONF_PATH)
# 1. parse args to setup config.
config.parse_args()
# 2. setup logging.
logging.setup(cfg.CONF.api.logging)
# 3. all other setup which requires config to be parsed and logging to
# be correctly setup.
username = cfg.CONF.database.username if hasattr(cfg.CONF.database, 'username') else None
password = cfg.CONF.database.password if hasattr(cfg.CONF.database, 'password') else None
db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
username=username, password=password)
def _run_server():
host = cfg.CONF.api.host
port = cfg.CONF.api.port
LOG.info('(PID=%s) ST2 API is serving on http://%s:%s.', os.getpid(), host, port)
wsgi.server(eventlet.listen((host, port)), app.setup_app())
return 0
def _teardown():
db_teardown()
def main():
try:
_setup()
return _run_server()
except SystemExit as exit_code:
sys.exit(exit_code)
except KeyboardInterrupt:
listener = get_listener_if_set()
if listener:
listener.shutdown()
except:
LOG.exception('(PID=%s) ST2 API quit due to exception.', os.getpid())
return 1
finally:
_teardown()
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import eventlet
from oslo.config import cfg
from eventlet import wsgi
from st2common import log as logging
from st2common.models.db import db_setup
from st2common.models.db import db_teardown
from st2common.constants.logging import DEFAULT_LOGGING_CONF_PATH
from st2api import config
from st2api import app
eventlet.monkey_patch(
os=True,
select=True,
socket=True,
thread=False if '--use-debugger' in sys.argv else True,
time=True)
LOG = logging.getLogger(__name__)
def _setup():
# Set up logger which logs everything which happens during and before config
# parsing to sys.stdout
logging.setup(DEFAULT_LOGGING_CONF_PATH)
# 1. parse args to setup config.
config.parse_args()
# 2. setup logging.
logging.setup(cfg.CONF.api.logging)
# 3. all other setup which requires config to be parsed and logging to
# be correctly setup.
username = cfg.CONF.database.username if hasattr(cfg.CONF.database, 'username') else None
password = cfg.CONF.database.password if hasattr(cfg.CONF.database, 'password') else None
db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
username=username, password=password)
def _run_server():
host = cfg.CONF.api.host
port = cfg.CONF.api.port
LOG.info('(PID=%s) ST2 API is serving on http://%s:%s.', os.getpid(), host, port)
wsgi.server(eventlet.listen((host, port)), app.setup_app())
return 0
def _teardown():
db_teardown()
def main():
try:
_setup()
return _run_server()
except SystemExit as exit_code:
sys.exit(exit_code)
except:
LOG.exception('(PID=%s) ST2 API quit due to exception.', os.getpid())
return 1
finally:
_teardown()
| apache-2.0 | Python |
375d762dcb92aa80f6d1b8d51f919c22a70e943f | Update huffman_code.py | autumind/huffman_code | huffman_code.py | huffman_code.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on Jul 22, 2015
@author: shenzb
'''
from Node import Node
import Utils
if __name__ == '__main__':
char_list = Utils.stringToPriorityQueue("hello, world!")
#print(char_list);
''' Initialize PriorityQueue '''
nodeList = []
for ch, cnt in char_list:
#print(ch, cnt)
node = Node(ch, cnt)
nodeList.append(node)
'''
Get a Huffman tree.
'''
huffmanTree = Utils.generateHuffmanTree(nodeList)
'''
Get Huffman codes.
'''
huffmanCode = dict()
Utils.generateHuffmanCodes(huffmanTree, "", huffmanCode)
'''
Print Huffman codes.
'''
for ch, cnt in char_list:
#pass
print(ch, huffmanCode[ch])
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on Jul 22, 2015
@author: shenzb
'''
from Node import Node
import Utils
if __name__ == '__main__':
char_list = Utils.stringToPriorityQueue("hello, world!")
#print(char_list);
''' Initialize PriorityQueue '''
nodeList = []
for ch, cnt in char_list:
#print(ch, cnt)
node = Node(ch, cnt)
nodeList.append(node)
'''
Get a Huffman tree.
'''
huffmanTree = Utils.generateHuffmanTree(nodeList)
'''
Get Huffman codes.
'''
huffmanCode = Utils.generateHuffmanCodes(huffmanTree)
'''
Print Huffman codes.
'''
for ch in char_list:
print(ch, huffmanCode[ch])
| apache-2.0 | Python |
3c287abe48588917b115df92927647b33670a431 | Use textarea widget when editing note text | 11craft/immercv,11craft/immercv,11craft/immercv,11craft/immercv | immercv/cvgraph/forms.py | immercv/cvgraph/forms.py | from django import forms
from neomodel import DateProperty, IntegerProperty, StringProperty
from immercv.cvgraph.models import Note
PROPERTY_TYPE_FIELDS = {
# property-type: form-field-type,
DateProperty: forms.DateField,
IntegerProperty: forms.IntegerField,
StringProperty: forms.CharField,
}
NODE_CLASS_FIELD_WIDGETS = {
# node-property: field-widget,
Note.text: forms.Textarea,
}
def field_for_node_property(node_or_class, property_name):
if isinstance(node_or_class, type):
node_class = node_or_class
else:
node_class = node_or_class.__class__
property = getattr(node_class, property_name)
property_type = type(property)
field_class = PROPERTY_TYPE_FIELDS[property_type]
return field_class(
required=property.required,
widget=NODE_CLASS_FIELD_WIDGETS.get(property),
)
def form_for_node_properties(node_or_class, property_names, data=None):
if isinstance(node_or_class, type):
current_values = {}
else:
current_values = {
property_name: getattr(node_or_class, property_name)
for property_name in property_names
}
class NodeForm(forms.Form):
def __init__(self, *args, **kwargs):
super(NodeForm, self).__init__(*args, **kwargs)
# Create fields in the order given in `property_names`.
for property_name in property_names:
field = field_for_node_property(node_or_class, property_name)
self.fields[property_name] = field
return NodeForm(initial=current_values, data=data)
| from django import forms
from neomodel import DateProperty, IntegerProperty, StringProperty
PROPERTY_TYPE_FIELDS = {
# property-type: form-field-type,
DateProperty: forms.DateField,
IntegerProperty: forms.IntegerField,
StringProperty: forms.CharField,
}
def field_for_node_property(node_or_class, property_name, **kwargs):
if isinstance(node_or_class, type):
node_class = node_or_class
else:
node_class = node_or_class.__class__
property = getattr(node_class, property_name)
property_type = type(property)
kwargs['required'] = kwargs.pop('required', property.required)
return PROPERTY_TYPE_FIELDS[property_type](**kwargs)
def form_for_node_properties(node_or_class, property_names, data=None):
if isinstance(node_or_class, type):
current_values = {}
else:
current_values = {
property_name: getattr(node_or_class, property_name)
for property_name in property_names
}
class NodeForm(forms.Form):
def __init__(self, *args, **kwargs):
super(NodeForm, self).__init__(*args, **kwargs)
# Create fields in the order given in `property_names`.
for property_name in property_names:
field = field_for_node_property(node_or_class, property_name)
self.fields[property_name] = field
return NodeForm(initial=current_values, data=data)
| bsd-3-clause | Python |
a9af20426d7f30008d7cda863ba855679734c8a8 | Add a temporary fix for Django ticket 29917 | matthiask/django-cabinet,matthiask/django-cabinet,matthiask/django-cabinet | tests/testapp/settings.py | tests/testapp/settings.py | from __future__ import absolute_import, unicode_literals
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.admin",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.staticfiles",
"django.contrib.messages",
"testapp",
"cabinet",
"imagefield",
]
MEDIA_ROOT = "/media/"
STATIC_URL = "/static/"
BASEDIR = os.path.dirname(__file__)
MEDIA_ROOT = os.path.join(BASEDIR, "media/")
STATIC_ROOT = os.path.join(BASEDIR, "static/")
SECRET_KEY = "supersikret"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
MIDDLEWARE = (
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.locale.LocaleMiddleware",
)
ROOT_URLCONF = "testapp.urls"
SILENCED_SYSTEM_CHECKS = ["admin.E130"] # https://code.djangoproject.com/ticket/29917
| from __future__ import absolute_import, unicode_literals
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.admin",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.staticfiles",
"django.contrib.messages",
"testapp",
"cabinet",
"imagefield",
]
MEDIA_ROOT = "/media/"
STATIC_URL = "/static/"
BASEDIR = os.path.dirname(__file__)
MEDIA_ROOT = os.path.join(BASEDIR, "media/")
STATIC_ROOT = os.path.join(BASEDIR, "static/")
SECRET_KEY = "supersikret"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
MIDDLEWARE = (
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.locale.LocaleMiddleware",
)
ROOT_URLCONF = "testapp.urls"
| bsd-3-clause | Python |
75e65f6144820ba216166eee4417912394f8cdca | Stop using os.path.join, because Visual Studio can actually handle forward slash style paths, and the os.path method was creating mixed \\ and / style paths, b0rking everything. | kans/birgo,kans/birgo,kans/birgo,kans/birgo,kans/birgo | tools/bundle.py | tools/bundle.py | #!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = path + '/' + f
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
| #!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = os.path.join(path, f)
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
| apache-2.0 | Python |
d1dd5242d3139b964f7a5485afd5a8ed672c4600 | Add tests for paragraph transform | mwilliamson/python-mammoth | tests/transforms_tests.py | tests/transforms_tests.py | import cobble
from nose.tools import assert_equal, istest
from mammoth import documents, transforms
from mammoth.transforms import get_descendants, get_descendants_of_type, _each_element
@istest
class ParagraphTests(object):
@istest
def paragraph_is_transformed(self):
paragraph = documents.paragraph(children=[])
result = transforms.paragraph(lambda _: documents.tab())(paragraph)
assert_equal(documents.tab(), result)
@istest
def non_paragraph_elements_are_not_transformed(self):
run = documents.run(children=[])
result = transforms.paragraph(lambda _: documents.tab())(run)
assert_equal(documents.run(children=[]), result)
@istest
class EachElementTests(object):
@istest
def all_descendants_are_transformed(self):
@cobble.data
class Count(documents.HasChildren):
count = cobble.field()
root = Count(count=None, children=[
Count(count=None, children=[
Count(count=None, children=[]),
]),
])
current_count = [0]
def set_count(node):
current_count[0] += 1
return node.copy(count=current_count[0])
result = _each_element(set_count)(root)
assert_equal(Count(count=3, children=[
Count(count=2, children=[
Count(count=1, children=[]),
]),
]), result)
@istest
class GetDescendantsTests(object):
@istest
def returns_nothing_if_element_type_has_no_children(self):
assert_equal([], get_descendants(documents.tab()))
@istest
def returns_nothing_if_element_has_empty_children(self):
assert_equal([], get_descendants(documents.paragraph(children=[])))
@istest
def includes_children(self):
children = [documents.text("child 1"), documents.text("child 2")]
element = documents.paragraph(children=children)
assert_equal(children, get_descendants(element))
@istest
def includes_indirect_descendants(self):
grandchild = documents.text("grandchild")
child = documents.run(children=[grandchild])
element = documents.paragraph(children=[child])
assert_equal([grandchild, child], get_descendants(element))
@istest
class GetDescendantsOfTypeTests(object):
@istest
def filters_descendants_to_type(self):
tab = documents.tab()
run = documents.run(children=[])
element = documents.paragraph(children=[tab, run])
assert_equal([run], get_descendants_of_type(element, documents.Run))
| import cobble
from nose.tools import assert_equal, istest
from mammoth import documents
from mammoth.transforms import get_descendants, get_descendants_of_type, _each_element
@istest
class EachElementTests(object):
@istest
def all_descendants_are_transformed(self):
@cobble.data
class Count(documents.HasChildren):
count = cobble.field()
root = Count(count=None, children=[
Count(count=None, children=[
Count(count=None, children=[]),
]),
])
current_count = [0]
def set_count(node):
current_count[0] += 1
return node.copy(count=current_count[0])
result = _each_element(set_count)(root)
assert_equal(Count(count=3, children=[
Count(count=2, children=[
Count(count=1, children=[]),
]),
]), result)
@istest
class GetDescendantsTests(object):
@istest
def returns_nothing_if_element_type_has_no_children(self):
assert_equal([], get_descendants(documents.tab()))
@istest
def returns_nothing_if_element_has_empty_children(self):
assert_equal([], get_descendants(documents.paragraph(children=[])))
@istest
def includes_children(self):
children = [documents.text("child 1"), documents.text("child 2")]
element = documents.paragraph(children=children)
assert_equal(children, get_descendants(element))
@istest
def includes_indirect_descendants(self):
grandchild = documents.text("grandchild")
child = documents.run(children=[grandchild])
element = documents.paragraph(children=[child])
assert_equal([grandchild, child], get_descendants(element))
@istest
class GetDescendantsOfTypeTests(object):
@istest
def filters_descendants_to_type(self):
tab = documents.tab()
run = documents.run(children=[])
element = documents.paragraph(children=[tab, run])
assert_equal([run], get_descendants_of_type(element, documents.Run))
| bsd-2-clause | Python |
7aef001c8ecff2b750106346a7b387b17b17b2aa | Clean up docstrings in hxl.__init__ per #118 | HXLStandard/libhxl-python,HXLStandard/libhxl-python | hxl/__init__.py | hxl/__init__.py | """Support library for the Humanitarian Exchange Language (HXL), version 1.0.
This library provides support for parsing, validating, cleaning, and
transforming humanitarian datasets that follow the HXL standard. Its
use will be familiar to developers who have worked with libraries like
U{JQuery<https://jquery.com>}. Here's an example::
import hxl
data = hxl.data('data.xlsx', True).with_rows('org=UNICEF').without_columns('contact').count('country')
This two-line script performs the following actions:
1. Load and parse the spreadsheet C{data.xlsx} (the library can also
load from any URL, and understands how to read Google
spreadsheets or U{CKAN<http://ckan.org>} resources).
2. Filter out all rows where the value "UNICEF" doesn't appear under
the C{#org} (organisation) hashtag.
3. Strip out personally-identifiable information by removing all
columns with the C{#contact} hashtag (e.g. C{#contact+name},
C{#contact+phone}, C{#contact+email}).
4. Produce a report showing the number of times each unique
C{#country} appears in the resulting sheet (e.g. to count the number
of activities being conducted by UNICEF in each country).
To get started, read the documentation for the L{hxl.data} function
and the L{hxl.model.Dataset} class. The various filters are also
available as command-line scripts, so you could perform the same
actions as above in a shell script like this::
$ cat data.xlsx | hxlselect -q 'org=UNICEF' | hxlcut -x contact | hxlcount -t country
@author: David Megginson
@organization: UNOCHA
@license: Public Domain
@date: Started August 2014
@see: U{http://hxlstandard.org} for the HXL data standard
@see: U{https://proxy.hxlstandard.org} for web-based deployment of this library
"""
import sys
if sys.version_info < (3,):
raise RuntimeError("libhxl requires Python 3 or higher")
# Flatten out common items for easier access
from hxl.common import HXLException
from hxl.model import TagPattern, Dataset, Column, Row, RowQuery
from hxl.io import data, tagger, HXLParseException, write_hxl, make_input, from_spec
from hxl.validation import schema, HXLValidationException
# end
| """Support library for the Humanitarian Exchange Language (HXL), version 1.0.
This library provides support for parsing, validating, cleaning, and
transforming humanitarian datasets that follow the HXL standard. Its
use will be familiar to developers who have worked with libraries like
U{JQuery<https://jquery.com>}. Here's an example::
import hxl
data = hxl.data('data.xlsx', True).with_rows('org=UNICEF').without_columns('contact').count('country')
This two-line script performs the following actions:
1. Load and parse the spreadsheet C{data.xlsx} (the library can also
load from any URL, and understands how to read Google
spreadsheets or U{CKAN<http://ckan.org>} resources).
2. Filter out all rows where the value "UNICEF" doesn't appear under
the C{#org} (organisation) hashtag.
3. Strip out personally-identifiable information by removing all
columns with the C{#contact} hashtag (e.g. C{#contact+name},
C{#contact+phone}, C{#contact+email}).
4. Produce a report showing the number of times each unique
C{#country} appears in the resulting sheet (e.g. to count the number
of activities being conducted by UNICEF in each country).
To get started, read the documentation for the L{hxl.data} function
and the L{hxl.model.Dataset} class. The various filters are also
available as command-line scripts, so you could perform the same
actions as above in a shell script like this::
$ cat data.xlsx | hxlselect -q 'org=UNICEF' | hxlcut -x contact | hxlcount -t country
@author: David Megginson
@organization: UNOCHA
@license: Public Domain
@date: Started August 2014
@see: U{hxlstandard.org} for the HXL data standard
@see: U{proxy.hxlstandard.org} for web-based deployment of this library
"""
import sys
if sys.version_info < (3,):
raise RuntimeError("libhxl requires Python 3 or higher")
# Flatten out common items for easier access
from hxl.common import HXLException
from hxl.model import TagPattern, Dataset, Column, Row, RowQuery
from hxl.io import data, tagger, HXLParseException, write_hxl, make_input, from_spec
from hxl.validation import schema, HXLValidationException
# end
| unlicense | Python |
b35d72da72a76060b67592e50441d3c2fcd32ee3 | fix for groups reverse url and for queryset filter | hddn/studentsdb,hddn/studentsdb,hddn/studentsdb | students/views/groups.py | students/views/groups.py | from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.views.generic import ListView, UpdateView, DeleteView, CreateView
from ..models import Group
from ..forms import GroupEditForm
from ..util import get_current_group
GROUPS_NUM = 5 # number of groups for pagination
class GroupsListView(ListView):
template_name = 'students/groups.html'
model = Group
paginate_by = GROUPS_NUM
context_object_name = 'groups'
def get_queryset(self):
current_group = get_current_group(self.request)
if current_group:
queryset = Group.objects.filter(title=current_group.title)
else:
queryset = Group.objects.all()
order_by = self.request.GET.get('order_by', '')
if order_by in ('title', 'leader'):
queryset = queryset.order_by(order_by)
if self.request.GET.get('reverse', '') == '1':
queryset = queryset.reverse()
return queryset
class GroupUpdateView(UpdateView):
model = Group
template_name = 'students/groups_edit.html'
form_class = GroupEditForm
def get_success_url(self):
return '{}?status_message={}'.format(reverse('groups'), _('Group saved'))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel_button'):
return HttpResponseRedirect('{}?status_message={}'.format(reverse('groups'), _('Canceled')))
else:
return super(GroupUpdateView, self).post(request, *args, **kwargs)
class GroupAddView(CreateView):
model = Group
template_name = 'students/groups_add.html'
form_class = GroupEditForm
def get_success_url(self):
return '{}?status_message={}'.format(reverse('groups'), _('Group added'))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel_button'):
return HttpResponseRedirect('{}?status_message={}'.format(reverse('groups'), _('Canceled')))
else:
return super(GroupAddView, self).post(request, *args, **kwargs)
class GroupDeleteView(DeleteView):
model = Group
template_name = 'students/groups_confirm_delete.html'
def get_success_url(self):
return '{}?status_message={}'.format(reverse('groups'), _('Group deleted'))
| from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.views.generic import ListView, UpdateView, DeleteView, CreateView
from ..models import Group
from ..forms import GroupEditForm
from ..util import get_current_group
GROUPS_NUM = 5 # number of groups for pagination
class GroupsListView(ListView):
template_name = 'students/groups.html'
model = Group
paginate_by = GROUPS_NUM
context_object_name = 'groups'
def get_queryset(self):
current_group = get_current_group(self.request)
if current_group:
queryset = Group.objects.filter(student_group=current_group)
else:
queryset = Group.objects.all()
order_by = self.request.GET.get('order_by', '')
if order_by in ('title', 'leader'):
queryset = queryset.order_by(order_by)
if self.request.GET.get('reverse', '') == '1':
queryset = queryset.reverse()
return queryset
class GroupUpdateView(UpdateView):
model = Group
template_name = 'students/groups_edit.html'
form_class = GroupEditForm
def get_success_url(self):
return '{}?status_message={}'.format(reverse('home'), _('Group saved'))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel_button'):
return HttpResponseRedirect('{}?status_message={}'.format(reverse('home'), _('Canceled')))
else:
return super(GroupUpdateView, self).post(request, *args, **kwargs)
class GroupAddView(CreateView):
model = Group
template_name = 'students/groups_add.html'
form_class = GroupEditForm
def get_success_url(self):
return '{}?status_message={}'.format(reverse('home'), _('Group added'))
def post(self, request, *args, **kwargs):
if request.POST.get('cancel_button'):
return HttpResponseRedirect('{}?status_message={}'.format(reverse('home'), _('Canceled')))
else:
return super(GroupAddView, self).post(request, *args, **kwargs)
class GroupDeleteView(DeleteView):
model = Group
template_name = 'students/groups_confirm_delete.html'
def get_success_url(self):
return '{}?status_message={}'.format(reverse('home'), _('Group deleted'))
| mit | Python |
71aa46a8dc0d82a12cd6041d090f3f9a9b1bbe75 | add arg for specifying webcam name | fchtngr/fotowebcam.eu-downloader | wallpaper_downloader.py | wallpaper_downloader.py | from lxml import html
from lxml import etree
import json
import requests
import urllib
import os
import argparse
import sys
###
### Downloads images from traunstein webcam. Skips download of existing files. Ignores images in blacklist file.
###
parser = argparse.ArgumentParser(description="Downloads bestof images from Traunstein webcam")
parser.add_argument('webcam', help='the fotowebcam name')
parser.add_argument('--path', help='where to save the images, default is cwdir')
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
resolution = "_hu"
extension = ".jpg"
url = "http://www.foto-webcam.eu/webcam/%s" % args.webcam
targetpath= os.dirname(args.path) if args.path else os.getcwd()
blacklistpath = os.path.join(targetpath, "blacklist")
fileslistpath = os.path.join(targetpath, "fileslist")
imagelist = requests.get("http://www.foto-webcam.eu/webcam/include/list.php?img=&wc=%s&bestof=1" % args.webcam)
images = json.loads(imagelist.text)
bestoflist = images['bestof']
with open(blacklistpath) as f:
blacklist = f.read().splitlines()
with open(fileslistpath) as f:
addToBlackList = f.read().splitlines()
for f in os.listdir(targetpath):
if f in addToBlackList:
addToBlackList.remove(f)
blacklist = blacklist + addToBlackList
downloaded = 0
blacklisted = 0
existing = 0
downloadlist = []
for i in bestoflist:
imagename = i + resolution + extension
imageurl = url + imagename
filename = imagename.replace("/", "-")
filepath = os.path.join(targetpath, filename)
print "checking %s:" % imagename
if os.path.isfile(filepath):
print "\talready exists!"
downloadlist.append(filename)
existing = existing + 1
elif filename in blacklist:
print "\tblacklisted!"
blacklisted = blacklisted + 1
else:
print "\tdownloading..."
urllib.urlretrieve(imageurl, filepath)
downloadlist.append(filename)
downloaded = downloaded + 1
print "\tdone"
with open(os.path.join(targetpath, 'fileslist'), 'w') as f:
f.write("\n".join(downloadlist))
with open(blacklistpath, 'w') as f:
f.write("\n".join(blacklist))
print ""
print "-" * 50
print "BestOf imgs available:\t%d" % len(bestoflist)
print "-" * 50
print "Downloaded: \t\t%d" % downloaded
print "Ignored: \t\t%d (blacklist: %d, existing: %d)" % (blacklisted + existing, blacklisted, existing)
if len(addToBlackList) > 0:
print "Added to blacklist: \t%d" % len(addToBlackList)
print "\t%s" % str(addToBlackList)
print "-" * 50
| from lxml import html
from lxml import etree
import json
import requests
import urllib
import os
import argparse
###
### Downloads images from traunstein webcam. Skips download of existing files. Ignores images in blacklist file.
###
parser = argparse.ArgumentParser(description="Downloads bestof images from Traunstein webcam")
parser.add_argument('--path', help='where to save the images')
args = parser.parse_args()
resolution = "_hu"
extension = ".jpg"
url = "http://www.foto-webcam.eu/webcam/traunstein/"
targetpath= os.dirname(args.path) if args.path else os.getcwd()
blacklistpath = os.path.join(targetpath, "blacklist")
fileslistpath = os.path.join(targetpath, "fileslist")
imagelist = requests.get("http://www.foto-webcam.eu/webcam/include/list.php?img=&wc=traunstein&bestof=1")
images = json.loads(imagelist.text)
bestoflist = images['bestof']
with open(blacklistpath) as f:
blacklist = f.read().splitlines()
with open(fileslistpath) as f:
addToBlackList = f.read().splitlines()
for f in os.listdir(targetpath):
if f in addToBlackList:
addToBlackList.remove(f)
blacklist = blacklist + addToBlackList
downloaded = 0
blacklisted = 0
existing = 0
downloadlist = []
for i in bestoflist:
imagename = i + resolution + extension
imageurl = url + imagename
filename = imagename.replace("/", "-")
filepath = os.path.join(targetpath, filename)
print "checking %s:" % imagename
if os.path.isfile(filepath):
print "\talready exists!"
downloadlist.append(filename)
existing = existing + 1
elif filename in blacklist:
print "\tblacklisted!"
blacklisted = blacklisted + 1
else:
print "\tdownloading..."
urllib.urlretrieve(imageurl, filepath)
downloadlist.append(filename)
downloaded = downloaded + 1
print "\tdone"
with open(os.path.join(targetpath, 'fileslist'), 'w') as f:
f.write("\n".join(downloadlist))
with open(blacklistpath, 'w') as f:
f.write("\n".join(blacklist))
print ""
print "-" * 50
print "BestOf imgs available:\t%d" % len(bestoflist)
print "-" * 50
print "Downloaded: \t\t%d" % downloaded
print "Ignored: \t\t%d (blacklist: %d, existing: %d)" % (blacklisted + existing, blacklisted, existing)
if len(addToBlackList) > 0:
print "Added to blacklist: \t%d" % len(addToBlackList)
print "\t%s" % str(addToBlackList)
print "-" * 50
| mit | Python |
ffc5d0abbc03d67ebd320cb2af7d855e5124a20b | Update mails.py | troeger/opensubmit,troeger/opensubmit,troeger/opensubmit,troeger/opensubmit,troeger/opensubmit | web/opensubmit/mails.py | web/opensubmit/mails.py | from django.core.mail import EmailMessage
from django.core.urlresolvers import reverse
from opensubmit import settings
STUDENT_FAILED_SUB = 'Warning - Validation failed'
STUDENT_FAILED_MSG = '''
Hi,
this is a short notice that your submission for "%s" in "%s"
failed in the automated validation test.
Further information can be found at %s.'''
STUDENT_PASSED_SUB = 'Validation successful'
STUDENT_PASSED_MSG = '''
Hi,
this is a short notice that your submission for "%s" in "%s"
passed in the automated validation test.
Further information can be found at %s.'''
STUDENT_GRADED_SUB = 'Grading finished'
STUDENT_GRADED_MSG = '''
Hi,
this is a short notice that the of grading your submission
for "%s" in "%s" was finalized.
Further information can be found at %s.'''
def inform_student(submission, state):
'''
Create an email message for the student,
based on the given submission state.
Sending eMails on validation completion does
not work, since this may have been triggered
by the admin.
'''
details_url = settings.HOST + reverse('details', args=(submission.pk,))
if state == submission.TEST_VALIDITY_FAILED:
subject = STUDENT_FAILED_SUB
message = STUDENT_FAILED_MSG
message = message % (submission.assignment,
submission.assignment.course,
details_url)
elif state == submission.CLOSED:
if submission.assignment.is_graded():
subject = STUDENT_GRADED_SUB
message = STUDENT_GRADED_MSG
else:
subject = STUDENT_PASSED_SUB
message = STUDENT_PASSED_MSG
message = message % (submission.assignment,
submission.assignment.course,
details_url)
else:
return
subject = "[%s] %s" % (submission.assignment.course, subject)
from_email = submission.assignment.course.owner.email
recipients = submission.authors.values_list(
'email', flat=True).distinct().order_by('email')
# send student email with BCC to course owner.
# TODO: This might be configurable later
# email = EmailMessage(subject, message, from_email, recipients,
# [self.assignment.course.owner.email])
email = EmailMessage(subject, message, from_email, recipients)
email.send(fail_silently=True)
| from django.core.mail import EmailMessage
from django.core.urlresolvers import reverse
from opensubmit import settings
STUDENT_FAILED_SUB = 'Warning - Validation failed'
STUDENT_FAILED_MSG = '''
Hi,
this is a short notice that your submission for "%s" in "%s"
failed in the automated validation test.
Further information can be found at %s.'''
STUDENT_PASSED_SUB = 'Validation successful'
STUDENT_PASSED_MSG = '''
Hi,
this is a short notice that your submission for "%s" in "%s"
passed in the automated validation test.
Further information can be found at %s.'''
STUDENT_GRADED_SUB = 'Grading finished'
STUDENT_GRADED_MSG = '''
Hi,
this is a short notice that the of grading your submission
for "%s" in "%s" was finalized.
Further information can be found at %s.'''
def inform_student(submission, state):
'''
Create an email message for the student,
based on the given submission state.
Sending eMails on validation completion does
not work, since this may have been triggered
by the admin.
'''
details_url = settings.MAIN_URL + reverse('details', args=(submission.pk,))
if state == submission.TEST_VALIDITY_FAILED:
subject = STUDENT_FAILED_SUB
message = STUDENT_FAILED_MSG
message = message % (submission.assignment,
submission.assignment.course,
details_url)
elif state == submission.CLOSED:
if submission.assignment.is_graded():
subject = STUDENT_GRADED_SUB
message = STUDENT_GRADED_MSG
else:
subject = STUDENT_PASSED_SUB
message = STUDENT_PASSED_MSG
message = message % (submission.assignment,
submission.assignment.course,
details_url)
else:
return
subject = "[%s] %s" % (submission.assignment.course, subject)
from_email = submission.assignment.course.owner.email
recipients = submission.authors.values_list(
'email', flat=True).distinct().order_by('email')
# send student email with BCC to course owner.
# TODO: This might be configurable later
# email = EmailMessage(subject, message, from_email, recipients,
# [self.assignment.course.owner.email])
email = EmailMessage(subject, message, from_email, recipients)
email.send(fail_silently=True)
| agpl-3.0 | Python |
a86789998e3267ac9c7fd9ee916eedd5670f65f7 | Add a way to pretty-print JSON | GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,magcius/sweettooth,GNOME/extensions-web,magcius/sweettooth | sweettooth/decorators.py | sweettooth/decorators.py |
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from django.utils.safestring import mark_safe
import functools
import json
def model_view(model):
def inner(view):
@functools.wraps(view)
def new_view(request, pk, **kw):
obj = get_object_or_404(model, pk=pk)
return view(request, obj, **kw)
return new_view
return inner
def dump_json(response, pretty=False):
if pretty:
return json.dumps(response, indent=2, sort_keys=True)
else:
return json.dumps(response)
def ajax_view(view):
@functools.wraps(view)
def new_view(request, **kw):
pretty = request.GET.get('pretty', False)
response = view(request, **kw)
if response is None:
return HttpResponse()
if not isinstance(response, HttpResponse):
response = HttpResponse(mark_safe(dump_json(response, pretty)),
content_type="application/json")
return response
return new_view
|
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from django.utils.safestring import mark_safe
import functools
import json
def model_view(model):
def inner(view):
@functools.wraps(view)
def new_view(request, pk, **kw):
obj = get_object_or_404(model, pk=pk)
return view(request, obj, **kw)
return new_view
return inner
def ajax_view(view):
@functools.wraps(view)
def new_view(request, **kw):
response = view(request, **kw)
if response is None:
return HttpResponse()
if not isinstance(response, HttpResponse):
response = HttpResponse(mark_safe(json.dumps(response)),
content_type="application/json")
return response
return new_view
| agpl-3.0 | Python |
bddc1d9fff2ab749b5946f44d52ed0670c1ce801 | use @wraps to set the __name__ __module__ and __doc__ correctly for logged functions | matrix-org/synapse,iot-factory/synapse,illicitonion/synapse,matrix-org/synapse,iot-factory/synapse,howethomas/synapse,illicitonion/synapse,rzr/synapse,illicitonion/synapse,TribeMedia/synapse,rzr/synapse,TribeMedia/synapse,iot-factory/synapse,matrix-org/synapse,TribeMedia/synapse,illicitonion/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,illicitonion/synapse,iot-factory/synapse,TribeMedia/synapse,howethomas/synapse,rzr/synapse,rzr/synapse,TribeMedia/synapse,iot-factory/synapse,howethomas/synapse,rzr/synapse,howethomas/synapse,howethomas/synapse | synapse/util/logutils.py | synapse/util/logutils.py | # -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from inspect import getcallargs
from functools import wraps
import logging
def log_function(f):
""" Function decorator that logs every call to that function.
"""
func_name = f.__name__
lineno = f.func_code.co_firstlineno
pathname = f.func_code.co_filename
@wraps(f)
def wrapped(*args, **kwargs):
name = f.__module__
logger = logging.getLogger(name)
level = logging.DEBUG
if logger.isEnabledFor(level):
bound_args = getcallargs(f, *args, **kwargs)
def format(value):
r = str(value)
if len(r) > 50:
r = r[:50] + "..."
return r
func_args = [
"%s=%s" % (k, format(v)) for k, v in bound_args.items()
]
msg_args = {
"func_name": func_name,
"args": ", ".join(func_args)
}
record = logging.LogRecord(
name=name,
level=level,
pathname=pathname,
lineno=lineno,
msg="Invoked '%(func_name)s' with args: %(args)s",
args=msg_args,
exc_info=None
)
logger.handle(record)
return f(*args, **kwargs)
return wrapped
| # -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from inspect import getcallargs
import logging
def log_function(f):
""" Function decorator that logs every call to that function.
"""
func_name = f.__name__
lineno = f.func_code.co_firstlineno
pathname = f.func_code.co_filename
def wrapped(*args, **kwargs):
name = f.__module__
logger = logging.getLogger(name)
level = logging.DEBUG
if logger.isEnabledFor(level):
bound_args = getcallargs(f, *args, **kwargs)
def format(value):
r = str(value)
if len(r) > 50:
r = r[:50] + "..."
return r
func_args = [
"%s=%s" % (k, format(v)) for k, v in bound_args.items()
]
msg_args = {
"func_name": func_name,
"args": ", ".join(func_args)
}
record = logging.LogRecord(
name=name,
level=level,
pathname=pathname,
lineno=lineno,
msg="Invoked '%(func_name)s' with args: %(args)s",
args=msg_args,
exc_info=None
)
logger.handle(record)
return f(*args, **kwargs)
return wrapped
| apache-2.0 | Python |
dafef02f46d9413b0ff125e1f8e701b009ed66c9 | Disable logging for irco-explorer. | GaretJax/irco,GaretJax/irco,GaretJax/irco,GaretJax/irco | irco/scripts/explorer.py | irco/scripts/explorer.py | import argparse
from irco.explorer import app, database, filters
#from irco.logging import sentry
def main():
argparser = argparse.ArgumentParser('irco-explorer')
argparser.add_argument('-p', '--port', default=8000, type=int)
argparser.add_argument('-i', '--interface', default='127.0.0.1')
argparser.add_argument('-d', '--debug', action='store_true')
argparser.add_argument('database')
args = argparser.parse_args()
#sentry.context.merge({
# 'tags': {'command': 'irco-explorer'},
# 'extra': {'parsed_arguments': args.__dict__}
#})
app.config['DATABASE'] = args.database
database.init_app(app)
filters.init_app(app)
app.run(
host=args.interface,
port=args.port,
debug=args.debug,
)
| import argparse
from irco.explorer import app, database, filters
from irco.logging import sentry
def main():
argparser = argparse.ArgumentParser('irco-explorer')
argparser.add_argument('-p', '--port', default=8000, type=int)
argparser.add_argument('-i', '--interface', default='127.0.0.1')
argparser.add_argument('-d', '--debug', action='store_true')
argparser.add_argument('database')
args = argparser.parse_args()
sentry.context.merge({
'tags': {'command': 'irco-explorer'},
'extra': {'parsed_arguments': args.__dict__}
})
app.config['DATABASE'] = args.database
database.init_app(app)
filters.init_app(app)
app.run(
host=args.interface,
port=args.port,
debug=args.debug,
)
| mit | Python |
89478bed86dfe4b522b2c38679410b4fb4b33979 | update to pythonwikibot changes | legoktm/legobot-old,legoktm/legobot-old | trunk/toolserver/fungi.py | trunk/toolserver/fungi.py | #!usr/bin/python
#-*- coding:utf-8 -*-
__version__ = '$Id$'
import re, time, sys, os
sys.path.append(os.environ['HOME'] + '/pywiki')
from pywikibot import wiki, pagegen, timedate
wiki.setUser('Legobot')
def do_project(page, putpage):
gen = pagegen.transclude(wiki.Page('Template' + page))
table = ''
for page in gen:
table += do_page(page)
putpage1 = wiki.Page(putpage)
newcontent = '{| class="wikitable sortable" style="text-align: left;"\n|-\n! Article name\n! Size (bytes)\n! Rating\n! Last modified\n! # Incoming links\n' + table + '\n|}'
putpage1.put(newcontent, 'Bot: Updating article list')
def do_page(page):
talk_page = page
main_page = page.toggletalk()
print 'Doing ' + main_page.title()
lastedit = timedate.convertts(main_page.lastedit()['timestamp'])
try:
clas = wiki.parseTemplate(talk_page.get())['class']
except KeyError:
clas = ''
if clas == 'GA':
clas = '{{GA-inline}}'
elif clas == 'FA':
clas = '{{FA-inline}}'
else:
clas = clas.title()
list = pagegen.whatlinkshere(main_page)
num=0
for i in list:
num+=1
try:
tablerow = '|-\n|| %s || %s || %s || %s || %s\n' %(main_page.aslink(), str(len(main_page.get())), clas, lastedit, str(num))
except wiki.IsRedirectPage:
return ''
except UnicodeEncodeError:
return ''
except UnicodeDecodeError:
return ''
print tablerow,
return tablerow
def main():
do_project('WikiProject Fungi','Wikipedia:WikiProject Fungi/fungus articles by size')
do_project('WikiProject Egypt','Wikipedia:WikiProject_Egypt/Articles_by_size')
if __name__ == '__main__':
main() | #!usr/bin/python
__version__ = '$Id$'
import re, time, sys, os
sys.path.append(os.environ['HOME'] + '/pywiki')
from pywikibot import wiki, pagegen, timedate
wiki.setUser('Legobot')
def main():
gen = pagegen.transclude(wiki.Page('Template:WikiProject Fungi'))
table = ''
for page in gen:
table += do_page(page)
putpage = wiki.Page('User:Legoktm/Bot')
newcontent = '{| class="wikitable sortable" style="text-align: left;"\n|-\n! Article name\n! Size (bytes)\n! Rating\n! Last modified\n! # Incoming links\n' + table + '\n|}'
putpage.put(newcontent, 'Testing bot')
def do_page(page):
talk_page = page
main_page = page.toggletalk()
print 'Doing ' + main_page.title()
lastedit = timedate.convertts(main_page.lastedit()['timestamp'])
try:
clas = wiki.parseTemplate(talk_page.get())['class']
except KeyError:
clas = ''
if clas == 'GA':
clas = '{{GA-inline}}'
elif clas == 'FA':
clas = '{{FA-inline}}'
else:
clas = clas.title()
try:
tablerow = '|-\n|| %s || %s || %s || %s || %s\n' %(main_page.aslink(), str(len(main_page.get())), clas, lastedit, str(len(pagegen.whatlinkshere(main_page))))
except wiki.IsRedirectPage:
return ''
except UnicodeEncodeError:
return ''
except UnicodeDecodeError:
return ''
print tablerow,
return tablerow
if __name__ == '__main__':
main() | mit | Python |
990c4dec9549908013616ed3daad2e8d9beaf33f | fix admin | bruecksen/isimip,bruecksen/isimip,bruecksen/isimip,bruecksen/isimip | isi_mip/contrib/admin.py | isi_mip/contrib/admin.py | from django.contrib import admin
from django.contrib.auth.models import User
from django.core import urlresolvers
from isi_mip.climatemodels.models import BaseImpactModel, Sector
class UserAdmin(admin.ModelAdmin):
list_display = ('username', 'email', 'first_name', 'last_name', 'get_model', 'get_sector')
list_filter = ('is_staff', 'is_superuser')
def get_model(self, obj):
try:
base_models = BaseImpactModel.objects.filter(owners=obj)
adminurl = "admin:%s_change" % BaseImpactModel._meta.db_table
except:
return '-'
results = []
for bm in base_models:
link = urlresolvers.reverse(adminurl, args=[bm.id])
results.append('<a href="%s">%s</a>' % (link, bm))
return ', '.join(results)
get_model.admin_order_field = 'get_model'
get_model.short_description = 'Impact Model'
get_model.allow_tags = True
def get_sector(self, obj):
try:
base_models = BaseImpactModel.objects.filter(owners=obj)
adminurl = "admin:%s_change" % Sector._meta.db_table
except:
return '-'
results = []
for bm in base_models:
link = urlresolvers.reverse(adminurl, args=[bm.id])
results.append('<a href="%s">%s</a>' % (link, bm))
return ', '.join(results)
get_sector.admin_order_field = 'get_sector'
get_sector.short_description = 'Sector'
get_sector.allow_tags = True
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
| from django.contrib import admin
from django.contrib.auth.models import User
from django.core import urlresolvers
from isi_mip.climatemodels.models import BaseImpactModel
class UserAdmin(admin.ModelAdmin):
list_display = ('username', 'email', 'first_name', 'last_name', 'get_model')
list_filter = ('is_staff', 'is_superuser')
def get_model(self, obj):
try:
base_models = BaseImpactModel.objects.filter(owners=obj)
adminurl = "admin:%s_change" % BaseImpactModel._meta.db_table
except:
return '-'
results = []
for bm in base_models:
link = urlresolvers.reverse(adminurl, args=[bm.id])
results.append('<a href="%s">%s</a>' % (link, bm))
return ', '.join(results)
get_model.admin_order_field = 'get_model'
get_model.short_description = 'Impact Model'
get_model.allow_tags = True
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
| mit | Python |
e7356e6ca1968d930f4fd348b48dcdd1cb9a02ab | Add ability to search tags by name | theatlantic/django-taggit2,decibyte/django-taggit,theatlantic/django-taggit,decibyte/django-taggit,theatlantic/django-taggit,theatlantic/django-taggit2 | taggit/admin.py | taggit/admin.py | from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
extra = 0
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
search_fields = ['name']
admin.site.register(Tag, TagAdmin) | from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
extra = 0
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
admin.site.register(Tag, TagAdmin)
| bsd-3-clause | Python |
c7a26e74deec3534525a1752bab5849f6b05566c | Improve readability | CubicComet/exercism-python-solutions | meetup/meetup.py | meetup/meetup.py | from datetime import date
class MeetupDayException(Exception):
pass
WEEKDAYS = {"Monday": 0, "Tuesday": 1, "Wednesday": 2, "Thursday": 3,
"Friday": 4, "Saturday": 5, "Sunday": 6}
TEENTH = {1:15, 2:16, 3:17, 4:18, 5:19, 6:13, 7:14}
ORDINAL = {"1st": 0, "2nd": 7, "3rd": 14, "4th": 21, "5th": 28}
def meetup_day(y, m, dow, dom):
start_dow = date(y, m, 1).weekday()
first_dow = (WEEKDAYS[dow] - start_dow) % 7 + 1
if dom == "teenth":
d = date(y, m, TEENTH[first_dow])
elif dom == "last":
try:
d = date(y, m, first_dow + 28)
except ValueError:
d = date(y, m, first_dow + 21)
else:
try:
d = date(y, m, first_dow + ORDINAL[dom])
except ValueError:
raise MeetupDayException("Out of range")
except KeyError:
raise MeetupDayException("Invalid input")
return d
| from datetime import date
class MeetupDayException(Exception):
pass
WEEKDAYS = {"Monday": 0, "Tuesday": 1, "Wednesday": 2, "Thursday": 3,
"Friday": 4, "Saturday": 5, "Sunday": 6}
TEENTH = {1:15, 2:16, 3:17, 4:18, 5:19, 6:13, 7:14}
ORDINAL = {"1st": 0, "2nd": 7, "3rd": 14, "4th": 21, "5th": 28}
def meetup_day(y, m, dow, dom):
start = date(y, m, 1)
first_dow = (WEEKDAYS[dow] - start.weekday()) % 7 + 1
if dom == "teenth":
d = date(y, m, TEENTH[first_dow])
elif dom == "last":
try:
d = date(y, m, first_dow + 28)
except ValueError:
d = date(y, m, first_dow + 21)
else:
try:
d = date(y, m, first_dow + ORDINAL[dom])
except ValueError:
raise MeetupDayException("Out of range")
except KeyError:
raise MeetupDayException("Invalid input")
return d
| agpl-3.0 | Python |
51c708c8f7ff39c9f9cb197f48897a00fba2ab54 | Change WELT2000 import path to point to skylines' fork | Turbo87/skylines,Harry-R/skylines,skylines-project/skylines,shadowoneau/skylines,RBE-Avionik/skylines,skylines-project/skylines,Turbo87/skylines,shadowoneau/skylines,Harry-R/skylines,skylines-project/skylines,RBE-Avionik/skylines,shadowoneau/skylines,Harry-R/skylines,skylines-project/skylines,RBE-Avionik/skylines,Turbo87/skylines,Harry-R/skylines,Turbo87/skylines,shadowoneau/skylines,RBE-Avionik/skylines | skylines/lib/waypoints/welt2000.py | skylines/lib/waypoints/welt2000.py | import os
import subprocess
from flask import current_app
from .welt2000_reader import parse_welt2000_waypoints
def __get_database_file(dir_data):
path = os.path.join(dir_data, 'WELT2000.TXT')
# Create Welt2000 data folder if necessary
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
# Download the current file
# (only if server file is newer than local file)
url = 'https://raw.githubusercontent.com/skylines-project/welt2000/master/WELT2000.TXT'
subprocess.check_call(['wget', '-N', '-P', os.path.dirname(path), url])
# Check if download succeeded
if not os.path.exists(path):
raise RuntimeError('Welt2000 database not found at {}'.format(path))
# Return path to the Welt2000 file
return path
def get_database(bounds=None, path=None):
delete_file = False
if not path:
# Get Welt2000 file
path = __get_database_file(current_app.config['SKYLINES_TEMPORARY_DIR'])
delete_file = True
# Parse Welt2000 file
with open(path, "r") as f:
parsed = parse_welt2000_waypoints(f, bounds)
if delete_file:
os.remove(path)
# Return parsed WaypointList
return parsed
| import os
import subprocess
from flask import current_app
from .welt2000_reader import parse_welt2000_waypoints
def __get_database_file(dir_data):
path = os.path.join(dir_data, 'WELT2000.TXT')
# Create Welt2000 data folder if necessary
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
# Download the current file
# (only if server file is newer than local file)
url = 'http://www.segelflug.de/vereine/welt2000/download/WELT2000.TXT'
subprocess.check_call(['wget', '-N', '-P', os.path.dirname(path), url])
# Check if download succeeded
if not os.path.exists(path):
raise RuntimeError('Welt2000 database not found at {}'.format(path))
# Return path to the Welt2000 file
return path
def get_database(bounds=None, path=None):
delete_file = False
if not path:
# Get Welt2000 file
path = __get_database_file(current_app.config['SKYLINES_TEMPORARY_DIR'])
delete_file = True
# Parse Welt2000 file
with open(path, "r") as f:
parsed = parse_welt2000_waypoints(f, bounds)
if delete_file:
os.remove(path)
# Return parsed WaypointList
return parsed
| agpl-3.0 | Python |
3aba7e7f654e492fb689b8030615658cae93c2d1 | Fix crashing when a user attempts to set usermode +o without /oper | Heufneutje/txircd,DesertBus/txircd,ElementalAlchemist/txircd | txircd/modules/umode_o.py | txircd/modules/umode_o.py | from txircd.modbase import Mode
class OperMode(Mode):
def checkSet(self, user, target, param):
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - User mode o may not be set")
return False # Should only be set by the OPER command; hence, reject any normal setting of the mode
def checkWhoFilter(self, user, targetUser, filters, fields, channel, udata):
if "o" in filters and not udata["oper"]:
return {}
return udata
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.oper_mode = None
def spawn(self):
self.oper_mode = OperMode()
return {
"modes": {
"uno": self.oper_mode
},
"actions": {
"wholinemodify": self.oper_mode.checkWhoFilter
},
"common": True
} | from txircd.modbase import Mode
class OperMode(Mode):
def checkSet(self, target, param):
return False # Should only be set by the OPER command; hence, reject any normal setting of the mode
def checkWhoFilter(self, user, targetUser, filters, fields, channel, udata):
if "o" in filters and not udata["oper"]:
return {}
return udata
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.oper_mode = None
def spawn(self):
self.oper_mode = OperMode()
return {
"modes": {
"uno": self.oper_mode
},
"actions": {
"wholinemodify": self.oper_mode.checkWhoFilter
},
"common": True
} | bsd-3-clause | Python |
552c4f70afe40af7aaf72a8c6061e09753397a2c | fix typo | SuperCowPowers/travis-wheels | wheel_upload.py | wheel_upload.py |
"""Push wheels up to S3 bucket"""
import boto
from boto.s3.key import Key
_bucket = 'workbench-wheels'
_key = 'py27/wheelhouse.tar.gz'
# Spin up the S3 connection
try:
conn = boto.connect_s3()
bucket = conn.get_bucket(_bucket)
mykey = bucket.get_key(_key)
if not mykey:
print 'Could not find key %s, creating a new file...' % _key
mykey = Key(bucket)
mykey.key = _key
# Upload the file
mykey.set_contents_from_filename('wheelhouse.tar.gz')
except: # Failure
exit(1)
|
"""Push wheels up to S3 bucket"""
import boto
from boto.s3.key import Key
_bucket = 'workbench-wheels'
_key = 'py27/wheelhouse.tar.gz'
# Spin up the S3 connection
try:
conn = boto.connect_s3()
bucket = conn.get_bucket(_bucket)
mykey = bucket.get_key(_key)
if not mykey:
print 'Could not find key %s, creating a new file...' % _key
mykey = Key(bucket)
mykey.key = _key
# Upload the file
mykey.set_contents_from_filename('wheelhouse.tar.gz')
except e: # Failure
exit(1)
| mit | Python |
427c654fb3afd0a9fffc8bfced577f2416edd082 | Change to new format to present stroke drawing. | xrloong/Xie | xie/graphics/utils.py | xie/graphics/utils.py | class TextCodec:
STROKE_SEPERATOR = "/"
POINT_SEPERATOR = ","
PARAMETER_SEPERATOR = "."
START_POINT_PATTERN="0" + PARAMETER_SEPERATOR + "{0[0]}" + PARAMETER_SEPERATOR + "{0[1]}"
END_POINT_PATTERN="1" + PARAMETER_SEPERATOR + "{0[0]}" + PARAMETER_SEPERATOR + "{0[1]}"
CONTROL_POINT_PATTERN="2" + PARAMETER_SEPERATOR + "{0[0]}" + PARAMETER_SEPERATOR + "{0[1]}"
def __init__(self):
pass
def encodeStartPoint(self, p):
return TextCodec.START_POINT_PATTERN.format(p)
def encodeEndPoint(self, p):
return TextCodec.END_POINT_PATTERN.format(p)
def encodeControlPoint(self, p):
return TextCodec.CONTROL_POINT_PATTERN.format(p)
def encodeStrokeExpression(self, pointExpressionList):
return TextCodec.POINT_SEPERATOR.join(pointExpressionList)
def encodeCharacterExpression(self, strokeExpressionList):
return TextCodec.STROKE_SEPERATOR.join(strokeExpressionList)
def isStartPoint(self, pointExpression):
return pointExpression[0]=='0'
def isEndPoint(self, pointExpression):
return pointExpression[0]=='1'
def isControlPoint(self, pointExpression):
return pointExpression[0]=='2'
def decodePointExpression(self, pointExpression):
param=pointExpression.split(TextCodec.PARAMETER_SEPERATOR)
return (int(param[1]), int(param[2]))
def decodeStrokeExpression(self, strokeExpression):
return strokeExpression.split(TextCodec.POINT_SEPERATOR)
def decodeCharacterExpression(self, characterExpression):
return characterExpression.split(TextCodec.STROKE_SEPERATOR)
| class TextCodec:
def __init__(self):
pass
def encodeStartPoint(self, p):
return "0{0[0]:02X}{0[1]:02X}".format(p)
def encodeEndPoint(self, p):
return "1{0[0]:02X}{0[1]:02X}".format(p)
def encodeControlPoint(self, p):
return "2{0[0]:02X}{0[1]:02X}".format(p)
def encodeStrokeExpression(self, pointExpressionList):
return ",".join(pointExpressionList)
def encodeCharacterExpression(self, strokeExpressionList):
return ";".join(strokeExpressionList)
def isStartPoint(self, pointExpression):
return pointExpression[0]=='0'
def isEndPoint(self, pointExpression):
return pointExpression[0]=='1'
def isControlPoint(self, pointExpression):
return pointExpression[0]=='2'
def decodePointExpression(self, pointExpression):
e=pointExpression
return (int(e[1:3], 16), int(e[3:5], 16))
def decodeStrokeExpression(self, strokeExpression):
return strokeExpression.split(",")
def decodeCharacterExpression(self, characterExpression):
return characterExpression.split(";")
| apache-2.0 | Python |
c3b0cc681b06ab5b8d64612d5c35fb27da56beeb | Fix port number detection in sabnzbd | Decipher/spksrc,astroganga/spksrc,mjoe/spksrc,markbastiaans/spksrc,Decipher/spksrc,mreppen/spksrc,saschpe/spksrc,schumi2004/spksrc,hadess/spksrc,mirweb/spksrc,mirweb/spksrc,jdierkse/spksrc,adrien-delhorme/spksrc,sea3pea0/spksrc,thunfischbrot/spksrc,mirweb/spksrc,Foncekar/spksrc,thunfischbrot/spksrc,lysin/spksrc,hmflash/spksrc,mirweb/spksrc,mreppen/spksrc,andyblac/spksrc,GaetanCambier/spksrc,Grimthorr/spksrc,mjoe/spksrc,mjoe/spksrc,momiji/spksrc,xtech9/spksrc,Decipher/spksrc,mirweb/spksrc,Foncekar/spksrc,andyblac/spksrc,bwynants/spksrc,Foncekar/spksrc,riverful/spksrc,saschpe/spksrc,lost-carrier/spksrc,Pyrex-FWI/spksrc,cdcabrera/spksrc,Pyrex-FWI/spksrc,saschpe/spksrc,lost-carrier/spksrc,Foncekar/spksrc,sea3pea0/spksrc,GoodOmens83/spksrc,nickbroon/spksrc,phoenix741/spksrc,nickbroon/spksrc,hmflash/spksrc,thunfischbrot/spksrc,Foncekar/spksrc,mjoe/spksrc,demorfi/spksrc,Grimthorr/spksrc,xtech9/spksrc,phoenix741/spksrc,momiji/spksrc,astroganga/spksrc,bwynants/spksrc,lysin/spksrc,Grimthorr/spksrc,jdel/syno-packages,schumi2004/spksrc,adrien-delhorme/spksrc,markbastiaans/spksrc,sea3pea0/spksrc,mreppen/spksrc,demorfi/spksrc,mreppen/spksrc,riverful/spksrc,jdel/syno-packages,thunfischbrot/spksrc,xtech9/spksrc,xtech9/spksrc,Pyrex-FWI/spksrc,markbastiaans/spksrc,hadess/spksrc,nickbroon/spksrc,jdierkse/spksrc,Decipher/spksrc,xtech9/spksrc,jdierkse/spksrc,Decipher/spksrc,cdcabrera/spksrc,jdel/syno-packages,lost-carrier/spksrc,bwynants/spksrc,Pyrex-FWI/spksrc,GaetanCambier/spksrc,jdel/syno-packages,sangood/spksrc,lost-carrier/spksrc,JasOXIII/spksrc,bwynants/spksrc,markbastiaans/spksrc,JasOXIII/spksrc,astroganga/spksrc,mirweb/spksrc,xtech9/spksrc,lysin/spksrc,Foncekar/spksrc,riverful/spksrc,hmflash/spksrc,nickbroon/spksrc,Decipher/spksrc,mreppen/spksrc,sea3pea0/spksrc,sea3pea0/spksrc,jdierkse/spksrc,adrien-delhorme/spksrc,riverful/spksrc,lost-carrier/spksrc,mirweb/spksrc,sea3pea0/spksrc,sangood/spksrc,lost-carrier/spksrc,lost-carrier/spksrc,sangood/spksrc,Decipher/spksrc,demorfi/spksrc,sea3pea0/spksrc,mreppen/spksrc,momiji/spksrc,Dr-Bean/spksrc,mirweb/spksrc,bwynants/spksrc,lost-carrier/spksrc,cdcabrera/spksrc,Grimthorr/spksrc,adrien-delhorme/spksrc,hadess/spksrc,demorfi/spksrc,jdierkse/spksrc,Dr-Bean/spksrc,riverful/spksrc,markbastiaans/spksrc,thunfischbrot/spksrc,astroganga/spksrc,jdierkse/spksrc,GaetanCambier/spksrc,Decipher/spksrc,phoenix741/spksrc,saschpe/spksrc,Dr-Bean/spksrc,nickbroon/spksrc,cdcabrera/spksrc,bwynants/spksrc,GoodOmens83/spksrc,demorfi/spksrc,mirweb/spksrc,Foncekar/spksrc,saschpe/spksrc,riverful/spksrc,hmflash/spksrc,Grimthorr/spksrc,hadess/spksrc,Foncekar/spksrc,schumi2004/spksrc,GoodOmens83/spksrc,bwynants/spksrc,bwynants/spksrc,GaetanCambier/spksrc,astroganga/spksrc,Pyrex-FWI/spksrc,JasOXIII/spksrc,andyblac/spksrc,phoenix741/spksrc,phoenix741/spksrc,mreppen/spksrc,andyblac/spksrc,Foncekar/spksrc,sea3pea0/spksrc,cdcabrera/spksrc,Grimthorr/spksrc,Dr-Bean/spksrc,andyblac/spksrc,saschpe/spksrc,sangood/spksrc,saschpe/spksrc,lysin/spksrc,Dr-Bean/spksrc,sangood/spksrc,mjoe/spksrc,nickbroon/spksrc,astroganga/spksrc,mjoe/spksrc,bwynants/spksrc,GoodOmens83/spksrc,demorfi/spksrc,hadess/spksrc,demorfi/spksrc,astroganga/spksrc,sangood/spksrc,hmflash/spksrc,adrien-delhorme/spksrc,andyblac/spksrc,lysin/spksrc,phoenix741/spksrc,jdierkse/spksrc,hadess/spksrc,mjoe/spksrc,markbastiaans/spksrc,hadess/spksrc,thunfischbrot/spksrc,nickbroon/spksrc,riverful/spksrc,adrien-delhorme/spksrc,demorfi/spksrc,momiji/spksrc,mreppen/spksrc,Dr-Bean/spksrc,adrien-delhorme/spksrc,lysin/spksrc,phoenix741/spksrc,GoodOmens83/spksrc,Pyrex-FWI/spksrc,momiji/spksrc,GaetanCambier/spksrc,mirweb/spksrc,JasOXIII/spksrc,GaetanCambier/spksrc,nickbroon/spksrc,riverful/spksrc,sangood/spksrc,saschpe/spksrc,cdcabrera/spksrc,JasOXIII/spksrc,phoenix741/spksrc,astroganga/spksrc,andyblac/spksrc,riverful/spksrc,thunfischbrot/spksrc,GoodOmens83/spksrc,saschpe/spksrc,sea3pea0/spksrc,GaetanCambier/spksrc,schumi2004/spksrc,sangood/spksrc,Pyrex-FWI/spksrc,lysin/spksrc,Pyrex-FWI/spksrc,lost-carrier/spksrc,schumi2004/spksrc,JasOXIII/spksrc,nickbroon/spksrc,JasOXIII/spksrc,Dr-Bean/spksrc,momiji/spksrc,GaetanCambier/spksrc,Grimthorr/spksrc,mjoe/spksrc,Pyrex-FWI/spksrc,schumi2004/spksrc,adrien-delhorme/spksrc,cdcabrera/spksrc,jdierkse/spksrc,thunfischbrot/spksrc,xtech9/spksrc,GoodOmens83/spksrc,phoenix741/spksrc,lysin/spksrc,markbastiaans/spksrc,GoodOmens83/spksrc,GaetanCambier/spksrc,cdcabrera/spksrc,schumi2004/spksrc,momiji/spksrc,schumi2004/spksrc,schumi2004/spksrc,GoodOmens83/spksrc,lysin/spksrc,thunfischbrot/spksrc,andyblac/spksrc,xtech9/spksrc,mreppen/spksrc,hadess/spksrc,andyblac/spksrc,hadess/spksrc,adrien-delhorme/spksrc,Grimthorr/spksrc,JasOXIII/spksrc,mjoe/spksrc,hmflash/spksrc,hmflash/spksrc,markbastiaans/spksrc,demorfi/spksrc,sangood/spksrc,Decipher/spksrc,hmflash/spksrc,Dr-Bean/spksrc,astroganga/spksrc,JasOXIII/spksrc,jdierkse/spksrc,hmflash/spksrc,xtech9/spksrc,markbastiaans/spksrc,cdcabrera/spksrc,Grimthorr/spksrc,momiji/spksrc | spk/sabnzbd/src/app/sabnzbd.cgi.py | spk/sabnzbd/src/app/sabnzbd.cgi.py | #!/usr/local/sabnzbd/env/bin/python
import os
import configobj
config = configobj.ConfigObj('/usr/local/sabnzbd/var/config.ini')
protocol = 'https' if int(config['misc']['enable_https']) else 'http'
https_port = int(config['misc']['port']) if len(config['misc']['https_port']) == 0 else int(config['misc']['https_port'])
port = https_port if protocol == 'https' else int(config['misc']['port'])
print 'Location: %s://%s:%d' % (protocol, os.environ['SERVER_NAME'], port)
print
| #!/usr/local/sabnzbd/env/bin/python
import os
import configobj
config = configobj.ConfigObj('/usr/local/sabnzbd/var/config.ini')
protocol = 'https' if int(config['misc']['enable_https']) else 'http'
port = int(config['misc']['https_port']) if int(config['misc']['enable_https']) else int(config['misc']['port'])
print 'Location: %s://%s:%d' % (protocol, os.environ['SERVER_NAME'], port)
print
| bsd-3-clause | Python |
41121f29cb868b9341ec505ba43eb4784104a2e5 | Add fixed find method for RIPEDatabase | fritz0705/lglass | lglass/database/whois.py | lglass/database/whois.py | # coding: utf-8
import socket
import lglass.rpsl
import lglass.database.base
@lglass.database.base.register
class WhoisClientDatabase(lglass.database.base.Database):
""" Simple blocking whois client database """
def __init__(self, hostspec):
self.hostspec = hostspec
def get(self, type, primary_key):
try:
return self.find(primary_key, types=[type], flags="-r")[-1]
except IndexError:
raise KeyError(type, primary_key)
def find(self, primary_key, types=None, flags=None):
send_buffer = b""
recv_buffer = b""
if types is not None:
send_buffer += "-T {types} ".format(types=",".join(types)).encode()
if flags is not None:
send_buffer += flags.encode()
send_buffer += b" "
send_buffer += "{key}".format(key=primary_key).encode()
send_buffer += b"\r\n"
with socket.create_connection(self.hostspec) as sock:
while len(send_buffer):
sent = sock.send(send_buffer)
send_buffer = send_buffer[sent:]
while True:
recvd = sock.recv(1024)
if not len(recvd):
break
recv_buffer += recvd
lines = recv_buffer.decode().splitlines()
lines_iter = iter(lines)
objs = []
while True:
obj = lglass.rpsl.Object.from_iterable(lines_iter, pragmas={
"stop-at-empty-line": True
})
if not obj:
break
objs.append(obj)
return objs
def list(self):
raise NotImplementedError("list() is not supported for WhoisClientDatabase")
def save(self):
raise NotImplementedError("save() is not supported for WhoisClientDatabase")
def delete(self):
raise NotImplementedError("delete() is not supported for WhoisClientDatabase")
@classmethod
def from_url(cls, url):
return cls((url.hostname, url.port if url.port else 43))
@lglass.database.base.register
class RIPEDatabase(WhoisClientDatabase):
def __init__(self, hostspec=None):
if hostspec is None:
hostspec = ("whois.ripe.net", 43)
WhoisClientDatabase.__init__(self, hostspec)
def find(self, primary_key, types=None, flags=None):
if flags is not None:
flags = "-B " + flags
else:
flags = "-B"
return WhoisClientDatabase.find(self, primary_key, types, flags)
def schema(self, type):
results = self.find(type, flags="-t")
if len(results) == 0:
raise KeyError("schema({})".format(type))
return lglass.rpsl.RIPESchemaObject(results[0])
@classmethod
def from_url(cls, url):
return cls()
| # coding: utf-8
import socket
import lglass.rpsl
import lglass.database.base
@lglass.database.base.register
class WhoisClientDatabase(lglass.database.base.Database):
""" Simple blocking whois client database """
def __init__(self, hostspec):
self.hostspec = hostspec
def get(self, type, primary_key):
try:
return self.find(primary_key, types=[type], flags="-r")[-1]
except IndexError:
raise KeyError(type, primary_key)
def find(self, primary_key, types=None, flags=None):
send_buffer = b""
recv_buffer = b""
if types is not None:
send_buffer += "-T {types} ".format(types=",".join(types)).encode()
if flags is not None:
send_buffer += flags.encode()
send_buffer += b" "
send_buffer += "{key}".format(key=primary_key).encode()
send_buffer += b"\r\n"
with socket.create_connection(self.hostspec) as sock:
while len(send_buffer):
sent = sock.send(send_buffer)
send_buffer = send_buffer[sent:]
while True:
recvd = sock.recv(1024)
if not len(recvd):
break
recv_buffer += recvd
lines = recv_buffer.decode().splitlines()
lines_iter = iter(lines)
objs = []
while True:
obj = lglass.rpsl.Object.from_iterable(lines_iter, pragmas={
"stop-at-empty-line": True
})
if not obj:
break
objs.append(obj)
return objs
def list(self):
raise NotImplementedError("list() is not supported for WhoisClientDatabase")
def save(self):
raise NotImplementedError("save() is not supported for WhoisClientDatabase")
def delete(self):
raise NotImplementedError("delete() is not supported for WhoisClientDatabase")
@classmethod
def from_url(cls, url):
return cls((url.hostname, url.port if url.port else 43))
@lglass.database.base.register
class RIPEDatabase(WhoisClientDatabase):
def __init__(self, hostspec=None):
if hostspec is None:
hostspec = ("whois.ripe.net", 43)
WhoisClientDatabase.__init__(self, hostspec)
def schema(self, type):
results = self.find(type, flags="-t")
if len(results) == 0:
raise KeyError("schema({})".format(type))
return lglass.rpsl.RIPESchemaObject(results[0])
| mit | Python |
1e4efa9e9f73cb332dc5bc624cc6d2b10ff87864 | Replace six.iteritems() with .items() | openstack/vitrage,openstack/vitrage,openstack/vitrage | vitrage/utils/__init__.py | vitrage/utils/__init__.py | # -*- encoding: utf-8 -*-
# Copyright 2015 - Alcatel-Lucent
# Copyright Β© 2014-2015 eNovance
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
def recursive_keypairs(d, separator='.'):
# taken from ceilometer and gnocchi
for name, value in sorted(d.items()):
if isinstance(value, dict):
for subname, subvalue in recursive_keypairs(value, separator):
yield ('%s%s%s' % (name, separator, subname), subvalue)
else:
yield name, value
def opt_exists(conf_parent, opt):
try:
return conf_parent[opt]
except cfg.NoSuchOptError:
return False
| # -*- encoding: utf-8 -*-
# Copyright 2015 - Alcatel-Lucent
# Copyright Β© 2014-2015 eNovance
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import six
def recursive_keypairs(d, separator='.'):
# taken from ceilometer and gnocchi
for name, value in sorted(six.iteritems(d)):
if isinstance(value, dict):
for subname, subvalue in recursive_keypairs(value, separator):
yield ('%s%s%s' % (name, separator, subname), subvalue)
else:
yield name, value
def opt_exists(conf_parent, opt):
try:
return conf_parent[opt]
except cfg.NoSuchOptError:
return False
| apache-2.0 | Python |
b11a7c8a4a8e80534edec320dac300066f59f08b | Remove needless line | uehara1414/kuso-wifi-server,uehara1414/kuso-wifi-server | web/docker_django/urls.py | web/docker_django/urls.py | from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('kuso_wifi_server.urls', namespace='kuso_wifi_server'))
]
| from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('kuso_wifi_server.urls', namespace='kuso_wifi_server')),
# url(r'^', include('docker_django.apps.kuso_wifi_server')),
]
| mit | Python |
091478221106ea0260f90fd957a4753fc9a5a714 | bump 0.1.2 | VeryCB/jos | jos/__init__.py | jos/__init__.py | __version__ = '0.1.2'
__author__ = 'VeryCB <imcaibin@gmail.com>'
| __version__ = '0.1.1'
__author__ = 'VeryCB <imcaibin@gmail.com>'
| mit | Python |
7c502dc033d729e49f7878ccf1359a6b36eba4fc | remove unused import | couchbase/cbmonitor,ronniedada/litmus,couchbase/cbmonitor,couchbase/cbagent,mikewied/cbagent,pavel-paulau/cbagent,ronniedada/litmus,vmx/cbagent | webapp/cbmonitor/views.py | webapp/cbmonitor/views.py | from django.shortcuts import render_to_response
def tab(request, path=None):
tab_name = {
None: "inventory",
"charts": "charts",
"snapshots": "snapshots"
}.get(path)
template = "{0}/{0}".format(tab_name) + ".jade"
return render_to_response(template, {tab_name: True})
| import inspect
from django.shortcuts import render_to_response
def tab(request, path=None):
tab_name = {
None: "inventory",
"charts": "charts",
"snapshots": "snapshots"
}.get(path)
template = "{0}/{0}".format(tab_name) + ".jade"
return render_to_response(template, {tab_name: True})
| apache-2.0 | Python |
295931f9f499a7740e9856df186fe72940a8e0e5 | Prepare for next release | jfwood/keep,jfwood/keep | keep/version.py | keep/version.py | # Copyright 2010-2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Cloud Keep version
"""
__version__ = '0.1.22dev'
__version_info__ = tuple(__version__.split('0.1.22dev'))
| # Copyright 2010-2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Cloud Keep version
"""
__version__ = '0.1.21'
__version_info__ = tuple(__version__.split('0.1.21'))
| apache-2.0 | Python |
63f49bebadb0796ba3a4075bf430a65a16833733 | remove my test realm | mhorowitz/pykrb5 | krb5/network.py | krb5/network.py | import socket
import struct
from . import types
class KDCConnection(object):
def __init__(self, addr):
self.addr = addr
@staticmethod
def recv_all(socket, count):
data = ""
while count > 0:
buf = socket.recv(count)
if buf == "":
return data
data += buf
count -= len(buf)
return data
def send_kdc(self, message):
s = socket.socket(*self.addr[0:3])
if self.addr[2] == socket.IPPROTO_TCP:
s.connect(self.addr[4])
s.settimeout(3)
s.sendall(struct.pack('!i', len(message)))
s.sendall(message)
rep_len = struct.unpack('!i', self.recv_all(s, 4))
rep = self.recv_all(s, rep_len)
s.close()
return rep
elif self.addr[2] == socket.IPPROTO_UDP:
# s.connect(self.addr[4])
s.sendto(message, self.addr[4])
# this is a UDP socket, so just specify a buffer larger
# than a typical IP packet, and we'll get the next packet.
return s.recv(1500)
class KDCConnectionFactory(object):
def get_connections(self, realm):
# TODO marc: don't hardwire this.
realms = {"ATHENA.MIT.EDU" : (('kerberos.mit.edu', 88),),
"NERD-MILITIA.ORG" : (('kerberos.nerd-militia.org', 88),),
"1TS.ORG" : (('kerberos.1ts.org', 88),),
}
if realm not in realms:
raise types.KerberosException("No KDCs for realm {0}".format(realm))
for server in realms[realm]:
for addr in socket.getaddrinfo(
server[0], server[1], 0, 0, socket.IPPROTO_UDP):
yield KDCConnection(addr)
| import socket
import struct
from . import types
class KDCConnection(object):
def __init__(self, addr):
self.addr = addr
@staticmethod
def recv_all(socket, count):
data = ""
while count > 0:
buf = socket.recv(count)
if buf == "":
return data
data += buf
count -= len(buf)
return data
def send_kdc(self, message):
s = socket.socket(*self.addr[0:3])
if self.addr[2] == socket.IPPROTO_TCP:
s.connect(self.addr[4])
s.settimeout(3)
s.sendall(struct.pack('!i', len(message)))
s.sendall(message)
rep_len = struct.unpack('!i', self.recv_all(s, 4))
rep = self.recv_all(s, rep_len)
s.close()
return rep
elif self.addr[2] == socket.IPPROTO_UDP:
# s.connect(self.addr[4])
s.sendto(message, self.addr[4])
# this is a UDP socket, so just specify a buffer larger
# than a typical IP packet, and we'll get the next packet.
return s.recv(1500)
class KDCConnectionFactory(object):
def get_connections(self, realm):
# TODO marc: don't hardwire this.
realms = {"ATHENA.MIT.EDU" : (('kerberos.mit.edu', 88),),
"NERD-MILITIA.ORG" : (('kerberos.nerd-militia.org', 88),),
"1TS.ORG" : (('kerberos.1ts.org', 88),),
"TOYBOX.ORG" : (('69.25.196.68', 88),),
}
if realm not in realms:
raise types.KerberosException("No KDCs for realm {0}".format(realm))
for server in realms[realm]:
for addr in socket.getaddrinfo(
server[0], server[1], 0, 0, socket.IPPROTO_UDP):
yield KDCConnection(addr)
| bsd-2-clause | Python |
99357ceafe432e9f0bcc4278ec3a90d9f923e074 | disable 'children.csv' as well (500k records, no thank you) | hsr-ba-fs15-dat/opendatahub,hsr-ba-fs15-dat/opendatahub,hsr-ba-fs15-dat/opendatahub,hsr-ba-fs15-dat/opendatahub,hsr-ba-fs15-dat/opendatahub | src/main/python/hub/tests/tests_fixtures.py | src/main/python/hub/tests/tests_fixtures.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework.test import APITestCase
from hub.management.commands.loadfixtures import Command as LoadFixtures
class FixtureTest(APITestCase):
@classmethod
def setUpClass(cls):
cls.format_list = [fmt['name'] for fmt in client.get('/api/v1/format/').data]
LoadFixtures(parse=False).handle()
EXCLUDED_DOCUMENTS = [
'Dummy', # those are for paging tests and just repeat
'employee', # excessive amounts of data, actually segfaults for interlis1
'children' # same
]
def find_fixtures(client):
documents = client.get('/api/v1/document/?count=50&page=1')
transformations = client.get('/api/v1/transformation/?count=50&page=1')
fixtures = []
for doc in documents.data['results']:
if all(doc['name'].find(excluded) < 0 for excluded in EXCLUDED_DOCUMENTS):
file_groups = client.get(doc['file_groups'])
for fg in file_groups.data:
fixtures.append(('ODH{}'.format(fg['id']), fg['data']))
for trf in transformations.data['results']:
fixtures.append(('TRF{}'.format(trf['id']), '/api/v1/transformation/{}/data/'.format(trf['id'])))
return fixtures
def get_fixture_test(id, url):
def fixture_test(self):
for fmt in self.format_list:
data_url = '{}?fmt={}'.format(url, fmt)
try:
response = self.client.get(data_url)
print '{} -> {}'.format(data_url, response.status_code)
self.assertEqual(200, response.status_code)
except Exception as e:
self.fail('Format {} failed with error {}'.format(fmt, e.message))
return fixture_test
from rest_framework.test import APIClient
LoadFixtures(parse=False).handle()
client = APIClient()
fixtures = find_fixtures(client)
for (id, url) in fixtures:
test = get_fixture_test(id, url)
test_name = 'test_{}'.format(id.lower())
setattr(FixtureTest, test_name, test)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework.test import APITestCase
from hub.management.commands.loadfixtures import Command as LoadFixtures
class FixtureTest(APITestCase):
@classmethod
def setUpClass(cls):
cls.format_list = [fmt['name'] for fmt in client.get('/api/v1/format/').data]
LoadFixtures(parse=False).handle()
EXCLUDED_DOCUMENTS = [
'Dummy', # those are for paging tests and just repeat
'employee' # excessive amounts of data, actually segfaults for interlis1
]
def find_fixtures(client):
documents = client.get('/api/v1/document/?count=50&page=1')
transformations = client.get('/api/v1/transformation/?count=50&page=1')
fixtures = []
for doc in documents.data['results']:
if all(doc['name'].find(excluded) < 0 for excluded in EXCLUDED_DOCUMENTS):
file_groups = client.get(doc['file_groups'])
for fg in file_groups.data:
fixtures.append(('ODH{}'.format(fg['id']), fg['data']))
for trf in transformations.data['results']:
fixtures.append(('TRF{}'.format(trf['id']), '/api/v1/transformation/{}/data/'.format(trf['id'])))
return fixtures
def get_fixture_test(id, url):
def fixture_test(self):
for fmt in self.format_list:
data_url = '{}?fmt={}'.format(url, fmt)
try:
response = self.client.get(data_url)
print '{} -> {}'.format(data_url, response.status_code)
self.assertEqual(200, response.status_code)
except Exception as e:
self.fail('Format {} failed with error {}'.format(fmt, e.message))
return fixture_test
from rest_framework.test import APIClient
LoadFixtures(parse=False).handle()
client = APIClient()
fixtures = find_fixtures(client)
for (id, url) in fixtures:
test = get_fixture_test(id, url)
test_name = 'test_{}'.format(id.lower())
setattr(FixtureTest, test_name, test)
| mit | Python |
0421087e7feb3bd2c8386b41df6cdba58a3b35a1 | Update notice | plus1s/shadowsocks-py-mu,plus1s/shadowsocks-py-mu | shadowsocks/config_example.py | shadowsocks/config_example.py | # !!! Please rename this file as config.py BEFORE editing it !!!
import logging
# !!! Only edit this line when you update your configuration file !!!
# After you update, the value of CONFIG_VERSION in config.py and
# config_example.py should be the same in order to start the server
CONFIG_VERSION = '20160623-1'
# Database Config
MYSQL_HOST = 'mengsky.net'
MYSQL_PORT = 3306
MYSQL_USER = 'root'
MYSQL_PASS = 'root'
MYSQL_DB = 'shadowsocks'
MYSQL_USER_TABLE = 'user'
MYSQL_TIMEOUT = 30
# It is not necessary to change the password if you only listen on 127.0.0.1
MANAGE_PASS = 'passwd'
# if you want manage in other server you should set this value to global ip
MANAGE_BIND_IP = '127.0.0.1'
# make sure this port is idle
MANAGE_PORT = 65000
# SS Panel API Setting
# Version of Panel: V2 or V3. V2 not support API thus no need to change
# anything in the following settings
PANEL_VERSION = 'V2'
API_URL = 'http://domain/mu'
# API Key of SS-Panel V3 (you can find this in the .env file)
API_PASS = 'mupass'
NODE_ID = '1'
CHECKTIME = 15
SYNCTIME = 600
# Choose True if you want to use custom method
CUSTOM_METHOD = True
# BIND IP
# if you want to bind ipv4 and ipv6 please use '::'
# if you want to bind only all of ipv4 please use '0.0.0.0'
# if you want to bind a specific IP you may use something like '4.4.4.4'
SS_BIND_IP = '::'
# This default method will be replaced by database record if applicable
SS_METHOD = 'aes-256-cfb'
# Choose whether enforce Shadowsocks One Time Auth (OTA)
# OTA will still be enabled for the client if it sends an AUTH Address type(0x10)
SS_OTA = False
# Skip listening these ports
SS_SKIP_PORTS = [80]
# Firewall Settings
# These settings are to prevent user from abusing your service
SS_FIREWALL_ENABLED = True
# Mode = whitelist or blacklist
SS_FIREWALL_MODE = 'blacklist'
# Member ports should be INTEGERS
# Only Ban these target ports (for blacklist mode)
SS_BAN_PORTS = [22, 23, 25]
# Only Allow these target ports (for whitelist mode)
SS_ALLOW_PORTS = [53, 80, 443, 8080, 8081]
# Trusted users (all target ports will be not be blocked for these users)
SS_FIREWALL_TRUSTED = [443]
# Shadowsocks Time Out
# It should > 180s as some protocol has keep-alive packet of 3 min, Eg.: bt
SS_TIMEOUT = 185
# Shadowsocks TCP Fastopen (Some OS may not support this, Eg.: Windows)
SS_FASTOPEN = False
# Shadowsocks verbose
SS_VERBOSE = False
# Banned Target IP List
SS_FORBIDDEN_IP = []
# LOG CONFIG
LOG_ENABLE = True
# Available Log Level: logging.NOTSET|DEBUG|INFO|WARNING|ERROR|CRITICAL
LOG_LEVEL = logging.INFO
LOG_FILE = 'shadowsocks.log'
# The following format is the one suggested for debugging
# LOG_FORMAT = '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s'
LOG_FORMAT = '%(asctime)s %(levelname)s %(message)s'
LOG_DATE_FORMAT = '%b %d %H:%M:%S'
| # !!! Please rename this file as config.py BEFORE editing it !!!
import logging
# !!! Do NOT touch this line !!!
CONFIG_VERSION = '20160623-1'
# Database Config
MYSQL_HOST = 'mengsky.net'
MYSQL_PORT = 3306
MYSQL_USER = 'root'
MYSQL_PASS = 'root'
MYSQL_DB = 'shadowsocks'
MYSQL_USER_TABLE = 'user'
MYSQL_TIMEOUT = 30
# It is not necessary to change the password if you only listen on 127.0.0.1
MANAGE_PASS = 'passwd'
# if you want manage in other server you should set this value to global ip
MANAGE_BIND_IP = '127.0.0.1'
# make sure this port is idle
MANAGE_PORT = 65000
# SS Panel API Setting
# Version of Panel: V2 or V3. V2 not support API thus no need to change
# anything in the following settings
PANEL_VERSION = 'V2'
API_URL = 'http://domain/mu'
# API Key of SS-Panel V3 (you can find this in the .env file)
API_PASS = 'mupass'
NODE_ID = '1'
CHECKTIME = 15
SYNCTIME = 600
# Choose True if you want to use custom method
CUSTOM_METHOD = True
# BIND IP
# if you want to bind ipv4 and ipv6 please use '::'
# if you want to bind only all of ipv4 please use '0.0.0.0'
# if you want to bind a specific IP you may use something like '4.4.4.4'
SS_BIND_IP = '::'
# This default method will be replaced by database record if applicable
SS_METHOD = 'aes-256-cfb'
# Choose whether enforce Shadowsocks One Time Auth (OTA)
# OTA will still be enabled for the client if it sends an AUTH Address type(0x10)
SS_OTA = False
# Skip listening these ports
SS_SKIP_PORTS = [80]
# Firewall Settings
# These settings are to prevent user from abusing your service
SS_FIREWALL_ENABLED = True
# Mode = whitelist or blacklist
SS_FIREWALL_MODE = 'blacklist'
# Member ports should be INTEGERS
# Only Ban these target ports (for blacklist mode)
SS_BAN_PORTS = [22, 23, 25]
# Only Allow these target ports (for whitelist mode)
SS_ALLOW_PORTS = [53, 80, 443, 8080, 8081]
# Trusted users (all target ports will be not be blocked for these users)
SS_FIREWALL_TRUSTED = [443]
# Shadowsocks Time Out
# It should > 180s as some protocol has keep-alive packet of 3 min, Eg.: bt
SS_TIMEOUT = 185
# Shadowsocks TCP Fastopen (Some OS may not support this, Eg.: Windows)
SS_FASTOPEN = False
# Shadowsocks verbose
SS_VERBOSE = False
# Banned Target IP List
SS_FORBIDDEN_IP = []
# LOG CONFIG
LOG_ENABLE = True
# Available Log Level: logging.NOTSET|DEBUG|INFO|WARNING|ERROR|CRITICAL
LOG_LEVEL = logging.INFO
LOG_FILE = 'shadowsocks.log'
# The following format is the one suggested for debugging
# LOG_FORMAT = '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s'
LOG_FORMAT = '%(asctime)s %(levelname)s %(message)s'
LOG_DATE_FORMAT = '%b %d %H:%M:%S'
| apache-2.0 | Python |
0b7730e97a64e87a1d0d5ba27290f56fd057ce26 | use a_0_0 in lode.py | tkf/railgun,tkf/railgun | samples/lode/lode.py | samples/lode/lode.py | from railgun import SimObject, relpath
class LinearODE(SimObject):
"""
Solve D-dimensional linear ordinary differential equations
Equation::
dX/dt(t) = A X(t)
X: D-dimensional vector
A: DxD matrix
"""
_clibname_ = 'liblode.so' # name of shared library
_clibdir_ = relpath('.', __file__) # library directory
_cmembers_ = [ # declaring members of struct
'num_d', # num_* as size of array (no need to write `int`)
'num_s = 10000', # setting default value
'double dt = 0.001',
'double a[d][d]', # num_d x num_d array
'double x[s][d]', # num_s x num_d array
]
_cfuncs_ = [ # declaring functions to load
"x run(s< s_end=num_s)"
# argument `s_end` has index `s` type and default is `num_s`
# '<' means it is upper bound of the index so the range is [1, num_s]
# this function returns member x
]
def main():
lode = LinearODE(num_d=2) # set num_d
lode.x[0] = [1, 0] # access c-member "VAR" via lode.VAR
lode.a = [[0, 1], [-1, 0]]
x1 = lode.run().copy()
lode.setv(a_0_0=-0.5) # set lode.a[i][j]=v via lode.set(a_'i'_'j'=v)
x2 = lode.run().copy()
import pylab
for (i, x) in enumerate([x1, x2]):
pylab.subplot(2, 2, 1 + i)
pylab.plot(x[:,0])
pylab.plot(x[:,1])
pylab.subplot(2, 2, 3 + i)
pylab.plot(x[:,0], x[:,1])
pylab.show()
if __name__ == '__main__':
main()
| from railgun import SimObject, relpath
class LinearODE(SimObject):
"""
Solve D-dimensional linear ordinary differential equations
Equation::
dX/dt(t) = A X(t)
X: D-dimensional vector
A: DxD matrix
"""
_clibname_ = 'liblode.so' # name of shared library
_clibdir_ = relpath('.', __file__) # library directory
_cmembers_ = [ # declaring members of struct
'num_d', # num_* as size of array (no need to write `int`)
'num_s = 10000', # setting default value
'double dt = 0.001',
'double a[d][d]', # num_d x num_d array
'double x[s][d]', # num_s x num_d array
]
_cfuncs_ = [ # declaring functions to load
"x run(s< s_end=num_s)"
# argument `s_end` has index `s` type and default is `num_s`
# '<' means it is upper bound of the index so the range is [1, num_s]
# this function returns member x
]
def main():
lode = LinearODE(num_d=2) # set num_d
lode.x[0] = [1, 0] # access c-member via lode.VAR
lode.a = [[0, 1], [-1, 0]]
x = lode.run()
import pylab
pylab.subplot(211)
pylab.plot(x[:,0])
pylab.plot(x[:,1])
pylab.subplot(212)
pylab.plot(x[:,0], x[:,1])
pylab.show()
if __name__ == '__main__':
main()
| mit | Python |
72c856a83d6dd538d6832027bf335f5ee3a70c30 | Print in a way that's valid constructor | PythonScanClient/PyScanClient,PythonScanClient/PyScanClient | scan/commands/Comment.py | scan/commands/Comment.py | '''
Created on Mar 8,2015
@author: qiuyx
'''
from scan.commands.Command import Command
import xml.etree.ElementTree as ET
class Comment(Command):
'''
Command to add comment.
SubClass of Command
'''
def __init__(self, text="This is an example comment."):
'''
@param text: Comment Text.
Usage::
>>>c=Comment("Scan Start.")
'''
self.__text=text
def genXML(self):
xml=ET.Element('comment');
ET.SubElement(xml, 'text').text = self.__text
return xml
def __repr__(self):
return self.toCmdString()
def toCmdString(self):
'''
Give a printing of this Command.
'''
return "Comment('%s')" % self.__text
| '''
Created on Mar 8,2015
@author: qiuyx
'''
from scan.commands.Command import Command
import xml.etree.ElementTree as ET
class Comment(Command):
'''
Command to add comment.
SubClass of Command
'''
def __init__(self, text="This is an example comment."):
'''
@param text: Comment Text.
Usage::
>>>c=Comment("Scan Start.")
'''
self.__text=text
def genXML(self):
xml=ET.Element('comment');
ET.SubElement(xml, 'text').text = self.__text
return xml
def __repr__(self):
return 'Comment(Comment='+self.__text+')'
def toCmdString(self):
'''
Give a printing of this Command.
'''
return 'Comment(Comment='+self.__text+')'
| epl-1.0 | Python |
e316576e519dbf1b0f96726cfa431c7e112d850a | Add docstirng for Plot | openworm/PyOpenWorm,gsarma/PyOpenWorm,gsarma/PyOpenWorm,openworm/PyOpenWorm | PyOpenWorm/plot.py | PyOpenWorm/plot.py | from PyOpenWorm import *
class Plot(DataObject):
"""
Object for storing plot data in PyOpenWorm.
Parameters
----------
data : 2D list (list of lists)
List of XY coordinates for this Plot.
Example usage ::
>>> pl = Plot([[1, 2], [3, 4]])
>>> pl.get_data()
# [[1, 2], [3, 4]]
"""
def __init__(self, data=False, *args, **kwargs):
DataObject.__init__(self, **kwargs)
Plot.DatatypeProperty('_data_string', self, multiple=False)
if data:
self.set_data(data)
def _to_string(self, input_list):
"""
Converts input_list to a string
for serialized storage in PyOpenWorm.
"""
return '|'.join([str(item) for item in input_list])
def _to_list(self, input_string):
"""
Converts from internal serlialized string
to a 2D list.
"""
out_list = []
for pair_string in input_string.split('|'):
pair_as_list = pair_string \
.replace('[', '') \
.replace(']', '') \
.split(',')
out_list.append(
map(float, pair_as_list)
)
return out_list
def set_data(self, data):
"""
Set the data attribute, which is user-facing,
as well as the serialized _data_string
attribute, which is used for db storage.
"""
try:
# make sure we're dealing with a 2D list
assert isinstance(data, list)
assert isinstance(data[0], list)
self._data_string(self._to_string(data))
self.data = data
except (AssertionError, IndexError):
raise ValueError('Attribute "data" must be a 2D list of numbers.')
def get_data(self):
"""
Get the data stored for this plot.
"""
if self._data_string():
return self._to_list(self._data_string())
else:
raise AttributeError('You must call "set_data" first.')
| from PyOpenWorm import *
class Plot(DataObject):
"""
Object for storing plot data in PyOpenWorm.
Must be instantiated with a 2D list of coordinates.
"""
def __init__(self, data=False, *args, **kwargs):
DataObject.__init__(self, **kwargs)
Plot.DatatypeProperty('_data_string', self, multiple=False)
if data:
self.set_data(data)
def _to_string(self, input_list):
"""
Converts input_list to a string
for serialized storage in PyOpenWorm.
"""
return '|'.join([str(item) for item in input_list])
def _to_list(self, input_string):
"""
Converts from internal serlialized string
to a 2D list.
"""
out_list = []
for pair_string in input_string.split('|'):
pair_as_list = pair_string \
.replace('[', '') \
.replace(']', '') \
.split(',')
out_list.append(
map(float, pair_as_list)
)
return out_list
def set_data(self, data):
"""
Set the data attribute, which is user-facing,
as well as the serialized _data_string
attribute, which is used for db storage.
"""
try:
# make sure we're dealing with a 2D list
assert isinstance(data, list)
assert isinstance(data[0], list)
self._data_string(self._to_string(data))
self.data = data
except (AssertionError, IndexError):
raise ValueError('Attribute "data" must be a 2D list of numbers.')
def get_data(self):
"""
Get the data stored for this plot.
"""
if self._data_string():
return self._to_list(self._data_string())
else:
raise AttributeError('You must call "set_data" first.')
| mit | Python |
10981892c236658dd3d4d9cd6caceedf300088bb | Remove unused import | erinspace/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,jeffreyliu3230/scrapi,icereval/scrapi,fabianvf/scrapi,mehanig/scrapi,ostwald/scrapi,mehanig/scrapi,felliott/scrapi,alexgarciac/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi | scripts/rename.py | scripts/rename.py | import logging
from cqlengine import Token
from scrapi import settings
from scrapi.database import _manager
from scrapi.linter import RawDocument
from scrapi.processing.elasticsearch import es
from scrapi.processing.cassandra import DocumentModel
from scrapi.tasks import normalize, process_normalized, process_raw
_manager.setup()
logger = logging.getLogger(__name__)
def document_generator(source, target):
query = DocumentModel.objects.all().filter(source=source).limit(1000)
page = list(query)
while len(page) > 0:
for doc in page:
try:
yield RawDocument({
'doc': doc.doc,
'docID': doc.docID,
'source': target,
'filetype': doc.filetype,
'timestamps': doc.timestamps
})
except Exception as e:
logger.exception(e)
page = list(query.filter(pk__token__gt=Token(page[-1].pk)))
def rename(source, target):
count = 0
exceptions = []
for raw in document_generator(source, target):
count += 1
try:
process_raw(raw)
process_normalized(normalize(raw, raw['source']), raw)
except Exception as e:
logger.exception(e)
exceptions.append(e)
for ex in exceptions:
logger.exception(e)
logger.error('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
assert es.count(settings.ELASTIC_INDEX, doc_type=source) == es.count(settings.ELASTIC_INDEX, doc_type=target)
| import logging
from cqlengine import Token, BatchQuery
from scrapi import settings
from scrapi.database import _manager
from scrapi.linter import RawDocument
from scrapi.processing.elasticsearch import es
from scrapi.processing.cassandra import DocumentModel
from scrapi.tasks import normalize, process_normalized, process_raw
_manager.setup()
logger = logging.getLogger(__name__)
def document_generator(source, target):
query = DocumentModel.objects.all().filter(source=source).limit(1000)
page = list(query)
while len(page) > 0:
for doc in page:
try:
yield RawDocument({
'doc': doc.doc,
'docID': doc.docID,
'source': target,
'filetype': doc.filetype,
'timestamps': doc.timestamps
})
except Exception as e:
logger.exception(e)
page = list(query.filter(pk__token__gt=Token(page[-1].pk)))
def rename(source, target):
count = 0
exceptions = []
for raw in document_generator(source, target):
count += 1
try:
process_raw(raw)
process_normalized(normalize(raw, raw['source']), raw)
except Exception as e:
logger.exception(e)
exceptions.append(e)
for ex in exceptions:
logger.exception(e)
logger.error('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
assert es.count(settings.ELASTIC_INDEX, doc_type=source) == es.count(settings.ELASTIC_INDEX, doc_type=target)
| apache-2.0 | Python |
16fa50ae128b19479484a2f3d9c2c8b77f4c27b4 | Remove the test for abspath with an empty path - too hard to do in a cross-platform manner. | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/test/test_ntpath.py | Lib/test/test_ntpath.py | import ntpath
import string
import os
errors = 0
def tester(fn, wantResult):
fn = string.replace(fn, "\\", "\\\\")
gotResult = eval(fn)
if wantResult != gotResult:
print "error!"
print "evaluated: " + str(fn)
print "should be: " + str(wantResult)
print " returned: " + str(gotResult)
print ""
global errors
errors = errors + 1
tester('ntpath.splitdrive("c:\\foo\\bar")', ('c:', '\\foo\\bar'))
tester('ntpath.splitunc("\\\\conky\\mountpoint\\foo\\bar")', ('\\\\conky\\mountpoint', '\\foo\\bar'))
tester('ntpath.splitdrive("c:/foo/bar")', ('c:', '/foo/bar'))
tester('ntpath.splitunc("//conky/mountpoint/foo/bar")', ('//conky/mountpoint', '/foo/bar'))
tester('ntpath.split("c:\\foo\\bar")', ('c:\\foo', 'bar'))
tester('ntpath.split("\\\\conky\\mountpoint\\foo\\bar")', ('\\\\conky\\mountpoint\\foo', 'bar'))
tester('ntpath.split("c:\\")', ('c:\\', ''))
tester('ntpath.split("\\\\conky\\mountpoint\\")', ('\\\\conky\\mountpoint', ''))
tester('ntpath.split("c:/")', ('c:/', ''))
tester('ntpath.split("//conky/mountpoint/")', ('//conky/mountpoint', ''))
tester('ntpath.isabs("c:\\")', 1)
tester('ntpath.isabs("\\\\conky\\mountpoint\\")', 1)
tester('ntpath.isabs("\\foo")', 1)
tester('ntpath.isabs("\\foo\\bar")', 1)
tester('ntpath.abspath("C:\\")', "C:\\")
if errors:
print str(errors) + " errors."
else:
print "No errors. Thank your lucky stars."
| import ntpath
import string
import os
errors = 0
def tester(fn, wantResult):
fn = string.replace(fn, "\\", "\\\\")
gotResult = eval(fn)
if wantResult != gotResult:
print "error!"
print "evaluated: " + str(fn)
print "should be: " + str(wantResult)
print " returned: " + str(gotResult)
print ""
global errors
errors = errors + 1
tester('ntpath.splitdrive("c:\\foo\\bar")', ('c:', '\\foo\\bar'))
tester('ntpath.splitunc("\\\\conky\\mountpoint\\foo\\bar")', ('\\\\conky\\mountpoint', '\\foo\\bar'))
tester('ntpath.splitdrive("c:/foo/bar")', ('c:', '/foo/bar'))
tester('ntpath.splitunc("//conky/mountpoint/foo/bar")', ('//conky/mountpoint', '/foo/bar'))
tester('ntpath.split("c:\\foo\\bar")', ('c:\\foo', 'bar'))
tester('ntpath.split("\\\\conky\\mountpoint\\foo\\bar")', ('\\\\conky\\mountpoint\\foo', 'bar'))
tester('ntpath.split("c:\\")', ('c:\\', ''))
tester('ntpath.split("\\\\conky\\mountpoint\\")', ('\\\\conky\\mountpoint', ''))
tester('ntpath.split("c:/")', ('c:/', ''))
tester('ntpath.split("//conky/mountpoint/")', ('//conky/mountpoint', ''))
tester('ntpath.isabs("c:\\")', 1)
tester('ntpath.isabs("\\\\conky\\mountpoint\\")', 1)
tester('ntpath.isabs("\\foo")', 1)
tester('ntpath.isabs("\\foo\\bar")', 1)
tester('ntpath.abspath("C:\\")', "C:\\")
tester('ntpath.abspath("")', os.getcwd())
if errors:
print str(errors) + " errors."
else:
print "No errors. Thank your lucky stars."
| mit | Python |
d59f6412c6f3103e6bd36f23ad9b0eb86b9c6069 | fix what was likely a typo, and make a default return value | fabianvf/scrapi,alexgarciac/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,erinspace/scrapi,mehanig/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,erinspace/scrapi,felliott/scrapi | scrapi/harvesters/bhl.py | scrapi/harvesters/bhl.py | """Harvests Biodiversity Heritage Library OAI Repository (BHL) metadata for ingestion into the SHARE service.
Example API call: http://www.biodiversitylibrary.org/oai?verb=ListRecords&metadataPrefix=oai_dc&from=2015-02-01
"""
import re
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, default_name_parser
def institution_name_parser(names):
''' Parse institution names '''
return [{
'name': inst
} for inst in names]
def process_contributors(*args):
''' Parse people name for BHL'''
names = [name for sublist in args for name in sublist]
# Filter people names and clean dates and extra spaces.
people = [re.sub(r'\d+-(\d+)?', r'', n).strip() for n in filter(lambda x: ', ' in x, names)] or []
# Filter institution names and clean tabs.
inst = [re.sub(r'\\t', r'', n).strip() for n in filter(lambda x: ', ' not in x, names)] or []
# Parse names differently if they're people's or institutions' names.
return (default_name_parser(people) + institution_name_parser(inst)) or [{'name': ''}]
class BHLHarvester(OAIHarvester):
short_name = 'bhl'
long_name = 'Biodiversity Heritage Library OAI Repository'
url = 'http://www.biodiversitylibrary.org/'
base_url = 'http://www.biodiversitylibrary.org/oai'
@property
def schema(self):
return updated_schema(self._schema, {
'contributors': ('//dc:creator/node()', '//dc:contributor/node()', process_contributors)
})
property_list = [
'type', 'date', 'relation', 'setSpec', 'rights'
]
| """Harvests Biodiversity Heritage Library OAI Repository (BHL) metadata for ingestion into the SHARE service.
Example API call: http://www.biodiversitylibrary.org/oai?verb=ListRecords&metadataPrefix=oai_dc&from=2015-02-01
"""
import re
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, default_name_parser
def institution_name_parser(names):
''' Parse institution names '''
return [{
'name': inst
} for inst in names]
def process_contributors(*args):
''' Parse people name for BHL'''
names = [name for sublist in args for name in sublist]
# Filter people names and clean dates and extra spaces.
people = [re.sub(r'\d+-(\d+)?', r'', n).strip() for n in filter(lambda x: ', ' in x, names)] or []
# Filter institution names and clean tabs.
inst = [re.sub(r'\\t', r'', n).strip() for n in filter(lambda x: ', ' not in x, names) or []]
# Parse names differently if they're people's or institutions' names.
return default_name_parser(people) + institution_name_parser(inst)
class BHLHarvester(OAIHarvester):
short_name = 'bhl'
long_name = 'Biodiversity Heritage Library OAI Repository'
url = 'http://www.biodiversitylibrary.org/'
base_url = 'http://www.biodiversitylibrary.org/oai'
@property
def schema(self):
return updated_schema(self._schema, {
'contributors': ('//dc:creator/node()', '//dc:contributor/node()', process_contributors)
})
property_list = [
'type', 'date', 'relation', 'setSpec', 'rights'
]
| apache-2.0 | Python |
f39d97d20b176bd5549714288643f04ee5d441c3 | add __all__ for import * | amaxwell/datatank_py | datatank_py/__init__.py | datatank_py/__init__.py | #!/usr/bin/env python
# coding: utf-8
# from glob import glob
# [x.strip(".py") for x in glob("*.py")]
__all__ = ['DTBitmap2D', 'DTDataFile', 'DTError', 'DTMask', 'DTMesh2D', 'DTPath2D', 'DTPathValues2D', 'DTPlot1D', 'DTPoint2D', 'DTPointCollection2D', 'DTPointValue2D', 'DTPointValueCollection2D', 'DTProgress', 'DTPyCoreImage', 'DTPyWrite', 'DTRegion2D', 'DTRegion3D', 'DTSeries', 'DTStructuredGrid2D', 'DTStructuredGrid3D', 'DTStructuredMesh2D', 'DTStructuredMesh3D', 'DTStructuredVectorField2D', 'DTStructuredVectorField3D', 'DTTriangularGrid2D', 'DTTriangularMesh2D', 'DTTriangularVectorField2D', 'DTVector2D']
| bsd-3-clause | Python | |
550dee3e13a0ee80d0bd9338c281e51fefdcfdc8 | Add format with slack attachments. | mathiasose/slacker_log_handler | slack_log_handler/__init__.py | slack_log_handler/__init__.py | import json
import traceback
from logging import Handler
from slacker import Slacker
class SlackLogHandler(Handler):
def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None):
Handler.__init__(self)
self.slack_chat = Slacker(api_key)
self.channel = channel
self.stack_trace = stack_trace
self.username = username
self.icon_url = icon_url
self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:'
if not self.channel.startswith('#'):
self.channel = '#' + self.channel
def emit(self, record):
message = '{}'.format(record.getMessage())
if self.stack_trace and record.exc_info:
message += '\n'
message += '\n'.join(traceback.format_exception(*record.exc_info))
attachments = [{
'fallback': self.username,
'color': 'danger',
'author_name': self.username,
'title': self.username,
'text': message
}]
self.slack_chat.chat.post_message(
channel=self.channel,
username=self.username,
icon_url=self.icon_url,
icon_emoji=self.icon_emoji,
attachments=json.dumps(attachments)
)
| import traceback
from logging import Handler
from slacker import Chat
class SlackLogHandler(Handler):
def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None):
Handler.__init__(self)
self.slack_chat = Chat(api_key)
self.channel = channel
self.stack_trace = stack_trace
self.username = username
self.icon_url = icon_url
self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:'
if not self.channel.startswith('#'):
self.channel = '#' + self.channel
def emit(self, record):
message = '{}'.format(record.getMessage())
if self.stack_trace and record.exc_info:
message += '\n'
message += '\n'.join(traceback.format_exception(*record.exc_info))
self.slack_chat.post_message(
text=message,
channel=self.channel,
username=self.username,
icon_url=self.icon_url,
icon_emoji=self.icon_emoji
) | apache-2.0 | Python |
c88cdf8c10def4b6a2be5556a04e793fe571053c | fix transaction management for 1.6 | nyaruka/smartmin,nyaruka/smartmin,caktus/smartmin,caktus/smartmin,caktus/smartmin,nyaruka/smartmin,caktus/smartmin | smartmin/csv_imports/tasks.py | smartmin/csv_imports/tasks.py | import StringIO
from smartmin import class_from_string
from django.utils import timezone
from .models import ImportTask
from time import sleep
from distutils.version import StrictVersion
import django
from celery.task import task
@task(track_started=True)
def csv_import(task_id): #pragma: no cover
from django.db import transaction
# there is a possible race condition between this task starting
# so we have a bit of loop here to fetch the task
tries = 0
task = None
while tries < 5 and not task:
try:
task = ImportTask.objects.get(pk=task_id)
except Exception as e:
# this object just doesn't exist yet, sleep a bit then try again
tries+=1
if tries >= 5:
raise e
else:
sleep(1)
log = StringIO.StringIO()
if StrictVersion(django.get_version()) < StrictVersion('1.6'):
transaction.enter_transaction_management()
transaction.managed()
try:
task.task_id = csv_import.request.id
task.log("Started import at %s" % timezone.now())
task.log("--------------------------------")
task.save()
transaction.commit()
model = class_from_string(task.model_class)
records = model.import_csv(task, log)
task.log(log.getvalue())
task.log("Import finished at %s" % timezone.now())
task.log("%d record(s) added." % len(records))
transaction.commit()
except Exception as e:
transaction.rollback()
import traceback
traceback.print_exc(e)
task.log("\nError: %s\n" % e)
task.log(log.getvalue())
transaction.commit()
raise e
finally:
transaction.leave_transaction_management()
else:
task.task_id = csv_import.request.id
task.log("Started import at %s" % timezone.now())
task.log("--------------------------------")
task.save()
try:
with transaction.atomic():
model = class_from_string(task.model_class)
records = model.import_csv(task, log)
task.log(log.getvalue())
task.log("Import finished at %s" % timezone.now())
task.log("%d record(s) added." % len(records))
except Exception as e:
import traceback
traceback.print_exc(e)
task.log("\nError: %s\n" % e)
task.log(log.getvalue())
raise e
return task
| import StringIO
from smartmin import class_from_string
from django.utils import timezone
from .models import ImportTask
from time import sleep
from celery.task import task
@task(track_started=True)
def csv_import(task_id): #pragma: no cover
from django.db import transaction
# there is a possible race condition between this task starting
# so we have a bit of loop here to fetch the task
tries = 0
task = None
while tries < 5 and not task:
try:
task = ImportTask.objects.get(pk=task_id)
except Exception as e:
# this object just doesn't exist yet, sleep a bit then try again
tries+=1
if tries >= 5:
raise e
else:
sleep(1)
transaction.enter_transaction_management()
transaction.managed()
log = StringIO.StringIO()
try:
task.task_id = csv_import.request.id
task.log("Started import at %s" % timezone.now())
task.log("--------------------------------")
task.save()
transaction.commit()
model = class_from_string(task.model_class)
records = model.import_csv(task, log)
task.log(log.getvalue())
task.log("Import finished at %s" % timezone.now())
task.log("%d record(s) added." % len(records))
transaction.commit()
except Exception as e:
transaction.rollback()
import traceback
traceback.print_exc(e)
task.log("\nError: %s\n" % e)
task.log(log.getvalue())
transaction.commit()
raise e
finally:
transaction.leave_transaction_management()
return task
| bsd-3-clause | Python |
fa5a279a4585ce1c2ae04e15191020a057c49c2d | Replace `property.SingleResource` with (newer) `reference.SingleResource`. | ZeitOnline/zeit.content.portraitbox | src/zeit/content/portraitbox/portraitbox.py | src/zeit/content/portraitbox/portraitbox.py |
from zeit.cms.i18n import MessageFactory as _
import lxml.builder
import lxml.objectify
import zeit.cms.content.property
import zeit.cms.content.reference
import zeit.cms.content.xmlsupport
import zeit.cms.interfaces
import zeit.cms.type
import zeit.content.portraitbox.interfaces
import zeit.wysiwyg.html
import zope.interface
class Portraitbox(zeit.cms.content.xmlsupport.XMLContentBase):
zope.interface.implements(zeit.content.portraitbox.interfaces.IPortraitbox,
zeit.cms.interfaces.IAsset)
default_template = (
u'<container layout="artbox" label="portrait" '
u'xmlns:py="http://codespeak.net/lxml/objectify/pytype" />')
name = zeit.cms.content.property.ObjectPathProperty('.block.title')
text = zeit.cms.content.property.Structure('.block.text')
image = zeit.cms.content.reference.SingleResource(
'.block.image', xml_reference_name='image')
class PortraiboxType(zeit.cms.type.XMLContentTypeDeclaration):
interface = zeit.content.portraitbox.interfaces.IPortraitbox
type = 'portraitbox'
factory = Portraitbox
title = _('Portraitbox')
class PortraitboxHTMLContent(zeit.wysiwyg.html.HTMLContentBase):
"""HTML content of an article."""
zope.component.adapts(zeit.content.portraitbox.interfaces.IPortraitbox)
path = lxml.objectify.ObjectPath('.block.text')
def get_tree(self):
xml = zope.proxy.removeAllProxies(self.context.xml)
text = self.path(xml, None)
if text is None:
self.path.setattr(xml, '')
text = self.path(xml)
elif len(text):
for child in text.iterchildren():
if child.tag == 'p':
break
else:
# There was no <p> node. Wrap the entire contents of text into
# a new <p>
children = [text.text]
for child in text.getchildren():
children.append(child)
text.remove(child)
text.append(lxml.builder.E.p(*children))
return text
|
from zeit.cms.i18n import MessageFactory as _
import lxml.builder
import lxml.objectify
import zeit.cms.content.property
import zeit.cms.content.xmlsupport
import zeit.cms.interfaces
import zeit.cms.type
import zeit.content.portraitbox.interfaces
import zeit.wysiwyg.html
import zope.interface
class Portraitbox(zeit.cms.content.xmlsupport.XMLContentBase):
zope.interface.implements(zeit.content.portraitbox.interfaces.IPortraitbox,
zeit.cms.interfaces.IAsset)
default_template = (
u'<container layout="artbox" label="portrait" '
u'xmlns:py="http://codespeak.net/lxml/objectify/pytype" />')
name = zeit.cms.content.property.ObjectPathProperty('.block.title')
text = zeit.cms.content.property.Structure('.block.text')
image = zeit.cms.content.property.SingleResource(
'.block.image', xml_reference_name='image',
attributes=('base_id', 'src'))
class PortraiboxType(zeit.cms.type.XMLContentTypeDeclaration):
interface = zeit.content.portraitbox.interfaces.IPortraitbox
type = 'portraitbox'
factory = Portraitbox
title = _('Portraitbox')
class PortraitboxHTMLContent(zeit.wysiwyg.html.HTMLContentBase):
"""HTML content of an article."""
zope.component.adapts(zeit.content.portraitbox.interfaces.IPortraitbox)
path = lxml.objectify.ObjectPath('.block.text')
def get_tree(self):
xml = zope.proxy.removeAllProxies(self.context.xml)
text = self.path(xml, None)
if text is None:
self.path.setattr(xml, '')
text = self.path(xml)
elif len(text):
for child in text.iterchildren():
if child.tag == 'p':
break
else:
# There was no <p> node. Wrap the entire contents of text into
# a new <p>
children = [text.text]
for child in text.getchildren():
children.append(child)
text.remove(child)
text.append(lxml.builder.E.p(*children))
return text
| bsd-3-clause | Python |
50f3804301549cbba1c1ca6d2bc5fb1d2e500d12 | make sure output dir exists | autopulated/expose-defs | scripts/processConfig.py | scripts/processConfig.py | #! /usr/bin/env python
# This is an example of using a pre-build script to process the merged config
# file, to generate a header (prebuild-demo/defs.h), which can be #included by
# other modules
import json
import os
def generateDefinitions(config):
definitions = ''
expose_definitions = '$exposeDef' in config.keys()
for k, v in config.items():
if isinstance(v, dict):
definitions += generateDefinitions(v)
elif expose_definitions and not k.startswith('$'):
definitions += '\n#define %s %s' % (k.upper(), v)
return definitions
with open(os.environ['YOTTA_MERGED_CONFIG_FILE'], 'r') as f:
merged_config = json.load(f, encoding='utf-8')
definitions = generateDefinitions(merged_config)
if not os.path.exists('./expose-defs'):
os.makedirs('./expose-defs')
with open('./expose-defs/defs.h', 'w') as outf:
outf.write(definitions)
| #! /usr/bin/env python
# This is an example of using a pre-build script to process the merged config
# file, to generate a header (prebuild-demo/defs.h), which can be #included by
# other modules
import json
import os
def generateDefinitions(config):
definitions = ''
expose_definitions = '$exposeDef' in config.keys()
for k, v in config.items():
if isinstance(v, dict):
definitions += generateDefinitions(v)
elif expose_definitions and not k.startswith('$'):
definitions += '\n#define %s %s' % (k.upper(), v)
return definitions
with open(os.environ['YOTTA_MERGED_CONFIG_FILE'], 'r') as f:
merged_config = json.load(f, encoding='utf-8')
definitions = generateDefinitions(merged_config)
with open('./expose-defs/defs.h', 'w') as outf:
outf.write(definitions)
| isc | Python |
ed68bd18b88f349a7348006a2e14cdddbc993da7 | Upgrade libchromiumcontent to Chrome 37. | beni55/electron,gbn972/electron,chrisswk/electron,mrwizard82d1/electron,Andrey-Pavlov/electron,yan-foto/electron,roadev/electron,abhishekgahlot/electron,MaxWhere/electron,nicholasess/electron,Jonekee/electron,synaptek/electron,edulan/electron,tincan24/electron,faizalpribadi/electron,davazp/electron,tylergibson/electron,gstack/infinium-shell,the-ress/electron,astoilkov/electron,yalexx/electron,gabrielPeart/electron,jjz/electron,gabriel/electron,dongjoon-hyun/electron,miniak/electron,jaanus/electron,ankitaggarwal011/electron,ianscrivener/electron,davazp/electron,gbn972/electron,chriskdon/electron,jannishuebl/electron,abhishekgahlot/electron,rhencke/electron,destan/electron,arusakov/electron,nagyistoce/electron-atom-shell,thingsinjars/electron,mirrh/electron,baiwyc119/electron,shaundunne/electron,arusakov/electron,pirafrank/electron,Neron-X5/electron,fritx/electron,etiktin/electron,Evercoder/electron,tonyganch/electron,Zagorakiss/electron,soulteary/electron,matiasinsaurralde/electron,chrisswk/electron,bobwol/electron,adamjgray/electron,tinydew4/electron,wolfflow/electron,icattlecoder/electron,howmuchcomputer/electron,DivyaKMenon/electron,simonfork/electron,vipulroxx/electron,mattotodd/electron,oiledCode/electron,systembugtj/electron,smczk/electron,maxogden/atom-shell,jlhbaseball15/electron,deepak1556/atom-shell,shaundunne/electron,tomashanacek/electron,jonatasfreitasv/electron,fireball-x/atom-shell,vipulroxx/electron,natgolov/electron,carsonmcdonald/electron,maxogden/atom-shell,ianscrivener/electron,bwiggs/electron,mirrh/electron,nicobot/electron,Ivshti/electron,kenmozi/electron,takashi/electron,anko/electron,bobwol/electron,edulan/electron,IonicaBizauKitchen/electron,jhen0409/electron,Zagorakiss/electron,renaesop/electron,leftstick/electron,tincan24/electron,astoilkov/electron,yalexx/electron,deed02392/electron,digideskio/electron,christian-bromann/electron,nicholasess/electron,gabriel/electron,lrlna/electron,mhkeller/electron,wan-qy/electron,cos2004/electron,zhakui/electron,Faiz7412/electron,matiasinsaurralde/electron,John-Lin/electron,brenca/electron,electron/electron,evgenyzinoviev/electron,shennushi/electron,mubassirhayat/electron,preco21/electron,tylergibson/electron,bobwol/electron,DivyaKMenon/electron,hokein/atom-shell,mjaniszew/electron,BionicClick/electron,etiktin/electron,jsutcodes/electron,LadyNaggaga/electron,gerhardberger/electron,etiktin/electron,bwiggs/electron,bpasero/electron,kokdemo/electron,bbondy/electron,subblue/electron,webmechanicx/electron,IonicaBizauKitchen/electron,chriskdon/electron,dkfiresky/electron,synaptek/electron,jiaz/electron,posix4e/electron,digideskio/electron,stevemao/electron,matiasinsaurralde/electron,gamedevsam/electron,egoist/electron,anko/electron,adamjgray/electron,mrwizard82d1/electron,beni55/electron,JussMee15/electron,cos2004/electron,adcentury/electron,tomashanacek/electron,takashi/electron,Floato/electron,gbn972/electron,medixdev/electron,cqqccqc/electron,eriser/electron,vHanda/electron,abhishekgahlot/electron,mirrh/electron,astoilkov/electron,robinvandernoord/electron,MaxWhere/electron,soulteary/electron,natgolov/electron,mattotodd/electron,brave/electron,jlhbaseball15/electron,electron/electron,electron/electron,brave/electron,MaxGraey/electron,bright-sparks/electron,jannishuebl/electron,Evercoder/electron,Rokt33r/electron,roadev/electron,dahal/electron,GoooIce/electron,aliib/electron,yalexx/electron,tylergibson/electron,nekuz0r/electron,leolujuyi/electron,faizalpribadi/electron,Faiz7412/electron,neutrous/electron,yan-foto/electron,deed02392/electron,wolfflow/electron,shockone/electron,rsvip/electron,lzpfmh/electron,yan-foto/electron,adcentury/electron,greyhwndz/electron,jonatasfreitasv/electron,simonfork/electron,rajatsingla28/electron,roadev/electron,gstack/infinium-shell,dongjoon-hyun/electron,felixrieseberg/electron,fomojola/electron,miniak/electron,LadyNaggaga/electron,rhencke/electron,mattdesl/electron,chriskdon/electron,edulan/electron,kostia/electron,Rokt33r/electron,jaanus/electron,fritx/electron,sircharleswatson/electron,mhkeller/electron,noikiy/electron,farmisen/electron,vipulroxx/electron,iftekeriba/electron,soulteary/electron,destan/electron,shockone/electron,jaanus/electron,jcblw/electron,John-Lin/electron,twolfson/electron,RIAEvangelist/electron,soulteary/electron,Zagorakiss/electron,jtburke/electron,edulan/electron,saronwei/electron,pandoraui/electron,IonicaBizauKitchen/electron,brave/electron,seanchas116/electron,seanchas116/electron,deepak1556/atom-shell,astoilkov/electron,neutrous/electron,trankmichael/electron,jjz/electron,astoilkov/electron,mjaniszew/electron,dongjoon-hyun/electron,sshiting/electron,kikong/electron,bright-sparks/electron,coderhaoxin/electron,RIAEvangelist/electron,stevekinney/electron,LadyNaggaga/electron,michaelchiche/electron,mattdesl/electron,rreimann/electron,brenca/electron,saronwei/electron,farmisen/electron,pirafrank/electron,robinvandernoord/electron,bbondy/electron,voidbridge/electron,stevemao/electron,lrlna/electron,shiftkey/electron,bright-sparks/electron,natgolov/electron,bright-sparks/electron,gamedevsam/electron,jacksondc/electron,renaesop/electron,mattdesl/electron,leethomas/electron,bruce/electron,eric-seekas/electron,preco21/electron,simonfork/electron,Rokt33r/electron,beni55/electron,Zagorakiss/electron,leftstick/electron,bbondy/electron,shiftkey/electron,dongjoon-hyun/electron,arturts/electron,kokdemo/electron,neutrous/electron,posix4e/electron,greyhwndz/electron,JesselJohn/electron,fabien-d/electron,fireball-x/atom-shell,biblerule/UMCTelnetHub,jjz/electron,howmuchcomputer/electron,bpasero/electron,bpasero/electron,MaxWhere/electron,Gerhut/electron,lrlna/electron,joneit/electron,icattlecoder/electron,webmechanicx/electron,gbn972/electron,Floato/electron,wolfflow/electron,kazupon/electron,rajatsingla28/electron,Evercoder/electron,aaron-goshine/electron,pirafrank/electron,mjaniszew/electron,biblerule/UMCTelnetHub,shaundunne/electron,arturts/electron,webmechanicx/electron,jiaz/electron,simonfork/electron,MaxGraey/electron,electron/electron,christian-bromann/electron,RIAEvangelist/electron,egoist/electron,fomojola/electron,benweissmann/electron,Faiz7412/electron,mirrh/electron,gabrielPeart/electron,chriskdon/electron,nicholasess/electron,robinvandernoord/electron,jiaz/electron,shennushi/electron,bruce/electron,cqqccqc/electron,bobwol/electron,minggo/electron,JesselJohn/electron,JussMee15/electron,greyhwndz/electron,nekuz0r/electron,Jacobichou/electron,eriser/electron,xiruibing/electron,benweissmann/electron,matiasinsaurralde/electron,systembugtj/electron,IonicaBizauKitchen/electron,gstack/infinium-shell,dkfiresky/electron,takashi/electron,gamedevsam/electron,carsonmcdonald/electron,lrlna/electron,shockone/electron,joaomoreno/atom-shell,benweissmann/electron,dkfiresky/electron,baiwyc119/electron,bruce/electron,yalexx/electron,thomsonreuters/electron,vipulroxx/electron,setzer777/electron,jhen0409/electron,sky7sea/electron,stevemao/electron,yan-foto/electron,jcblw/electron,voidbridge/electron,mirrh/electron,jcblw/electron,kcrt/electron,rsvip/electron,zhakui/electron,fomojola/electron,shockone/electron,rprichard/electron,evgenyzinoviev/electron,noikiy/electron,aliib/electron,yalexx/electron,Faiz7412/electron,icattlecoder/electron,thomsonreuters/electron,yan-foto/electron,zhakui/electron,pombredanne/electron,fireball-x/atom-shell,brave/muon,biblerule/UMCTelnetHub,greyhwndz/electron,gbn972/electron,cos2004/electron,adcentury/electron,noikiy/electron,destan/electron,farmisen/electron,MaxGraey/electron,tonyganch/electron,nicobot/electron,Evercoder/electron,coderhaoxin/electron,abhishekgahlot/electron,kenmozi/electron,christian-bromann/electron,fritx/electron,xfstudio/electron,rreimann/electron,howmuchcomputer/electron,natgolov/electron,farmisen/electron,fabien-d/electron,Ivshti/electron,adamjgray/electron,GoooIce/electron,SufianHassan/electron,fffej/electron,SufianHassan/electron,shennushi/electron,voidbridge/electron,wan-qy/electron,rprichard/electron,pandoraui/electron,tylergibson/electron,the-ress/electron,pombredanne/electron,bitemyapp/electron,nicobot/electron,Jonekee/electron,renaesop/electron,systembugtj/electron,bitemyapp/electron,benweissmann/electron,MaxGraey/electron,mubassirhayat/electron,aecca/electron,fabien-d/electron,ianscrivener/electron,dkfiresky/electron,tylergibson/electron,sircharleswatson/electron,seanchas116/electron,thingsinjars/electron,shennushi/electron,evgenyzinoviev/electron,leethomas/electron,simongregory/electron,arusakov/electron,JussMee15/electron,davazp/electron,baiwyc119/electron,timruffles/electron,ankitaggarwal011/electron,beni55/electron,shennushi/electron,xfstudio/electron,jacksondc/electron,digideskio/electron,matiasinsaurralde/electron,mattotodd/electron,Jacobichou/electron,twolfson/electron,kenmozi/electron,biblerule/UMCTelnetHub,joneit/electron,aliib/electron,pombredanne/electron,the-ress/electron,preco21/electron,nagyistoce/electron-atom-shell,Neron-X5/electron,jtburke/electron,joaomoreno/atom-shell,electron/electron,takashi/electron,lzpfmh/electron,iftekeriba/electron,miniak/electron,oiledCode/electron,digideskio/electron,stevemao/electron,gerhardberger/electron,smczk/electron,etiktin/electron,nicholasess/electron,leethomas/electron,gerhardberger/electron,wan-qy/electron,icattlecoder/electron,fabien-d/electron,natgolov/electron,rajatsingla28/electron,trankmichael/electron,brenca/electron,dongjoon-hyun/electron,JussMee15/electron,vHanda/electron,michaelchiche/electron,tinydew4/electron,minggo/electron,xiruibing/electron,zhakui/electron,nekuz0r/electron,vaginessa/electron,RobertJGabriel/electron,nagyistoce/electron-atom-shell,faizalpribadi/electron,jcblw/electron,aliib/electron,fomojola/electron,christian-bromann/electron,d-salas/electron,leftstick/electron,jlord/electron,thingsinjars/electron,gabrielPeart/electron,thingsinjars/electron,kenmozi/electron,shiftkey/electron,mattdesl/electron,bitemyapp/electron,fireball-x/atom-shell,tomashanacek/electron,Floato/electron,gstack/infinium-shell,aecca/electron,jacksondc/electron,jonatasfreitasv/electron,dahal/electron,robinvandernoord/electron,leftstick/electron,biblerule/UMCTelnetHub,RobertJGabriel/electron,leethomas/electron,neutrous/electron,micalan/electron,jjz/electron,vHanda/electron,trankmichael/electron,michaelchiche/electron,leethomas/electron,oiledCode/electron,wolfflow/electron,meowlab/electron,kikong/electron,leftstick/electron,jcblw/electron,jlord/electron,dkfiresky/electron,bbondy/electron,davazp/electron,SufianHassan/electron,saronwei/electron,posix4e/electron,trankmichael/electron,sky7sea/electron,jacksondc/electron,destan/electron,xfstudio/electron,Evercoder/electron,tinydew4/electron,noikiy/electron,jtburke/electron,abhishekgahlot/electron,jsutcodes/electron,Ivshti/electron,setzer777/electron,felixrieseberg/electron,seanchas116/electron,rreimann/electron,mattotodd/electron,noikiy/electron,felixrieseberg/electron,aichingm/electron,tinydew4/electron,xfstudio/electron,rhencke/electron,arturts/electron,BionicClick/electron,systembugtj/electron,kikong/electron,voidbridge/electron,John-Lin/electron,roadev/electron,kazupon/electron,John-Lin/electron,icattlecoder/electron,lzpfmh/electron,evgenyzinoviev/electron,GoooIce/electron,simongregory/electron,jhen0409/electron,rreimann/electron,thingsinjars/electron,BionicClick/electron,eric-seekas/electron,darwin/electron,d-salas/electron,tonyganch/electron,preco21/electron,beni55/electron,michaelchiche/electron,anko/electron,arusakov/electron,kcrt/electron,nekuz0r/electron,gamedevsam/electron,cqqccqc/electron,jhen0409/electron,ervinb/electron,faizalpribadi/electron,edulan/electron,shockone/electron,mhkeller/electron,trigrass2/electron,simonfork/electron,kcrt/electron,sky7sea/electron,bpasero/electron,destan/electron,Evercoder/electron,xiruibing/electron,the-ress/electron,kazupon/electron,deepak1556/atom-shell,MaxWhere/electron,ankitaggarwal011/electron,trigrass2/electron,JussMee15/electron,vaginessa/electron,takashi/electron,trigrass2/electron,Jonekee/electron,mubassirhayat/electron,webmechanicx/electron,tomashanacek/electron,bwiggs/electron,pandoraui/electron,jaanus/electron,nekuz0r/electron,xiruibing/electron,tonyganch/electron,gerhardberger/electron,rhencke/electron,gabriel/electron,renaesop/electron,smczk/electron,simongregory/electron,sshiting/electron,Rokt33r/electron,greyhwndz/electron,chrisswk/electron,shennushi/electron,SufianHassan/electron,michaelchiche/electron,shiftkey/electron,carsonmcdonald/electron,coderhaoxin/electron,pandoraui/electron,JesselJohn/electron,Neron-X5/electron,adamjgray/electron,takashi/electron,rprichard/electron,Andrey-Pavlov/electron,iftekeriba/electron,xiruibing/electron,iftekeriba/electron,smczk/electron,leolujuyi/electron,minggo/electron,soulteary/electron,bruce/electron,fomojola/electron,wan-qy/electron,stevekinney/electron,shiftkey/electron,setzer777/electron,jacksondc/electron,gerhardberger/electron,Floato/electron,timruffles/electron,trigrass2/electron,robinvandernoord/electron,seanchas116/electron,arturts/electron,soulteary/electron,jiaz/electron,DivyaKMenon/electron,jonatasfreitasv/electron,joaomoreno/atom-shell,Gerhut/electron,davazp/electron,jjz/electron,John-Lin/electron,miniak/electron,eriser/electron,mattotodd/electron,jlord/electron,setzer777/electron,deepak1556/atom-shell,sircharleswatson/electron,thompsonemerson/electron,chrisswk/electron,medixdev/electron,arturts/electron,timruffles/electron,felixrieseberg/electron,bitemyapp/electron,IonicaBizauKitchen/electron,pombredanne/electron,Jonekee/electron,simongregory/electron,Gerhut/electron,nicobot/electron,anko/electron,deed02392/electron,zhakui/electron,vaginessa/electron,xfstudio/electron,vaginessa/electron,micalan/electron,meowlab/electron,the-ress/electron,minggo/electron,aaron-goshine/electron,twolfson/electron,aichingm/electron,JesselJohn/electron,JesselJohn/electron,stevekinney/electron,meowlab/electron,jhen0409/electron,nagyistoce/electron-atom-shell,Zagorakiss/electron,micalan/electron,leolujuyi/electron,shiftkey/electron,sky7sea/electron,neutrous/electron,thompsonemerson/electron,tonyganch/electron,jannishuebl/electron,pirafrank/electron,Jacobichou/electron,systembugtj/electron,christian-bromann/electron,fritx/electron,Zagorakiss/electron,sshiting/electron,vHanda/electron,felixrieseberg/electron,gstack/infinium-shell,xfstudio/electron,JussMee15/electron,Jonekee/electron,eriser/electron,Ivshti/electron,jlord/electron,ianscrivener/electron,gbn972/electron,kikong/electron,joaomoreno/atom-shell,oiledCode/electron,BionicClick/electron,webmechanicx/electron,nekuz0r/electron,Jonekee/electron,gabriel/electron,mjaniszew/electron,egoist/electron,bruce/electron,darwin/electron,destan/electron,DivyaKMenon/electron,nicholasess/electron,shockone/electron,dahal/electron,lzpfmh/electron,davazp/electron,simongregory/electron,zhakui/electron,kazupon/electron,sshiting/electron,roadev/electron,d-salas/electron,seanchas116/electron,jaanus/electron,neutrous/electron,Faiz7412/electron,vaginessa/electron,posix4e/electron,fffej/electron,micalan/electron,twolfson/electron,aliib/electron,aichingm/electron,thomsonreuters/electron,RobertJGabriel/electron,stevemao/electron,chriskdon/electron,gabrielPeart/electron,mhkeller/electron,kostia/electron,kcrt/electron,nicholasess/electron,aichingm/electron,etiktin/electron,leolujuyi/electron,voidbridge/electron,thompsonemerson/electron,saronwei/electron,beni55/electron,RIAEvangelist/electron,faizalpribadi/electron,rsvip/electron,sshiting/electron,Gerhut/electron,synaptek/electron,electron/electron,kazupon/electron,RobertJGabriel/electron,bobwol/electron,thomsonreuters/electron,Jacobichou/electron,mrwizard82d1/electron,Jacobichou/electron,gabrielPeart/electron,rreimann/electron,arturts/electron,brave/electron,fffej/electron,Andrey-Pavlov/electron,ankitaggarwal011/electron,mubassirhayat/electron,coderhaoxin/electron,robinvandernoord/electron,joneit/electron,voidbridge/electron,meowlab/electron,matiasinsaurralde/electron,brenca/electron,GoooIce/electron,cos2004/electron,bitemyapp/electron,digideskio/electron,lzpfmh/electron,michaelchiche/electron,Gerhut/electron,rreimann/electron,dahal/electron,thomsonreuters/electron,brave/muon,ianscrivener/electron,aaron-goshine/electron,darwin/electron,d-salas/electron,kikong/electron,trankmichael/electron,sky7sea/electron,LadyNaggaga/electron,jlhbaseball15/electron,timruffles/electron,brave/muon,ankitaggarwal011/electron,Andrey-Pavlov/electron,rsvip/electron,dongjoon-hyun/electron,mhkeller/electron,stevekinney/electron,vaginessa/electron,kostia/electron,jiaz/electron,Gerhut/electron,bwiggs/electron,nicobot/electron,wolfflow/electron,timruffles/electron,ankitaggarwal011/electron,yalexx/electron,carsonmcdonald/electron,jsutcodes/electron,hokein/atom-shell,kenmozi/electron,leftstick/electron,GoooIce/electron,stevekinney/electron,mjaniszew/electron,anko/electron,pandoraui/electron,aichingm/electron,Rokt33r/electron,kcrt/electron,eriser/electron,sky7sea/electron,LadyNaggaga/electron,sshiting/electron,joneit/electron,aecca/electron,sircharleswatson/electron,synaptek/electron,kcrt/electron,setzer777/electron,gamedevsam/electron,MaxWhere/electron,fffej/electron,preco21/electron,ianscrivener/electron,leethomas/electron,carsonmcdonald/electron,bright-sparks/electron,RobertJGabriel/electron,electron/electron,the-ress/electron,subblue/electron,shaundunne/electron,tincan24/electron,fffej/electron,DivyaKMenon/electron,synaptek/electron,kokdemo/electron,nagyistoce/electron-atom-shell,eric-seekas/electron,mrwizard82d1/electron,Andrey-Pavlov/electron,gabrielPeart/electron,etiktin/electron,rsvip/electron,fomojola/electron,thompsonemerson/electron,eric-seekas/electron,aecca/electron,jtburke/electron,howmuchcomputer/electron,howmuchcomputer/electron,faizalpribadi/electron,bwiggs/electron,ervinb/electron,brave/electron,adcentury/electron,jannishuebl/electron,mattotodd/electron,sircharleswatson/electron,the-ress/electron,chriskdon/electron,renaesop/electron,lrlna/electron,Neron-X5/electron,wan-qy/electron,baiwyc119/electron,kokdemo/electron,setzer777/electron,bobwol/electron,joaomoreno/atom-shell,ervinb/electron,twolfson/electron,shaundunne/electron,hokein/atom-shell,Neron-X5/electron,kenmozi/electron,jlhbaseball15/electron,bright-sparks/electron,brave/muon,hokein/atom-shell,smczk/electron,jlord/electron,bpasero/electron,meowlab/electron,fireball-x/atom-shell,Jacobichou/electron,mattdesl/electron,edulan/electron,howmuchcomputer/electron,evgenyzinoviev/electron,sircharleswatson/electron,chrisswk/electron,SufianHassan/electron,bbondy/electron,lrlna/electron,medixdev/electron,brave/electron,hokein/atom-shell,ervinb/electron,aaron-goshine/electron,GoooIce/electron,aaron-goshine/electron,jiaz/electron,tincan24/electron,Neron-X5/electron,maxogden/atom-shell,gabriel/electron,adamjgray/electron,roadev/electron,kokdemo/electron,tomashanacek/electron,rajatsingla28/electron,mhkeller/electron,webmechanicx/electron,Floato/electron,rajatsingla28/electron,saronwei/electron,nicobot/electron,systembugtj/electron,jhen0409/electron,brave/muon,coderhaoxin/electron,egoist/electron,mjaniszew/electron,wolfflow/electron,minggo/electron,vipulroxx/electron,christian-bromann/electron,iftekeriba/electron,LadyNaggaga/electron,aecca/electron,icattlecoder/electron,subblue/electron,SufianHassan/electron,tinydew4/electron,meowlab/electron,subblue/electron,mubassirhayat/electron,kokdemo/electron,jannishuebl/electron,RIAEvangelist/electron,arusakov/electron,cqqccqc/electron,darwin/electron,tomashanacek/electron,gamedevsam/electron,deepak1556/atom-shell,bitemyapp/electron,natgolov/electron,ervinb/electron,John-Lin/electron,maxogden/atom-shell,farmisen/electron,IonicaBizauKitchen/electron,adcentury/electron,thompsonemerson/electron,yan-foto/electron,joneit/electron,medixdev/electron,astoilkov/electron,dahal/electron,BionicClick/electron,jsutcodes/electron,d-salas/electron,kostia/electron,mattdesl/electron,oiledCode/electron,noikiy/electron,aaron-goshine/electron,RobertJGabriel/electron,bpasero/electron,subblue/electron,gabriel/electron,twolfson/electron,d-salas/electron,renaesop/electron,smczk/electron,stevemao/electron,eric-seekas/electron,kostia/electron,mirrh/electron,trigrass2/electron,jtburke/electron,aecca/electron,fritx/electron,kazupon/electron,deed02392/electron,jannishuebl/electron,pirafrank/electron,bruce/electron,deed02392/electron,biblerule/UMCTelnetHub,trigrass2/electron,abhishekgahlot/electron,brenca/electron,iftekeriba/electron,cos2004/electron,tinydew4/electron,felixrieseberg/electron,thomsonreuters/electron,trankmichael/electron,simonfork/electron,joneit/electron,RIAEvangelist/electron,greyhwndz/electron,baiwyc119/electron,Rokt33r/electron,medixdev/electron,aliib/electron,Ivshti/electron,cos2004/electron,rhencke/electron,dkfiresky/electron,micalan/electron,xiruibing/electron,shaundunne/electron,tincan24/electron,deed02392/electron,jlhbaseball15/electron,synaptek/electron,carsonmcdonald/electron,baiwyc119/electron,adamjgray/electron,thompsonemerson/electron,benweissmann/electron,jjz/electron,posix4e/electron,jonatasfreitasv/electron,jlhbaseball15/electron,brenca/electron,cqqccqc/electron,JesselJohn/electron,bpasero/electron,rhencke/electron,joaomoreno/atom-shell,Floato/electron,jsutcodes/electron,subblue/electron,Andrey-Pavlov/electron,gerhardberger/electron,aichingm/electron,pandoraui/electron,cqqccqc/electron,fabien-d/electron,jsutcodes/electron,preco21/electron,pombredanne/electron,medixdev/electron,maxogden/atom-shell,brave/muon,bbondy/electron,tincan24/electron,evgenyzinoviev/electron,arusakov/electron,jacksondc/electron,digideskio/electron,rajatsingla28/electron,simongregory/electron,jcblw/electron,benweissmann/electron,rprichard/electron,posix4e/electron,coderhaoxin/electron,miniak/electron,anko/electron,tonyganch/electron,mrwizard82d1/electron,dahal/electron,bwiggs/electron,ervinb/electron,pirafrank/electron,adcentury/electron,darwin/electron,MaxWhere/electron,miniak/electron,tylergibson/electron,egoist/electron,mrwizard82d1/electron,thingsinjars/electron,vHanda/electron,fffej/electron,fritx/electron,minggo/electron,eric-seekas/electron,kostia/electron,vipulroxx/electron,stevekinney/electron,BionicClick/electron,egoist/electron,jaanus/electron,lzpfmh/electron,jonatasfreitasv/electron,wan-qy/electron,pombredanne/electron,saronwei/electron,micalan/electron,leolujuyi/electron,DivyaKMenon/electron,jtburke/electron,oiledCode/electron,farmisen/electron,eriser/electron,leolujuyi/electron,gerhardberger/electron,MaxGraey/electron,vHanda/electron | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'ea1a7e85a3de1878e5656110c76f4d2d8af41c6e'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'afb4570ceee2ad10f3caf5a81335a2ee11ec68a5'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| mit | Python |
ca79a5a66638b08b551471eeca85d75ebae61218 | make it executable | geovedi/nmt-playground,geovedi/nmt-playground | scripts/bleu_sent.py | scripts/bleu_sent.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import io
import fire
from nltk.translate.bleu_score import SmoothingFunction, sentence_bleu
def main(ref, hyp, out):
smoothing_func = SmoothingFunction()
with io.open(out, 'w', encoding='utf-8') as out_f, \
io.open(ref, 'r', encoding='utf-8') as ref_f, \
io.open(hyp, 'r', encoding='utf-8') as hyp_f:
for ref_s, hyp_s in zip(ref_f, hyp_f):
if not ref_s or not hyp_s:
break
try:
score = sentence_bleu(
[ref_s.strip().split()],
hyp_s.split(),
smoothing_function=smoothing_func.method4)
except:
score = 0.0
out_f.write('{0:0.4f}\n'.format(score))
if __name__ == '__main__':
fire.Fire(main)
| # -*- coding: utf-8 -*-
import io
import fire
from nltk.translate.bleu_score import SmoothingFunction, sentence_bleu
def main(ref, hyp, out):
smoothing_func = SmoothingFunction()
with io.open(out, 'w', encoding='utf-8') as out_f, \
io.open(ref, 'r', encoding='utf-8') as ref_f, \
io.open(hyp, 'r', encoding='utf-8') as hyp_f:
for ref_s, hyp_s in zip(ref_f, hyp_f):
if not ref_s or not hyp_s:
break
try:
score = sentence_bleu(
[ref_s.strip().split()],
hyp_s.split(),
smoothing_function=smoothing_func.method4)
except:
score = 0.0
out_f.write('{0:0.4f}\n'.format(score))
if __name__ == '__main__':
fire.Fire(main)
| mit | Python |
f20156beb47f860646f31b46ff69879e190d220d | Add job console output to firebase | MDTsai/webcompat-system-addon-autotest | scripts/postbuild.py | scripts/postbuild.py | #!/usr/bin/python3
import sys
import jenkins
from firebase import firebase
JENKINS_URL = '' # Enter Jenkins URL like http://localhost:8080
JENKINS_USERNAME = '' # Enter available Jenkins username
JENKINS_APITOKEN = '' # Enter Jenkins API token (or password if Jenkins < 1.5)
FIREBASE_DSN = '' # Enter your firebase domain
FIREBASE_INVALID_CHARSET = '.$#[]/'
if __name__ == "__main__":
build_number = int(sys.argv[1])
job_name = sys.argv[2]
server = jenkins.Jenkins(JENKINS_URL, username=JENKINS_USERNAME, password=JENKINS_APITOKEN)
build_info = server.get_build_info(job_name, build_number)
console_output = server.get_build_console_output(job_name, build_number)
firebase = firebase.FirebaseApplication(FIREBASE_DSN)
# Remove invalid character for firebase
firebase_job_name = job_name
for ic in FIREBASE_INVALID_CHARSET:
if ic in firebase_job_name:
firebase_job_name = firebase_job_name.replace(ic, '')
# Post new job result to firebase
data = {'result': build_info['result'], 'timestamp': build_info['timestamp']}
firebase.put('/job/' + firebase_job_name, build_number, data)
# Post new job console output to firebase
data = {'output': console_output}
firebase.put('/job_console/' + firebase_job_name, build_number, data)
| #!/usr/bin/python3
import sys
import jenkins
from firebase import firebase
JENKINS_URL = '' # Enter Jenkins URL like http://localhost:8080
JENKINS_USERNAME = '' # Enter available Jenkins username
JENKINS_APITOKEN = '' # Enter Jenkins API token (or password if Jenkins < 1.5)
FIREBASE_DSN = '' # Enter your firebase domain
FIREBASE_INVALID_CHARSET = '.$#[]/'
if __name__ == "__main__":
build_number = int(sys.argv[1])
job_name = sys.argv[2]
server = jenkins.Jenkins(JENKINS_URL, username=JENKINS_USERNAME, password=JENKINS_APITOKEN)
build_info = server.get_build_info(job_name, build_number)
firebase = firebase.FirebaseApplication(FIREBASE_DSN)
# Remove invalid character for firebase
firebase_job_name = job_name
for ic in FIREBASE_INVALID_CHARSET:
if ic in firebase_job_name:
firebase_job_name = firebase_job_name.replace(ic, '')
# Post new job result to firebase
data = {'result': build_info['result'], 'timestamp': build_info['timestamp']}
firebase.put('/job/' + firebase_job_name, build_number, data)
| mpl-2.0 | Python |
5cdccbf7a6c3ff15ff66ae4634929546c3d52721 | Add Datasource parent class | lamby/debian-devel-changes-bot,xtaran/debian-devel-changes-bot,sebastinas/debian-devel-changes-bot,xtaran/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot | DebianChangesBot/__init__.py | DebianChangesBot/__init__.py | import urllib2
class Datasource(object):
class DataError(Exception): pass
def update(self):
fileobj = urllib2.urlopen(self.URL)
return self.parse(fileobj)
import datasources
| agpl-3.0 | Python | |
dedcb6bcabe3d8d6758dcee607e8c33b174d782b | Bump to 2.0.0. | kivy/kivy,matham/kivy,rnixx/kivy,matham/kivy,akshayaurora/kivy,matham/kivy,akshayaurora/kivy,rnixx/kivy,matham/kivy,kivy/kivy,rnixx/kivy,akshayaurora/kivy,kivy/kivy | kivy/_version.py | kivy/_version.py | # This file is imported from __init__.py and exec'd from setup.py
MAJOR = 2
MINOR = 0
MICRO = 0
RELEASE = True
__version__ = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
if not RELEASE:
# if it's a rcx release, it's not proceeded by a period. If it is a
# devx release, it must start with a period
__version__ += 'rc4'
_kivy_git_hash = ''
_kivy_build_date = ''
| # This file is imported from __init__.py and exec'd from setup.py
MAJOR = 2
MINOR = 0
MICRO = 0
RELEASE = False
__version__ = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
if not RELEASE:
# if it's a rcx release, it's not proceeded by a period. If it is a
# devx release, it must start with a period
__version__ += 'rc4'
_kivy_git_hash = ''
_kivy_build_date = ''
| mit | Python |
f8e4334514a622fa7541e0b82800fdbc717e8838 | Add notes to sketch for priorityq. refs #10 | jay-tyler/data-structures,jonathanstallings/data-structures | priorityq.py | priorityq.py | from __future__ import unicode_literals
from functools import total_ordering
from binary_heap import BinaryHeap
@total_ordering # Will build out the remaining comparison methods
class QNode(object):
"""A class for a queue node."""
def __init__(self, val, priority=None):
self.val = val
self.priority = priority
def __repr__(self):
"""Print representation of node."""
return "{val}".format(val=self.val)
def __eq__(self, other):
"""Implement this and following two methods with logic to compare
priority and value appropiately.
"""
pass
def __lt__(self, other):
"""Implement in tandem with __eq__."""
pass
class PriorityQ(object):
"""A class for a priority queue. Compose this from BinaryHeap."""
def __init__(self, iterable=()):
"""We can iteratively use insert here."""
pass
def insert(item): # Wamt to extend spec to include priority as 2nd arg
"""Insert an item into the queue. Would be nice to examine item as follows:
If item is node:
add to PriorityQ
else:
init QNode with item as val and priority as None
"""
pass
def pop():
"""Remove the most important item from the queue."""
pass
def peek():
"""Returns the most important item from queue without removal."""
| from __future__ import unicode_literals
from functools import total_ordering
from binary_heap import BinaryHeap
@total_ordering # Will build out the remaining comparison methods
class QNode(object):
"""A class for a queue node."""
def __init__(self, val, priority=None):
super(QNode, self).__init__()
self.val = val
self.priority = priority
def __repr__(self):
"""Print representation of node."""
return "{val}".format(val=self.val)
def __eq__(self, other):
"""Implement this and following two methods with logic to compare
priority and value appropiately.
"""
pass
def __lt__(self, other):
"""Implement in tandem with __eq__."""
pass
class PriorityQ(object):
"""A class for a priority queue. Compose this from BinaryHeap."""
def __init__(self, iterable=()):
pass
def insert(item):
"""Insert an item into the queue."""
pass
def pop():
"""Remove the most importan item from the queue."""
pass
def peek():
"""Returns the most important item from queue without removal."""
| mit | Python |
59dc6605af2aba9c94201b5b08e614015c8824dc | Use localtime function | Netuitive/netuitive-client-python | example/example.py | example/example.py | import netuitive
import time
import os
ApiClient = netuitive.Client(url=os.environ.get('API_URL'), api_key=os.environ.get('CUSTOM_API_KEY'))
MyElement = netuitive.Element()
MyElement.add_attribute('Language', 'Python')
MyElement.add_attribute('app_version', '7.0')
MyElement.add_relation('my_child_element')
MyElement.add_tag('Production', 'True')
MyElement.add_tag('app_tier', 'True')
timestamp = int(time.mktime(time.localtime()))
MyElement.add_sample('app.error', timestamp, 1, host='appserver01')
MyElement.add_sample('app.request', timestamp, 10, host='appserver01')
ApiClient.post(MyElement)
MyElement.clear_samples()
MyEvent = netuitive.Event('appserver01', 'INFO', 'test event','this is a test message', 'INFO')
ApiClient.post_event(MyEvent)
if ApiClient.time_insync():
print('we have time sync with the server')
| import netuitive
import time
import os
ApiClient = netuitive.Client(url=os.environ.get('API_URL'), api_key=os.environ.get('CUSTOM_API_KEY'))
MyElement = netuitive.Element()
MyElement.add_attribute('Language', 'Python')
MyElement.add_attribute('app_version', '7.0')
MyElement.add_relation('my_child_element')
MyElement.add_tag('Production', 'True')
MyElement.add_tag('app_tier', 'True')
timestamp = int(time.mktime(time.gmtime()))
MyElement.add_sample('app.error', timestamp, 1, host='appserver01')
MyElement.add_sample('app.request', timestamp, 10, host='appserver01')
ApiClient.post(MyElement)
MyElement.clear_samples()
MyEvent = netuitive.Event('appserver01', 'INFO', 'test event','this is a test message', 'INFO')
ApiClient.post_event(MyEvent)
if ApiClient.time_insync():
print('we have time sync with the server')
| apache-2.0 | Python |
1a0e86f11ddac5ff4842e87cfc6796670866ca0f | Update item_attribute.py | gsnbng/erpnext,susuchina/ERPNEXT,SPKian/Testing2,saurabh6790/test-erp,aruizramon/alec_erpnext,hernad/erpnext,mbauskar/omnitech-erpnext,mbauskar/sapphire-erpnext,gangadharkadam/v6_erp,gangadharkadam/saloon_erp,anandpdoshi/erpnext,Tejal011089/trufil-erpnext,meisterkleister/erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/v6_erp,hatwar/Das_erpnext,indictranstech/reciphergroup-erpnext,dieface/erpnext,njmube/erpnext,gangadharkadam/contributionerp,mbauskar/Das_Erpnext,mbauskar/omnitech-demo-erpnext,geekroot/erpnext,mahabuber/erpnext,Drooids/erpnext,Tejal011089/paypal_erpnext,Tejal011089/huntercamp_erpnext,gangadharkadam/saloon_erp,tmimori/erpnext,saurabh6790/test-erp,hatwar/buyback-erpnext,hatwar/buyback-erpnext,SPKian/Testing2,tmimori/erpnext,indictranstech/erpnext,pombredanne/erpnext,gmarke/erpnext,sheafferusa/erpnext,sagar30051991/ozsmart-erp,mahabuber/erpnext,gangadharkadam/contributionerp,pombredanne/erpnext,hanselke/erpnext-1,indictranstech/erpnext,geekroot/erpnext,Tejal011089/fbd_erpnext,treejames/erpnext,njmube/erpnext,indictranstech/biggift-erpnext,rohitwaghchaure/erpnext-receipher,sheafferusa/erpnext,indictranstech/fbd_erpnext,hanselke/erpnext-1,gangadharkadam/saloon_erp_install,MartinEnder/erpnext-de,sagar30051991/ozsmart-erp,hanselke/erpnext-1,Aptitudetech/ERPNext,shitolepriya/test-erp,fuhongliang/erpnext,mbauskar/omnitech-erpnext,fuhongliang/erpnext,hanselke/erpnext-1,ThiagoGarciaAlves/erpnext,gangadharkadam/contributionerp,Tejal011089/trufil-erpnext,indictranstech/fbd_erpnext,indictranstech/osmosis-erpnext,saurabh6790/test-erp,SPKian/Testing2,gangadharkadam/saloon_erp,Suninus/erpnext,indictranstech/Das_Erpnext,mbauskar/Das_Erpnext,aruizramon/alec_erpnext,gmarke/erpnext,netfirms/erpnext,geekroot/erpnext,gangadhar-kadam/helpdesk-erpnext,SPKian/Testing,anandpdoshi/erpnext,geekroot/erpnext,susuchina/ERPNEXT,indictranstech/reciphergroup-erpnext,indictranstech/reciphergroup-erpnext,rohitwaghchaure/erpnext-receipher,ThiagoGarciaAlves/erpnext,anandpdoshi/erpnext,sagar30051991/ozsmart-erp,Drooids/erpnext,ShashaQin/erpnext,mbauskar/omnitech-demo-erpnext,dieface/erpnext,shitolepriya/test-erp,njmube/erpnext,meisterkleister/erpnext,SPKian/Testing,mbauskar/Das_Erpnext,susuchina/ERPNEXT,shft117/SteckerApp,ThiagoGarciaAlves/erpnext,fuhongliang/erpnext,indictranstech/fbd_erpnext,indictranstech/trufil-erpnext,shitolepriya/test-erp,mbauskar/helpdesk-erpnext,Tejal011089/huntercamp_erpnext,hatwar/buyback-erpnext,indictranstech/biggift-erpnext,hatwar/Das_erpnext,gangadhar-kadam/helpdesk-erpnext,mbauskar/alec_frappe5_erpnext,indictranstech/erpnext,mbauskar/omnitech-erpnext,gangadharkadam/saloon_erp_install,mahabuber/erpnext,shitolepriya/test-erp,mbauskar/alec_frappe5_erpnext,Drooids/erpnext,gsnbng/erpnext,hatwar/Das_erpnext,fuhongliang/erpnext,MartinEnder/erpnext-de,Tejal011089/trufil-erpnext,ShashaQin/erpnext,netfirms/erpnext,indictranstech/biggift-erpnext,indictranstech/osmosis-erpnext,dieface/erpnext,mbauskar/helpdesk-erpnext,indictranstech/trufil-erpnext,hatwar/buyback-erpnext,mbauskar/omnitech-erpnext,hatwar/Das_erpnext,sagar30051991/ozsmart-erp,Tejal011089/paypal_erpnext,meisterkleister/erpnext,dieface/erpnext,Suninus/erpnext,gangadharkadam/v6_erp,indictranstech/fbd_erpnext,mbauskar/helpdesk-erpnext,rohitwaghchaure/erpnext-receipher,gangadharkadam/v6_erp,shft117/SteckerApp,gsnbng/erpnext,SPKian/Testing,gangadhar-kadam/helpdesk-erpnext,aruizramon/alec_erpnext,aruizramon/alec_erpnext,SPKian/Testing2,tmimori/erpnext,gangadharkadam/saloon_erp_install,shft117/SteckerApp,Suninus/erpnext,mbauskar/Das_Erpnext,Tejal011089/paypal_erpnext,treejames/erpnext,mbauskar/omnitech-demo-erpnext,mbauskar/helpdesk-erpnext,indictranstech/trufil-erpnext,mbauskar/sapphire-erpnext,Tejal011089/paypal_erpnext,indictranstech/Das_Erpnext,mbauskar/alec_frappe5_erpnext,netfirms/erpnext,ShashaQin/erpnext,shft117/SteckerApp,pombredanne/erpnext,sheafferusa/erpnext,treejames/erpnext,indictranstech/Das_Erpnext,ShashaQin/erpnext,gangadharkadam/saloon_erp,indictranstech/Das_Erpnext,hernad/erpnext,indictranstech/biggift-erpnext,mbauskar/omnitech-demo-erpnext,indictranstech/trufil-erpnext,gangadharkadam/saloon_erp_install,meisterkleister/erpnext,gmarke/erpnext,indictranstech/osmosis-erpnext,gmarke/erpnext,njmube/erpnext,Tejal011089/trufil-erpnext,netfirms/erpnext,rohitwaghchaure/erpnext-receipher,susuchina/ERPNEXT,pombredanne/erpnext,MartinEnder/erpnext-de,Tejal011089/fbd_erpnext,Drooids/erpnext,SPKian/Testing,ThiagoGarciaAlves/erpnext,indictranstech/reciphergroup-erpnext,gsnbng/erpnext,gangadharkadam/contributionerp,hernad/erpnext,Tejal011089/huntercamp_erpnext,Tejal011089/huntercamp_erpnext,Tejal011089/fbd_erpnext,saurabh6790/test-erp,mbauskar/sapphire-erpnext,sheafferusa/erpnext,treejames/erpnext,Suninus/erpnext,mahabuber/erpnext,tmimori/erpnext,Tejal011089/fbd_erpnext,gangadhar-kadam/helpdesk-erpnext,indictranstech/erpnext,MartinEnder/erpnext-de,hernad/erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,mbauskar/sapphire-erpnext | erpnext/stock/doctype/item_attribute/item_attribute.py | erpnext/stock/doctype/item_attribute/item_attribute.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class ItemAttribute(Document):
def validate(self):
self.validate_duplication()
self.validate_attribute_values()
def validate_duplication(self):
values, abbrs = [], []
for d in self.item_attribute_values:
d.abbr = d.abbr.upper()
if d.attribute_value in values:
frappe.throw(_("{0} must appear only once").format(d.attribute_value))
values.append(d.attribute_value)
if d.abbr in abbrs:
frappe.throw(_("{0} must appear only once").format(d.abbr))
abbrs.append(d.abbr)
def validate_attribute_values(self):
attribute_values = []
for d in self.item_attribute_values:
attribute_values.append(d.attribute_value)
variant_attributes = frappe.db.sql("select DISTINCT attribute_value from `tabVariant Attribute` where attribute=%s", self.name)
if variant_attributes:
for d in variant_attributes:
if d[0] not in attribute_values:
frappe.throw(_("Attribute Value {0} cannot be removed from {1} as Item Variants exist with this Attribute.").format(d[0], self.name))
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class ItemAttribute(Document):
def validate(self):
self.validate_duplication()
self.validate_attribute_values()
def validate_duplication(self):
values, abbrs = [], []
for d in self.item_attribute_values:
d.abbr = d.abbr.upper()
if d.attribute_value in values:
frappe.throw(_("{0} must appear only once").format(d.attribute_value))
values.append(d.attribute_value)
if d.abbr in abbrs:
frappe.throw(_("{0} must appear only once").format(d.abbr))
abbrs.append(d.abbr)
def validate_attribute_values(self):
attribute_values = []
for d in self.item_attribute_values:
attribute_values.append(d.attribute_value)
variant_attributes = frappe.db.sql("select DISTINCT attribute_value from `tabVariant Attribute` where attribute=%s", self.name)
if variant_attributes:
for d in variant_attributes:
if d[0] not in attribute_values:
frappe.throw(_("Attribute Value {0} cannot be removed from {1} as it has Variants.").format(d[0], self.name)) | agpl-3.0 | Python |
db1e14ecabaaf39873c18ea7156eab085d89af08 | Add support for backend section in .system | olofk/fusesoc,lowRISC/fusesoc,lowRISC/fusesoc,olofk/fusesoc | orpsoc/system.py | orpsoc/system.py | import sys
if sys.version[0] == '2':
import ConfigParser as configparser
else:
import configparser
from orpsoc.core import Core
from orpsoc.config import Config
import os
DEFAULT_VALUES = {'name' : '',
'cores' : '',
'simulators' : '',
'backend' : '',
'include_files' : '',
'rtl_files' : '',
'tb_files' : ''}
class System:
def __init__(self, system_file):
system_root = os.path.dirname(system_file)
system_config = configparser.SafeConfigParser(DEFAULT_VALUES)
system_config.readfp(open(system_file))
self.name = system_config.get('main', 'name')
self.cores = {}
self.cores['orpsoc'] = self._create_orpsoc_core(system_config, system_root)
cores = system_config.get('main', 'cores').split()
for core in cores:
core_file = os.path.join(Config().cores_root,core,core+'.core')
self.cores[core] = Core(core_file)
self.simulators = system_config.get('main','simulators').split()
self.backend_name = system_config.get('main','backend')
if self.backend_name and system_config.has_section(self.backend_name):
self.backend = dict(system_config.items(self.backend_name))
def setup_cores(self):
for core in self.cores:
self.cores[core].setup()
def _create_orpsoc_core(self, system_config, system_root):
core = Core(name=self.name, core_root=system_root)
core.rtl_files = system_config.get('main', 'rtl_files').split()
core.include_files = system_config.get('main', 'include_files').split()
core.include_dirs = list(set(map(os.path.dirname, core.include_files)))
core.tb_files = system_config.get('main', 'tb_files').split()
return core
def get_cores(self):
return self.cores
def get_rtl_files(self):
return self.rtl_files
#FIXME: Iterate through core RTL files and append to rtl_files
def get_tb_files(self):
return self.tb_files
| import sys
if sys.version[0] == '2':
import ConfigParser as configparser
else:
import configparser
from orpsoc.core import Core
from orpsoc.config import Config
import os
DEFAULT_VALUES = {'name' : '',
'cores' : '',
'simulators' : '',
'backend' : '',
'include_files' : '',
'rtl_files' : '',
'tb_files' : ''}
class System:
def __init__(self, system_file):
system_root = os.path.dirname(system_file)
system_config = configparser.SafeConfigParser(DEFAULT_VALUES)
system_config.readfp(open(system_file))
self.name = system_config.get('main', 'name')
self.cores = {}
self.cores['orpsoc'] = self._create_orpsoc_core(system_config, system_root)
cores = system_config.get('main', 'cores').split()
for core in cores:
core_file = os.path.join(Config().cores_root,core,core+'.core')
self.cores[core] = Core(core_file)
self.simulators = system_config.get('main','simulators').split()
def setup_cores(self):
for core in self.cores:
self.cores[core].setup()
def _create_orpsoc_core(self, system_config, system_root):
core = Core(name=self.name, core_root=system_root)
core.rtl_files = system_config.get('main', 'rtl_files').split()
core.include_files = system_config.get('main', 'include_files').split()
core.include_dirs = list(set(map(os.path.dirname, core.include_files)))
core.tb_files = system_config.get('main', 'tb_files').split()
return core
def get_cores(self):
return self.cores
def get_rtl_files(self):
return self.rtl_files
#FIXME: Iterate through core RTL files and append to rtl_files
def get_tb_files(self):
return self.tb_files
| bsd-2-clause | Python |
ba7c59bc0ad31658ae741ba7d0ddf0e0bc1d36d4 | Update production URL to the right one (#1903) | La0/mozilla-relengapi,mozilla-releng/services,mozilla-releng/services,La0/mozilla-relengapi,La0/mozilla-relengapi,mozilla-releng/services,La0/mozilla-relengapi,mozilla-releng/services | src/shipit/api/setup.py | src/shipit/api/setup.py | # -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import setuptools
def read_requirements(file_):
lines = []
with open(file_) as f:
for line in f.readlines():
line = line.strip()
if line.startswith('-e ') or line.startswith('http://') or line.startswith('https://'):
extras = ''
if '[' in line:
extras = '[' + line.split('[')[1].split(']')[0] + ']'
line = line.split('#')[1].split('egg=')[1] + extras
elif line == '' or line.startswith('#') or line.startswith('-'):
continue
line = line.split('#')[0].strip()
lines.append(line)
return sorted(list(set(lines)))
with open('VERSION') as f:
VERSION = f.read().strip()
DESCRIPTION = '''
Release kick-off (ship-it) is a Mozilla "internal" tool used to start the
release of Firefox Desktop, Android and Thunderbird. This tool is specific to
Mozilla workflows and tools.
'''
setuptools.setup(
name='mozilla-shipit-api',
version=VERSION,
description=DESCRIPTION,
author='Mozilla Release Services Team',
author_email='release-services@mozilla.com',
url='https://shipit-api.mozilla-releng.net',
tests_require=read_requirements('requirements-dev.txt'),
install_requires=read_requirements('requirements.txt'),
packages=setuptools.find_packages(),
entry_points=dict(
console_scripts=[
'shipit-v1-sync = shipit_api.cli:v1_sync',
'shipit-download-product-details = shipit_api.cli:download_product_details',
'shipit-rebuild-product-details = shipit_api.cli:rebuild_product_details',
'shipit-trigger-product-details = shipit_api.cli:trigger_product_details',
],
),
include_package_data=True,
zip_safe=False,
license='MPL2',
)
| # -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import setuptools
def read_requirements(file_):
lines = []
with open(file_) as f:
for line in f.readlines():
line = line.strip()
if line.startswith('-e ') or line.startswith('http://') or line.startswith('https://'):
extras = ''
if '[' in line:
extras = '[' + line.split('[')[1].split(']')[0] + ']'
line = line.split('#')[1].split('egg=')[1] + extras
elif line == '' or line.startswith('#') or line.startswith('-'):
continue
line = line.split('#')[0].strip()
lines.append(line)
return sorted(list(set(lines)))
with open('VERSION') as f:
VERSION = f.read().strip()
DESCRIPTION = '''
Release kick-off (ship-it) is a Mozilla "internal" tool used to start the
release of Firefox Desktop, Android and Thunderbird. This tool is specific to
Mozilla workflows and tools.
'''
setuptools.setup(
name='mozilla-shipit-api',
version=VERSION,
description=DESCRIPTION,
author='Mozilla Release Services Team',
author_email='release-services@mozilla.com',
url='https://api.shipit.mozilla-releng.net',
tests_require=read_requirements('requirements-dev.txt'),
install_requires=read_requirements('requirements.txt'),
packages=setuptools.find_packages(),
entry_points=dict(
console_scripts=[
'shipit-v1-sync = shipit_api.cli:v1_sync',
'shipit-download-product-details = shipit_api.cli:download_product_details',
'shipit-rebuild-product-details = shipit_api.cli:rebuild_product_details',
'shipit-trigger-product-details = shipit_api.cli:trigger_product_details',
],
),
include_package_data=True,
zip_safe=False,
license='MPL2',
)
| mpl-2.0 | Python |
7760e9e811524e8fdc3599169d12d12f4ff89421 | delete data sql | loreguerra/bbt-chart | delete_entry.py | delete_entry.py | import psycopg2
import sys
from connect import connect_to_db
# edit data from arguments in command line
# filename counts as first arg
args = sys.argv
date_to_delete = args[1] # second arg
# adding items to data
data = date_to_delete
# connect to database
conn = connect_to_db()
cur = conn.cursor()
# SQL for inserting values into db
SQL = "DELETE FROM BBT_CHART WHERE DATE = (%s);"
# execute SQL command plus data
cur.execute(SQL, data)
print "Entry deleted"
conn.commit()
conn.close()
| import psycopg2
import sys
from connect import connect_to_db
# edit data from arguments in command line
# filename counts as first arg
args = sys.argv
date_to_delete = args[1] # second arg
# adding items to data
data = date_to_delete
# connect to database
conn = connect_to_db()
cur = conn.cursor()
# SQL for inserting values into db
SQL = "DELETE from BBT_CHART WHERE DATE = (%s);"
# execute SQL command plus data
cur.execute(SQL, data)
print "Entry deleted"
conn.commit()
conn.close()
| mit | Python |
9c6b5bc39f272926e92c1c4a5a7aeed2436c61b5 | Update config.py | fadhiilrachman/line-py | linepy/config.py | linepy/config.py | # -*- coding: utf-8 -*-
from akad.ttypes import ApplicationType
import re
class Config(object):
LINE_HOST_DOMAIN = 'https://gd2.line.naver.jp'
LINE_OBS_DOMAIN = 'https://obs-sg.line-apps.com'
LINE_TIMELINE_API = 'https://gd2.line.naver.jp/mh/api'
LINE_TIMELINE_MH = 'https://gd2.line.naver.jp/mh'
LINE_LOGIN_QUERY_PATH = '/api/v4p/rs'
LINE_AUTH_QUERY_PATH = '/api/v4/TalkService.do'
LINE_API_QUERY_PATH_FIR = '/S4'
LINE_POLL_QUERY_PATH_FIR = '/P4'
LINE_CALL_QUERY_PATH = '/V4'
LINE_CERTIFICATE_PATH = '/Q'
LINE_CHAN_QUERY_PATH = '/CH4'
LINE_SQUARE_QUERY_PATH = '/SQS1'
LINE_SHOP_QUERY_PATH = '/SHOP4'
CHANNEL_ID = {
'LINE_TIMELINE': '1341209850',
'LINE_WEBTOON': '1401600689',
'LINE_TODAY': '1518712866',
'LINE_STORE': '1376922440',
'LINE_MUSIC': '1381425814',
'LINE_SERVICES': '1459630796'
}
APP_TYPE = ApplicationType._VALUES_TO_NAMES[96]
APP_VER = '7.18.1'
CARRIER = '51089, 1-0'
SYSTEM_NAME = 'FDLRCN'
SYSTEM_VER = '11.2.5'
IP_ADDR = '8.8.8.8'
EMAIL_REGEX = re.compile(r"[^@]+@[^@]+\.[^@]+")
def __init__(self):
#sniff chrome headers and use those instead, because these will get you messagebanned
self.USER_AGENT = 'Line/%s' % self.APP_VER
self.APP_NAME = '%s\t%s\t%s\t%s' % (self.APP_TYPE, self.APP_VER, self.SYSTEM_NAME, self.SYSTEM_VER)
| # -*- coding: utf-8 -*-
from akad.ttypes import ApplicationType
import re
class Config(object):
LINE_HOST_DOMAIN = 'https://gd2.line.naver.jp'
LINE_OBS_DOMAIN = 'https://obs-sg.line-apps.com'
LINE_TIMELINE_API = 'https://gd2.line.naver.jp/mh/api'
LINE_TIMELINE_MH = 'https://gd2.line.naver.jp/mh'
LINE_LOGIN_QUERY_PATH = '/api/v4p/rs'
LINE_AUTH_QUERY_PATH = '/api/v4/TalkService.do'
LINE_API_QUERY_PATH_FIR = '/S4'
LINE_POLL_QUERY_PATH_FIR = '/P4'
LINE_CALL_QUERY_PATH = '/V4'
LINE_CERTIFICATE_PATH = '/Q'
LINE_CHAN_QUERY_PATH = '/CH4'
LINE_SQUARE_QUERY_PATH = '/SQS1'
LINE_SHOP_QUERY_PATH = '/SHOP4'
CHANNEL_ID = {
'LINE_TIMELINE': '1341209850',
'LINE_WEBTOON': '1401600689',
'LINE_TODAY': '1518712866',
'LINE_STORE': '1376922440',
'LINE_MUSIC': '1381425814',
'LINE_SERVICES': '1459630796'
}
APP_TYPE = ApplicationType._VALUES_TO_NAMES[96]
APP_VER = '7.18.1'
CARRIER = '51089, 1-0'
SYSTEM_NAME = 'FDLRCN'
SYSTEM_VER = '11.2.5'
IP_ADDR = '8.8.8.8'
EMAIL_REGEX = re.compile(r"[^@]+@[^@]+\.[^@]+")
def __init__(self):
self.APP_NAME = '%s\t%s\t%s\t%s' % (self.APP_TYPE, self.APP_VER, self.SYSTEM_NAME, self.SYSTEM_VER)
self.USER_AGENT = 'Line/%s' % self.APP_VER
| bsd-3-clause | Python |
e6d5b3d09b284f0c12e54bd7dc51f9175d7e189c | implement Output edit menu | CaptainDesAstres/Simple-Blender-Render-Manager,CaptainDesAstres/Blender-Render-Manager | settingMod/Output.py | settingMod/Output.py | #!/usr/bin/python3.4
# -*-coding:Utf-8 -*
'''module to manage rendering output path'''
import xml.etree.ElementTree as xmlMod
import os
class Output:
'''class to manage rendering output path'''
def __init__(self, xml= None):
'''initialize output path with default value or values extracted from an xml object'''
if xml is None:
self.defaultInit()
else:
self.fromXml(xml)
def defaultInit(self):
'''initialize output path with default value'''
if not os.path.exists('/home/'+os.getlogin()+'/.BlenderRenderManager/render'):
os.mkdir('/home/'+os.getlogin()+'/.BlenderRenderManager/render')
self.path = '/home/'+os.getlogin()+'/.BlenderRenderManager/render'
def fromXml(self, xml):
'''initialize output path with values extracted from an xml object'''
self.path = xml.get('path')
def toXml(self):
'''export output path into xml syntaxed string'''
return '<output path="'+self.path+'" />\n'
def see(self, log):
'''method to see output path and access edition menu'''
change = False
log.menuIn('Output Path')
while True:
os.system('clear')
log.print()
print('\n')
self.print()
print('''\n\n \033[4mMenu :\033[4m
1- Edit path
2- Edit patterns
0- Quit
''')
choice = input().strip().lower()
if choice in ['0', 'q', 'quit', 'cancel']:
log.menuOut()
return change
log.write('\033[31mMenu choice not yet implementedβ¦\033[0m\n')
def print(self, index = False, std = True):
'''a method to display the output path settings'''
| #!/usr/bin/python3.4
# -*-coding:Utf-8 -*
'''module to manage rendering output path'''
import xml.etree.ElementTree as xmlMod
import os
class Output:
'''class to manage rendering output path'''
def __init__(self, xml= None):
'''initialize output path with default value or values extracted from an xml object'''
if xml is None:
self.defaultInit()
else:
self.fromXml(xml)
def defaultInit(self):
'''initialize output path with default value'''
if not os.path.exists('/home/'+os.getlogin()+'/.BlenderRenderManager/render'):
os.mkdir('/home/'+os.getlogin()+'/.BlenderRenderManager/render')
self.path = '/home/'+os.getlogin()+'/.BlenderRenderManager/render'
def fromXml(self, xml):
'''initialize output path with values extracted from an xml object'''
self.path = xml.get('path')
def toXml(self):
'''export output path into xml syntaxed string'''
return '<output path="'+self.path+'" />\n'
def see(self, log):
'''method to see output path and access edition menu'''
def print(self, index = False, std = True):
'''a method to display the output path settings'''
| mit | Python |
524292c821886f5cbe5fe3ea201c65c48119be41 | update extract_samples.py | lisongze/SForecast,lisongze/SForecast,lisongze/SForecast,lisongze/SForecast | extract_samples.py | extract_samples.py | import sys, os
import numpy as np
import pandas as pd
import datetime
if __name__ == '__main__':
infile = sys.argv[1]
csv_content = pd.read_csv(infile, [0])
date = [datetime.datetime.strptime(x, '%Y-%m-%d') for x in csv_content.index]
for x in date:
s = 0
| import sys, os
import numpy as np
import pandas as pd
import datetime
if __name__ == '__main__':
infile = sys.argv[1]
csv_content = pd.read_csv(infile, [0])
date = []
| mit | Python |
dbbdf00341f5cc8673c9cc0f3b4baf7487bf7a4b | Combine conditionals | markstory/lint-review,markstory/lint-review,markstory/lint-review | lintreview/tools/shellcheck.py | lintreview/tools/shellcheck.py | import logging
import os
import functools
from lintreview.tools import Tool, run_command, process_checkstyle
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Shellcheck(Tool):
name = 'shellcheck'
def check_dependencies(self):
"""
See if shellcheck is on the system path.
"""
return in_path('shellcheck')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
if ext in ('.sh', '.bash', '.ksh', '.zsh'):
return True
if not os.path.exists(filename) or not os.access(filename, os.X_OK):
return False
# Check for a shebang in the first line.
with open(filename, 'r') as f:
line = f.readline()
return line.startswith('#!') and (
'bash' in line or
'sh' in line or
'zsh' in line or
'ksh' in line
)
def process_files(self, files):
"""
Run code checks with shellcheck.
"""
log.debug('Processing %s files with %s', files, self.name)
command = self.create_command(files)
output = run_command(
command,
ignore_error=True,
include_errors=False)
filename_converter = functools.partial(
self._relativize_filename,
files)
process_checkstyle(self.problems, output, filename_converter)
map(self.escape_backtick, self.problems)
def escape_backtick(self, problem):
problem.body = problem.body.replace('`', '\`')
def create_command(self, files):
command = ['shellcheck']
command += ['--format=checkstyle']
shell = 'sh'
if self.options.get('shell'):
shell = self.apply_base(self.options['shell'])
command += ['--shell=' + shell]
if self.options.get('exclude'):
command += ['--exclude=' + str(self.options['exclude'])]
command += files
return command
| import logging
import os
import functools
from lintreview.tools import Tool, run_command, process_checkstyle
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Shellcheck(Tool):
name = 'shellcheck'
def check_dependencies(self):
"""
See if shellcheck is on the system path.
"""
return in_path('shellcheck')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
if ext in ('.sh', '.bash', '.ksh', '.zsh'):
return True
if not os.path.exists(filename):
return False
if not os.access(filename, os.X_OK):
return False
# Check for a shebang in the first line.
with open(filename, 'r') as f:
line = f.readline()
return line.startswith('#!') and (
'bash' in line or
'sh' in line or
'zsh' in line or
'ksh' in line
)
def process_files(self, files):
"""
Run code checks with shellcheck.
"""
log.debug('Processing %s files with %s', files, self.name)
command = self.create_command(files)
output = run_command(
command,
ignore_error=True,
include_errors=False)
filename_converter = functools.partial(
self._relativize_filename,
files)
process_checkstyle(self.problems, output, filename_converter)
map(self.escape_backtick, self.problems)
def escape_backtick(self, problem):
problem.body = problem.body.replace('`', '\`')
def create_command(self, files):
command = ['shellcheck']
command += ['--format=checkstyle']
shell = 'sh'
if self.options.get('shell'):
shell = self.apply_base(self.options['shell'])
command += ['--shell=' + shell]
if self.options.get('exclude'):
command += ['--exclude=' + str(self.options['exclude'])]
command += files
return command
| mit | Python |
66a8811a5f489fc133b23996ebac145407dd512f | Update ipc_lista4.6.py | any1m1c/ipc20161 | lista4-oficial/ipc_lista4.6.py | lista4-oficial/ipc_lista4.6.py | #Bruno de Oliveira Freire - 1615310030
#questao 6 da lista de listas
media=[]
qtd_alunos=0
num_aluno=0
m=0
aluno=1
for qtd_alunos in range(10):
print("-------------nota do aluno %d-----------------"%aluno)
n1=float(input("insira o numero 1:"))
n2=float(input("insira o numero 2:"))
n3=float(input("insira o numero 3:"))
n4=float(input("insira o numero 4:"))
aluno+=1
m = (n1+n2+n3+n4)/4
media.append(m)
if m>=7:
num_aluno+=1
print("esse e a quantidade de alunos com nota na media:%d"%num_aluno)
| #Bruno de Oliveira Freire - 1615310030
media=[]
qtd_alunos=0
num_aluno=0
m=0
aluno=1
for qtd_alunos in range(10):
print("-------------nota do aluno %d-----------------"%aluno)
n1=float(input("insira o numero 1:"))
n2=float(input("insira o numero 2:"))
n3=float(input("insira o numero 3:"))
n4=float(input("insira o numero 4:"))
aluno+=1
m = (n1+n2+n3+n4)/4
media.append(m)
if m>=7:
num_aluno+=1
print("esse e a quantidade de alunos com nota na media:%d"%num_aluno)
| apache-2.0 | Python |
23b8416025e478a1740200a9a9a4302fd09d7937 | Set the version at 1.0. | uw-it-aca/spotseeker_server,uw-it-aca/spotseeker_server,uw-it-aca/spotseeker_server | spotseeker_server/__init__.py | spotseeker_server/__init__.py | __version__ = '1.0'
| apache-2.0 | Python | |
b23fd62c15c5f2e526e470daf6a6025d6231f966 | update local server to know about loading and saving gists | namin/io.livecode.ch,namin/io.livecode.ch,namin/io.livecode.ch | pub/local.py | pub/local.py | from flask import Flask
from flask import request
from flask import render_template
from flask import jsonify
import os
import json
import requests
app = Flask(__name__)
def proxy_github_post(action, user, repo):
data = {}
for k,v in request.form.iteritems():
data[k] = v
r = requests.post('http://%s/api/%s/%s/%s' % (os.environ.get('REMOTE_SERVER_NAME', 'io.livecode.ch'), action, user, repo), data)
return r.text, r.status_code
@app.route("/api/run/<user>/<repo>", methods=['POST'])
def proxy_github_run(user, repo):
return proxy_github('run', user, repo)
@app.route("/api/save/<user>/<repo>", methods=['POST'])
def proxy_github_save(user, repo):
return proxy_github('save', user, repo)
@app.route("/api/load/<user>/<repo>/<id>")
def proxy_gist_load(user, repo, id):
r = requests.get('http://%s/api/load/%s/%s/%s' % (os.environ.get('REMOTE_SERVER_NAME', 'io.livecode.ch'), user, repo, id))
result = r.json()
return jsonify(result)
@app.route('/')
def local_index():
return render_template("local/index.html")
def local_defaults(user, repo):
with open('templates/local/%s/.io.livecode.ch/defaults.json' % repo) as f_defaults:
return json.load(f_defaults)
@app.route('/learn/<user>/<repo>')
@app.route('/learn/<user>/<repo>/<subdir>')
def local_preview(user, repo, subdir=None):
j_defaults = local_defaults(user, repo)
if subdir:
subdir += '/'
else:
subdir = ""
return render_template('local/%s/.io.livecode.ch/_site/%sindex.html' % (repo, subdir), user=user, repo=repo, language=j_defaults.get('language'))
@app.route('/debug/<path:p>')
def debug_page(p):
return render_template(p+'.html')
if __name__ == "__main__":
app.run(debug=True)
| from flask import Flask
from flask import request
from flask import render_template
import os
import json
import requests
app = Flask(__name__)
@app.route("/api/run/<user>/<repo>", methods=['POST'])
def proxy_github_run(user, repo):
data = {}
for k,v in request.form.iteritems():
data[k] = v
r = requests.post('http://%s/api/run/%s/%s' % (os.environ.get('REMOTE_SERVER_NAME', 'io.livecode.ch'), user, repo), data)
return r.text, r.status_code
@app.route('/')
def local_index():
return render_template("local/index.html")
def local_defaults(user, repo):
with open('templates/local/%s/.io.livecode.ch/defaults.json' % repo) as f_defaults:
return json.load(f_defaults)
@app.route('/learn/<user>/<repo>')
@app.route('/learn/<user>/<repo>/<subdir>')
def local_preview(user, repo, subdir=None):
j_defaults = local_defaults(user, repo)
if subdir:
subdir += '/'
else:
subdir = ""
return render_template('local/%s/.io.livecode.ch/_site/%sindex.html' % (repo, subdir), user=user, repo=repo, language=j_defaults.get('language'))
@app.route('/debug/<path:p>')
def debug_page(p):
return render_template(p+'.html')
if __name__ == "__main__":
app.run(debug=True)
| apache-2.0 | Python |
82638521d51cea4286f4b3e8ecd3f2ccd0441e49 | Create core Class it's called pyMonitor with some stuffs | ahmedalkabir/pyMonitor | pyMonitor.py | pyMonitor.py | """
pyMonitor first Version
Written By :Ahmed Alkabir
"""
#!/usr/bin/python3
# Library
import serial
import sys
class pyMonitor():
#baud rate of Serial communication
baud_rate = [4800,9600,14400,19200,28800,38400,57600,115200]
# serial object
__main_conn = None
# Constructor
def __init__(self, port, baud_rate, byte_size=serial.EIGHTBITS, parity=serial.PARITY_NONE, stop_bit=serial.STOPBITS_ONE):
self.__main_conn = serial.Serial(port, baud_rate, bytesize=byte_size, parity=parity, stopbits=stop_bit)
pass
# Close Connection of Serial Communication
@classmethod
def close_connection(cls):
if cls.__main_conn is not None:
cls.__main_conn.close()
# Ports of Computers and it depends on os system
@staticmethod
def get_port():
if sys.platform.startswith('win'): # For Windows Platform
ports = ['COM%s' %(i + 1) for i in range(256)]
win_ports = []
for port in ports:
try:
# Check if port it works if work add to results list otherwise ignore it
temp_port = serial.Serial(port)
temp_port.close()
win_ports.append(port)
except (OSError, serial.SerialException):
pass
# return ports
return win_ports
elif sys.platform.startswith('linux'): # For Linux Platform
ports = ['/dev/ttyUSB%s'%(i) for i in range(256)]
linux_ports = []
for port in ports:
try:
# Check if port it works if work add to results list otherwise ignore it
temp_port = serial.Serial(port)
temp_port.close()
linux_ports.append(port)
except(OSError, serial.SerialException):
pass
# return ports
return linux_ports
def main():
a = pyMonitor(pyMonitor.get_port()[0],pyMonitor.baud_rate[0])
main() | mit | Python | |
343499674bb2dea15b3ecc8d3c2bb41ab273e758 | Improve exception printing | kyamagu/psd2svg | src/psd2svg/rasterizer/batik_rasterizer.py | src/psd2svg/rasterizer/batik_rasterizer.py | # -*- coding: utf-8 -*-
"""
Batik-based rasterizer module.
Download the latest batik rasterizer to use the module. Note Ubuntu 16.04LTS
package is broken and does not work.
Prerequisite:
wget http://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&\
filename=xmlgraphics/batik/binaries/batik-bin-1.9.tar.gz
export BATIK_PATH=./batik-bin-1.9.tar.gz
Deb package:
sudo apt-get install -y libbatik-java
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
from psd2svg.rasterizer.base_rasterizer import BaseRasterizer
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar"
)
class BatikRasterizer(BaseRasterizer):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
basename, ext = os.path.splitext(os.path.basename(url))
output_file = os.path.join(d, "{}.{}".format(basename, format))
cmd = [
"java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0.255.255.255",
"-m", "image/{}".format(format),
"-d", d,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
stdout, stderr = proc.communicate()
try:
assert os.path.exists(output_file)
rasterized = Image.open(output_file)
except:
logger.error("{}\n{}{}".format(" ".join(cmd), stdout, stderr))
raise
return self.composite_background(rasterized)
| # -*- coding: utf-8 -*-
"""
Batik-based rasterizer module.
Download the latest batik rasterizer to use the module. Note Ubuntu 16.04LTS
package is broken and does not work.
Prerequisite:
wget http://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&\
filename=xmlgraphics/batik/binaries/batik-bin-1.9.tar.gz
export BATIK_PATH=./batik-bin-1.9.tar.gz
Deb package:
sudo apt-get install -y libbatik-java
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
from psd2svg.rasterizer.base_rasterizer import BaseRasterizer
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(BaseRasterizer):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0.255.255.255",
"-d", output_file,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
rasterized = Image.open(output_file)
return self.composite_background(rasterized)
| mit | Python |
7ce5438e9f0b4b2ac87a708996ccde69d891b2dd | Make modules uninstallable | OCA/survey,OCA/survey,OCA/survey | survey_conditional_question/__openerp__.py | survey_conditional_question/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Survey Conditional Questions',
'version': '9.0.1.0.0',
'category': 'Warehouse Management',
'sequence': 14,
'summary': '',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
'survey', 'website'
],
'data': [
'conditional_survey_view.xml',
'conditional_survey_qweb.xml',
],
'demo': [
],
'test': [
],
'installable': False,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Survey Conditional Questions',
'version': '9.0.1.0.0',
'category': 'Warehouse Management',
'sequence': 14,
'summary': '',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
'survey', 'website'
],
'data': [
'conditional_survey_view.xml',
'conditional_survey_qweb.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
056e27acf790cf7134d99b60f9d924111b7003fa | Update to 22.04 | ktdreyer/teuthology,ceph/teuthology,ktdreyer/teuthology,ceph/teuthology | teuthology/test/test_get_distro_version.py | teuthology/test/test_get_distro_version.py | from teuthology.misc import get_distro_version
class Mock:
pass
class TestGetDistroVersion(object):
def setup(self):
self.fake_ctx = Mock()
self.fake_ctx.config = {}
self.fake_ctx_noarg = Mock()
self.fake_ctx_noarg.config = {}
self.fake_ctx_noarg.os_version = None
self.fake_ctx.os_type = None
self.fake_ctx_noarg.os_type = None
def test_default_distro_version(self):
# Default distro is ubuntu, default version of ubuntu is 20.04
self.fake_ctx.os_version = None
distroversion = get_distro_version(self.fake_ctx)
assert distroversion == '22.04'
def test_argument_version(self):
self.fake_ctx.os_version = '13.04'
distroversion = get_distro_version(self.fake_ctx)
assert distroversion == '13.04'
def test_teuth_config_version(self):
#Argument takes precidence.
self.fake_ctx.os_version = '13.04'
self.fake_ctx.config = {'os_version': '13.10'}
distroversion = get_distro_version(self.fake_ctx)
assert distroversion == '13.04'
def test_teuth_config_noarg_version(self):
self.fake_ctx_noarg.config = {'os_version': '13.04'}
distroversion = get_distro_version(self.fake_ctx_noarg)
assert distroversion == '13.04'
def test_no_teuth_config(self):
self.fake_ctx = Mock()
self.fake_ctx.os_type = None
self.fake_ctx.os_version = '13.04'
distroversion = get_distro_version(self.fake_ctx)
assert distroversion == '13.04'
| from teuthology.misc import get_distro_version
class Mock:
pass
class TestGetDistroVersion(object):
def setup(self):
self.fake_ctx = Mock()
self.fake_ctx.config = {}
self.fake_ctx_noarg = Mock()
self.fake_ctx_noarg.config = {}
self.fake_ctx_noarg.os_version = None
self.fake_ctx.os_type = None
self.fake_ctx_noarg.os_type = None
def test_default_distro_version(self):
# Default distro is ubuntu, default version of ubuntu is 20.04
self.fake_ctx.os_version = None
distroversion = get_distro_version(self.fake_ctx)
assert distroversion == '20.04'
def test_argument_version(self):
self.fake_ctx.os_version = '13.04'
distroversion = get_distro_version(self.fake_ctx)
assert distroversion == '13.04'
def test_teuth_config_version(self):
#Argument takes precidence.
self.fake_ctx.os_version = '13.04'
self.fake_ctx.config = {'os_version': '13.10'}
distroversion = get_distro_version(self.fake_ctx)
assert distroversion == '13.04'
def test_teuth_config_noarg_version(self):
self.fake_ctx_noarg.config = {'os_version': '13.04'}
distroversion = get_distro_version(self.fake_ctx_noarg)
assert distroversion == '13.04'
def test_no_teuth_config(self):
self.fake_ctx = Mock()
self.fake_ctx.os_type = None
self.fake_ctx.os_version = '13.04'
distroversion = get_distro_version(self.fake_ctx)
assert distroversion == '13.04'
| mit | Python |
252f5be14a1c6c00871d73a5a4ed2f5125b44a8f | Fix APPEND_SLASH behavior | peterkuma/tjrapid,peterkuma/tjrapid,peterkuma/tjrapid | main/views.py | main/views.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2012 Peter Kuma
import os
from django.http import Http404
from django.shortcuts import render
from django.template import RequestContext
from django.utils import translation
from django.http import HttpResponseRedirect
from django.conf import settings
from django.shortcuts import get_object_or_404, redirect
from main.models import Category, Page
def page(request, category_name, name):
p = get_object_or_404(Page, category__name=category_name, name=name)
if p.redirect and ( \
p.redirect.startswith('http://') or \
p.redirect.startswith('https://') \
):
return redirect(p.redirect)
categories = Category.objects.all()
return render(request, 'main/' + p.category.template_name, {
'page': p,
'category': p.category,
'style': p.style,
'categories': categories,
}, RequestContext(request))
def attachment(request, category_name, page_name, name):
def redirect_or_404():
if not request.path.endswith('/'):
return HttpResponseRedirect(request.path + '/')
raise Http404
if page_name == '':
try:
c = Category.objects.get(name=category_name)
except Category.DoesNotExist:
return redirect_or_404()
for a in c.attachments.all():
if os.path.basename(a.file.name) == name:
return HttpResponseRedirect(a.file.url)
try:
p = Page.objects.get(
category__name=category_name,
name=page_name
)
except Page.DoesNotExist:
return redirect_or_404()
for a in p.attachments.all():
if os.path.basename(a.file.name) == name:
return HttpResponseRedirect(a.file.url)
return redirect_or_404()
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2012 Peter Kuma
import os
from django.http import Http404
from django.shortcuts import render
from django.template import RequestContext
from django.utils import translation
from django.http import HttpResponseRedirect
from django.conf import settings
from django.shortcuts import get_object_or_404, redirect
from main.models import Category, Page
def page(request, category_name, name):
p = get_object_or_404(Page, category__name=category_name, name=name)
if p.redirect and ( \
p.redirect.startswith('http://') or \
p.redirect.startswith('https://') \
):
return redirect(p.redirect)
categories = Category.objects.all()
return render(request, 'main/' + p.category.template_name, {
'page': p,
'category': p.category,
'style': p.style,
'categories': categories,
}, RequestContext(request))
def attachment(request, category_name, page_name, name):
if page_name == '':
c = get_object_or_404(Category, name=category_name)
for a in c.attachments.all():
if os.path.basename(a.file.name) == name:
return HttpResponseRedirect(a.file.url)
p = get_object_or_404(Page,
category__name=category_name,
name=page_name
)
for a in p.attachments.all():
if os.path.basename(a.file.name) == name:
return HttpResponseRedirect(a.file.url)
raise Http404
| mit | Python |
4d6b254cd6b5f586619e54cf40efeb71da35e3a7 | Update getchar simprocedure to use the new read calling convention | iamahuman/angr,axt/angr,chubbymaggie/simuvex,chubbymaggie/angr,chubbymaggie/simuvex,schieb/angr,angr/simuvex,chubbymaggie/angr,schieb/angr,iamahuman/angr,axt/angr,iamahuman/angr,schieb/angr,angr/angr,tyb0807/angr,tyb0807/angr,axt/angr,chubbymaggie/simuvex,f-prettyland/angr,f-prettyland/angr,tyb0807/angr,angr/angr,angr/angr,chubbymaggie/angr,f-prettyland/angr | simuvex/procedures/libc___so___6/getchar.py | simuvex/procedures/libc___so___6/getchar.py | import simuvex
from simuvex.s_type import SimTypeInt
######################################
# getchar
######################################
class getchar(simuvex.SimProcedure):
def run(self):
self.return_type = SimTypeInt(32, True)
data = self.state.posix.read_from(0,1)
data = data.zero_extend(self.state.arch.bits-data.size())
return data
| import simuvex
from simuvex.s_type import SimTypeInt
######################################
# getchar
######################################
class getchar(simuvex.SimProcedure):
def run(self):
self.return_type = SimTypeInt(32, True)
data = self.state.posix.read(0,1)
data = data.zero_extend(self.state.arch.bits-data.size())
return data
| bsd-2-clause | Python |
f35b4b4016c773a8b086d9170609fe6dea3a09cd | include also common_resource in distribution | frishberg/robotframework-selenium2library,vincentfretin/robotframework-selenium2library,peritus/robotframework-selenium2library,edmi-nz/robotframework-selenium2library,githubccruz/robotframework-selenium2library,elizaleong/robotframework-selenium2library,fingeronthebutton/Selenium2Library,valaxy/robotframework-selenium2library,anuyens/robotframework-selenium2library,gotcha/Selenium2Library,F1ashhimself/robotframework-selenium2library,jollychang/robotframework-selenium2library,frishberg/robotframework-selenium2library,jussimalinen/robotframework-selenium2library,gotcha/Selenium2Library,stevejefferies/robotframework-selenium2library,robotframework/SeleniumLibrary,jussimalinen/robotframework-selenium2library,fingeronthebutton/Selenium2Library,SergiuTudos/Selenium2Library,githubccruz/robotframework-selenium2library,bmannix/robotframework-selenium2library,ariff/robotframework-selenium2library,robotframework/SeleniumLibrary,HelioGuilherme66/robotframework-selenium2library,jouk0/robotframework-selenium2library,alistair-broomhead/robotframework-selenium2library,bazizi/robotframework-selenium2library,deiga/robotframework-selenium2library,frishberg/robotframework-selenium2library,Songjg/robotframework-selenium2library,binithb/robotframework-selenium2library,Hi-Fi/robotframework-selenium2library,binithb/robotframework-selenium2library,vincentfretin/robotframework-selenium2library,SergiuTudos/Selenium2Library,alistair-broomhead/robotframework-selenium2library,bazizi/robotframework-selenium2library,cheezy/robotframework-selenium2library,futurice/robotframework-selenium2library,emanlove/robotframework-selenium2library,elizaleong/robotframework-selenium2library,robotframework/SeleniumLibrary,cheezy/robotframework-selenium2library,jouk0/robotframework-selenium2library,HelioGuilherme66/robotframework-selenium2library,hali4ka/robotframework-selenium2library,gotcha/Selenium2Library,hbmartin/robotframework-selenium2library,valaxy/robotframework-selenium2library,emanlove/robotframework-selenium2library,adwu73/robotframework-selenium2library,hbmartin/robotframework-selenium2library,peritus/robotframework-selenium2library,myaskevich/robotframework-selenium2library,bazizi/robotframework-selenium2library,anilreddy/robotframework-selenium2library,Gaurang033/Selenium2Library,kontulai/robotframework-selenium2library,Gaurang033/Selenium2Library,anuyens/robotframework-selenium2library,deiga/robotframework-selenium2library,rtomac/robotframework-selenium2library,soukingang/robotframework-selenium2library,jollychang/robotframework-selenium2library,valaxy/robotframework-selenium2library,bmannix/robotframework-selenium2library,Songjg/robotframework-selenium2library,rtomac/robotframework-selenium2library,Gaurang033/Selenium2Library,hali4ka/robotframework-selenium2library,Hi-Fi/robotframework-selenium2library,emanlove/robotframework-selenium2library,anilreddy/robotframework-selenium2library,futurice/robotframework-selenium2library,myaskevich/robotframework-selenium2library,fingeronthebutton/Selenium2Library,adwu73/robotframework-selenium2library,HelioGuilherme66/robotframework-selenium2library,kontulai/robotframework-selenium2library,hali4ka/robotframework-selenium2library,edmi-nz/robotframework-selenium2library,F1ashhimself/robotframework-selenium2library,ariff/robotframework-selenium2library,SergiuTudos/Selenium2Library,hali4ka/robotframework-selenium2library,stevejefferies/robotframework-selenium2library,binithb/robotframework-selenium2library,rtomac/robotframework-selenium2library,soukingang/robotframework-selenium2library | demo/package.py | demo/package.py | #!/usr/bin/env python
import os
from time import localtime
from zipfile import ZipFile, ZIP_DEFLATED
FILES = {'': ['rundemo.py', 'README.txt'],
'login_tests': ['valid_login.txt', 'invalid_login.txt',
'html_resource.txt', 'flex_resource.txt',
'common_resource.txt'],
'demoapp': ['server.py'],
'demoapp/html': ['index.html', 'welcome.html', 'error.html',
'demo.css'],
'demoapp/flex': ['index.html', 'LoginApp.mxml', 'LoginApp.swf',
'FlexPilot.swf']}
name = 'robotframework-seleniumlibrary-demo'
root = os.path.dirname(__file__)
timestamp = '%d%02d%02d' % localtime()[:3]
zippath = os.path.join(root, '%s-%s.zip' % (name, timestamp))
if os.path.exists(zippath):
os.remove(zippath)
zipfile = ZipFile(zippath, 'w', ZIP_DEFLATED)
for dirname in FILES:
for filename in FILES[dirname]:
path = os.path.join(root, dirname.replace('/', os.sep), filename)
print 'Adding: ', os.path.normpath(path)
zipfile.write(path, os.path.join(name, path))
zipfile.close()
print 'Created: ', os.path.normpath(zippath)
| #!/usr/bin/env python
import os
from time import localtime
from zipfile import ZipFile, ZIP_DEFLATED
FILES = {'': ['rundemo.py', 'README.txt'],
'login_tests': ['valid_login.txt', 'invalid_login.txt',
'html_resource.txt', 'flex_resource.txt'],
'demoapp': ['server.py'],
'demoapp/html': ['index.html', 'welcome.html', 'error.html',
'demo.css'],
'demoapp/flex': ['index.html', 'LoginApp.mxml', 'LoginApp.swf',
'FlexPilot.swf']}
name = 'robotframework-seleniumlibrary-demo'
root = os.path.dirname(__file__)
timestamp = '%d%02d%02d' % localtime()[:3]
zippath = os.path.join(root, '%s-%s.zip' % (name, timestamp))
if os.path.exists(zippath):
os.remove(zippath)
zipfile = ZipFile(zippath, 'w', ZIP_DEFLATED)
for dirname in FILES:
for filename in FILES[dirname]:
path = os.path.join(root, dirname.replace('/', os.sep), filename)
print 'Adding: ', os.path.normpath(path)
zipfile.write(path, os.path.join(name, path))
zipfile.close()
print 'Created: ', os.path.normpath(zippath)
| apache-2.0 | Python |
5e6536de38955faf71ab1bf768aa0546d6e7b998 | Rename update command to rebuild in help | dreyou/ebstarter,dreyou/ebstarter,dreyou/ebstarter | sender.py | sender.py | #!/usr/bin/env python
import pika
import json
from optparse import OptionParser
import logging
parser = OptionParser()
parser.add_option("-a", "--application", help="application name", type="string", dest="name", default="")
parser.add_option("-o", "--operation", help="operation create|rebuild|delete", type="string", dest="operation", default="")
parser.add_option("-s", "--source", help="s3 source url", type="string", dest="source", default="")
parser.add_option("-q", "--queue", help="queue name", type="string", dest="queue", default="test")
parser.add_option("-l", "--log-level", help="log level", dest="log_level", type="int", default=1)
(options, args) = parser.parse_args()
if options.log_level == 1:
log_level = logging.INFO
elif options.log_level >= 2:
log_level = logging.DEBUG
logging.basicConfig(level=log_level)
logging.info("Send data to queue: "+options.queue)
connection = pika.BlockingConnection()
channel = connection.channel()
channel.exchange_declare(exchange='commands', type='fanout')
channel.queue_bind(exchange='commands', queue=options.queue)
message = dict()
message["name"] = options.name
message["operation"] = options.operation
message["source"] = options.source
message_json = json.dumps(message)
logging.debug("Sending json: "+str(message_json))
channel.basic_publish("commands", "", message_json)
connection.close() | #!/usr/bin/env python
import pika
import json
from optparse import OptionParser
import logging
parser = OptionParser()
parser.add_option("-a", "--application", help="application name", type="string", dest="name", default="")
parser.add_option("-o", "--operation", help="operation create|update|delete", type="string", dest="operation", default="")
parser.add_option("-s", "--source", help="s3 source url", type="string", dest="source", default="")
parser.add_option("-q", "--queue", help="queue name", type="string", dest="queue", default="test")
parser.add_option("-l", "--log-level", help="log level", dest="log_level", type="int", default=1)
(options, args) = parser.parse_args()
if options.log_level == 1:
log_level = logging.INFO
elif options.log_level >= 2:
log_level = logging.DEBUG
logging.basicConfig(level=log_level)
logging.info("Send data to queue: "+options.queue)
connection = pika.BlockingConnection()
channel = connection.channel()
channel.exchange_declare(exchange='commands', type='fanout')
channel.queue_bind(exchange='commands', queue=options.queue)
message = dict()
message["name"] = options.name
message["operation"] = options.operation
message["source"] = options.source
message_json = json.dumps(message)
logging.debug("Sending json: "+str(message_json))
channel.basic_publish("commands", "", message_json)
connection.close() | mit | Python |
1f2625e3d31698faeaca1eb79343bd65979da4b2 | Add UnknownCodeError to spambl module | piotr-rusin/spam-lists | spambl.py | spambl.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
class SpamBLError(Exception):
''' Base exception class for spambl module '''
class UnknownCodeError(SpamBLError):
''' Raise when trying to use an unexpected value of dnsbl return code '''
class DNSBL(object):
''' Represents a DNSBL service provider '''
def __init__(self, identifier, query_suffix, code_item_class, lists_ips, lists_uris):
''' Create new DNSBL object
:param identifier: a value designating DNSBL service provider: its name or url address.
:param query_suffix: a suffix added to DNSBL query address
:param code_item_class: item classes associated with DNSBL query return codes
:param lists_ips: information if this object represents an ip blocklist
:param lists_uris: information if this object represents a domain name blocklist
'''
self.identifier = identifier
self._query_suffix = query_suffix
self._code_item_class = code_item_class
self.lists_ips = lists_ips
self.lists_uris = lists_uris
if __name__ == '__main__':
pass | #!/usr/bin/python
# -*- coding: utf-8 -*-
class SpamBLError(Exception):
''' Base exception class for spambl module '''
class DNSBL(object):
''' Represents a DNSBL service provider '''
def __init__(self, identifier, query_suffix, code_item_class, lists_ips, lists_uris):
''' Create new DNSBL object
:param identifier: a value designating DNSBL service provider: its name or url address.
:param query_suffix: a suffix added to DNSBL query address
:param code_item_class: item classes associated with DNSBL query return codes
:param lists_ips: information if this object represents an ip blocklist
:param lists_uris: information if this object represents a domain name blocklist
'''
self.identifier = identifier
self._query_suffix = query_suffix
self._code_item_class = code_item_class
self.lists_ips = lists_ips
self.lists_uris = lists_uris
if __name__ == '__main__':
pass | mit | Python |
0a170b84c956a958270131b46e2e1b7e8f6d6777 | Bump to version 0.29.0 | reubano/meza,reubano/tabutils,reubano/meza,reubano/tabutils,reubano/tabutils,reubano/meza | meza/__init__.py | meza/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
meza
~~~~
Provides methods for reading and processing data from tabular formatted files
Attributes:
CURRENCIES [tuple(unicode)]: Currency symbols to remove from decimal
strings.
ENCODING (str): Default file encoding.
DEFAULT_DATETIME (obj): Default datetime object
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import sys
import logging
from datetime import datetime as dt
from builtins import *
__version__ = '0.29.0'
__title__ = 'meza'
__package_name__ = 'meza'
__author__ = 'Reuben Cummings'
__description__ = 'A Python toolkit for processing tabular data'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
CURRENCIES = ('$', 'Β£', 'β¬')
ENCODING = 'utf-8'
DEFAULT_DATETIME = dt(9999, 12, 31, 0, 0, 0)
if sys.version_info.major >= 3:
import csv
else:
from . import unicsv as csv
logging.basicConfig()
csv = csv
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
meza
~~~~
Provides methods for reading and processing data from tabular formatted files
Attributes:
CURRENCIES [tuple(unicode)]: Currency symbols to remove from decimal
strings.
ENCODING (str): Default file encoding.
DEFAULT_DATETIME (obj): Default datetime object
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import sys
import logging
from datetime import datetime as dt
from builtins import *
__version__ = '0.28.5'
__title__ = 'meza'
__package_name__ = 'meza'
__author__ = 'Reuben Cummings'
__description__ = 'A Python toolkit for processing tabular data'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
CURRENCIES = ('$', 'Β£', 'β¬')
ENCODING = 'utf-8'
DEFAULT_DATETIME = dt(9999, 12, 31, 0, 0, 0)
if sys.version_info.major >= 3:
import csv
else:
from . import unicsv as csv
logging.basicConfig()
csv = csv
| mit | Python |
d48356215cf48f86926d2ce3044180881827db5e | Fix integration test directory | jacebrowning/gitman,jacebrowning/gdm | tests/test_all.py | tests/test_all.py | """Integration tests for the `gdm` package."""
import os
import shutil
import pytest
import gdm
from gdm.config import Config
from .conftest import FILES, ROOT
@pytest.mark.integration
def test_install():
"""Verify dependencies can be installed."""
os.chdir(ROOT)
config = Config(FILES)
shutil.rmtree(config.location, ignore_errors=True)
assert not os.path.exists(config.location)
# clean install
assert gdm.install(FILES)
assert os.path.isdir(config.location)
# second install
assert gdm.install(FILES)
assert 'gdm_1' in os.listdir(config.location)
assert 'gdm_2' in os.listdir(config.location)
@pytest.mark.integration
def test_uninstall():
"""Verify dependencies can be uninstalled."""
os.chdir(ROOT)
config = Config(FILES)
assert gdm.install(FILES)
assert os.path.isdir(config.location)
assert gdm.uninstall(FILES)
assert not os.path.isdir(config.location)
| """Integration tests for the `gdm` package."""
import os
import shutil
import pytest
import gdm
from gdm.config import Config
from .conftest import FILES
@pytest.mark.integration
def test_install():
"""Verify dependencies can be installed."""
config = Config(FILES)
shutil.rmtree(config.location, ignore_errors=True)
assert not os.path.exists(config.location)
# clean install
assert gdm.install(FILES)
assert os.path.isdir(config.location)
# second install
assert gdm.install(FILES)
assert 'gdm_1' in os.listdir(config.location)
assert 'gdm_2' in os.listdir(config.location)
@pytest.mark.integration
def test_uninstall():
"""Verify dependencies can be uninstalled."""
config = Config(FILES)
assert gdm.install(FILES)
assert os.path.isdir(config.location)
assert gdm.uninstall(FILES)
assert not os.path.isdir(config.location)
| mit | Python |
a85cb3467180898c721792973bd31b14e1b96baa | use POST only. | geekan/task-manager,geekan/task-manager,geekan/task-manager,geekan/task-manager | task_manager/task_processor/views.py | task_manager/task_processor/views.py | from django.shortcuts import render
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.db.models import Q
from models import ImageNeuralTask
from time import strftime, localtime
import logging
l = logging.getLogger(__name__)
def index(request):
#l.error(__name__)
tasks = list(ImageNeuralTask.objects.all().values())
#l.debug(tasks)
return HttpResponse('<br/>'.join([str(task) for task in tasks]))
@csrf_exempt
def neural_task(request, *args, **kwargs):
l.warn(args, kwargs, request.POST, request.GET)
good_paras = ['image_url', 'image_id', 'style_image_path', 'user_id']
para_dict = {k: request.POST.get(k, '') for k in good_paras}
para_dict['create_time'] = strftime("%Y-%m-%d %H:%M:%S", localtime())
para_dict['status'] = 'accepted' if all(para_dict.values()) else 'unaccepted'
task = ImageNeuralTask(**para_dict)
task.save()
return index(request)
@csrf_exempt
def neural_task_clean(request, *args, **kwargs):
l.warn(args, kwargs, request.POST, request.GET)
ImageNeuralTask.objects.filter(Q(image_id='') | Q(user_id='')).delete()
return index(request)
| from django.shortcuts import render
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.db.models import Q
from models import ImageNeuralTask
from time import strftime, localtime
import logging
l = logging.getLogger(__name__)
def index(request):
#l.error(__name__)
tasks = list(ImageNeuralTask.objects.all().values())
#l.debug(tasks)
return HttpResponse('<br/>'.join([str(task) for task in tasks]))
@csrf_exempt
def neural_task(request, *args, **kwargs):
l.warn(args, kwargs, request.POST, request.GET)
good_paras = ['image_url', 'image_id', 'style_image_path', 'user_id']
para_dict = {k: request.REQUEST.get(k, '') for k in good_paras}
para_dict['create_time'] = strftime("%Y-%m-%d %H:%M:%S", localtime())
para_dict['status'] = 'accepted' if all(para_dict.values()) else 'unaccepted'
task = ImageNeuralTask(**para_dict)
task.save()
return index(request)
@csrf_exempt
def neural_task_clean(request, *args, **kwargs):
l.warn(args, kwargs, request.POST, request.GET)
ImageNeuralTask.objects.filter(Q(image_id='') | Q(user_id='')).delete()
return index(request)
| mit | Python |
bff5e3725e81e9abd65280da51978a4a38ea5a3f | make bus and channel first-class citizens of the shellbot package | bernard357/shellbot | shellbot/__init__.py | shellbot/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .bot import ShellBot
from .bus import Bus, Subscriber, Publisher
from .channel import Channel
from .context import Context
from .engine import Engine
from .listener import Listener
from .machines import MachinesFactory
from .server import Server
from .shell import Shell
from .spaces import SpaceFactory
from .speaker import Speaker, Vibes
from .commands.base import Command
from .routes.base import Route
from .routes.notifier import Notifier
from .routes.wrapper import Wrapper
__version__ = '17.6.6'
__all__ = [
'__version__',
'Bus',
'Channel',
'Command',
'Context',
'Engine',
'Listener',
'MachinesFactory',
'Notifier',
'Publisher',
'Route',
'Server',
'Shell',
'ShellBot',
'SpaceFactory',
'Speaker',
'Subscriber',
'Wrapper',
]
| # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .bot import ShellBot
from .context import Context
from .engine import Engine
from .listener import Listener
from .machines import MachinesFactory
from .server import Server
from .shell import Shell
from .spaces import SpaceFactory
from .speaker import Speaker, Vibes
from .commands.base import Command
from .routes.base import Route
from .routes.notifier import Notifier
from .routes.wrapper import Wrapper
__version__ = '17.6.6'
__all__ = [
'__version__',
'Command',
'Context',
'Engine',
'Listener',
'MachinesFactory',
'Notifier',
'Route',
'Server',
'Shell',
'ShellBot',
'SpaceFactory',
'Speaker',
'Wrapper',
]
| apache-2.0 | Python |
374bd4881e00c2605f28ea816fa94468a76f2621 | Add MultiplePublisher to handle topic name suffix | OTL/jps | jps/utils.py | jps/utils.py | import json
from .publisher import Publisher
from .common import DEFAULT_PUB_PORT
from .common import DEFAULT_HOST
from .env import get_master_host
class JsonMultiplePublisher(object):
'''publish multiple topics by one json message
Example:
>>> p = JsonMultiplePublisher()
>>> p.publish('{"topic1": 1.0, "topic2": {"x": 0.1}}')
'''
def __init__(self, host=get_master_host(), pub_port=DEFAULT_PUB_PORT):
self._pub = Publisher('*', host=host, pub_port=pub_port)
def publish(self, json_msg):
'''
json_msg = '{"topic1": 1.0, "topic2": {"x": 0.1}}'
'''
pyobj = json.loads(json_msg)
for topic, value in pyobj.items():
msg = '{topic} {data}'.format(topic=topic, data=json.dumps(value))
self._pub.publish(msg)
class MultiplePublisher(object):
def __init__(self, base_topic_name):
self._publishers = {}
self._base_topic_name = base_topic_name
def publish(self, msg, topic_suffix=''):
if topic_suffix not in self._publishers:
self._publishers[topic_suffix] = Publisher(self._base_topic_name + topic_suffix)
self._publishers[topic_suffix].publish(msg)
| import json
from .publisher import Publisher
from .common import DEFAULT_PUB_PORT
from .common import DEFAULT_HOST
from .env import get_master_host
class JsonMultiplePublisher(object):
'''publish multiple topics by one json message
Example:
>>> p = JsonMultiplePublisher()
>>> p.publish('{"topic1": 1.0, "topic2": {"x": 0.1}}')
'''
def __init__(self, host=get_master_host(), pub_port=DEFAULT_PUB_PORT):
self._pub = Publisher('*', host=host, pub_port=pub_port)
def publish(self, json_msg):
'''
json_msg = '{"topic1": 1.0, "topic2": {"x": 0.1}}'
'''
pyobj = json.loads(json_msg)
for topic, value in pyobj.items():
msg = '{topic} {data}'.format(topic=topic, data=json.dumps(value))
self._pub.publish(msg)
| apache-2.0 | Python |
24f449ad9c5ed738e767f21d1f137d784e2b2d2c | add register url mapping | simpleoncall/simpleoncall,simpleoncall/simpleoncall | simpleoncall/urls.py | simpleoncall/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
from simpleoncall import api
urlpatterns = patterns(
'',
url(r'^$', 'simpleoncall.views.dashboard', name='dashboard'),
url(r'^login', 'simpleoncall.views.login', name='login'),
url(r'^register', 'simpleoncall.views.register', name='register'),
url(r'^logout', 'simpleoncall.views.logout', name='logout'),
url(r'^settings', 'simpleoncall.views.settings', name='settings'),
url(r'^account', 'simpleoncall.views.account', name='account'),
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(api.urlpatterns, namespace='api')),
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
from simpleoncall import api
urlpatterns = patterns(
'',
url(r'^$', 'simpleoncall.views.dashboard', name='dashboard'),
url(r'^login', 'simpleoncall.views.login', name='login'),
url(r'^logout', 'simpleoncall.views.logout', name='logout'),
url(r'^settings', 'simpleoncall.views.settings', name='settings'),
url(r'^account', 'simpleoncall.views.account', name='account'),
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(api.urlpatterns, namespace='api')),
)
| mit | Python |
17cea2e1d22e29fa073133044afbcab571bc1c58 | Add version number to init | AdaptivePELE/AdaptivePELE,AdaptivePELE/AdaptivePELE,AdaptivePELE/AdaptivePELE,AdaptivePELE/AdaptivePELE | AdaptivePELE/__init__.py | AdaptivePELE/__init__.py | __version__ = "1.4.2"
| mit | Python | |
8e0ae6f377c66e1fcbc50b04f1d55b372b6dea40 | make it a class | timmydoza/RPi_7SegDisplay | RPi_7SegDisplay.py | RPi_7SegDisplay.py | import RPi.GPIO as GPIO
import multiprocessing
import signal
import sys
from time import sleep
class RPi_7SegDisplay():
_CHARACTERS = {
'0': 0b11000000,
'1': 0b11111001,
'2': 0b10100100,
'3': 0b10110000,
'4': 0b10011001,
'5': 0b10010010,
'6': 0b10000010,
'7': 0b11111000,
'8': 0b10000000,
'9': 0b10010000,
' ': 0b11111111,
'-': 0b10111111,
'.': 0b01111111
}
_BLANK = [_CHARACTERS[' ']] * 8
def __init__(self, data_pin, clock_pin, latch_pin):
self._data = data_pin
self._clock = clock_pin
self._latch = latch_pin
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup([data_pin, clock_pin, latch_pin], GPIO.OUT)
if (len(multiprocessing.active_children()) == 0):
self._proc = multiprocessing.Process(target=_display)
self._proc.start()
self._queue = multiprocessing.Queue(2)
self._queue.put(_BLANK)
signal.signal(signal.SIGINT, self._signal_handler)
def _shift(self, byte):
for i in range(7, -1, -1):
GPIO.output(self._data, byte & (1 << i))
GPIO.output(self._clock, 1)
GPIO.output(self._clock, 0)
def _write(self, message):
for i, char in enumerate(message):
self._shift(1 << i)
self._shift(char)
GPIO.output(self._latch, 1)
GPIO.output(self._latch, 0)
sleep(0.003)
def _display(self):
while True:
if not self._queue.empty():
text = self._queue.get()
self._write(text)
def show(self, message):
result = []
if not self._queue.full():
for i, char in enumerate(message):
if (char == '.' and i > 0):
result[-1] -= 128
elif (char != '.'):
result.append(self._CHARACTERS[char])
self._queue.put(result)
def stop(self):
self._proc.terminate()
self._write(_BLANK)
GPIO.cleanup()
def _signal_handler(self, signal, frame):
stop()
sys.exit(0)
| import RPi.GPIO as GPIO
import multiprocessing
import signal
import sys
from time import sleep
_CHARACTERS = {
'0': 0b11000000,
'1': 0b11111001,
'2': 0b10100100,
'3': 0b10110000,
'4': 0b10011001,
'5': 0b10010010,
'6': 0b10000010,
'7': 0b11111000,
'8': 0b10000000,
'9': 0b10010000,
' ': 0b11111111,
'-': 0b10111111,
'.': 0b01111111
}
_BLANK = [_CHARACTERS[' ']] * 8
def init(data_pin, clock_pin, latch_pin):
global _data, _clock, _latch, _proc
_data = data_pin
_clock = clock_pin
_latch = latch_pin
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup([data_pin, clock_pin, latch_pin], GPIO.OUT)
if (len(multiprocessing.active_children()) == 0):
_proc = multiprocessing.Process(target=_display)
_proc.start()
def _shift(byte):
for i in range(7, -1, -1):
GPIO.output(_data, byte & (1 << i))
GPIO.output(_clock, 1)
GPIO.output(_clock, 0)
def _write(message):
for i, char in enumerate(message):
_shift(1 << i)
_shift(char)
GPIO.output(_latch, 1)
GPIO.output(_latch, 0)
sleep(0.003)
def _display():
while True:
if not _queue.empty():
text = _queue.get()
_write(text)
def show(message):
result = []
if not _queue.full():
for i, char in enumerate(message):
if (char == '.' and i > 0):
result[-1] -= 128
elif (char != '.'):
result.append(_CHARACTERS[char])
_queue.put(result)
def stop():
_proc.terminate()
_write(_BLANK)
GPIO.cleanup()
def _signal_handler(signal, frame):
stop()
sys.exit(0)
_queue = multiprocessing.Queue(2)
_queue.put(_BLANK)
signal.signal(signal.SIGINT, _signal_handler)
| mit | Python |
cadae5454859af11ba0a2793750265a4a1f9e38a | change hakbeon to bunryu which can be more generally used | Perlmint/Yuzuki,TintypeMolly/Yuzuki,TintypeMolly/Yuzuki,PoolC/Yuzuki,TintypeMolly/Yuzuki,Perlmint/Yuzuki,Perlmint/Yuzuki,PoolC/Yuzuki,PoolC/Yuzuki | model/user.py | model/user.py | from datetime import datetime
from sqlalchemy import Boolean, Column, DateTime, Integer, String, Text
from sqlalchemy.orm import relationship
from model.base import Base
from helper.pbkdf2 import pbkdf2, pbkdf2_check
class User(Base):
__tablename__ = "user"
uid = Column(Integer(), primary_key=True)
username = Column(String(255), index=True, unique=True, nullable=False)
nickname = Column(String(255), unique=True, nullable=False)
password = Column(String(255), nullable=False)
groups = relationship("Group", secondary="assoc_user_group")
pd_realname = Column(String(255), nullable=False)
pd_email = Column(String(255))
pd_address = Column(String(255))
pd_phone = Column(String(255))
pd_bunryu = Column(Integer())
pd_bio = Column(Text())
created_at = Column(DateTime(), default=datetime.now)
is_admin = Column(Boolean, default=False)
def __init__(self, username, nickname, password, pd_realname, pd_email, pd_address, pd_phone, pd_bunryu, pd_bio):
self.username = username
self.nickname = nickname
self.password = pbkdf2(password)
self.pd_realname = pd_realname
self.pd_email = pd_email
self.pd_address = pd_address
self.pd_phone = pd_phone
self.pd_bunryu = pd_bunryu
self.pd_bio = pd_bio
def check_password(self, password):
return pbkdf2_check(password, self.password)
def __repr__(self):
return "<User name=%s>" % self.username | from datetime import datetime
from sqlalchemy import Boolean, Column, DateTime, Integer, String, Text
from sqlalchemy.orm import relationship
from model.base import Base
from helper.pbkdf2 import pbkdf2, pbkdf2_check
class User(Base):
__tablename__ = "user"
uid = Column(Integer(), primary_key=True)
username = Column(String(255), index=True, unique=True, nullable=False)
nickname = Column(String(255), unique=True, nullable=False)
password = Column(String(255), nullable=False)
groups = relationship("Group", secondary="assoc_user_group")
pd_realname = Column(String(255), nullable=False)
pd_email = Column(String(255))
pd_address = Column(String(255))
pd_phone = Column(String(255))
pd_hakbeon = Column(Integer())
pd_bio = Column(Text())
created_at = Column(DateTime(), default=datetime.now)
is_admin = Column(Boolean, default=False)
def __init__(self, username, nickname, password, pd_realname, pd_email, pd_address, pd_phone, pd_hakbeon, pd_bio):
self.username = username
self.nickname = nickname
self.password = pbkdf2(password)
self.pd_realname = pd_realname
self.pd_email = pd_email
self.pd_address = pd_address
self.pd_phone = pd_phone
self.pd_hakbeon = pd_hakbeon
self.pd_bio = pd_bio
def check_password(self, password):
return pbkdf2_check(password, self.password)
def __repr__(self):
return "<User name=%s>" % self.username | mit | Python |
38791bf66166b194c6f90c140589e11c9d614673 | test c-model | Oscarlight/PiNN_Caffe2,Oscarlight/PiNN_Caffe2,Oscarlight/PiNN_Caffe2,Oscarlight/PiNN_Caffe2 | transiNXOR_modeling/transixor_predictor.py | transiNXOR_modeling/transixor_predictor.py | import sys
sys.path.append('../')
import numpy as np
from itertools import product
from pinn_api import predict_ids_grads, predict_ids
import matplotlib.pyplot as plt
import glob
## ------------ True data ---------------
ids_file = glob.glob('./transiXOR_data/current_D9.npy')
# ids_file = glob.glob('./transiXOR_data/*_id_*.npy')
# vds, vbg, vtg, id
ids_data = np.load(ids_file[0])
print(ids_data.shape)
## ------------ Prediction ---------------
# vds = np.linspace(-0.1, 0.3, 101)
# vbg = np.linspace(0.1, 0.1, 1)
# vtg = np.linspace(0.2, 0.2, 1)
vds = np.linspace(0.2, 0.2, 1)
vbg = np.linspace(0.1, 0.1, 1)
vtg = np.linspace(-0.1, 0.3, 101)
iter_lst = list(product(vds, vbg, vtg))
vds_pred = np.expand_dims(np.array([e[0] for e in iter_lst], dtype=np.float32), axis=1)
vbg_pred = np.array([e[1] for e in iter_lst], dtype=np.float32)
vtg_pred = np.array([e[2] for e in iter_lst], dtype=np.float32)
vg_pred = np.column_stack((vtg_pred, vbg_pred))
vg_pred = np.sum(vg_pred, axis=1, keepdims=True)
# vg_pred = np.sum(vg_pred, axis=1, keepdims=True)
## If trained with adjoint builder
# ids_pred, _, _ = predict_ids_grads(
# './transiXOR_Models/bise_h16', vg_pred, vds_pred)
## If trained with origin builder
ids_pred = predict_ids(
'./transiXOR_Models/bise_ext_sym_h264_0', vg_pred, vds_pred)
# ids_true = ids_data[:, 30, 20]
# vds_true = np.linspace(-0.1, 0.3, 41)
# plt.plot(vds, ids_pred, 'r')
# plt.plot(vds_true, ids_true)
# plt.show()
# plt.semilogy(vds, np.abs(ids_pred), 'r')
# plt.semilogy(vds_true, np.abs(ids_true))
# plt.show()
ids_true = ids_data[30, 20, :]
vtg_true = np.linspace(-0.1, 0.3, 41)
plt.plot(vtg, ids_pred, 'r')
plt.plot(vtg_true, ids_true)
plt.show()
plt.semilogy(vtg, np.abs(ids_pred), 'r')
plt.semilogy(vtg_true, np.abs(ids_true))
plt.show()
| import sys
sys.path.append('../')
import numpy as np
from itertools import product
from pinn_api import predict_ids_grads, predict_ids
import matplotlib.pyplot as plt
import glob
## ------------ True data ---------------
ids_file = glob.glob('./transiXOR_data/current_D9.npy')
# ids_file = glob.glob('./transiXOR_data/*_id_*.npy')
# vds, vbg, vtg, id
ids_data = np.load(ids_file[0])
print(ids_data.shape)
## ------------ Prediction ---------------
vds = np.linspace(-0.1, 0.3, 101)
vbg = np.linspace(0.1, 0.1, 1)
vtg = np.linspace(0.2, 0.2, 1)
# vds = np.linspace(0.2, 0.2, 1)
# vbg = np.linspace(0.1, 0.1, 1)
# vtg = np.linspace(-0.1, 0.3, 41)
iter_lst = list(product(vds, vbg, vtg))
vds_pred = np.expand_dims(np.array([e[0] for e in iter_lst], dtype=np.float32), axis=1)
vbg_pred = np.array([e[1] for e in iter_lst], dtype=np.float32)
vtg_pred = np.array([e[2] for e in iter_lst], dtype=np.float32)
vg_pred = np.column_stack((vtg_pred, vbg_pred))
vg_pred = np.sum(vg_pred, axis=1, keepdims=True)
# vg_pred = np.sum(vg_pred, axis=1, keepdims=True)
## If trained with adjoint builder
# ids_pred, _, _ = predict_ids_grads(
# './transiXOR_Models/bise_h16', vg_pred, vds_pred)
## If trained with origin builder
ids_pred = predict_ids(
'./transiXOR_Models/bise_ext_sym_h264_0', vg_pred, vds_pred)
# ids_true = ids_data[30, 20, :]
ids_true = ids_data[:, 30, 20]
vds_true = np.linspace(-0.1, 0.3, 41)
plt.plot(vds, ids_pred, 'r')
plt.plot(vds_true, ids_true)
plt.show()
plt.semilogy(vds, np.abs(ids_pred), 'r')
plt.semilogy(vds_true, np.abs(ids_true))
plt.show()
| mit | Python |
48dc53c5c92a49fca959a962434bee6c2810a0c8 | Add missing license to pip package | PAIR-code/what-if-tool,PAIR-code/what-if-tool,PAIR-code/what-if-tool,pair-code/what-if-tool,pair-code/what-if-tool,PAIR-code/what-if-tool,pair-code/what-if-tool,pair-code/what-if-tool,PAIR-code/what-if-tool,pair-code/what-if-tool | tensorboard_plugin_wit/pip_package/setup.py | tensorboard_plugin_wit/pip_package/setup.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
setuptools.setup(
name="tensorboard_plugin_wit",
version="1.6.0",
description="What-If Tool TensorBoard plugin.",
packages=setuptools.find_packages(),
license='Apache 2.0',
package_data={
"tensorboard_plugin_wit": ["static/**"],
},
entry_points={
"tensorboard_plugins": [
"wit = tensorboard_plugin_wit.wit_plugin_loader:WhatIfToolPluginLoader",
],
},
)
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
setuptools.setup(
name="tensorboard_plugin_wit",
version="1.6.0",
description="What-If Tool TensorBoard plugin.",
packages=setuptools.find_packages(),
package_data={
"tensorboard_plugin_wit": ["static/**"],
},
entry_points={
"tensorboard_plugins": [
"wit = tensorboard_plugin_wit.wit_plugin_loader:WhatIfToolPluginLoader",
],
},
)
| apache-2.0 | Python |
00092b1a6af866fb74c04923be627d8b0b44e630 | fix ver | shortdudey123/gbot | modules/admin.py | modules/admin.py | #!/usr/bin/env python
# =============================================================================
# file = opme.py
# description = gbot module
# author = GR <https://github.com/shortdudey123>
# create_date = 2014-07-12
# mod_date = 2014-07-12
# version = 0.1
# usage = loaded by gbot
# notes =
# python_ver = 2.7.6
# =============================================================================
adminOnly = True
commandName = 'admin'
version = 0.1
help = """
View and modify the list of admins for this bot.
Usage: {0} <list | add nick | del nick>
Admin Only: {1}
Version: {2}
Note:
""".format(commandName, adminOnly, version)
def execModule(channel, message, nick, botSelf):
retCommands = []
if len(message.split()) == 2 and message.split()[1].lower() == 'list':
botSelf.bot.sendMessage(channel, 'Admins: {0}'.format(' '.join(botSelf.admins)))
botSelf.bot.sendMessage(channel, 'Owners: {0}'.format(' '.join(botSelf.owners)))
elif len(message.split()) == 3:
adminNick = message.split()[2]
if message.split()[1].lower() == 'add':
if botSelf.addAdmin(adminNick):
botSelf.bot.sendMessage(channel, 'Added an admin: {0}'.format(adminNick))
else:
botSelf.bot.sendMessage(channel, '{0} is already an admin!'.format(adminNick))
if message.split()[1].lower() == 'del':
try:
delMsg = botSelf.deleteAdmin(adminNick)
botSelf.bot.sendMessage(channel, '{0}'.format(delMsg))
except Exception, e:
botSelf.bot.sendMessage(channel, '{0}'.format(e))
return retCommands
if __name__ == "__main__":
filename = __file__.split('.')[0]
help(filename)
| #!/usr/bin/env python
# =============================================================================
# file = opme.py
# description = gbot module
# author = GR <https://github.com/shortdudey123>
# create_date = 2014-07-12
# mod_date = 2014-07-12
# version = 0.1
# usage = loaded by gbot
# notes =
# python_ver = 2.7.6
# =============================================================================
adminOnly = True
commandName = 'admin'
version = 0.1.1
help = """
View and modify the list of admins for this bot.
Usage: {0} <list | add nick | del nick>
Admin Only: {1}
Version: {2}
Note:
""".format(commandName, adminOnly, version)
def execModule(channel, message, nick, botSelf):
retCommands = []
if len(message.split()) == 2 and message.split()[1].lower() == 'list':
botSelf.bot.sendMessage(channel, 'Admins: {0}'.format(' '.join(botSelf.admins)))
botSelf.bot.sendMessage(channel, 'Owners: {0}'.format(' '.join(botSelf.owners)))
elif len(message.split()) == 3:
adminNick = message.split()[2]
if message.split()[1].lower() == 'add':
if botSelf.addAdmin(adminNick):
botSelf.bot.sendMessage(channel, 'Added an admin: {0}'.format(adminNick))
else:
botSelf.bot.sendMessage(channel, '{0} is already an admin!'.format(adminNick))
if message.split()[1].lower() == 'del':
try:
delMsg = botSelf.deleteAdmin(adminNick)
botSelf.bot.sendMessage(channel, '{0}'.format(delMsg))
except Exception, e:
botSelf.bot.sendMessage(channel, '{0}'.format(e))
return retCommands
if __name__ == "__main__":
filename = __file__.split('.')[0]
help(filename)
| apache-2.0 | Python |
eecded05bdd04cafe93793efdc97742b58903e83 | Fix broken imports in ParticleTest | Wilee999/panda3d,tobspr/panda3d,brakhane/panda3d,cc272309126/panda3d,tobspr/panda3d,matthiascy/panda3d,Wilee999/panda3d,Wilee999/panda3d,Wilee999/panda3d,grimfang/panda3d,matthiascy/panda3d,mgracer48/panda3d,tobspr/panda3d,brakhane/panda3d,cc272309126/panda3d,Wilee999/panda3d,Wilee999/panda3d,grimfang/panda3d,grimfang/panda3d,grimfang/panda3d,chandler14362/panda3d,cc272309126/panda3d,chandler14362/panda3d,tobspr/panda3d,Wilee999/panda3d,matthiascy/panda3d,matthiascy/panda3d,brakhane/panda3d,chandler14362/panda3d,matthiascy/panda3d,matthiascy/panda3d,chandler14362/panda3d,tobspr/panda3d,tobspr/panda3d,chandler14362/panda3d,tobspr/panda3d,cc272309126/panda3d,mgracer48/panda3d,grimfang/panda3d,cc272309126/panda3d,brakhane/panda3d,brakhane/panda3d,tobspr/panda3d,matthiascy/panda3d,mgracer48/panda3d,chandler14362/panda3d,cc272309126/panda3d,brakhane/panda3d,grimfang/panda3d,brakhane/panda3d,grimfang/panda3d,chandler14362/panda3d,chandler14362/panda3d,Wilee999/panda3d,mgracer48/panda3d,cc272309126/panda3d,grimfang/panda3d,cc272309126/panda3d,chandler14362/panda3d,grimfang/panda3d,mgracer48/panda3d,mgracer48/panda3d,mgracer48/panda3d,matthiascy/panda3d,mgracer48/panda3d,brakhane/panda3d,grimfang/panda3d,chandler14362/panda3d,Wilee999/panda3d,brakhane/panda3d,tobspr/panda3d,matthiascy/panda3d,cc272309126/panda3d,tobspr/panda3d,mgracer48/panda3d | direct/src/particles/ParticleTest.py | direct/src/particles/ParticleTest.py |
if __name__ == "__main__":
from direct.directbase.TestStart import *
from panda3d.physics import LinearVectorForce
from panda3d.core import Vec3
import ParticleEffect
from direct.tkpanels import ParticlePanel
import Particles
import ForceGroup
# Showbase
base.enableParticles()
# ForceGroup
fg = ForceGroup.ForceGroup()
gravity = LinearVectorForce(Vec3(0.0, 0.0, -10.0))
fg.addForce(gravity)
# Particles
p = Particles.Particles()
# Particle effect
pe = ParticleEffect.ParticleEffect('particle-fx')
pe.reparentTo(render)
#pe.setPos(0.0, 5.0, 4.0)
pe.addForceGroup(fg)
pe.addParticles(p)
# Particle Panel
pp = ParticlePanel.ParticlePanel(pe)
run()
|
if __name__ == "__main__":
from direct.directbase.TestStart import *
from pandac.LinearVectorForce import LinearVectorForce
from pandac.Vec3 import Vec3
import ParticleEffect
from direct.tkpanels import ParticlePanel
import Particles
import ForceGroup
# Showbase
base.enableParticles()
# ForceGroup
fg = ForceGroup.ForceGroup()
gravity = LinearVectorForce(Vec3(0.0, 0.0, -10.0))
fg.addForce(gravity)
# Particles
p = Particles.Particles()
# Particle effect
pe = ParticleEffect.ParticleEffect('particle-fx')
pe.reparentTo(render)
#pe.setPos(0.0, 5.0, 4.0)
pe.addForceGroup(fg)
pe.addParticles(p)
# Particle Panel
pp = ParticlePanel.ParticlePanel(pe)
run()
| bsd-3-clause | Python |
198824caf096becbb14454d8969c4c8616c521ae | bump version | nyaruka/smartmin,nyaruka/smartmin,nyaruka/smartmin | smartmin/__init__.py | smartmin/__init__.py | from __future__ import unicode_literals
__version__ = '2.2.3'
| from __future__ import unicode_literals
__version__ = '2.2.2'
| bsd-3-clause | Python |
6a7e2db1d7e2220004d37f4f4531e75672f9a2b3 | Fix issue #14 dont parse path twice | Sixdsn/MOZ_OVERRIDE | modules/files.py | modules/files.py | #!/usr/bin/python
import os, sys, commands, string
from options import SIXMOZ_options
from logger import SIXMOZ_logger
from rules import SIXMOZ_rules
class SIXMOZ_files():
def __init__(self):
#besoin d'ajouter check si files est vide ou ['']
pass
@staticmethod
def get_files():
return (SIXMOZ_files.files)
@staticmethod
def get_idl_files():
return (SIXMOZ_files.idl_files)
def find_idl_files():
idl_files = []
if (SIXMOZ_options.idl_folder != ""):
if (not os.path.exists(SIXMOZ_options.idl_folder)):
print("Options -I %s doesn't not exist"% SIXMOZ_options.idl_folder)
sys.exit(1)
SIXMOZ_logger.print_info("Getting Files from idl_folder: " + SIXMOZ_options.idl_folder)
idl_files = string.split(commands.getstatusoutput("find " + SIXMOZ_options.idl_folder + " -type f -readable \( " + SIXMOZ_rules.get_conf('extensions') +
" \) -and -not -path \"" + SIXMOZ_options.path + "*\" | sort")[1], "\n")
return (idl_files)
def find_files():
SIXMOZ_logger.print_info("Stage 1/6: Getting files to parse: %s"% SIXMOZ_options.path)
files = string.split(commands.getstatusoutput("find " + SIXMOZ_options.path + " -type f -readable " + SIXMOZ_rules.get_conf('extensions') + " -or -name \"*.cpp\" | sort")[1], "\n")
return (files)
files = find_files()
idl_files = find_idl_files()
| #!/usr/bin/python
import os, sys, commands, string
from options import SIXMOZ_options
from logger import SIXMOZ_logger
from rules import SIXMOZ_rules
class SIXMOZ_files():
def __init__(self):
#besoin d'ajouter check si files est vide ou ['']
pass
@staticmethod
def get_files():
return (SIXMOZ_files.files)
@staticmethod
def get_idl_files():
return (SIXMOZ_files.idl_files)
def find_idl_files():
idl_files = []
if (SIXMOZ_options.idl_folder != ""):
if (not os.path.exists(SIXMOZ_options.idl_folder)):
print("Options -I %s doesn't not exist"% SIXMOZ_options.idl_folder)
sys.exit(1)
SIXMOZ_logger.print_info("Getting Files from idl_folder: " + SIXMOZ_options.idl_folder)
idl_files = string.split(commands.getstatusoutput("find " + SIXMOZ_options.idl_folder + SIXMOZ_rules.get_conf('extensions') + " | sort")[1], "\n")
return (idl_files)
def find_files():
SIXMOZ_logger.print_info("Stage 1/6: Getting files to parse: %s"% SIXMOZ_options.path)
files = string.split(commands.getstatusoutput("find " + SIXMOZ_options.path + SIXMOZ_rules.get_conf('extensions') + " -or -name \"*.cpp\" | sort")[1], "\n")
return (files)
files = find_files()
idl_files = find_idl_files()
| mpl-2.0 | Python |
4b081f7a0e3f4c2069e2da77b584c37fa6d0705f | add listener for searching playlists | hamidzr/kodi-extras | voiceControl.py | voiceControl.py | from utils.voiceParser import *
COMMANDS = [
{
'aliases': ['please set', 'pls set', 'please','pls', 'go','set'],
'script': 'python3.6 kodiCrud.py',
'hasArgs': True
},
{
'aliases': ['music','play music'],
'script': 'python3.6 playYtPlaylist.py'
},
{
'aliases': ['play playlist', 'search playlist'],
'script': 'python3.6 playYtSearchPlaylist.py',
'hasArgs': True
},
{
'aliases': ['play'],
'script': 'python3.6 playYtSearch.py 10',
'hasArgs': True
},
{
'aliases': ['next','skip'],
'script': 'python3.6 kodiCrud.py next'
},
{
'aliases': ['pause'],
'script': 'python3.6 kodiCrud.py pause'
}
]
listen(COMMANDS);
| from utils.voiceParser import *
COMMANDS = [
{
'aliases': ['please set', 'pls set', 'please','pls', 'go','set'],
'script': 'python3.6 kodiCrud.py',
'hasArgs': True
},
{
'aliases': ['music','play music'],
'script': 'python3.6 playYtPlaylist.py'
},
{
'aliases': ['next','skip'],
'script': 'python3.6 kodiCrud.py next'
},
{
'aliases': ['pause'],
'script': 'python3.6 kodiCrud.py pause'
},
{
'aliases': ['play'],
'script': 'python3.6 playYtSearch.py 10',
'hasArgs': True
}
]
listen(COMMANDS);
| mit | Python |
af822a557355434ea900eb0742d5429c0f2c9d30 | Add session to function | HERA-Team/hera_mc,HERA-Team/hera_mc,HERA-Team/Monitor_and_Control | scripts/write_antenna_location_file.py | scripts/write_antenna_location_file.py | #! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2016 the HERA Collaboration
# Licensed under the 2-clause BSD license.
"""
Script to write out antenna locations for use in cal files.
"""
import pandas as pd
from hera_mc import mc, geo_handling, cm_handling
import datetime
parser = mc.get_mc_argument_parser()
parser.add_argument('--file', help="file name to save antenna locations to",
default='hera_ant_locs_' + datetime.date.today().strftime("%m_%d_%Y") + '.csv')
parser.add_argument('--cm_csv_path', help="path to cm csv files",
default=None)
args = parser.parse_args()
filename = args.file
db = mc.connect_to_mc_db(args)
session = db.sessionmaker()
h = cm_handling.Handling(session)
cm_version = h.get_cm_version()
h = geo_handling.Handling(session)
locations = h.get_all_fully_connected_ever(station_types_to_check=['HH'])
cofa_loc = h.cofa()[0]
locations.append({'station_name': cofa_loc.station_name,
'station_type': cofa_loc.station_type_name,
'tile': cofa_loc.tile,
'datum': cofa_loc.datum,
'easting': cofa_loc.easting,
'northing': cofa_loc.northing,
'longitude': cofa_loc.lon,
'latitude': cofa_loc.lat,
'elevation': cofa_loc.elevation,
'antenna_number': -1,
'correlator_input_x': None,
'correlator_input_y': None,
'start_date': cofa_loc.created_date,
'stop_date': None})
df = pd.DataFrame(locations)
df = df[['station_name', 'station_type', 'datum', 'tile', 'easting', 'northing',
'longitude', 'latitude', 'elevation', 'antenna_number',
'correlator_input_x', 'correlator_input_y', 'start_date', 'stop_date']]
df.rename(columns={'station_name': 'antenna_name'}, inplace=True)
df['cm_version'] = cm_version
df.to_csv(filename, index=False)
| #! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2016 the HERA Collaboration
# Licensed under the 2-clause BSD license.
"""
Script to write out antenna locations for use in cal files.
"""
import pandas as pd
from hera_mc import mc, geo_handling, cm_handling
import datetime
parser = mc.get_mc_argument_parser()
parser.add_argument('--file', help="file name to save antenna locations to",
default='hera_ant_locs_' + datetime.date.today().strftime("%m_%d_%Y") + '.csv')
parser.add_argument('--cm_csv_path', help="path to cm csv files",
default=None)
args = parser.parse_args()
filename = args.file
db = mc.connect_to_mc_db(args)
session = db.sessionmaker()
h = cm_handling.Handling()
cm_version = h.get_cm_version()
h = geo_handling.Handling(session)
locations = h.get_all_fully_connected_ever(station_types_to_check=['HH'])
cofa_loc = h.cofa()[0]
locations.append({'station_name': cofa_loc.station_name,
'station_type': cofa_loc.station_type_name,
'tile': cofa_loc.tile,
'datum': cofa_loc.datum,
'easting': cofa_loc.easting,
'northing': cofa_loc.northing,
'longitude': cofa_loc.lon,
'latitude': cofa_loc.lat,
'elevation': cofa_loc.elevation,
'antenna_number': -1,
'correlator_input_x': None,
'correlator_input_y': None,
'start_date': cofa_loc.created_date,
'stop_date': None})
df = pd.DataFrame(locations)
df = df[['station_name', 'station_type', 'datum', 'tile', 'easting', 'northing',
'longitude', 'latitude', 'elevation', 'antenna_number',
'correlator_input_x', 'correlator_input_y', 'start_date', 'stop_date']]
df.rename(columns={'station_name': 'antenna_name'}, inplace=True)
df['cm_version'] = cm_version
df.to_csv(filename, index=False)
| bsd-2-clause | Python |
e12580f1749493dbe05f474d2fecdec4eaba73c5 | bump version | google/TensorNetwork | tensornetwork/version.py | tensornetwork/version.py | # Copyright 2019 The TensorNetwork Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.4.6'
| # Copyright 2019 The TensorNetwork Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.4.5'
| apache-2.0 | Python |
130af66197095a188ff01906fd1323f107d72246 | Fix nomination form | SAlkhairy/trabd,SAlkhairy/trabd,SAlkhairy/trabd,SAlkhairy/trabd | voting/views.py | voting/views.py | from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from .models import Position, Nomination
from .forms import NominationForm
def show_index(request):
return render(request, 'accounts/home.html')
@login_required
def list_positions(request, entity):
if entity in ['club', 'council']:
positions = Position.objects.filter(entity=entity)
if not request.user.is_superuser:
positions = positions.filter(colleges_allowed_to_nominate=request.user.profile.collge)
else:
raise Http404
context = {'positions': positions}
return render(request, 'voting/list_' + entity + '_positions.html', context)
@login_required
def add_nominee(request, position_id):
position = get_object_or_404(Position, pk=position_id)
if request.method == 'POST':
instance = Nomination(user=request.user, position=position)
form = NominationForm(request.POST, instance=instance)
if form.is_valid():
instance = form.save()
return HttpResponseRedirect(reverse("voting:nomination_thanks"))
elif request.method == 'GET':
form = NominationForm()
context = {'form': form,
'position': position}
return render(request,'voting/add_nominee.html', context)
| from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from .models import Position, Nomination
from .forms import NominationForm
def show_index(request):
return render(request, 'accounts/home.html')
@login_required
def list_positions(request, entity):
if entity in ['club', 'council']:
positions = Position.objects.filter(entity=entity)
if not request.user.is_superuser:
positions = positions.filter(colleges_allowed_to_nominate=request.user.profile.collge)
else:
raise Http404
context = {'positions': positions}
return render(request, 'voting/list_' + entity + '_positions.html', context)
@login_required
def add_nominee(request, position_id):
position = get_object_or_404(Position, pk=position_id)
if request.method == 'POST':
instance = Nomination(user=request.user, position=position)
form = NominationForm(request.POST, instance=instance)
if form.is_valid():
instance = form.save()
context = {'form': form,
'position': position}
return HttpResponseRedirect(reverse("voting:nomination_thanks"))
elif request.method == 'GET':
form = NominationForm()
context = {'form': form,
'position': position}
return render(request,'voting/add_nominee.html', context)
| agpl-3.0 | Python |
3229ae048b623833925f6c5db4fe6016940dcc06 | fix path to db | vjdorazio/TwoRavens,vjdorazio/TwoRavens,vjdorazio/TwoRavens,vjdorazio/TwoRavens,vjdorazio/TwoRavens | tworavensproject/settings/dev_container.py | tworavensproject/settings/dev_container.py | from __future__ import absolute_import
import json
import sys
from os import makedirs
from os.path import join, normpath, isdir, isfile
from .base import *
DEBUG = True # False - will be False
ROOT_URLCONF = 'tworavensproject.urls_prod'
APACHE_WEB_DIRECTORY = '/var/www/html'
SECRET_KEY = 'ye-dev-container-secret-key'
LOCAL_SETUP_DIR = join('/srv', 'webapps', 'tworavens_files')
if not isdir(LOCAL_SETUP_DIR):
makedirs(LOCAL_SETUP_DIR)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': join(LOCAL_SETUP_DIR, 'two_ravens.db3'),
}
}
SESSION_COOKIE_NAME = 'two_ravens_local'
# where static files are collected
STATIC_ROOT = join(APACHE_WEB_DIRECTORY, '2ravens', 'static')
# http://django-debug-toolbar.readthedocs.org/en/latest/installation.html
INTERNAL_IPS = ('127.0.0.1', '0.0.0.0')
#INSTALLED_APPS += ['debug_toolbar']
########## END TOOLBAR CONFIGURATION
#MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware']
MEDIA_ROOT = join(APACHE_WEB_DIRECTORY, "media")
MEDIA_URL = '/media/'
TIME_ZONE = 'America/New_York'
# TwoRavens R service test
R_DEV_SERVER_BASE = 'http://0.0.0.0:8000/custom/'
RECORD_R_SERVICE_ROUTING = True # log R service requests/response JSON to db
| from __future__ import absolute_import
import json
import sys
from os import makedirs
from os.path import join, normpath, isdir, isfile
from .base import *
DEBUG = True # False - will be False
ROOT_URLCONF = 'tworavensproject.urls_prod'
APACHE_WEB_DIRECTORY = '/var/www/html'
SECRET_KEY = 'ye-dev-container-secret-key'
LOCAL_SETUP_DIR = join(BASE_DIR, 'srv', 'webapps', 'tworavens_files')
if not isdir(LOCAL_SETUP_DIR):
makedirs(LOCAL_SETUP_DIR)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': join(LOCAL_SETUP_DIR, 'two_ravens.db3'),
}
}
SESSION_COOKIE_NAME = 'two_ravens_local'
# where static files are collected
STATIC_ROOT = join(APACHE_WEB_DIRECTORY, '2ravens', 'static')
# http://django-debug-toolbar.readthedocs.org/en/latest/installation.html
INTERNAL_IPS = ('127.0.0.1', '0.0.0.0')
#INSTALLED_APPS += ['debug_toolbar']
########## END TOOLBAR CONFIGURATION
#MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware']
MEDIA_ROOT = join(APACHE_WEB_DIRECTORY, "media")
MEDIA_URL = '/media/'
TIME_ZONE = 'America/New_York'
# TwoRavens R service test
R_DEV_SERVER_BASE = 'http://0.0.0.0:8000/custom/'
RECORD_R_SERVICE_ROUTING = True # log R service requests/response JSON to db
| bsd-3-clause | Python |
5200e1d18efa2c71993f69a7d10e605b4af01430 | add file type | ehealthafrica-ci/formhub,eHealthAfrica/onadata,qlands/onadata,kobotoolbox/kobocat,piqoni/onadata,ehealthafrica-ci/formhub,spatialdev/onadata,makinacorpus/formhub,ehealthafrica-ci/onadata,qlands/onadata,eHealthAfrica/formhub,hnjamba/onaclone,smn/onadata,jomolinare/kobocat,makinacorpus/formhub,SEL-Columbia/formhub,eHealthAfrica/onadata,sounay/flaminggo-test,ehealthafrica-ci/onadata,qlands/onadata,SEL-Columbia/formhub,ehealthafrica-ci/onadata,eHealthAfrica/formhub,jomolinare/kobocat,awemulya/fieldsight-kobocat,ultimateprogramer/formhub,jomolinare/kobocat,spatialdev/onadata,smn/onadata,hnjamba/onaclone,mainakibui/kobocat,smn/onadata,hnjamba/onaclone,piqoni/onadata,SEL-Columbia/formhub,GeoODK/formhub,wesley1001/formhub,sounay/flaminggo-test,spatialdev/onadata,ehealthafrica-ci/onadata,kobotoolbox/kobocat,sounay/flaminggo-test,eHealthAfrica/onadata,eHealthAfrica/formhub,smn/onadata,jomolinare/kobocat,GeoODK/onadata,SEL-Columbia/formhub,mainakibui/kobocat,mainakibui/kobocat,mainakibui/kobocat,makinacorpus/formhub,awemulya/fieldsight-kobocat,GeoODK/formhub,GeoODK/onadata,wesley1001/formhub,eHealthAfrica/formhub,sounay/flaminggo-test,ultimateprogramer/formhub,kobotoolbox/kobocat,spatialdev/onadata,piqoni/onadata,GeoODK/formhub,ehealthafrica-ci/formhub,kobotoolbox/kobocat,awemulya/fieldsight-kobocat,GeoODK/onadata,awemulya/fieldsight-kobocat,piqoni/onadata,ultimateprogramer/formhub,hnjamba/onaclone,wesley1001/formhub,wesley1001/formhub,makinacorpus/formhub,ehealthafrica-ci/formhub,ultimateprogramer/formhub,GeoODK/onadata,qlands/onadata | main/models/meta_data.py | main/models/meta_data.py | from django.db import models
from odk_logger.models import XForm
import os
def upload_to(instance, filename):
return os.path.join(
instance.xform.user.username,
'docs',
filename
)
def unique_type_for_form(xform, data_type, data_value=None):
result = type_for_form(xform, data_type)
if not len(result):
result = MetaData(data_type=data_type, xform=xform)
result.save()
else:
result = result[0]
if data_value:
result.data_value = data_value
result.save()
return result
def type_for_form(xform, data_type):
return MetaData.objects.filter(xform=xform, data_type=data_type)
class MetaData(models.Model):
xform = models.ForeignKey(XForm)
data_type = models.CharField(max_length=255)
data_value = models.CharField(max_length=255)
data_file = models.FileField(upload_to=upload_to, null=True)
data_file_type = models.CharField(max_length=255, null=True)
@staticmethod
def form_license(xform, data_value=None):
data_type = 'form_license'
return unique_type_for_form(xform, data_type, data_value)
@staticmethod
def data_license(xform, data_value=None):
data_type = 'data_license'
return unique_type_for_form(xform, data_type, data_value)
@staticmethod
def source(xform, data_value=None):
data_type = 'source'
return unique_type_for_form(xform, data_type, data_value)
@staticmethod
def supporting_docs(xform, data_file=None):
data_type = 'supporting_doc'
if data_file:
doc = MetaData(data_type=data_type, xform=xform,
data_value=data_file.name,
data_file=data_file,
data_file_type=data_file.content_type)
doc.save()
return type_for_form(xform, data_type)
class Meta:
app_label = 'main'
| from django.db import models
from odk_logger.models import XForm
import os
def upload_to(instance, filename):
return os.path.join(
instance.xform.user.username,
'docs',
filename
)
def unique_type_for_form(xform, data_type, data_value=None):
result = type_for_form(xform, data_type)
if not len(result):
result = MetaData(data_type=data_type, xform=xform)
result.save()
else:
result = result[0]
if data_value:
result.data_value = data_value
result.save()
return result
def type_for_form(xform, data_type):
return MetaData.objects.filter(xform=xform, data_type=data_type)
class MetaData(models.Model):
xform = models.ForeignKey(XForm)
data_type = models.CharField(max_length=255)
data_value = models.CharField(max_length=255)
data_file = models.FileField(upload_to=upload_to, null=True)
@staticmethod
def form_license(xform, data_value=None):
data_type = 'form_license'
return unique_type_for_form(xform, data_type, data_value)
@staticmethod
def data_license(xform, data_value=None):
data_type = 'data_license'
return unique_type_for_form(xform, data_type, data_value)
@staticmethod
def source(xform, data_value=None):
data_type = 'source'
return unique_type_for_form(xform, data_type, data_value)
@staticmethod
def supporting_docs(xform, data_file=None):
data_type = 'supporting_doc'
if data_file:
doc = MetaData(data_type=data_type, xform=xform,
data_value=data_file.name,
data_file=data_file)
doc.save()
return type_for_form(xform, data_type)
class Meta:
app_label = 'main'
| bsd-2-clause | Python |
6b9b851d3c9b77e0a383a7c67a610337d58ec8ce | Add manage command | mkaplenko/flask-dynstatic | flask_dynstatic.py | flask_dynstatic.py | from flask import current_app
from functools import wraps
from flask.ext.script import Command
import os
class DynStatic(object):
views = []
def __init__(self, app=None):
self.app = app
@staticmethod
def construct_html():
for view in DynStatic.views:
with open(os.path.join(current_app.config['DYNSTATIC_ROOT'], view[0]), 'w') as html_file:
html_file.write(view[1]())
print('{0} done\n'.format(html_file.name))
@staticmethod
def to_static_html(path):
def decorator(func):
if func not in DynStatic.views:
DynStatic.views.append((path, func))
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorator
class GetStatic(Command):
def run(self):
DynStatic.construct_html()
print("Static html constructed\n")
| from flask import current_app
from functools import wraps
import os
class DynStatic(object):
views = []
def __init__(self, app=None):
self.app = app
@staticmethod
def construct_html():
for view in DynStatic.views:
with open(os.path.join(current_app.config['DYNSTATIC_ROOT'], view[0]), 'w') as html_file:
html_file.write(view[1]())
@staticmethod
def to_static_html(path):
def decorator(func):
if func not in DynStatic.views:
DynStatic.views.append((path, func))
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorator
| bsd-3-clause | Python |
937a5e32c77ca57917d60a891616fbcf19ab19f9 | Fix a bug that caused HTTP Accept headers with whitespace to be parsed incorrectly | jgorset/django-respite,jgorset/django-respite,jgorset/django-respite | respite/utils.py | respite/utils.py | from django import forms
def generate_form(model):
"""
Generate a form from a model.
Arguments:
model -- A Django model.
"""
_model = model
class Form(forms.ModelForm):
class Meta:
model = _model
return Form
def parse_http_accept_header(header):
"""
Return a list of content types listed in the HTTP Accept header
ordered by quality.
Arguments:
header -- A string describing the contents of the HTTP Accept header.
"""
components = [item.strip() for item in header.split(',')]
l = []
for component in components:
if ';' in component:
subcomponents = [item.strip() for item in component.split(';')]
l.append(
(
subcomponents[0], # eg. 'text/html'
subcomponents[1][2:] # eg. 'q=0.9'
)
)
else:
l.append((component, '1'))
l.sort(
key = lambda i: i[1],
reverse = True
)
content_types = []
for i in l:
content_types.append(i[0])
return content_types
| from django import forms
def generate_form(model):
"""
Generate a form from a model.
Arguments:
model -- A Django model.
"""
_model = model
class Form(forms.ModelForm):
class Meta:
model = _model
return Form
def parse_http_accept_header(header):
"""
Return a list of content types listed in the HTTP Accept header
ordered by quality.
Arguments:
header -- A string describing the contents of the HTTP Accept header.
"""
components = header.split(',')
l = []
for component in components:
if ';' in component:
subcomponents = component.split(';')
l.append(
(
subcomponents[0], # eg. 'text/html'
subcomponents[1][2:] # eg. 'q=0.9'
)
)
else:
l.append((component, '1'))
l.sort(
key = lambda i: i[1],
reverse = True
)
content_types = []
for i in l:
content_types.append(i[0])
return content_types
| mit | Python |
4c4c32e9a627fcbca8527aa38b5ea41457e96683 | Fix errant import | editorsnotes/editorsnotes,editorsnotes/editorsnotes | editorsnotes/api/tests/serializers.py | editorsnotes/api/tests/serializers.py | import json
from rest_framework import serializers
from rest_framework.renderers import JSONRenderer
from django.test.client import RequestFactory
from editorsnotes.auth.models import Project
from .views import ClearContentTypesTransactionTestCase
from ..serializers.mixins import EmbeddedItemsMixin
from ..serializers import ProjectSerializer
class EmbeddingSerializerTestCase(ClearContentTypesTransactionTestCase):
fixtures = ['projects.json']
def setUp(self):
factory = RequestFactory()
self.dummy_request = factory.get('/')
def test_has_embedded_project(self):
context = {'request': self.dummy_request}
project = Project.objects.get(slug='emma')
project_url = self.dummy_request.build_absolute_uri(
project.get_absolute_url())
project_serializer = ProjectSerializer(
instance=project, context=context)
class SimpleEmbeddingSerializer(EmbeddedItemsMixin,
serializers.Serializer):
project_url = serializers.SerializerMethodField()
class Meta:
fields = ('project_url',)
embedded_fields = ('project_url',)
def get_project_url(self, obj):
return project_url
test_serializer = SimpleEmbeddingSerializer({},
include_embeds=True,
context=context
)
data = json.loads(JSONRenderer().render(test_serializer.data))
project_data = json.loads(JSONRenderer().render(project_serializer.data))
self.assertEqual(data, {
'project_url': project_url,
'_embedded': {project_url: project_data}
})
| import json
from rest_framework import serializers
from rest_framework.renderers import JSONRenderer
from django.test.client import RequestFactory
from editorsnotes.auth.models import Project
from .views import ClearContentTypesTransactionTestCase
from ..serializers.base import EmbeddedItemsMixin
from ..serializers import ProjectSerializer
class EmbeddingSerializerTestCase(ClearContentTypesTransactionTestCase):
fixtures = ['projects.json']
def setUp(self):
factory = RequestFactory()
self.dummy_request = factory.get('/')
def test_has_embedded_project(self):
context = {'request': self.dummy_request}
project = Project.objects.get(slug='emma')
project_url = self.dummy_request.build_absolute_uri(
project.get_absolute_url())
project_serializer = ProjectSerializer(
instance=project, context=context)
class SimpleEmbeddingSerializer(EmbeddedItemsMixin,
serializers.Serializer):
project_url = serializers.SerializerMethodField()
class Meta:
fields = ('project_url',)
embedded_fields = ('project_url',)
def get_project_url(self, obj):
return project_url
test_serializer = SimpleEmbeddingSerializer({},
include_embeds=True,
context=context
)
data = json.loads(JSONRenderer().render(test_serializer.data))
project_data = json.loads(JSONRenderer().render(project_serializer.data))
self.assertEqual(data, {
'project_url': project_url,
'_embedded': {project_url: project_data}
})
| agpl-3.0 | Python |
3a479db126d081532e76a5a4018bcfb6ad11f5e9 | Fix hook param name in nonpython example (#9373) | RonnyPfannschmidt/pytest,pytest-dev/pytest,Akasurde/pytest,markshao/pytest | doc/en/example/nonpython/conftest.py | doc/en/example/nonpython/conftest.py | # content of conftest.py
import pytest
def pytest_collect_file(parent, file_path):
if file_path.suffix == ".yaml" and file_path.name.startswith("test"):
return YamlFile.from_parent(parent, path=file_path)
class YamlFile(pytest.File):
def collect(self):
# We need a yaml parser, e.g. PyYAML.
import yaml
raw = yaml.safe_load(self.path.open())
for name, spec in sorted(raw.items()):
yield YamlItem.from_parent(self, name=name, spec=spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super().__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in sorted(self.spec.items()):
# Some custom test execution (dumb example follows).
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
"""Called when self.runtest() raises an exception."""
if isinstance(excinfo.value, YamlException):
return "\n".join(
[
"usecase execution failed",
" spec failed: {1!r}: {2!r}".format(*excinfo.value.args),
" no further details known at this point.",
]
)
def reportinfo(self):
return self.path, 0, f"usecase: {self.name}"
class YamlException(Exception):
"""Custom exception for error reporting."""
| # content of conftest.py
import pytest
def pytest_collect_file(parent, fspath):
if fspath.suffix == ".yaml" and fspath.name.startswith("test"):
return YamlFile.from_parent(parent, path=fspath)
class YamlFile(pytest.File):
def collect(self):
# We need a yaml parser, e.g. PyYAML.
import yaml
raw = yaml.safe_load(self.path.open())
for name, spec in sorted(raw.items()):
yield YamlItem.from_parent(self, name=name, spec=spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super().__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in sorted(self.spec.items()):
# Some custom test execution (dumb example follows).
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
"""Called when self.runtest() raises an exception."""
if isinstance(excinfo.value, YamlException):
return "\n".join(
[
"usecase execution failed",
" spec failed: {1!r}: {2!r}".format(*excinfo.value.args),
" no further details known at this point.",
]
)
def reportinfo(self):
return self.path, 0, f"usecase: {self.name}"
class YamlException(Exception):
"""Custom exception for error reporting."""
| mit | Python |
c12186ce51edc184692dbe214d17c9993e6f627a | Update yadisk.py | haitaka/DroiTaka | cogs/utils/api/yadisk.py | cogs/utils/api/yadisk.py | import json
import requests
__version__ = '0.1.2-dev'
USER_AGENT = 'pycopy/{}'.format(__version__)
BASE_URL = 'https://api.copy.com'
AUTH_URL = BASE_URL + '/auth_user' # TODO: should use /rest
OBJECTS_URL = BASE_URL + '/list_objects' # TODO: should use /rest
DOWNLOAD_URL = BASE_URL + '/download_object' # TODO: should use /rest
class Copy(object):
def __init__(self, token):
self.session = requests.session()
self.session.headers.update({'Authentication': 'OAuth ' + str(token),})
def get_token(self, key):
res = requests.post('http://oauth.yandex.ru/token', data = {
'grant_type': 'authorization_code',
'code': key,
'client_id': 'b12710fc26ee46ba82e34b97f08f2305',
'client_secret': '4ff2284115644e04acc77c54526364d2',
'device_id': '141f72b7-fd02-11e5-981a-00155d860f42',
'device_name': 'DroiTaka',
})
def _get(self, url, *args, **kwargs):
return self.session.get(url, *args, **kwargs)
def _post(self, url, data, *args, **kwargs):
return self.session.post(url, {'data': json.dumps(data), }, *args, **kwargs)
def list_files(self, dir_path):
file_list = []
json_res = self._https://cloud-api.yandex.net:443/v1/disk/resources?path=%2Fradio%2F
return file_list
def direct_link(self, file_path):
object_url = BASE_URL + '/rest/meta/copy/' + file_path
response = self.session.get(object_url)
return response.json()['url']
def get_file(self, file_path):
url = self.direct_link(file_path)
r = self._post(DOWNLOAD_URL, {'path': file_path}, stream=True)
r.raw.decode_content = True
return r.raw
def dwnload_file(self, file_path):
url = self.direct_link(file_path)
local_filename = "tmp_uploads/" + url.split('/')[-1]
r = self._get(url, stream=True)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
#f.flush() #commented by recommendation from J.F.Sebastian
return local_filename
def get_headers_str(self):
headers_str = ""
for key, value in self.session.headers.items():
headers_str += "{}: {}\r\n".format(key, value)
return headers_str
| import json
import requests
__version__ = '0.1.2-dev'
USER_AGENT = 'pycopy/{}'.format(__version__)
BASE_URL = 'https://api.copy.com'
AUTH_URL = BASE_URL + '/auth_user' # TODO: should use /rest
OBJECTS_URL = BASE_URL + '/list_objects' # TODO: should use /rest
DOWNLOAD_URL = BASE_URL + '/download_object' # TODO: should use /rest
class Copy(object):
def __init__(self, username, password):
self.session = requests.session()
self.session.headers.update({'X-Client-Type': 'api',
'X-Api-Version': '1',
'User-Agent': USER_AGENT, })
self.authenticate(username, password)
def _get(self, url, *args, **kwargs):
return self.session.get(url, *args, **kwargs)
def _post(self, url, data, *args, **kwargs):
return self.session.post(url, {'data': json.dumps(data), }, *args,
**kwargs)
def authenticate(self, username, password):
response = self._post(AUTH_URL,
{'username': username, 'password': password, })
json_response = response.json()
if 'auth_token' not in json_response:
raise ValueError("Error while authenticating")
self.user_data = json_response
self.auth_token = json_response['auth_token']
self.session.headers.update({'X-Authorization': self.auth_token, })
def list_files(self, dir_path):
file_list = []
list_wtrmark = False
while (True):
response = self._post(OBJECTS_URL, {'path': dir_path, 'list_watermark': list_wtrmark, })
for file in response.json()['children']:
if file['type'] == 'file':
file_list.append(file['path'].split("/")[-1])
#print(file_list[-1])
list_wtrmark = response.json()['list_watermark']
#print(list_wtrmark)
#print(response.json())
if (response.json()['more_items'] == '0'):
#print('break')
break
return file_list
def direct_link(self, file_path):
object_url = BASE_URL + '/rest/meta/copy/' + file_path
response = self.session.get(object_url)
return response.json()['url']
def get_file(self, file_path):
url = self.direct_link(file_path)
r = self._post(DOWNLOAD_URL, {'path': file_path}, stream=True)
r.raw.decode_content = True
return r.raw
def dwnload_file(self, file_path):
url = self.direct_link(file_path)
local_filename = "tmp_uploads/" + url.split('/')[-1]
r = self._get(url, stream=True)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
#f.flush() #commented by recommendation from J.F.Sebastian
return local_filename
def get_headers_str(self):
headers_str = ""
for key, value in self.session.headers.items():
headers_str += "{}: {}\r\n".format(key, value)
return headers_str
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.