commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
18b769ed14028f771877439a6e9c73e333671986 | use LOG_LEVEL env variable | joliveros/bitmex-websocket,joliveros/bitmex-websocket | bitmex_websocket/_settings_base.py | bitmex_websocket/_settings_base.py | from os.path import join
import logging
import os
import alog
########################################################################################################################
# Connection/Auth
########################################################################################################################
# API URL.
BASE_URL = ''
if os.environ.get('RUN_ENV') == 'development':
BASE_URL = "https://testnet.bitmex.com/api/v1/"
else:
BASE_URL = "https://www.bitmex.com/api/v1/"
# The BitMEX API requires permanent API keys. Go to https://testnet.bitmex.com/api/apiKeys to fill these out.
BITMEX_API_KEY = os.environ.get('BITMEX_API_KEY')
BITMEX_API_SECRET = os.environ.get('BITMEX_API_SECRET')
# Available levels: logging.(DEBUG|INFO|WARN|ERROR)
LOG_LEVEL = os.environ.get('LOG_LEVEL')
if not LOG_LEVEL:
LOG_LEVEL = logging.INFO
alog.set_level(LOG_LEVEL)
| from os.path import join
import logging
import os
########################################################################################################################
# Connection/Auth
########################################################################################################################
# API URL.
BASE_URL = ''
if os.environ.get('RUN_ENV') == 'development':
BASE_URL = "https://testnet.bitmex.com/api/v1/"
else:
BASE_URL = "https://www.bitmex.com/api/v1/"
# The BitMEX API requires permanent API keys. Go to https://testnet.bitmex.com/api/apiKeys to fill these out.
BITMEX_API_KEY = os.environ.get('BITMEX_API_KEY')
BITMEX_API_SECRET = os.environ.get('BITMEX_API_SECRET')
# Available levels: logging.(DEBUG|INFO|WARN|ERROR)
LOG_LEVEL = os.environ.get('LOGGING')
if not LOG_LEVEL:
LOG_LEVEL = logging.INFO
| mit | Python |
6bb9a4ed50ad879c56cdeae0dedb49bba6780780 | Use IRC Nicks instead of real names. | honza/nigel | matchers/volunteer.py | matchers/volunteer.py | import random
from base import BaseMatcher
class VolunteerMatcher(BaseMatcher):
dev_text = "volunteer someone"
all_text = "volunteer a dev"
dev_candidates = ['sjl', 'arthurdebert', 'honza', 'fernandotakai', 'nicksergeant']
all_candidates = dev_candidates + ['cz', 'ehazlett']
def respond(self, message, user=None):
if self.dev_text in message.lower():
victim = random.choice(self.dev_candidates)
self.speak('%s is it' % victim)
elif self.all_text in message.lower():
victim = random.choice(self.all_candidates)
self.speak('%s is it' % victim)
| import random
from base import BaseMatcher
class VolunteerMatcher(BaseMatcher):
dev_text = "volunteer someone"
all_text = "volunteer a dev"
dev_candidates = ['Steve', 'Arthur', 'Honza', 'Fernando', 'Nick']
all_candidates = dev_candidates + ['Craig', 'Evan']
def respond(self, message, user=None):
if self.dev_text in message.lower():
victim = random.choice(self.dev_candidates)
self.speak('%s is it' % victim)
elif self.all_text in message.lower():
victim = random.choice(self.all_candidates)
self.speak('%s is it' % victim)
| bsd-2-clause | Python |
c475cf3e65100dd3cff3c992fc756fb2078b6195 | Update example project | gears/flask-gears | example/app.py | example/app.py | from flask import Flask, render_template
from flask_gears import Gears
from gears_stylus import StylusCompiler
app = Flask(__name__)
gears = Gears()
gears.init_app(app)
env = gears.get_environment(app)
env.compilers.register('.styl', StylusCompiler.as_handler())
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
| import os
from flask import Flask, render_template
from flask_gears import Gears
NODE_PATH = os.path.join(os.path.dirname(__file__), 'node_modules')
NODE_PATH = os.path.normpath(os.path.abspath(NODE_PATH))
os.environ['NODE_PATH'] = NODE_PATH
app = Flask(__name__)
gears = Gears()
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
| isc | Python |
fc319b99854145ac217f50f675cb7fffb1b52e9f | Add author to should-I-boot-this.py | kernelci/lava-ci-staging,kernelci/lava-ci-staging,kernelci/lava-ci-staging | should-I-boot-this.py | should-I-boot-this.py | #!/usr/bin/env python3
# -*- coding:utf-8 -*
#
# Copyright (C) 2017 Free Electrons SAS
# Author: Florent Jacquet <florent.jacquet@free-electrons.com>
#
# This module is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
import configparser
"""
To test the script, just export those variables and play with their values
export LAB=lab-free-electrons
export TREE=mainline
"""
config = configparser.ConfigParser()
config.read('labs.ini')
# Is the lab existing?
if os.environ['LAB'] not in config.sections():
print("Unknown lab (%s). Allowing boot of %s." % (os.environ['LAB'], os.environ['TREE']))
sys.exit(0)
# Is the tree blacklisted for this lab?
if os.environ['TREE'] in config[os.environ['LAB']]['tree_blacklist'].split():
print("Tree '%s' is blacklisted for lab '%s'" % (os.environ['TREE'], os.environ['LAB']))
sys.exit(1)
print("Booting tree '%s' is allowed for lab '%s'" % (os.environ['TREE'], os.environ['LAB']))
sys.exit(0)
| #!/usr/bin/env python3
# -*- coding:utf-8 -*
#
# Copyright (C) 2017 Free Electrons SAS
# Author:
#
# This module is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
import configparser
"""
To test the script, just export those variables and play with their values
export LAB=lab-free-electrons
export TREE=mainline
"""
config = configparser.ConfigParser()
config.read('labs.ini')
# Is the lab existing?
if os.environ['LAB'] not in config.sections():
print("Unknown lab (%s). Allowing boot of %s." % (os.environ['LAB'], os.environ['TREE']))
sys.exit(0)
# Is the tree blacklisted for this lab?
if os.environ['TREE'] in config[os.environ['LAB']]['tree_blacklist'].split():
print("Tree '%s' is blacklisted for lab '%s'" % (os.environ['TREE'], os.environ['LAB']))
sys.exit(1)
print("Booting tree '%s' is allowed for lab '%s'" % (os.environ['TREE'], os.environ['LAB']))
sys.exit(0)
| lgpl-2.1 | Python |
187d038bcd7cb8fd1d819f827bd1e081b762e9de | Prepare for next version | simplefin/siloscript,simplefin/siloscript,simplefin/siloscript | siloscript/version.py | siloscript/version.py | # Copyright (c) The SimpleFIN Team
# See LICENSE for details.
__version__ = "0.3.0-dev"
| # Copyright (c) The SimpleFIN Team
# See LICENSE for details.
__version__ = "0.2.1"
| apache-2.0 | Python |
b24083b0991157a1e0d8a533fc1cac3aa2e4523c | Order similar artist results properly | FreeMusicNinja/api.freemusic.ninja | similarities/utils.py | similarities/utils.py | from django.db.models import Q
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
similar = Q(similarity__other_artist=artist, similarity__weight__gt=0)
return Artist.objects.filter(similar).order_by('-similarity__weight')
| import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
return Artist.objects.filter(similarity__other_artist=artist,
similarity__weight__gt=0)
| bsd-3-clause | Python |
c9d1edf1148ed503c02510c998ee9da9394fa848 | Add a `;` at the end of each line and escape `"` character in content string (iOS only) | saminerve/localizable | lib/ios.py | lib/ios.py | import os
import subprocess
from files import findFiles
import codecs
def init(args):
path = args["root"] if "root" in args else "."
path = path + (("/"+args["path"]) if "path" in args else "")
global storyboardFiles, stringsFiles
storyboardFiles = findFiles(".storyboard", path+"/Base.lproj")
stringsFiles = findFiles(".strings", path+"/fr.lproj")
global resourceContent
resourceContent = {}
#Add standalone Localizable.strings file
global commonStoryboard
commonStoryboard = "Localizable"
def close():
for storyboard, (path, content) in resourceContent.iteritems():
with open(path, 'w') as file:
file.write(content)
file.close()
def writeText(row, storyboard):
if row.dynamic:
storyboard = commonStoryboard
if storyboard != None and storyboard.lower() in resourceContent:
(path, content) = resourceContent[storyboard.lower()]
text = row.text.replace('"', '\\"')
if storyboard.lower() == commonStoryboard.lower():
resourceContent[storyboard.lower()] = (path, content + "\""+row.key + "\" = \"" + text + "\";\n")
else:
resourceContent[storyboard.lower()] = (path, content.replace("#{"+row.key+"}", text))
def writeComment(row, storyboard):
if commonStoryboard != None and commonStoryboard.lower() in resourceContent:
(path, content) = resourceContent[commonStoryboard.lower()]
resourceContent[commonStoryboard.lower()] = (path, content + "\n// "+row.section + "\n")
def writeStoryboard(row, storyboard):
if storyboardFiles != None:
for (rootStory, fileStory) in storyboardFiles:
if fileStory.lower() == storyboard.lower()+".storyboard" or storyboard.lower() == commonStoryboard.lower():
storyPath = rootStory+"/"+fileStory
for (rootStrings, fileStrings) in stringsFiles:
if fileStrings.lower() == storyboard.lower()+".strings":
stringsPath = rootStrings+"/"+fileStrings
generateStringsFile(storyboard.lower(), storyPath, stringsPath)
file = codecs.open(stringsPath, encoding='utf-16')
resourceContent[storyboard.lower()] = (stringsPath, file.read())
file.close
break
break
def generateStringsFile(storyboard, storyPath, stringsPath):
if storyboard != None and storyboard.lower() == commonStoryboard.lower():
os.remove(stringsPath)
file = open(stringsPath, 'w')
file.close()
else:
print ">", stringsPath
subprocess.call(["ibtool", storyPath, "--generate-strings-file", stringsPath])
| import os
import subprocess
from files import findFiles
import codecs
def init(args):
path = args["root"] if "root" in args else "."
path = path + (("/"+args["path"]) if "path" in args else "")
global storyboardFiles, stringsFiles
storyboardFiles = findFiles(".storyboard", path+"/Base.lproj")
stringsFiles = findFiles(".strings", path+"/fr.lproj")
global resourceContent
resourceContent = {}
#Add standalone Localizable.strings file
global commonStoryboard
commonStoryboard = "Localizable"
def close():
for storyboard, (path, content) in resourceContent.iteritems():
with open(path, 'w') as file:
file.write(content)
file.close()
def writeText(row, storyboard):
if row.dynamic:
storyboard = commonStoryboard
if storyboard != None and storyboard.lower() in resourceContent:
(path, content) = resourceContent[storyboard.lower()]
if storyboard.lower() == commonStoryboard.lower():
resourceContent[storyboard.lower()] = (path, content + "\""+row.key + "\" = \"" + row.text + "\"\n")
else:
resourceContent[storyboard.lower()] = (path, content.replace("#{"+row.key+"}", row.text))
def writeComment(row, storyboard):
if commonStoryboard != None and commonStoryboard.lower() in resourceContent:
(path, content) = resourceContent[commonStoryboard.lower()]
resourceContent[commonStoryboard.lower()] = (path, content + "\n// "+row.section + "\n")
def writeStoryboard(row, storyboard):
if storyboardFiles != None:
for (rootStory, fileStory) in storyboardFiles:
if fileStory.lower() == storyboard.lower()+".storyboard" or storyboard.lower() == commonStoryboard.lower():
storyPath = rootStory+"/"+fileStory
for (rootStrings, fileStrings) in stringsFiles:
if fileStrings.lower() == storyboard.lower()+".strings":
stringsPath = rootStrings+"/"+fileStrings
generateStringsFile(storyboard.lower(), storyPath, stringsPath)
file = codecs.open(stringsPath, encoding='utf-16')
resourceContent[storyboard.lower()] = (stringsPath, file.read())
file.close
break
break
def generateStringsFile(storyboard, storyPath, stringsPath):
if storyboard != None and storyboard.lower() == commonStoryboard.lower():
os.remove(stringsPath)
file = open(stringsPath, 'w')
file.close()
else:
print ">", stringsPath
subprocess.call(["ibtool", storyPath, "--generate-strings-file", stringsPath])
| unlicense | Python |
5b101517628e1f87a956e78e99fde442d08fc5e6 | Make attributes executable and add in table to model map | rcbau/fuzzy-happiness | fuzzy_happiness/attributes.py | fuzzy_happiness/attributes.py | #!/usr/bin/python
#
# Copyright 2013 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import sys
from nova.db.sqlalchemy import models
from oslo.config import cfg
CONF = cfg.CONF
def load_configuration():
configs = {}
for name, obj in inspect.getmembers(models):
if not inspect.isclass(obj):
continue
if not issubclass(obj, models.NovaBase):
continue
attrs_missing = []
for required_attr in ['__tablename__', '__confidential__']:
if not hasattr(obj, required_attr):
attrs_missing.append(required_attr)
if attrs_missing:
if CONF.debug:
print ('Required attributes %s missing from %s'
% (', '.join(attrs_missing), name))
continue
configs[obj.__tablename__] = obj.__confidential__
return configs
def map_tables_to_model_names(tables):
results = {}
for name, obj in inspect.getmembers(models):
if hasattr(obj, '__tablename__'):
if obj.__tablename__ in tables:
results[obj.__tablename__] = name
return results
def main():
CONF(sys.argv[1:], project='fuzzy-happiness')
print load_configuration()
if __name__ == '__main__':
main()
| #!/usr/bin/python
#
# Copyright 2013 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import sys
from nova.db.sqlalchemy import models
from oslo.config import cfg
CONF = cfg.CONF
def load_configuration():
configs = {}
for name, obj in inspect.getmembers(models):
if not inspect.isclass(obj):
continue
if not issubclass(obj, models.NovaBase):
continue
attrs_missing = []
for required_attr in ['__tablename__', '__confidential__']:
if not hasattr(obj, required_attr):
attrs_missing.append(required_attr)
if attrs_missing:
if CONF.debug:
print ('Required attributes %s missing from %s'
% (', '.join(attrs_missing), name))
continue
configs[obj.__tablename__] = obj.__confidential__
return configs
def main():
CONF(sys.argv[1:], project='fuzzy-happiness')
print load_configuration()
| apache-2.0 | Python |
dfb3ef220b53b03b0f5007d8712ad3704fc860f6 | Use KafkaProducer | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/change_feed/producer.py | corehq/apps/change_feed/producer.py | from __future__ import unicode_literals
from __future__ import absolute_import
import json
import time
from django.conf import settings
from kafka import KafkaProducer
from kafka.common import LeaderNotAvailableError, FailedPayloadsError, KafkaUnavailableError
from six.moves import range
from corehq.util.soft_assert import soft_assert
def send_to_kafka(producer, topic, change_meta):
def _send_to_kafka():
producer.send_messages(
bytes(topic),
bytes(json.dumps(change_meta.to_json())),
)
try:
tries = 3
for i in range(tries):
# try a few times because the python kafka libraries can trigger timeouts
# if they are idle for a while.
try:
_send_to_kafka()
break
except (FailedPayloadsError, KafkaUnavailableError, LeaderNotAvailableError):
if i == (tries - 1):
# if it's the last try, fail hard
raise
except LeaderNotAvailableError:
# kafka seems to be down. sleep a bit to avoid crazy amounts of error spam
time.sleep(15)
raise
except Exception as e:
_assert = soft_assert(notify_admins=True)
_assert(False, 'Problem sending change to kafka {}: {} ({})'.format(
change_meta.to_json(), e, type(e)
))
raise
class ChangeProducer(object):
def __init__(self):
self._producer = None
@property
def producer(self):
if self._producer is not None:
return self._producer
self._producer = KafkaProducer(
bootstrap_servers=settings.KAFKA_BROKERS,
api_version=settings.KAFKA_API_VERSION,
client_id="cchq-producer",
retries=3,
acks=1
)
return self._producer
def send_change(self, topic, change_meta):
message = change_meta.to_json()
try:
self.producer.send(topic, bytes(json.dumps(message)))
except Exception as e:
_assert = soft_assert(notify_admins=True)
_assert(False, 'Problem sending change to kafka {}: {} ({})'.format(
message, e, type(e)
))
raise
producer = ChangeProducer()
| from __future__ import unicode_literals
from __future__ import absolute_import
import json
import time
from corehq.util.soft_assert import soft_assert
from kafka import SimpleProducer
from kafka.common import LeaderNotAvailableError, FailedPayloadsError, KafkaUnavailableError
from corehq.apps.change_feed.connection import get_simple_kafka_client
from six.moves import range
def send_to_kafka(producer, topic, change_meta):
def _send_to_kafka():
producer.send_messages(
bytes(topic),
bytes(json.dumps(change_meta.to_json())),
)
try:
tries = 3
for i in range(tries):
# try a few times because the python kafka libraries can trigger timeouts
# if they are idle for a while.
try:
_send_to_kafka()
break
except (FailedPayloadsError, KafkaUnavailableError, LeaderNotAvailableError):
if i == (tries - 1):
# if it's the last try, fail hard
raise
except LeaderNotAvailableError:
# kafka seems to be down. sleep a bit to avoid crazy amounts of error spam
time.sleep(15)
raise
except Exception as e:
_assert = soft_assert(notify_admins=True)
_assert(False, 'Problem sending change to kafka {}: {} ({})'.format(
change_meta.to_json(), e, type(e)
))
raise
class ChangeProducer(object):
def __init__(self):
self._kafka = None
self._producer = None
self._has_error = False
@property
def kafka(self):
if self._kafka is None:
self._kafka = get_simple_kafka_client(client_id='cchq-producer')
return self._kafka
@property
def producer(self):
if self._producer is not None:
return self._producer
self._producer = SimpleProducer(
self.kafka, async_send=False, req_acks=SimpleProducer.ACK_AFTER_LOCAL_WRITE,
sync_fail_on_error=True
)
return self._producer
def send_change(self, topic, change_meta):
if self.producer:
send_to_kafka(self.producer, topic, change_meta)
producer = ChangeProducer()
| bsd-3-clause | Python |
5cac0d8b336cb8efe7d819d47abf46ccadea7b29 | Fix typo/bug in validate_params function | kmike/django-generic-images,kmike/django-generic-images,kmike/django-generic-images | generic_utils/templatetags.py | generic_utils/templatetags.py | from django import template
class InvalidParamsError(template.TemplateSyntaxError):
''' Custom exception class to distinguish usual TemplateSyntaxErrors
and validation errors for templatetags introduced by ``validate_params``
function'''
pass
def validate_params(bits, arguments_count, keyword_positions):
'''
Raises exception if passed params (`bits`) do not match signature.
Signature is defined by `arguments_count` (acceptible number of params) and
keyword_positions (dictionary with positions in keys and keywords in values,
for ex. {2:'by', 4:'of', 5:'type', 7:'as'}).
'''
if len(bits) != arguments_count+1:
raise InvalidParamsError("'%s' tag takes %d arguments" % (bits[0], arguments_count,))
for pos in keyword_positions:
value = keyword_positions[pos]
if bits[pos] != value:
raise InvalidParamsError("argument #%d to '%s' tag must be '%s'" % (pos, bits[0], value))
| from django import template
class InvalidParamsError(template.TemplateSyntaxError):
''' Custom exception class to distinguish usual TemplateSyntaxErrors
and validation errors for templatetags introduced by ``validate_params``
function'''
pass
def validate_params(bits, arguments_count, keyword_positions):
'''
Raises exception if passed params (`bits`) do not match signature.
Signature is defined by `arguments_count` (acceptible number of params) and
keyword_positions (dictionary with positions in keys and keywords in values,
for ex. {2:'by', 4:'of', 5:'type', 7:'as'}).
'''
if len(bits) != arguments_count+1:
raise InvalidTagParamsError("'%s' tag takes %d arguments" % (bits[0], arguments_count,))
for pos in keyword_positions:
value = keyword_positions[pos]
if bits[pos] != value:
raise InvalidTagParamsError("argument #%d to '%s' tag must be '%s'" % (pos, bits[0], value))
| mit | Python |
0cdc4901f604b64ab26adf0cad867bf58e72f91a | update to debug | justinwp/croplands,justinwp/croplands | gfsad/views/api/processors.py | gfsad/views/api/processors.py | from flask import request
from gfsad.exceptions import Unauthorized
from gfsad.utils.s3 import upload_image
from gfsad.tasks.records import get_ndvi
from gfsad.auth import allowed_roles, verify_role, load_user
def api_roles(role):
def wrapper(*args, **kwargs):
if not allowed_roles(role):
raise Unauthorized()
return wrapper
def after_post_record():
get_ndvi.apply()
def add_user_to_posted_data(data=None, **kwargs):
"""
Appends user_id to data if user is not none.
:param data: data from api endpoint
:param kwargs:
:return: None
"""
user = load_user()
if user is 'anonymous':
print 'Anonymous User'
return
data['user_id'] = user.id
#TODO Improve method of applying user_id to sub models
# perhaps using get_related_model? looping through entities of array?
if 'records' in data:
for record in data['records']:
record['user_id'] = user.id
if 'images' in data:
for image in data['images']:
image['user_id'] = user.id
def remove_relations(data=None, **kwargs):
"""
Removes all relations from patched data.
:param data:
:param kwargs:
:return: None
"""
keys_to_delete = []
for key, val in data.iteritems():
if type(data[key]) is list:
keys_to_delete.append(key)
for key in keys_to_delete:
del data[key]
def debug_post(data=None, **kwargs):
print data
print "authorization header" + str(request.headers.get('Authorization', None)) | from flask import request
from gfsad.exceptions import Unauthorized
from gfsad.utils.s3 import upload_image
from gfsad.tasks.records import get_ndvi
from gfsad.auth import allowed_roles, verify_role, load_user
def api_roles(role):
def wrapper(*args, **kwargs):
if not allowed_roles(role):
raise Unauthorized()
return wrapper
def after_post_record():
get_ndvi.apply()
def add_user_to_posted_data(data=None, **kwargs):
"""
Appends user_id to data if user is not none.
:param data: data from api endpoint
:param kwargs:
:return: None
"""
user = load_user()
if user is 'anonymous':
print 'Anonymous User'
return
data['user_id'] = user.id
#TODO Improve method of applying user_id to sub models
# perhaps using get_related_model? looping through entities of array?
if 'records' in data:
for record in data['records']:
record['user_id'] = user.id
if 'images' in data:
for image in data['images']:
image['user_id'] = user.id
def remove_relations(data=None, **kwargs):
"""
Removes all relations from patched data.
:param data:
:param kwargs:
:return: None
"""
keys_to_delete = []
for key, val in data.iteritems():
if type(data[key]) is list:
keys_to_delete.append(key)
for key in keys_to_delete:
del data[key]
def debug_post(data=None, **kwargs):
print data
print request.headers | mit | Python |
d37faaa3950f9468a9276d8202177a3f4e48f632 | Clean up mnist example | explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc | examples/mnist_mlp.py | examples/mnist_mlp.py | from __future__ import print_function
import plac
import dill as pickle
from tqdm import tqdm
from thinc.neural.vec2vec import Model, ReLu, Softmax
from thinc.api import clone, chain
from thinc.extra import datasets
from thinc.neural.ops import CupyOps
def main(depth=2, width=512, nb_epoch=20):
Model.ops = CupyOps()
# Configuration here isn't especially good. But, for demo..
with Model.define_operators({'**': clone, '>>': chain}):
model = ReLu(width) >> ReLu(width) >> Softmax()
train_data, dev_data, _ = datasets.mnist()
train_X, train_y = model.ops.unzip(train_data)
dev_X, dev_y = model.ops.unzip(dev_data)
with model.begin_training(train_X, train_y) as (trainer, optimizer):
epoch_loss = [0.]
def report_progress():
with model.use_params(optimizer.averages):
print(epoch_loss[-1], model.evaluate(dev_X, dev_y), trainer.dropout)
epoch_loss.append(0.)
trainer.each_epoch.append(report_progress)
trainer.nb_epoch = nb_epoch
trainer.dropout = 0.75
trainer.batch_size = 128
trainer.dropout_decay = 1e-4
train_X = model.ops.asarray(train_X, dtype='float32')
y_onehot = model.ops.allocate((train_X.shape[0], 10), dtype='float32')
for i, label in enumerate(train_y):
y_onehot[i, int(label)] = 1.
for X, y in trainer.iterate(train_X, y_onehot):
yh, backprop = model.begin_update(X, drop=trainer.dropout)
loss = ((yh-y)**2.).sum() / y.shape[0]
backprop(yh-y, optimizer)
epoch_loss[-1] += loss
with model.use_params(optimizer.averages):
print('Avg dev.: %.3f' % model.evaluate(dev_X, dev_y))
with open('out.pickle', 'wb') as file_:
pickle.dump(model, file_, -1)
if __name__ == '__main__':
plac.call(main)
| from __future__ import print_function
import plac
import dill as pickle
from thinc.neural.vec2vec import Model, ReLu, Softmax
from thinc.neural._classes.batchnorm import BatchNorm as BN
from thinc.api import clone, chain
from thinc.loss import categorical_crossentropy
from thinc.extra import datasets
def main(depth=4, width=128, nb_epoch=5):
with Model.define_operators({'**': clone, '>>': chain}):
model = BN(ReLu(width)) ** depth >> Softmax()
train_data, dev_data, _ = datasets.mnist()
train_X, train_y = model.ops.unzip(train_data)
dev_X, dev_y = model.ops.unzip(dev_data)
with model.begin_training(train_X, train_y) as (trainer, optimizer):
trainer.each_epoch.append(
lambda: print(model.evaluate(dev_X, dev_y)))
trainer.nb_epoch = nb_epoch
trainer.dropout = 0.2
trainer.dropout_decay = 0.0
for X, y in trainer.iterate(train_X, train_y):
X = model.ops.asarray(X)
y = model.ops.asarray(y)
yh, backprop = model.begin_update(X, drop=trainer.dropout)
d_loss, loss = categorical_crossentropy(yh, y)
optimizer.set_loss(loss)
backprop(d_loss, optimizer)
with model.use_params(optimizer.averages):
print('Avg dev.: %.3f' % model.evaluate(dev_X, dev_y))
with open('out.pickle', 'wb') as file_:
pickle.dump(model, file_, -1)
if __name__ == '__main__':
plac.call(main)
| mit | Python |
330074331c13f9adfe1abe182b61c241324aa0e8 | Remove old permission system in files | hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website | files/views.py | files/views.py | from django.shortcuts import render
from .models import Image
from .forms import ImageForm
from .templatetags.render_single_image import render_image
from django.views.generic import CreateView, DeleteView, UpdateView, ListView
from django.shortcuts import redirect
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import PermissionRequiredMixin
class ImageDeleteView(PermissionRequiredMixin, DeleteView):
model = Image
success_url = '/files/images'
permission_required = "files.delete_image"
class ImageListView(PermissionRequiredMixin, ListView):
model = Image
template_name = 'files/images.html'
permission_required = "files.view_image"
@login_required()
def imageUpload(request):
if request.method == 'POST':
form = ImageForm(request.POST, request.FILES, prefix='img')
if form.is_valid():
image = form.save(commit = False)
image.save()
return ajax_return_last_image(request)
else:
return HttpResponse(form.errors)
else:
return HttpResponseRedirect('/')
@login_required()
def imageDelete(request, id):
if request.method == 'POST':
image = Image.objects.get(id=id)
image.delete()
else:
return HttpResponseRedirect('/')
def ajax_return_last_image(request):
last_image = Image.objects.order_by('-time')[0]
return render(request, 'files/single-image.html', {'image':last_image})
@login_required()
def imageView(request, image_id):
try:
image = Image.objects.get(pk=image_id)
return HttpResponseRedirect('/media/'+str(image.file))
except Image.DoesNotExist:
return HttpResponseRedirect('/')
@login_required()
def modalpicker(request):
images = Image.objects.order_by('-time')
return images
| from django.shortcuts import render
from .models import Image
from .forms import ImageForm
from .templatetags.render_single_image import render_image
from django.views.generic import CreateView, DeleteView, UpdateView, ListView
from django.shortcuts import redirect
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from authentication.templatetags import check_user_group as groups
class ImageDeleteView(DeleteView):
model = Image
success_url = '/files/images'
def dispatch(self, request, *args, **kwargs):
if not groups.has_group(self.request.user, 'member'):
return redirect("/")
return super(ImageDeleteView, self).dispatch(request, *args, **kwargs)
class ImageListView(ListView):
model = Image
template_name = 'files/images.html'
def dispatch(self, request, *args, **kwargs):
if not groups.has_group(self.request.user, 'member'):
return redirect("/")
return super(ImageListView, self).dispatch(request, *args, **kwargs)
@login_required()
def imageUpload(request):
if request.method == 'POST':
form = ImageForm(request.POST, request.FILES, prefix='img')
if form.is_valid():
image = form.save(commit = False)
image.save()
return ajax_return_last_image(request)
else:
return HttpResponse(form.errors)
else:
return HttpResponseRedirect('/')
@login_required()
def imageDelete(request, id):
if request.method == 'POST':
image = Image.objects.get(id=id)
image.delete()
else:
return HttpResponseRedirect('/')
def ajax_return_last_image(request):
last_image = Image.objects.order_by('-time')[0]
return render(request, 'files/single-image.html', {'image':last_image})
@login_required()
def imageView(request, image_id):
try:
image = Image.objects.get(pk=image_id)
return HttpResponseRedirect('/media/'+str(image.file))
except Image.DoesNotExist:
return HttpResponseRedirect('/')
@login_required()
def modalpicker(request):
images = Image.objects.order_by('-time')
return images
| mit | Python |
eb67b659d1419ecc39fead03c9a2bd85a5a2c5fb | add coerce_type setting | night-crawler/django-docker-helpers | django_docker_helpers/utils.py | django_docker_helpers/utils.py | import os
import sys
import typing as t
from yaml import load
def dotkey(obj, dot_path: str, default=None):
val = obj
sentinel = object()
if '.' not in dot_path:
return obj.get(dot_path, default)
for path_item in dot_path.split('.'):
if not hasattr(val, 'get'):
return default
val = val.get(path_item, sentinel)
if val is sentinel:
return default
return val
def get_env_var_name(project_name: str, dotpath: str) -> str:
return '__'.join(filter(None, [project_name] + dotpath.upper().split('.')))
def load_yaml_config(project_name: str, filename: str) -> t.Tuple[dict, t.Callable]:
config_dict = load(open(filename))
sentinel = object()
def configure(key_name: str, default=None, coerce_type: t.Type[t.Union[bool, str, list, dict, None]]=None):
val = os.environ.get(get_env_var_name(project_name, key_name), sentinel)
if val is sentinel:
val = dotkey(config_dict, key_name, sentinel)
if val is sentinel:
val = default
if coerce_type is not None:
if coerce_type == bool:
if val in ['0', '1', 0, 1]:
val = bool(int(val))
if val.lower() == 'true':
val = True
if val.lower() == 'false':
val = False
else:
val = coerce_type(val)
return val
return config_dict, configure
def wf(s, flush=True):
sys.stdout.write(s)
flush and sys.stdout.flush()
| import os
import sys
import typing as t
from yaml import load
def dotkey(obj, dot_path: str, default=None):
val = obj
sentinel = object()
if '.' not in dot_path:
return obj.get(dot_path, default)
for path_item in dot_path.split('.'):
if not hasattr(val, 'get'):
return default
val = val.get(path_item, sentinel)
if val is sentinel:
return default
return val
def get_env_var_name(project_name: str, dotpath: str) -> str:
return '__'.join(filter(None, [project_name] + dotpath.upper().split('.')))
def load_yaml_config(project_name: str, filename: str) -> t.Tuple[dict, t.Callable]:
config_dict = load(open(filename))
sentinel = object()
def configure(key_name: str, default=None):
val = os.environ.get(get_env_var_name(project_name, key_name), sentinel)
if val is sentinel:
val = dotkey(config_dict, key_name, sentinel)
if val is sentinel:
val = default
return val
return config_dict, configure
def wf(s, flush=True):
sys.stdout.write(s)
flush and sys.stdout.flush()
| mit | Python |
4b747baa8325196534bc2182e5af53bd20068589 | update taskstats, don't use raw data | roolebo/pyroute2,roolebo/pyroute2 | examples/taskstats.py | examples/taskstats.py | '''
Simple taskstats sample.
'''
import os
from pyroute2 import TaskStats
pid = os.getpid()
ts = TaskStats()
# bind is required in the case of generic netlink
ts.bind()
ret = ts.get_pid_stat(int(pid))[0]
# parsed structure
print(ret)
ts.close()
| '''
Simple taskstats sample.
'''
import os
from pyroute2 import TaskStats
from pyroute2.common import hexdump
pid = os.getpid()
ts = TaskStats()
# bind is required in the case of generic netlink
ts.bind()
ret = ts.get_pid_stat(int(pid))[0]
# raw hex structure to check alignment
print(hexdump(ret.raw))
# parsed structure
print(ret)
ts.close()
| apache-2.0 | Python |
ca98e4e30fc12195dbddd795a65c24e5880e6029 | Install python-six package in Ubuntu | Azure/azure-linux-extensions,vityagi/azure-linux-extensions,andyliuliming/azure-linux-extensions,andyliuliming/azure-linux-extensions,jasonzio/azure-linux-extensions,bpramod/azure-linux-extensions,krkhan/azure-linux-extensions,Azure/azure-linux-extensions,jasonzio/azure-linux-extensions,jasonzio/azure-linux-extensions,Azure/azure-linux-extensions,soumyanishan/azure-linux-extensions,jasonzio/azure-linux-extensions,vityagi/azure-linux-extensions,andyliuliming/azure-linux-extensions,Azure/azure-linux-extensions,varunkumta/azure-linux-extensions,vityagi/azure-linux-extensions,soumyanishan/azure-linux-extensions,krkhan/azure-linux-extensions,bpramod/azure-linux-extensions,bpramod/azure-linux-extensions,bpramod/azure-linux-extensions,vityagi/azure-linux-extensions,Azure/azure-linux-extensions,Azure/azure-linux-extensions,varunkumta/azure-linux-extensions,soumyanishan/azure-linux-extensions,soumyanishan/azure-linux-extensions,varunkumta/azure-linux-extensions,vityagi/azure-linux-extensions,andyliuliming/azure-linux-extensions,vityagi/azure-linux-extensions,soumyanishan/azure-linux-extensions,krkhan/azure-linux-extensions,vityagi/azure-linux-extensions,bpramod/azure-linux-extensions,krkhan/azure-linux-extensions,varunkumta/azure-linux-extensions,Azure/azure-linux-extensions,bpramod/azure-linux-extensions,bpramod/azure-linux-extensions | VMEncryption/main/patch/UbuntuPatching.py | VMEncryption/main/patch/UbuntuPatching.py | #!/usr/bin/python
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.4+
import os
import sys
import imp
import base64
import re
import json
import platform
import shutil
import time
import traceback
import datetime
import subprocess
from AbstractPatching import AbstractPatching
from Common import *
class UbuntuPatching(AbstractPatching):
def __init__(self,logger,distro_info):
super(UbuntuPatching,self).__init__(distro_info)
self.logger = logger
self.base64_path = '/usr/bin/base64'
self.bash_path = '/bin/bash'
self.blkid_path = '/sbin/blkid'
self.cat_path = '/bin/cat'
self.cryptsetup_path = '/sbin/cryptsetup'
self.dd_path = '/bin/dd'
self.e2fsck_path = '/sbin/e2fsck'
self.echo_path = '/bin/echo'
self.lsblk_path = '/bin/lsblk'
self.lsscsi_path = '/usr/bin/lsscsi'
self.mkdir_path = '/bin/mkdir'
self.mount_path = '/bin/mount'
self.openssl_path = '/usr/bin/openssl'
self.resize2fs_path = '/sbin/resize2fs'
self.umount_path = '/bin/umount'
def install_extras(self):
"""
install the sg_dd because the default dd do not support the sparse write
"""
packages = ['cryptsetup-bin', 'lsscsi', 'python-six']
return_code = subprocess.call(['apt-get', 'install', '-y'] + packages)
self.logger.log("Installing packages: " + " ".join(packages))
self.logger.log("Installation result: " + str(return_code))
| #!/usr/bin/python
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.4+
import os
import sys
import imp
import base64
import re
import json
import platform
import shutil
import time
import traceback
import datetime
import subprocess
from AbstractPatching import AbstractPatching
from Common import *
class UbuntuPatching(AbstractPatching):
def __init__(self,logger,distro_info):
super(UbuntuPatching,self).__init__(distro_info)
self.logger = logger
self.base64_path = '/usr/bin/base64'
self.bash_path = '/bin/bash'
self.blkid_path = '/sbin/blkid'
self.cat_path = '/bin/cat'
self.cryptsetup_path = '/sbin/cryptsetup'
self.dd_path = '/bin/dd'
self.e2fsck_path = '/sbin/e2fsck'
self.echo_path = '/bin/echo'
self.lsblk_path = '/bin/lsblk'
self.lsscsi_path = '/usr/bin/lsscsi'
self.mkdir_path = '/bin/mkdir'
self.mount_path = '/bin/mount'
self.openssl_path = '/usr/bin/openssl'
self.resize2fs_path = '/sbin/resize2fs'
self.umount_path = '/bin/umount'
def install_extras(self):
"""
install the sg_dd because the default dd do not support the sparse write
"""
if(self.distro_info[0].lower() == "ubuntu" and self.distro_info[1] == "12.04"):
common_extras = ['cryptsetup-bin','lsscsi']
else:
common_extras = ['cryptsetup-bin','lsscsi']
for extra in common_extras:
self.logger.log("installation for " + extra + 'result is ' + str(subprocess.call(['apt-get', 'install','-y', extra]))) | apache-2.0 | Python |
48ed2d3d5d06138b4116a29ef091ed8f21561476 | Bump version to 0.0.7 | portfoliome/pgawedge | pgawedge/_version.py | pgawedge/_version.py | version_info = (0, 0, 7)
__version__ = '.'.join(map(str, version_info))
| version_info = (0, 0, 6)
__version__ = '.'.join(map(str, version_info))
| mit | Python |
78fd850094c2517aeea76640a9821d62ba999579 | remove duplicate service_yaml parameter (#932) | googleapis/gapic-generator-typescript,googleapis/gapic-generator-typescript,googleapis/gapic-generator-typescript | rules_typescript_gapic/typescript_gapic.bzl | rules_typescript_gapic/typescript_gapic.bzl | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load("@rules_gapic//:gapic.bzl", "proto_custom_library")
def typescript_gapic_library(
name,
src,
deps,
grpc_service_config = None,
package_name = None,
main_service = None,
bundle_config = None,
service_yaml = None,
metadata = None,
legacy_proto_load = None,
extra_protoc_parameters = [],
extra_protoc_file_parameters = {},
**kwargs):
plugin_args_dict = {}
if package_name:
plugin_args_dict["package-name"] = package_name
if main_service:
plugin_args_dict["main-service"] = main_service
if metadata:
plugin_args_dict["metadata"] = "true"
if legacy_proto_load:
plugin_args_dict["legacy-proto-load"] = "true"
file_args = {} # note: keys are filenames, values are parameter name, aligned with the prior art
for key, value in extra_protoc_file_parameters:
file_args[key] = value
if grpc_service_config:
file_args[grpc_service_config] = "grpc-service-config"
if bundle_config:
file_args[bundle_config] = "bundle-config"
if service_yaml:
file_args[service_yaml] = "service-yaml"
plugin_args = []
for parameter in extra_protoc_parameters:
plugin_args.append(parameter)
for key, value in plugin_args_dict.items():
plugin_args.append("{}={}".format(key, value))
output_suffix = ".srcjar"
proto_custom_library(
name = name,
deps = [src],
plugin = Label("//:protoc_plugin"),
plugin_args = plugin_args,
plugin_file_args = file_args,
output_type = "typescript-gapic",
output_suffix = output_suffix,
)
| # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load("@rules_gapic//:gapic.bzl", "proto_custom_library")
def typescript_gapic_library(
name,
src,
deps,
grpc_service_config = None,
package_name = None,
main_service = None,
bundle_config = None,
service_yaml = None,
metadata = None,
legacy_proto_load = None,
service_yaml = None,
extra_protoc_parameters = [],
extra_protoc_file_parameters = {},
**kwargs):
plugin_args_dict = {}
if package_name:
plugin_args_dict["package-name"] = package_name
if main_service:
plugin_args_dict["main-service"] = main_service
if metadata:
plugin_args_dict["metadata"] = "true"
if legacy_proto_load:
plugin_args_dict["legacy-proto-load"] = "true"
file_args = {} # note: keys are filenames, values are parameter name, aligned with the prior art
for key, value in extra_protoc_file_parameters:
file_args[key] = value
if grpc_service_config:
file_args[grpc_service_config] = "grpc-service-config"
if bundle_config:
file_args[bundle_config] = "bundle-config"
if service_yaml:
file_args[service_yaml] = "service-yaml"
plugin_args = []
for parameter in extra_protoc_parameters:
plugin_args.append(parameter)
for key, value in plugin_args_dict.items():
plugin_args.append("{}={}".format(key, value))
output_suffix = ".srcjar"
proto_custom_library(
name = name,
deps = [src],
plugin = Label("//:protoc_plugin"),
plugin_args = plugin_args,
plugin_file_args = file_args,
output_type = "typescript-gapic",
output_suffix = output_suffix,
)
| apache-2.0 | Python |
5d2e48ae1b6feffa2e0f969b1f4545e46944b095 | tweak to note | praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo | molo/core/backends.py | molo/core/backends.py | from django.contrib.auth.models import Group
from django_cas_ng.backends import CASBackend
class MoloCASBackend(CASBackend):
def authenticate(self, ticket, service, request):
user = super(
MoloCASBackend, self).authenticate(ticket, service, request)
if user is None:
return None
if 'attributes' in request.session \
and 'has_perm' in request.session['attributes']\
and request.session['attributes']['has_perm'] == 'True':
if request.session['attributes']['is_admin'] == 'True':
user.is_staff = True
user.is_superuser = True
user.save()
else:
moderator_group = Group.objects.filter(
name='Moderators').first()
if moderator_group:
user.groups.add(moderator_group)
"""
TODO: Handle case where Moderator group does not exist.
We need to log this or find ways of notifying users that
the moderator group was removed or renamed.
There isn't much we can do about this case though.
"""
else:
user.is_staff = False
user.is_superuser = False
user.save()
return None
return user
| from django.contrib.auth.models import Group
from django_cas_ng.backends import CASBackend
class MoloCASBackend(CASBackend):
def authenticate(self, ticket, service, request):
user = super(
MoloCASBackend, self).authenticate(ticket, service, request)
if user is None:
return None
if 'attributes' in request.session \
and 'has_perm' in request.session['attributes']\
and request.session['attributes']['has_perm'] == 'True':
if request.session['attributes']['is_admin'] == 'True':
user.is_staff = True
user.is_superuser = True
user.save()
else:
moderator_group = Group.objects.filter(
name='Moderators').first()
if moderator_group:
user.groups.add(moderator_group)
else:
pass
"""
We need to log this or find ways of notifying users that
the moderator group was removed or renamed
"""
else:
user.is_staff = False
user.is_superuser = False
user.save()
return None
return user
| bsd-2-clause | Python |
af4a3ee0dc9afe88428b1c85c03f376c8652bffe | Update query-4.py | ibm-messaging/iot-device-samples,amprasanna/iot-device-samples,amprasanna/iot-device-samples,ibm-messaging/iot-device-samples,amprasanna/iot-device-samples,ibm-messaging/iot-device-samples | historian-cloudant/query-4.py | historian-cloudant/query-4.py | # *****************************************************************************
# Copyright (c) 2016 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# Lokesh Haralakatta
# *****************************************************************************
'''
Example of retrieving device events for 2 devices - piCam-2 and piCam-4.
Counting device events for each of the device.
Print details abut the device events count.
'''
import requests
# Define cloudant username
user = "<cloudant-user>"
# Define cloudant password
passwd = "<cloudant-password>"
# Define cloudant host
host = user+".cloudant.com"
# Define cloudant db name
db = "<cloudant-dbname>"
# Define view variable to contain the Map View Name
view = "by-deviceId"
# Frame the URL using above defned variables values
url = 'https://'+host+'/'+db+'/_design/iotp/_view/'+view
# Define args variable to store required parameter values
args={'keys' : ' [ "piCam-2" , "piCam-4" ] ' }
# Invoke HTTP GET request with all required parameters
response = requests.get(url,params=args,auth=(user,passwd))
# Check the response status code, should be 200 to proceed further
if ( response.status_code == 200):
# Get the response data in JSON format
jsonData = response.json()
# Get the device data records which are JSON array of rows with in jsonData
records = jsonData['rows']
# Define variables to store different count values
piCam2Count = 0
piCam4Count = 0
otherCount = 0
totalCount = 0
# For each record, get deviceType, deviceID and devicedata from the records
for record in records:
device = record['value']['deviceId']
if (device == 'piCam-2'):
piCam2Count += 1
elif (device == 'piCam-4'):
piCam4Count += 1
else:
otherCount +=1
totalCount +=1
# Print records count for device piCam-2
print "Device Events for piCam-2: %s" %str(piCam2Count)
# Print records count for device piCam-4
print "Device Events for piCam-4: %s" %str(piCam4Count)
# Print otherCount, should be ZERO
print "Device Events for other devices: %s" %str(otherCount)
# Print totalCount, should be sum of piCam2Count and piCam4Count
print "Total Device Events: %s" %str(totalCount)
else:
print "HTTP GET Failed with Status Code - %s" %(response.status_code)
| # *****************************************************************************
# Copyright (c) 2014 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# Lokesh Haralakatta
# *****************************************************************************
'''
Example of retrieving device events for 2 devices - piCam-2 and piCam-4.
Counting device events for each of the device.
Print details abut the device events count.
'''
import requests
# Define cloudant username
user = "<cloudant-user>"
# Define cloudant password
passwd = "<cloudant-password>"
# Define cloudant host
host = user+".cloudant.com"
# Define cloudant db name
db = "<cloudant-dbname>"
# Define view variable to contain the Map View Name
view = "by-deviceId"
# Frame the URL using above defned variables values
url = 'https://'+host+'/'+db+'/_design/iotp/_view/'+view
# Define args variable to store required parameter values
args={'keys' : ' [ "piCam-2" , "piCam-4" ] ' }
# Invoke HTTP GET request with all required parameters
response = requests.get(url,params=args,auth=(user,passwd))
# Check the response status code, should be 200 to proceed further
if ( response.status_code == 200):
# Get the response data in JSON format
jsonData = response.json()
# Get the device data records which are JSON array of rows with in jsonData
records = jsonData['rows']
# Define variables to store different count values
piCam2Count = 0
piCam4Count = 0
otherCount = 0
totalCount = 0
# For each record, get deviceType, deviceID and devicedata from the records
for record in records:
device = record['value']['deviceId']
if (device == 'piCam-2'):
piCam2Count += 1
elif (device == 'piCam-4'):
piCam4Count += 1
else:
otherCount +=1
totalCount +=1
# Print records count for device piCam-2
print "Device Events for piCam-2: %s" %str(piCam2Count)
# Print records count for device piCam-4
print "Device Events for piCam-4: %s" %str(piCam4Count)
# Print otherCount, should be ZERO
print "Device Events for other devices: %s" %str(otherCount)
# Print totalCount, should be sum of piCam2Count and piCam4Count
print "Total Device Events: %s" %str(totalCount)
else:
print "HTTP GET Failed with Status Code - %s" %(response.status_code)
| epl-1.0 | Python |
4ea234ff62edcc1f6191ac2648b01b3427fca7fc | REFACTOR : Removed unixpackage checks for compilation system packages (build-essential) | hitchtest/hitchrabbit | hitchrabbit/rabbit_package.py | hitchrabbit/rabbit_package.py | from hitchtest import HitchPackage, utils
from subprocess import check_output, call
from hitchtest.environment import checks
from os.path import join, exists
from os import makedirs, chdir, chmod
import shutil
import getpass
import stat
import os
ISSUES_URL = "http://github.com/hitchtest/hitchrabbit/issues"
class RabbitPackage(HitchPackage):
VERSIONS = [
"3.5.4", "3.5.3", "3.5.2", "3.5.1", "3.5.0",
"3.4.4", "3.4.3", "3.4.2", "3.4.1", "3.4.0",
"3.3.5", "3.3.4", "3.3.3", "3.3.2", "3.3.1", "3.3.0",
"3.2.4", "3.2.3", "3.2.2", "3.2.1", "3.2.0",
"3.1.5", "3.1.4", "3.1.3", "3.1.2", "3.1.1", "3.1.0",
]
name = "RabbitMQ"
def __init__(self, version, bin_directory=None):
super(RabbitPackage, self).__init__()
self.version = self.check_version(version, self.VERSIONS, ISSUES_URL)
self.directory = join(self.get_build_directory(), "rabbitmq-server-{}".format(self.version))
self.bin_directory = bin_directory
checks.packages(["xsltproc", "erlang-nox", "erlang-dev", "libxml2-dev", "libxslt1-dev", ])
def verify(self):
pass
def build(self):
download_to = join(self.get_downloads_directory(), "rabbitmq-server-{0}.tar.gz".format(self.version))
download_url = "https://www.rabbitmq.com/releases/rabbitmq-server/v{0}/rabbitmq-server-{0}.tar.gz".format(
self.version,
)
utils.download_file(download_to, download_url)
if not exists(self.directory):
utils.extract_archive(download_to, self.get_build_directory())
chdir(self.directory)
call(["make"])
call(["make", "install"])
self.bin_directory = join(self.directory, "scripts")
@property
def server(self):
if self.bin_directory is None:
raise RuntimeError("bin_directory not set.")
return join(self.bin_directory, "rabbitmq-server")
@property
def ctl(self):
if self.bin_directory is None:
raise RuntimeError("bin_directory not set.")
return join(self.bin_directory, "rabbitmqctl")
| from hitchtest import HitchPackage, utils
from subprocess import check_output, call
from hitchtest.environment import checks
from os.path import join, exists
from os import makedirs, chdir, chmod
import shutil
import getpass
import stat
import os
ISSUES_URL = "http://github.com/hitchtest/hitchrabbit/issues"
class RabbitPackage(HitchPackage):
VERSIONS = [
"3.5.4", "3.5.3", "3.5.2", "3.5.1", "3.5.0",
"3.4.4", "3.4.3", "3.4.2", "3.4.1", "3.4.0",
"3.3.5", "3.3.4", "3.3.3", "3.3.2", "3.3.1", "3.3.0",
"3.2.4", "3.2.3", "3.2.2", "3.2.1", "3.2.0",
"3.1.5", "3.1.4", "3.1.3", "3.1.2", "3.1.1", "3.1.0",
]
name = "RabbitMQ"
def __init__(self, version, bin_directory=None):
super(RabbitPackage, self).__init__()
self.version = self.check_version(version, self.VERSIONS, ISSUES_URL)
self.directory = join(self.get_build_directory(), "rabbitmq-server-{}".format(self.version))
self.bin_directory = bin_directory
checks.packages(["build-essential", "xsltproc", "erlang-nox", "erlang-dev", "libxml2-dev", "libxslt1-dev", ])
def verify(self):
pass
def build(self):
download_to = join(self.get_downloads_directory(), "rabbitmq-server-{0}.tar.gz".format(self.version))
download_url = "https://www.rabbitmq.com/releases/rabbitmq-server/v{0}/rabbitmq-server-{0}.tar.gz".format(
self.version,
)
utils.download_file(download_to, download_url)
if not exists(self.directory):
utils.extract_archive(download_to, self.get_build_directory())
chdir(self.directory)
call(["make"])
call(["make", "install"])
self.bin_directory = join(self.directory, "scripts")
@property
def server(self):
if self.bin_directory is None:
raise RuntimeError("bin_directory not set.")
return join(self.bin_directory, "rabbitmq-server")
@property
def ctl(self):
if self.bin_directory is None:
raise RuntimeError("bin_directory not set.")
return join(self.bin_directory, "rabbitmqctl")
| agpl-3.0 | Python |
7016b7bb026e0fe557ca06efa81dace9999e526d | Write a slightly less dumb protocol? | HubbeKing/Hubbot_Twisted | hubbot/Modules/Healthcheck.py | hubbot/Modules/Healthcheck.py | from twisted.protocols import basic
from twisted.internet import protocol, reactor
from hubbot.moduleinterface import ModuleInterface
class HealthcheckProtocol(basic.LineReceiver):
def lineReceived(self, line):
response_body = "All is well. Ish."
self.sendLine("HTTP/1.0 200 OK".encode("UTF-8"))
self.sendLine("Content-Type: text/plain".encode("UTF-8"))
self.sendLine(f"Content-Length: {len(response_body)}\n".encode("UTF-8"))
self.transport.write(response_body)
self.transport.loseConnection()
class Healthcheck(ModuleInterface):
port = 9999
def __init__(self, bot):
self.healthcheck_server = protocol.ServerFactory()
self.healthcheck_server.protocol = HealthcheckProtocol
super().__init__(bot)
def on_load(self):
reactor.listenTCP(self.port, self.healthcheck_server)
def on_unload(self):
reactor.stopListening(self.port)
def help(self, message):
return f"Hosts an HTTP healthcheck server on port {self.port}."
| from twisted.internet import reactor, protocol
from hubbot.moduleinterface import ModuleInterface
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"""As soon as any data is received, write it back."""
self.transport.write(data)
class Healthcheck(ModuleInterface):
port = 9999
def __init__(self, bot):
self.healthcheck_server = protocol.ServerFactory()
self.healthcheck_server.protocol = Echo
super().__init__(bot)
def on_load(self):
reactor.listenTCP(self.port, self.healthcheck_server)
def on_unload(self):
reactor.stopListening(self.port)
def help(self, message):
return f"Hosts an HTTP healthcheck server on port {self.port}."
| mit | Python |
6d487b873f7b5f4ac026e923603ef96707bcc0f2 | Add a docstring to the main __init__.py | frostidaho/dynmen | src/dynmen/__init__.py | src/dynmen/__init__.py | # -*- coding: utf-8 -*-
"""
dynmen - A simple python interface to dynamic menus like dmenu or rofi
import dynmen
menu = dynmen.Menu(['dmenu', '-fn', 'Sans-30'])
output = menu({'a': 1, 'b': 2, 'c': 3})
You can make the menu non-blocking by setting:
menu.process_mode = 'futures'
Please see the repository for more examples:
https://github.com/frostidaho/dynmen
"""
from .menu import Menu, MenuError
del menu
def new_dmenu(**kwargs):
from .dmenu import DMenu
return DMenu(**kwargs)
def new_rofi(**kwargs):
from .rofi import Rofi
return Rofi(**kwargs)
| # -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
def new_dmenu(**kwargs):
from .dmenu import DMenu
return DMenu(**kwargs)
def new_rofi(**kwargs):
from .rofi import Rofi
return Rofi(**kwargs)
| mit | Python |
86c37485ebfac86c249d1e1c19b58a19a3b9dc5c | Implement is_used for git | dealertrack/flake8-diff,miki725/flake8-diff | flake8diff/vcs/git.py | flake8diff/vcs/git.py | from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class GitVCS(VCSBase):
"""
Git support implementation
"""
name = 'git'
def get_vcs(self):
"""
Get git binary executable path
"""
return _execute('which git', strict=True).strip()
def is_used(self):
"""
Determines if this VCS should be used
"""
try:
self._is_git_repository()
except subprocess.CalledProcessError:
return False
return True
def changed_lines(self, filename):
"""
Get a list of all lines changed by this set of commits.
"""
diff_command = [
'diff',
'--new-line-format="%dn "',
'--unchanged-line-format=""',
'--changed-group-format="%>"'
]
difftool_command = [
self.vcs,
'difftool',
'-y',
'-x',
"'{0}'".format(' '.join(diff_command)),
]
cmd = filter(None, difftool_command + self.commits + [
"--",
filename
])
return _execute(' '.join(cmd)).split()
def changed_files(self):
"""
Return a list of all changed files.
"""
command = filter(None, [
self.vcs,
"diff",
"--name-only",
] + self.commits)
return filter(self.filter_file,
iter(_execute(' '.join(command))
.splitlines()))
def _is_git_repository(self):
return _execute(
'{vcs} status'.format(vcs=self.vcs), strict=True, log_errors=False)
| from __future__ import unicode_literals, print_function
import logging
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class GitVCS(VCSBase):
"""
Git support implementation
"""
name = 'git'
def get_vcs(self):
"""
Get git binary executable path
"""
return _execute('which git', strict=True).strip()
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def changed_lines(self, filename):
"""
Get a list of all lines changed by this set of commits.
"""
diff_command = [
'diff',
'--new-line-format="%dn "',
'--unchanged-line-format=""',
'--changed-group-format="%>"'
]
difftool_command = [
self.vcs,
'difftool',
'-y',
'-x',
"'{0}'".format(' '.join(diff_command)),
]
cmd = filter(None, difftool_command + self.commits + [
"--",
filename
])
return _execute(' '.join(cmd)).split()
def changed_files(self):
"""
Return a list of all changed files.
"""
command = filter(None, [
self.vcs,
"diff",
"--name-only",
] + self.commits)
return filter(self.filter_file,
iter(_execute(' '.join(command))
.splitlines()))
| mit | Python |
715084463ea259897ca23adddc27cfe214605f04 | fix bug | Parkayun/initpy,janusnic/initpy,wzyuliyang/initpy | flask_init/creator.py | flask_init/creator.py | #!/usr/bin/python
# -*- coding:utf-8 -*-
import functools
import inspect
import os
from .exceptions import InvalidFileName, InvalidFolderName, RootPathDoesNotExists
from .templates import blank_template
def name_validator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
func_args = inspect.getcallargs(func, *args, **kwargs)
if func_args.get('validate'):
_filter = "!@#$%^&*()-+=[]{}|\"'."
name = func_args.get('name').replace('.py', '')
if len(list(set(list(name)).intersection(list(_filter)))) > 0 or name[0].isdigit():
exception = 'Invalid'+func.__name__.split('_')[1].title()+'Name'
raise globals()[exception]
return func(*args, **kwargs)
return wrapper
class Creator(object):
root_path = None
def __init__(self, root_path):
if not os.path.isdir(root_path):
raise RootPathDoesNotExists
self.root_path = root_path
@name_validator
def create_file(self, _path, name, template, validate=True):
file_path = os.path.join(_path, name)
with open(file_path, 'w') as _file:
_file.write(template)
@name_validator
def create_folder(self, _path, name, validate=True):
try:
folder_path = os.path.join(_path, name)
os.mkdir(folder_path)
except OSError:
pass
def create_module(self, name):
self.create_folder(self.root_path, name)
module_path = os.path.join(self.root_path, name)
self.create_file(module_path, '__init__.py', blank_template, False)
| #!/usr/bin/python
# -*- coding:utf-8 -*-
import functools
import inspect
import os
from .exceptions import InvalidFileName, InvalidFolderName, RootPathDoesNotExists
from .templates import blank_template
def name_validator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
func_args = inspect.getcallargs(func, *args, **kwargs)
if func_args.get('validate'):
_filter = "!@#$%^&*()-+=[]{}|\"'."
name = func_args.get('name')
if len(list(set(list(name)).intersection(list(_filter)))) > 0 or name[0].isdigit():
exception = 'Invalid'+func.__name__.split('_')[1].title()+'Name'
raise globals()[exception]
return func(*args, **kwargs)
return wrapper
class Creator(object):
root_path = None
def __init__(self, root_path):
if not os.path.isdir(root_path):
raise RootPathDoesNotExists
self.root_path = root_path
@name_validator
def create_file(self, _path, name, template, validate=True):
file_path = os.path.join(_path, name)
with open(file_path, 'w') as _file:
_file.write(template)
@name_validator
def create_folder(self, _path, name, validate=True):
try:
folder_path = os.path.join(_path, name)
os.mkdir(folder_path)
except OSError:
pass
def create_module(self, name):
self.create_folder(self.root_path, name)
module_path = os.path.join(self.root_path, name)
self.create_file(module_path, '__init__.py', blank_template, False)
| mit | Python |
9fdbef053acb3dea9d54ce9e8078ed2393d54560 | Allow "foo/bar" branches to be browsed with "/foo:bar/..." | CodethinkLabs/mustard,CodethinkLabs/mustard | mustard/repository.py | mustard/repository.py | # Copyright (C) 2012 Codethink Limited
import cliapp
import collections
import os
import pygit2
class Repository(object):
def __init__(self, app, dirname):
self.app = app
self.dirname = dirname
self.repo = pygit2.Repository(self.dirname)
self.checked_out = True if self.repo.workdir else False
def history(self, ref):
refs = []
if ref:
sha1 = self.resolve_ref(ref)
else:
sha1 = self.repo.head.hex
for commit in self.repo.walk(sha1, pygit2.GIT_SORT_TIME):
refs.append(commit.hex)
return refs
def diff(self, ref1=None, ref2=None):
if ref1 and ref2:
sha1 = self.resolve_ref(ref1)
sha2 = self.resolve_ref(ref2)
commit1 = self.repo[sha1]
commit2 = self.repo[sha2]
tree1 = commit1.tree
tree2 = commit2.tree
return tree1.diff(tree2).patch
else:
return self.repo.head.tree.diff().patch
def commit(self, ref):
sha1 = self.resolve_ref(ref)
return self.repo[sha1]
def list_tree(self, ref):
sha1 = self.resolve_ref(ref)
queue = collections.deque()
queue.append((self.repo[sha1].tree, ''))
while queue:
(tree, path) = queue.popleft()
for entry in tree:
if entry.filemode == 040000:
subtree = self.repo[entry.oid]
queue.append((subtree, os.path.join(path, entry.name)))
else:
yield os.path.join(path, entry.name)
def cat_file(self, ref, filename):
sha1 = self.resolve_ref(ref)
commit = self.repo[sha1]
entry = commit.tree[filename]
blob = self.repo[entry.oid]
return blob.data
def resolve_ref(self, ref):
ref = ref.replace(':', '/')
return self.repo.revparse_single(ref).hex
| # Copyright (C) 2012 Codethink Limited
import cliapp
import collections
import os
import pygit2
class Repository(object):
def __init__(self, app, dirname):
self.app = app
self.dirname = dirname
self.repo = pygit2.Repository(self.dirname)
self.checked_out = True if self.repo.workdir else False
def history(self, ref):
refs = []
if ref:
sha1 = self.resolve_ref(ref)
else:
sha1 = self.repo.head.hex
for commit in self.repo.walk(sha1, pygit2.GIT_SORT_TIME):
refs.append(commit.hex)
return refs
def diff(self, ref1=None, ref2=None):
if ref1 and ref2:
sha1 = self.resolve_ref(ref1)
sha2 = self.resolve_ref(ref2)
commit1 = self.repo[sha1]
commit2 = self.repo[sha2]
tree1 = commit1.tree
tree2 = commit2.tree
return tree1.diff(tree2).patch
else:
return self.repo.head.tree.diff().patch
def commit(self, ref):
sha1 = self.resolve_ref(ref)
return self.repo[sha1]
def list_tree(self, ref):
sha1 = self.resolve_ref(ref)
queue = collections.deque()
queue.append((self.repo[sha1].tree, ''))
while queue:
(tree, path) = queue.popleft()
for entry in tree:
if entry.filemode == 040000:
subtree = self.repo[entry.oid]
queue.append((subtree, os.path.join(path, entry.name)))
else:
yield os.path.join(path, entry.name)
def cat_file(self, ref, filename):
sha1 = self.resolve_ref(ref)
commit = self.repo[sha1]
entry = commit.tree[filename]
blob = self.repo[entry.oid]
return blob.data
def resolve_ref(self, ref):
return self.repo.revparse_single(ref).hex
| agpl-3.0 | Python |
b2905ee06ded6d5992b52f364370a5508c1c002a | use "simple_query_string" for raw user query | gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo | mygpo/search/index.py | mygpo/search/index.py |
""" Contains code for indexing other objects """
from pyes import ES, QueryStringQuery, FunctionScoreQuery
from pyes.exceptions import IndexAlreadyExistsException, NoServerAvailable
from django.conf import settings
from mygpo.search.json import podcast_to_json
from mygpo.search.models import PodcastResult
import logging
logger = logging.getLogger(__name__)
def get_connection():
""" Create a connection from Django settings """
conn = ES([settings.ELASTICSEARCH_SERVER],
timeout=settings.ELASTICSEARCH_TIMEOUT)
return conn
def index_podcast(sender, **kwargs):
""" Indexes a podcast """
conn = get_connection()
podcast = kwargs['instance']
logger.info('Indexing podcast %s', podcast)
document = podcast_to_json(podcast)
try:
conn.index(document, settings.ELASTICSEARCH_INDEX,
'podcast', podcast.id.hex)
except NoServerAvailable:
logger.exception('Indexing podcast failed')
def create_index():
""" Creates the Elasticsearch index """
conn = get_connection()
logger.info('Creating index %s' % settings.ELASTICSEARCH_INDEX)
try:
conn.indices.create_index(settings.ELASTICSEARCH_INDEX)
except IndexAlreadyExistsException as ex:
logger.info(str(ex))
def search_podcasts(query):
""" Search for podcasts according to 'query' """
conn = get_connection()
q = {
"function_score" : {
"boost_mode": 'replace',
"query" : {
'simple_query_string': {'query': query}
},
"script_score" : {
'script': "_score * (doc.subscribers.value / 4000)"
}
}
}
results = conn.search(query=q, indices=settings.ELASTICSEARCH_INDEX,
doc_types='podcast',
model=lambda conn, doc: PodcastResult.from_doc(doc))
return results
|
""" Contains code for indexing other objects """
from pyes import ES, QueryStringQuery, FunctionScoreQuery
from pyes.exceptions import IndexAlreadyExistsException, NoServerAvailable
from django.conf import settings
from mygpo.search.json import podcast_to_json
from mygpo.search.models import PodcastResult
import logging
logger = logging.getLogger(__name__)
def get_connection():
""" Create a connection from Django settings """
conn = ES([settings.ELASTICSEARCH_SERVER],
timeout=settings.ELASTICSEARCH_TIMEOUT)
return conn
def index_podcast(sender, **kwargs):
""" Indexes a podcast """
conn = get_connection()
podcast = kwargs['instance']
logger.info('Indexing podcast %s', podcast)
document = podcast_to_json(podcast)
try:
conn.index(document, settings.ELASTICSEARCH_INDEX,
'podcast', podcast.id.hex)
except NoServerAvailable:
logger.exception('Indexing podcast failed')
def create_index():
""" Creates the Elasticsearch index """
conn = get_connection()
logger.info('Creating index %s' % settings.ELASTICSEARCH_INDEX)
try:
conn.indices.create_index(settings.ELASTICSEARCH_INDEX)
except IndexAlreadyExistsException as ex:
logger.info(str(ex))
def search_podcasts(query):
""" Search for podcasts according to 'query' """
conn = get_connection()
q = {
"function_score" : {
"boost_mode": 'replace',
"query" : {
'query_string': {'query': query}
},
"script_score" : {
'script': "_score * (doc.subscribers.value / 4000)"
}
}
}
results = conn.search(query=q, indices=settings.ELASTICSEARCH_INDEX,
doc_types='podcast',
model=lambda conn, doc: PodcastResult.from_doc(doc))
return results
| agpl-3.0 | Python |
3fcc04ba2156820de488475ddfd08ef4519627d5 | Update views.py | 02agarwalt/FNGS_website,02agarwalt/FNGS_website,ebridge2/FNGS_website,ebridge2/FNGS_website,02agarwalt/FNGS_website,ebridge2/FNGS_website,ebridge2/FNGS_website | fngs/analyze/views.py | fngs/analyze/views.py | from django.http import HttpResponse, Http404
from django.shortcuts import render, get_object_or_404
from django.views import generic
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.core.urlresolvers import reverse_lazy
from .models import Submission
from .forms import SubmissionForm
from ndmg.scripts.ndmg_func_pipeline import ndmg_func_pipeline as fngs_pipeline
from ndmg.scripts.ndmg_dwi_pipeline import ndmg_dwi_pipeline as ndmg_pipeline
from django.conf import settings
import time
import importlib
import imp
from ndmg.utils import utils as mgu
from threading import Thread
from multiprocessing import Process
import os
import re
def index(request):
return render(request, 'analyze/index.html')
def submit_job(request):
form = SubmissionForm(request.POST or None, request.FILES or None)
if form.is_valid():
submission = form.save(commit=False)
submission.creds_file = request.FILES['creds_file']
submission.save()
logfile = submission.jobdir + "log.txt"
p = Process(target=submitstuff, args=(submission, logfile))
p.daemon=True
p.start()
p.join()
messages = open(logfile, 'r').readlines()
os.system("rm " + logfile)
context = {
"messages": messages,
"form": form,
}
return render(request, 'analyze/create_submission.html', context)
context = {
"form": form,
}
return render(request, 'analyze/create_submission.html', context)
def submitstuff(submission, logfile):
if submission.state == 'participant':
cmd = "ndmg_cloud participant --bucket " + submission.bucket + " --bidsdir " + submission.bidsdir + " --jobdir " + submission.jobdir + " --credentials " + submission.creds_file.url + " --modality " + submission.modality + " --stc " + submission.slice_timing
if submission.state == 'group':
cmd = "ndmg_cloud group --bucket " + submission.bucket + " --bidsdir " + submission.bidsdir + " --jobdir " + submission.jobdir + " --credentials " + submission.creds_file.url + " --modality " + submission.modality + " --dataset " + submission.datasetname
cmd = cmd + " > " + logfile
os.system(cmd)
| from django.http import HttpResponse, Http404
from django.shortcuts import render, get_object_or_404
from django.views import generic
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.core.urlresolvers import reverse_lazy
from .models import Submission
from .forms import SubmissionForm
from ndmg.scripts.ndmg_func_pipeline import ndmg_func_pipeline as fngs_pipeline
from ndmg.scripts.ndmg_dwi_pipeline import ndmg_dwi_pipeline as ndmg_pipeline
from django.conf import settings
import time
import importlib
import imp
from ndmg.utils import utils as mgu
from threading import Thread
from multiprocessing import Process
import os
import re
def index(request):
return render(request, 'analyze/index.html')
def submit_job(request):
form = SubmissionForm(request.POST or None, request.FILES or None)
if form.is_valid():
submission = form.save(commit=False)
submission.creds_file = request.FILES['creds_file']
submission.save()
logfile = submission.jobdir + "log.txt"
p = Process(target=submitstuff, args=(submission, logfile))
p.daemon=True
p.start()
p.join()
messages = open(logfile, 'r').readlines()
os.system("rm " + logfile)
context = {
"messages": messages,
"form": form,
}
return render(request, 'analyze/create_submission.html', context)
context = {
"form": form,
}
return render(request, 'analyze/create_submission.html', context)
def submitstuff(submission, logfile):
if submission.state == 'participant':
cmd = "ndmg_cloud participant --bucket " + submission.bucket + " --bidsdir " + submission.bidsdir + " --jobdir " + submission.jobdir + " --credentials " + submission.creds_file.url + " --modality " + submission.modality + " --stc " + submission.slice_timing
if submission.state == 'group':
cmd = "ndmg_cloud group --bucket " + submission.bucket + " --bidsdir " + submission.bidsdir + " --jobdir " + submission.jobdir + " --credentials " + submission.creds_file.url + " --modality " + submission.modality + " --dataset " + submission.datasetname
if submission.state == 'status':
cmd = "ndmg_cloud status --jobdir " + submission.jobdir + " --credentials " + submission.creds_file.url
if submission.state == 'kill':
cmd = "ndmg_cloud kill --jobdir " + submission.jobdir + " --credentials " + submission.creds_file.url
cmd = cmd + " > " + logfile
os.system(cmd)
| apache-2.0 | Python |
d3fbce0c8e73513cc29c5c15be6575ba708fec0f | Add copyright statement | CarnegieHall/linked-data | get_geoInfo.py | get_geoInfo.py | # !/usr/local/bin/python3.4.2
# ----Copyright (c) 2017 Carnegie Hall | The MIT License (MIT)----
# ----For the full license terms, please visit https://github.com/CarnegieHall/linked-data/blob/master/LICENSE----
## Argument[0] is script to run
## Argument[1] is path to entityDict
import httplib2
import json
import lxml
import os
import re
import sys
import time
from bs4 import BeautifulSoup
from rdflib import Graph, Literal, Namespace, URIRef
from rdflib.namespace import RDFS
from rdflib.plugins.serializers.nt import NTSerializer
gPlaces = Graph()
gn = Namespace('http://www.geonames.org/ontology#')
wgs84_pos = Namespace('http://www.w3.org/2003/01/geo/wgs84_pos#')
filePath_1 = sys.argv[1]
with open(filePath_1, 'rU') as f1:
entities = json.load(f1)
for entity in entities:
geobirth = entities[entity]['geobirth']
if geobirth:
uri = ''.join([geobirth, 'about.rdf'])
h = httplib2.Http()
resp, rdf_doc = h.request(uri, "GET")
time.sleep(1)
soup = BeautifulSoup(rdf_doc, "xml")
for tag in soup.find_all("name"):
name = tag.text
gPlaces.add( (URIRef(geobirth), RDFS.label, Literal(name)) )
for tag in soup.find_all("parentCountry"):
country = tag.attrs['rdf:resource']
gPlaces.add( (URIRef(geobirth), gn.parentCountry, URIRef(country) ) )
for tag in soup.find_all("lat"):
lat = tag.text
gPlaces.add( (URIRef(geobirth), wgs84_pos.lat, Literal(lat)) )
for tag in soup.find_all("long"):
long = tag.text
gPlaces.add( (URIRef(geobirth), wgs84_pos.long, Literal(long)) )
places_graph_path = os.path.join(
os.path.dirname(__file__), os.pardir, 'Graphs', 'placesGraph.nt')
gPlaces.bind("gn", gn)
gPlaces.bind("rdfs", RDFS)
gPlaces.bind("wgs84_pos", wgs84_pos)
gPlaces = gPlaces.serialize(destination=places_graph_path, format='nt')
print('Finished getting geo info')
| # !/usr/local/bin/python3.4
## Argument[0] is script to run
## Argument[1] is path to entityDict
import httplib2
import json
import lxml
import os
import re
import sys
import time
from bs4 import BeautifulSoup
from rdflib import Graph, Literal, Namespace, URIRef
from rdflib.namespace import RDFS
from rdflib.plugins.serializers.nt import NTSerializer
gPlaces = Graph()
gn = Namespace('http://www.geonames.org/ontology#')
wgs84_pos = Namespace('http://www.w3.org/2003/01/geo/wgs84_pos#')
filePath_1 = sys.argv[1]
with open(filePath_1, 'rU') as f1:
entities = json.load(f1)
for entity in entities:
geobirth = entities[entity]['geobirth']
if geobirth:
uri = ''.join([geobirth, 'about.rdf'])
h = httplib2.Http()
resp, rdf_doc = h.request(uri, "GET")
time.sleep(1)
soup = BeautifulSoup(rdf_doc, "xml")
for tag in soup.find_all("name"):
name = tag.text
gPlaces.add( (URIRef(geobirth), RDFS.label, Literal(name)) )
for tag in soup.find_all("parentCountry"):
country = tag.attrs['rdf:resource']
gPlaces.add( (URIRef(geobirth), gn.parentCountry, URIRef(country) ) )
for tag in soup.find_all("lat"):
lat = tag.text
gPlaces.add( (URIRef(geobirth), wgs84_pos.lat, Literal(lat)) )
for tag in soup.find_all("long"):
long = tag.text
gPlaces.add( (URIRef(geobirth), wgs84_pos.long, Literal(long)) )
places_graph_path = os.path.join(
os.path.dirname(__file__), os.pardir, 'Graphs', 'placesGraph.nt')
gPlaces.bind("gn", gn)
gPlaces.bind("rdfs", RDFS)
gPlaces.bind("wgs84_pos", wgs84_pos)
gPlaces = gPlaces.serialize(destination=places_graph_path, format='nt')
print('Finished getting geo info')
| mit | Python |
2262399bf8e8501547ff16dfa1ef95818437df35 | add rea v0.2 to table defs | openego/ego.io,openego/ego.io | egoio/db_tables/calc_ego_re.py | egoio/db_tables/calc_ego_re.py | # coding: utf-8
from sqlalchemy import BigInteger, Column, Float, Integer, SmallInteger, String, \
Table, Text, text, Numeric
from geoalchemy2.types import Geometry
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
metadata = Base.metadata
class EgoDeuDea(Base):
__tablename__ = 'ego_deu_dea'
__table_args__ = {'schema': 'calc_ego_re'}
id = Column(Integer, primary_key=True)
sort = Column(Integer)
electrical_capacity = Column(Numeric)
generation_type = Column(Text)
generation_subtype = Column(String)
voltage_level = Column(String)
voltage_type = Column(String(2))
subst_id = Column(Integer)
la_id = Column(Integer)
geom_line = Column(Geometry('LINESTRING', 3035), index=True)
geom = Column(Geometry('POINT', 3035), index=True)
geom_new = Column(Geometry('POINT', 3035), index=True)
flag = Column(String)
class EgoDeaAllocation(Base):
__tablename__ = 'ego_dea_allocation'
__table_args__ = {'schema': 'model_draft'}
id = Column(Integer, primary_key=True)
sort = Column(Integer)
electrical_capacity = Column(Numeric)
generation_type = Column(Text)
generation_subtype = Column(String)
voltage_level = Column(String)
postcode = Column(String)
subst_id = Column(Integer)
la_id = Column(Integer)
geom_line = Column(Geometry('LINESTRING', 3035), index=True)
geom = Column(Geometry('POINT', 3035), index=True)
geom_new = Column(Geometry('POINT', 3035), index=True)
flag = Column(String)
class EgoSupplyRea(Base):
__tablename__ = 'ego_supply_rea'
__table_args__ = {'schema': 'model_draft'}
id = Column(Integer, primary_key=True)
sort = Column(Integer)
electrical_capacity = Column(Numeric)
generation_type = Column(Text)
generation_subtype = Column(String)
voltage_level = Column(SmallInteger)
postcode = Column(String)
subst_id = Column(Integer)
la_id = Column(Integer)
geom_line = Column(Geometry('LINESTRING', 3035), index=True)
geom = Column(Geometry('POINT', 3035), index=True)
geom_new = Column(Geometry('POINT', 3035), index=True)
flag = Column(String)
| # coding: utf-8
from sqlalchemy import BigInteger, Column, Float, Integer, SmallInteger, String, \
Table, Text, text, Numeric
from geoalchemy2.types import Geometry
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
metadata = Base.metadata
class EgoDeuDea(Base):
__tablename__ = 'ego_deu_dea'
__table_args__ = {'schema': 'calc_ego_re'}
id = Column(Integer, primary_key=True)
sort = Column(Integer)
electrical_capacity = Column(Numeric)
generation_type = Column(Text)
generation_subtype = Column(String)
voltage_level = Column(String)
voltage_type = Column(String(2))
subst_id = Column(Integer)
la_id = Column(Integer)
geom_line = Column(Geometry('LINESTRING', 3035), index=True)
geom = Column(Geometry('POINT', 3035), index=True)
geom_new = Column(Geometry('POINT', 3035), index=True)
flag = Column(String)
class EgoDeaAllocation(Base):
__tablename__ = 'ego_dea_allocation'
__table_args__ = {'schema': 'model_draft'}
id = Column(Integer, primary_key=True)
sort = Column(Integer)
electrical_capacity = Column(Numeric)
generation_type = Column(Text)
generation_subtype = Column(String)
voltage_level = Column(String)
postcode = Column(String)
subst_id = Column(Integer)
la_id = Column(Integer)
geom_line = Column(Geometry('LINESTRING', 3035), index=True)
geom = Column(Geometry('POINT', 3035), index=True)
geom_new = Column(Geometry('POINT', 3035), index=True)
flag = Column(String)
| agpl-3.0 | Python |
2bf16e3512ffa20e9da583123b2face80fac3ab3 | remove queue urls from dev | bryanph/OIPA,VincentVW/OIPA,bryanph/OIPA,tokatikato/OIPA,catalpainternational/OIPA,catalpainternational/OIPA,openaid-IATI/OIPA,catalpainternational/OIPA,bryanph/OIPA,tokatikato/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,VincentVW/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,VincentVW/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,catalpainternational/OIPA,VincentVW/OIPA,bryanph/OIPA,tokatikato/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,tokatikato/OIPA | OIPA/OIPA/urls.py | OIPA/OIPA/urls.py | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from api.v3.urls import api_v3_docs
admin.autodiscover()
urlpatterns = patterns(
'',
# (r'^admin/queue/', include('django_rq.urls')),
url(r'^admin/task_queue/', include('task_queue.urls')),
# url(r'^admin/geoadmin/', 'geodata.views.geoadmin'),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include('api.urls')),
url(r'^$', api_v3_docs),
url(r'', api_v3_docs),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}),
) | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from api.v3.urls import api_v3_docs
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^admin/queue/', include('django_rq.urls')),
url(r'^admin/task_queue/', include('task_queue.urls')),
# url(r'^admin/geoadmin/', 'geodata.views.geoadmin'),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include('api.urls')),
url(r'^$', api_v3_docs),
url(r'', api_v3_docs),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}),
) | agpl-3.0 | Python |
8a5486efc58bc26096a254e8a4d0316e2054ff5e | Split long line | fusionbox/django-authtools | authtools/backends.py | authtools/backends.py | from django.contrib.auth.backends import ModelBackend
from django import VERSION as DJANGO_VERSION
class CaseInsensitiveUsernameFieldBackendMixin(object):
"""
This authentication backend assumes that usernames are email addresses and simply
lowercases a username before an attempt is made to authenticate said username using a
superclass's authenticate method. This superclass should be either a user-defined
authentication backend, or a Django-provided authentication backend (e.g., ModelBackend).
Example usage:
See CaseInsensitiveUsernameFieldBackend, below.
NOTE:
A word of caution. Use of this backend presupposes a way to ensure that users cannot
create usernames that differ only in case (e.g., joe@test.org and JOE@test.org). It is
advised that you use this backend in conjunction with the
CaseInsensitiveUsernameFieldCreationForm provided in the forms module.
"""
if DJANGO_VERSION[:2] >= (1, 11): # django 1.11 and above require a request positional arg
def authenticate(self, request, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveUsernameFieldBackendMixin, self).authenticate(
request,
username=username,
password=password,
**kwargs
)
else:
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveUsernameFieldBackendMixin, self).authenticate(
username=username,
password=password,
**kwargs
)
class CaseInsensitiveUsernameFieldModelBackend(
CaseInsensitiveUsernameFieldBackendMixin,
ModelBackend):
pass
# alias for the old name for backwards-compatability
CaseInsensitiveEmailBackendMixin = CaseInsensitiveUsernameFieldBackendMixin
CaseInsensitiveEmailModelBackend = CaseInsensitiveUsernameFieldModelBackend
| from django.contrib.auth.backends import ModelBackend
from django import VERSION as DJANGO_VERSION
class CaseInsensitiveUsernameFieldBackendMixin(object):
"""
This authentication backend assumes that usernames are email addresses and simply
lowercases a username before an attempt is made to authenticate said username using a
superclass's authenticate method. This superclass should be either a user-defined
authentication backend, or a Django-provided authentication backend (e.g., ModelBackend).
Example usage:
See CaseInsensitiveUsernameFieldBackend, below.
NOTE:
A word of caution. Use of this backend presupposes a way to ensure that users cannot
create usernames that differ only in case (e.g., joe@test.org and JOE@test.org). It is
advised that you use this backend in conjunction with the
CaseInsensitiveUsernameFieldCreationForm provided in the forms module.
"""
if DJANGO_VERSION[:2] >= (1, 11): # django 1.11 and above require a request positional arg
def authenticate(self, request, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveUsernameFieldBackendMixin, self).authenticate(
request,
username=username,
password=password,
**kwargs
)
else:
def authenticate(self, username=None, password=None, **kwargs):
if username is not None:
username = username.lower()
return super(CaseInsensitiveUsernameFieldBackendMixin, self).authenticate(
username=username,
password=password,
**kwargs
)
class CaseInsensitiveUsernameFieldModelBackend(CaseInsensitiveUsernameFieldBackendMixin, ModelBackend):
pass
# alias for the old name for backwards-compatability
CaseInsensitiveEmailBackendMixin = CaseInsensitiveUsernameFieldBackendMixin
CaseInsensitiveEmailModelBackend = CaseInsensitiveUsernameFieldModelBackend
| bsd-2-clause | Python |
7cc5874b77d848dca26ebf6f0df96698fb7cccac | bump version for branch update | gopythongo/gopythongo,gopythongo/gopythongo | src/py/gopythongo/__init__.py | src/py/gopythongo/__init__.py | # -* coding: utf-8 *-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
version = "0.7.5.dev0"
program_version = "GoPythonGo %s" % version
| # -* coding: utf-8 *-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
version = "0.7.4.dev0"
program_version = "GoPythonGo %s" % version
| mpl-2.0 | Python |
97c2056e4a0511b593d0646359e9a28d72d88bd3 | Change some code in sanic aiomysql code | lixxu/sanic,jrocketfingers/sanic,yunstanford/sanic,ai0/sanic,lixxu/sanic,ashleysommer/sanic,Tim-Erwin/sanic,ai0/sanic,channelcat/sanic,ashleysommer/sanic,channelcat/sanic,r0fls/sanic,lixxu/sanic,r0fls/sanic,yunstanford/sanic,jrocketfingers/sanic,yunstanford/sanic,lixxu/sanic,ashleysommer/sanic,yunstanford/sanic,channelcat/sanic,Tim-Erwin/sanic,channelcat/sanic | examples/sanic_aiomysql_with_global_pool.py | examples/sanic_aiomysql_with_global_pool.py | # encoding: utf-8
"""
You need the aiomysql
"""
import asyncio
import os
import aiomysql
from sanic import Sanic
from sanic.response import json
database_name = os.environ['DATABASE_NAME']
database_host = os.environ['DATABASE_HOST']
database_user = os.environ['DATABASE_USER']
database_password = os.environ['DATABASE_PASSWORD']
app = Sanic()
@app.listener("before_server_start")
async def get_pool(app, loop):
"""
the first param is the global instance ,
so we can store our connection pool in it .
and it can be used by different request
:param args:
:param kwargs:
:return:
"""
app.pool = {
"aiomysql": await aiomysql.create_pool(host=database_host, user=database_user, password=database_password,
db=database_name,
maxsize=5)}
async with app.pool['aiomysql'].acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('DROP TABLE IF EXISTS sanic_polls')
await cur.execute("""CREATE TABLE sanic_polls (
id serial primary key,
question varchar(50),
pub_date timestamp
);""")
for i in range(0, 100):
await cur.execute("""INSERT INTO sanic_polls
(id, question, pub_date) VALUES ({}, {}, now())
""".format(i, i))
@app.route("/")
async def test():
result = []
data = {}
async with app.pool['aiomysql'].acquire() as conn:
async with conn.cursor() as cur:
await cur.execute("SELECT question, pub_date FROM sanic_polls")
async for row in cur:
result.append({"question": row[0], "pub_date": row[1]})
if result or len(result) > 0:
data['data'] = res
return json(data)
if __name__ == '__main__':
app.run(host="127.0.0.1", workers=4, port=12000)
| # encoding: utf-8
"""
You need the aiomysql
"""
import asyncio
import os
import aiomysql
import uvloop
from sanic import Sanic
from sanic.response import json
database_name = os.environ['DATABASE_NAME']
database_host = os.environ['DATABASE_HOST']
database_user = os.environ['DATABASE_USER']
database_password = os.environ['DATABASE_PASSWORD']
app = Sanic()
@app.listener("before_server_start")
async def get_pool(app, loop):
"""
the first param is the global instance ,
so we can store our connection pool in it .
and it can be used by different request
:param args:
:param kwargs:
:return:
"""
app.pool = {
"aiomysql": await aiomysql.create_pool(host=database_host, user=database_user, password=database_password,
db=database_name,
maxsize=5)}
async with app.pool['aiomysql'].acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('DROP TABLE IF EXISTS sanic_polls')
await cur.execute("""CREATE TABLE sanic_polls (
id serial primary key,
question varchar(50),
pub_date timestamp
);""")
for i in range(0, 100):
await cur.execute("""INSERT INTO sanic_polls
(id, question, pub_date) VALUES ({}, {}, now())
""".format(i, i))
@app.route("/")
async def test():
result = []
data = {}
async with app.pool['aiomysql'].acquire() as conn:
async with conn.cursor() as cur:
await cur.execute("SELECT question, pub_date FROM sanic_polls")
async for row in cur:
result.append({"question": row[0], "pub_date": row[1]})
if result or len(result) > 0:
data['data'] = res
return json(data)
if __name__ == '__main__':
app.run(host="127.0.0.1", workers=4, port=12000)
| mit | Python |
99f1bd9b7a02415b2e0ccc2e36144bca79b96386 | Fix migrations dependency | caesar2164/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform | lms/djangoapps/bulk_email/migrations/0003_config_model_feature_flag.py | lms/djangoapps/bulk_email/migrations/0003_config_model_feature_flag.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('bulk_email', '0002_data__load_course_email_template'),
('bulk_email', '0003_alter_field_to_option'),
]
operations = [
migrations.CreateModel(
name='BulkEmailFlag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('require_course_email_auth', models.BooleanField(default=True)),
('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')),
],
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('bulk_email', '0002_data__load_course_email_template'),
]
operations = [
migrations.CreateModel(
name='BulkEmailFlag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('require_course_email_auth', models.BooleanField(default=True)),
('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')),
],
),
]
| agpl-3.0 | Python |
5560b0c055760cd1d06c533b7f83a563633cb6fc | Allow running individual tests from runtests.py | tsouvarev/django-money,recklessromeo/django-money,iXioN/django-money,recklessromeo/django-money,tsouvarev/django-money,iXioN/django-money,rescale/django-money,AlexRiina/django-money | runtests.py | runtests.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import sys
from django.conf import settings
settings.configure(
DEBUG=True,
# AUTH_USER_MODEL='testdata.CustomUser',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
SITE_ID=1,
ROOT_URLCONF=None,
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'djmoney',
'djmoney.tests.testapp',
'south',
'reversion',
),
USE_TZ=True,
USE_L10N=True,
SOUTH_TESTS_MIGRATE=True,
)
import moneyed
from moneyed.localization import _FORMATTER, DEFAULT
from decimal import ROUND_HALF_EVEN
_FORMATTER.add_sign_definition('pl_PL', moneyed.PLN, suffix=' zł')
_FORMATTER.add_sign_definition(DEFAULT, moneyed.PLN, suffix=' zł')
_FORMATTER.add_formatting_definition(
"pl_PL", group_size=3, group_separator=" ", decimal_point=",",
positive_sign="", trailing_positive_sign="",
negative_sign="-", trailing_negative_sign="",
rounding_method=ROUND_HALF_EVEN)
from django.test.simple import DjangoTestSuiteRunner
test_runner = DjangoTestSuiteRunner(verbosity=1, failfast=False)
# If you use South for migrations, uncomment this to monkeypatch
# syncdb to get migrations to run.
from south.management.commands import patch_for_test_db_setup
patch_for_test_db_setup()
if len(sys.argv) > 1:
tests = sys.argv[1:]
else:
tests = ['djmoney']
failures = test_runner.run_tests(tests)
if failures:
sys.exit(failures)
## Run py.tests
# Compatibility testing patches on the py-moneyed
import pytest
pytest.main()
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import sys
from django.conf import settings
settings.configure(
DEBUG=True,
# AUTH_USER_MODEL='testdata.CustomUser',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
SITE_ID=1,
ROOT_URLCONF=None,
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'djmoney',
'djmoney.tests.testapp',
'south',
'reversion',
),
USE_TZ=True,
USE_L10N=True,
SOUTH_TESTS_MIGRATE=True,
)
import moneyed
from moneyed.localization import _FORMATTER, DEFAULT
from decimal import ROUND_HALF_EVEN
_FORMATTER.add_sign_definition('pl_PL', moneyed.PLN, suffix=' zł')
_FORMATTER.add_sign_definition(DEFAULT, moneyed.PLN, suffix=' zł')
_FORMATTER.add_formatting_definition(
"pl_PL", group_size=3, group_separator=" ", decimal_point=",",
positive_sign="", trailing_positive_sign="",
negative_sign="-", trailing_negative_sign="",
rounding_method=ROUND_HALF_EVEN)
from django.test.simple import DjangoTestSuiteRunner
test_runner = DjangoTestSuiteRunner(verbosity=1, failfast=False)
# If you use South for migrations, uncomment this to monkeypatch
# syncdb to get migrations to run.
from south.management.commands import patch_for_test_db_setup
patch_for_test_db_setup()
failures = test_runner.run_tests(['djmoney', ])
if failures:
sys.exit(failures)
## Run py.tests
# Compatibility testing patches on the py-moneyed
import pytest
pytest.main()
| bsd-3-clause | Python |
cd4e3f3f42cf570bd2280bfa5067c64638dc6c96 | add directory iterator | aureooms/sak,aureooms/sak | sak/iter.py | sak/iter.py | import lib.args, lib.sys, fileinput, itertools, getpass, lib.file
# polyfill for generator zip function
if hasattr( itertools , "izip" ) :
_zip = itertools.izip
else :
_zip = zip
def directories ( callable = None , iterable = None ) :
iterable = lib.args.listify( iterable )
callable = lib.args.listify( callable )
callable = list( itertools.chain( *map( lib.args.split , callable ) ) )
if not iterable :
iterable = ( s[:-1] for s in fileinput.input( [] ) )
for item in iterable :
lib.sys.call( callable , stddefault = None , cwd = item )
def imap ( callable = None , iterable = None ) :
iterable = lib.args.listify( iterable )
callable = lib.args.listify( callable )
callable = list( itertools.chain( *map( lib.args.split , callable ) ) )
if not iterable :
iterable = ( s[:-1] for s in fileinput.input( [] ) )
for item in iterable :
lib.sys.call( [ arg.format( item ) for arg in callable ] , stddefault = None )
def starmap ( callable = None , iterable = None ) :
iterable = lib.args.listify( iterable )
callable = lib.args.listify( callable )
callable = list( itertools.chain( *map( lib.args.split , callable ) ) )
if not iterable :
iterable = ( s[:-1] for s in fileinput.input( [] ) )
for item in iterable :
argv = lib.args.split( item )
args , kwargs = lib.args.parse( argv , [] , {} )
lib.sys.call( [ arg.format( *args , **kwargs ) for arg in callable ] , stddefault = None )
@lib.args.convert( n = int )
def repeat ( item , n = -1 ) :
"""
Repeat given string n times. If n is negative then repeat given string an infinite number of times.
"""
if n < 0 :
args = [ None ]
else :
args = [ None , n ]
for _ in itertools.repeat( *args ) :
print( item )
@lib.args.convert( n = int )
def password ( n = -1 ) :
item = getpass.getpass('Password to repeat : ')
repeat( item , n )
def izip ( callables = None , sep = " " ) :
callables = lib.args.listify( callables )
callables = map( lib.args.split , callables )
iterables = [ lib.file.lineiterator( lib.sys.popen( callable ).stdout ) for callable in callables ]
for t in _zip( *iterables ) :
print ( *t , sep = sep )
| import lib.args, lib.sys, fileinput, itertools, getpass, lib.file
# polyfill for generator zip function
if hasattr( itertools , "izip" ) :
_zip = itertools.izip
else :
_zip = zip
def imap ( callable = None , iterable = None ) :
iterable = lib.args.listify( iterable )
callable = lib.args.listify( callable )
callable = list( itertools.chain( *map( lib.args.split , callable ) ) )
if not iterable :
iterable = ( s[:-1] for s in fileinput.input( [] ) )
for item in iterable :
lib.sys.call( [ arg.format( item ) for arg in callable ] , stddefault = None )
def starmap ( callable = None , iterable = None ) :
iterable = lib.args.listify( iterable )
callable = lib.args.listify( callable )
callable = list( itertools.chain( *map( lib.args.split , callable ) ) )
if not iterable :
iterable = ( s[:-1] for s in fileinput.input( [] ) )
for item in iterable :
argv = lib.args.split( item )
args , kwargs = lib.args.parse( argv , [] , {} )
lib.sys.call( [ arg.format( *args , **kwargs ) for arg in callable ] , stddefault = None )
@lib.args.convert( n = int )
def repeat ( item , n = -1 ) :
"""
Repeat given string n times. If n is negative then repeat given string an infinite number of times.
"""
if n < 0 :
args = [ None ]
else :
args = [ None , n ]
for _ in itertools.repeat( *args ) :
print( item )
@lib.args.convert( n = int )
def password ( n = -1 ) :
item = getpass.getpass('Password to repeat : ')
repeat( item , n )
def izip ( callables = None , sep = " " ) :
callables = lib.args.listify( callables )
callables = map( lib.args.split , callables )
iterables = [ lib.file.lineiterator( lib.sys.popen( callable ).stdout ) for callable in callables ]
for t in _zip( *iterables ) :
print ( *t , sep = sep )
| agpl-3.0 | Python |
63775ca941ad1925a5451c7c4b63cb7eae701fa8 | Remove qnet.cc shorthand module | mabuchilab/QNET | qnet/__init__.py | qnet/__init__.py | # This file is part of QNET.
#
# QNET is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# QNET is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with QNET. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright (C) 2012-2017, QNET authors (see AUTHORS file)
#
###########################################################################
"""The :mod:`qnet` package exposes all of QNET's functionality for easy
interactive or programmative use.
Specifically, the subpackages for the following parts of QNET are directly
available:
* Symbolic quantum and circuit algebra as :mod:`qnet.algebra`
* Printers for symbolic symbolic expressions as :mod:`qnet.printing`
* Conversion utilities to Sympy and Numpy as :mod:`qnet.convert`
* Miscellaneous additional tools, as :mod:`qnet.misc`
For interactive usage, the package should be initialized as follows::
>>> import qnet
>>> qnet.init_printing()
Note that most subpackages in turn expose their functionality through a "flat"
API. That is, instead of
.. code-block:: python
from qnet.algebra.operator_algebra import LocalOperator
from qnet.circuit_components.displace_cc import Displace
the two objects may be more succintly imported from a higher level namespace as
.. code-block:: python
from qnet.algebra import LocalOperator, Displace
In an interactive context (and only there!), a star import such as
.. code-block:: python
from qnet.algebra import *
may be useful.
The flat API is defined via the `__all__ <https://docs.python.org/3.5/tutorial/modules.html#importing-from-a-package>`_
attribute of each subpackage (see each package's documentation).
Internally, the flat API (or star imports) must never be used.
"""
import qnet.algebra
import qnet.circuit_components
import qnet.convert
import qnet.misc
import qnet.printing
from .printing import init_printing
__all__ = ['init_printing']
__version__ = "2.0.0-dev"
| # This file is part of QNET.
#
# QNET is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# QNET is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with QNET. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright (C) 2012-2017, QNET authors (see AUTHORS file)
#
###########################################################################
"""The :mod:`qnet` package exposes all of QNET's functionality for easy
interactive or programmative use.
Specifically, the subpackages for the following parts of QNET are directly
available:
* Symbolic quantum and circuit algebra as :mod:`qnet.algebra`
* Printers for symbolic symbolic expressions as :mod:`qnet.printing`
* A library of circuit components as :mod:`qnet.cc <qnet.circuit_components>`
(as a shorthand for the :mod:`~qnet.circuit_components` subpackage)
* Conversion utilities to Sympy and Numpy as :mod:`qnet.convert`
* Miscellaneous additional tools, as :mod:`qnet.misc`
For interactive usage, the package should be initialized as follows::
>>> import qnet
>>> qnet.init_printing()
Note that most subpackages in turn expose their functionality through a "flat"
API. That is, instead of
.. code-block:: python
from qnet.algebra.operator_algebra import LocalOperator
from qnet.circuit_components.displace_cc import Displace
the two objects may be more succintly imported from a higher level namespace as
.. code-block:: python
import qnet # required for qnet.cc to work
from qnet.algebra import LocalOperator
from qnet.cc import Displace
In an interactive context (and only there!), a star import such as
.. code-block:: python
from qnet.algebra import *
may be useful.
The flat API is defined via the `__all__ <https://docs.python.org/3.5/tutorial/modules.html#importing-from-a-package>`_
attribute of each subpackage (see each package's documentation).
Internally, the flat API (or star imports) must never be used.
"""
import qnet.algebra
import qnet.circuit_components
import qnet.circuit_components as cc
import qnet.convert
import qnet.misc
import qnet.printing
from .printing import init_printing
__all__ = ['init_printing']
__version__ = "2.0.0-dev"
| mit | Python |
71c3714fc46dfefa6f0875e1a0a8781b6aca5a8d | Clean up fourier samples. | mwhoffman/pygp | pygp/inference/_fourier.py | pygp/inference/_fourier.py | """
Approximations to the GP using random Fourier features.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
import scipy.linalg as sla
# local imports
from ..utils.random import rstate
from ..utils.exceptions import ModelError
from ..likelihoods import Gaussian
# exported symbols
__all__ = ['FourierSample']
class FourierSample(object):
def __init__(self, N, likelihood, kernel, mean, X, y, rng=None):
# if given a seed or an instantiated RandomState make sure that we use
# it here, but also within the sample_spectrum code.
rng = rstate(rng)
if not isinstance(likelihood, Gaussian):
raise ModelError('Fourier samples only defined for Gaussian'
'likelihoods')
# this randomizes the feature.
W, alpha = kernel.sample_spectrum(N, rng)
self._W = W
self._b = rng.rand(N) * 2 * np.pi
self._a = np.sqrt(2 * alpha / N)
self._mean = mean
self._theta = None
if X is not None:
# evaluate the features
Z = np.dot(X, self._W.T) + self._b
Phi = np.cos(Z) * self._a
# get the components for regression
A = np.dot(Phi.T, Phi) + likelihood.s2 * np.eye(Phi.shape[1])
R = sla.cholesky(A)
r = y - mean
p = np.sqrt(likelihood.s2) * rng.randn(N)
# FIXME: we can do a smarter update here when the number of points
# is less than the number of features.
self._theta = sla.cho_solve((R, False), np.dot(Phi.T, r))
self._theta += sla.solve_triangular(R, p)
else:
self._theta = rng.randn(N)
def get(self, X):
"""
Evaluate the function at a collection of points.
"""
X = np.array(X, ndmin=2, copy=False)
Z = np.dot(X, self._W.T) + self._b
Phi = np.cos(Z) * self._a
F = np.dot(Phi, self._theta) + self._mean
return F
def __call__(self, x):
return self.get(x)[0]
| """
Approximations to the GP using random Fourier features.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
import scipy.linalg as sla
# local imports
from ..utils.random import rstate
from ..utils.exceptions import ModelError
from ..likelihoods import Gaussian
# exported symbols
__all__ = ['FourierSample']
class FourierSample(object):
def __init__(self, N, likelihood, kernel, mean, X, y, rng=None):
# if given a seed or an instantiated RandomState make sure that we use
# it here, but also within the sample_spectrum code.
rng = rstate(rng)
if not isinstance(likelihood, Gaussian):
raise ModelError('Fourier samples only defined for Gaussian'
'likelihoods')
# this randomizes the feature.
W, alpha = kernel.sample_spectrum(N, rng)
self._W = W
self._b = rng.rand(N) * 2 * np.pi
self._a = np.sqrt(2 * alpha / N)
self._mean = mean
self._theta = None
if X is not None:
Phi = self.phi(X)
A = np.dot(Phi.T, Phi) + likelihood.s2 * np.eye(Phi.shape[1])
R = sla.cholesky(A)
r = y - mean
# FIXME: we can do a smarter update here when the number of points
# is less than the number of features.
rnd = np.sqrt(likelihood.s2) * rng.randn(N)
self._theta = sla.cho_solve((R, False), np.dot(Phi.T, r))
self._theta += sla.solve_triangular(R, rnd)
else:
self._theta = rng.randn(N)
def phi(self, X):
"""
Evaluate the random features.
"""
# x is n-by-D,
# W is N-by-D,
# Phi, the return value, should be n-by-N.
rnd = np.dot(X, self._W.T) + self._b
Phi = np.cos(rnd) * self._a
return Phi
def get(self, X):
"""
Evaluate the function at a collection of points.
"""
Phi = self.phi(np.array(X, ndmin=2, copy=False))
return self._mean + np.dot(Phi, self._theta)
def __call__(self, x):
return self.get(x)[0]
| bsd-2-clause | Python |
d86d9710c71d95311a8039ed6c194ca6c4962210 | remove the stuff that we may or may not want to implement | Fizzadar/pyinfra,Fizzadar/pyinfra | pyinfra/facts/win_files.py | pyinfra/facts/win_files.py | from __future__ import unicode_literals
import re
from pyinfra.api.facts import FactBase
from .util.win_files import parse_win_ls_output
class WinFile(FactBase):
# Types must match WIN_FLAG_TO_TYPE in .util.win_files.py
type = 'file'
shell = 'ps'
def command(self, name):
self.name = name
return 'ls {0}'.format(name)
def process(self, output):
# Note: The first 7 lines are header lines
return parse_win_ls_output(output[7], self.type)
| from __future__ import unicode_literals
import re
from pyinfra.api.facts import FactBase
from .util.win_files import parse_win_ls_output
class WinFile(FactBase):
# Types must match WIN_FLAG_TO_TYPE in .util.win_files.py
type = 'file'
shell = 'ps'
def command(self, name):
self.name = name
return 'ls {0}'.format(name)
def process(self, output):
# Note: The first 7 lines are header lines
return parse_win_ls_output(output[7], self.type)
# TODO: work thru the rest below
#class Link(File):
# type = 'link'
#
#
#class Directory(File):
# type = 'directory'
#
#
#class Socket(File):
# type = 'socket'
#
#
#class Sha1File(FactBase):
# '''
# Returns a SHA1 hash of a file. Works with both sha1sum and sha1.
# '''
#
# _regexes = [
# r'^([a-zA-Z0-9]{40})\s+%s$',
# r'^SHA1\s+\(%s\)\s+=\s+([a-zA-Z0-9]{40})$',
# ]
#
# def command(self, name):
# self.name = name
# return 'sha1sum {0} 2> /dev/null || sha1 {0}'.format(name)
#
# def process(self, output):
# for regex in self._regexes:
# regex = regex % self.name
# matches = re.match(regex, output[0])
#
# if matches:
# return matches.group(1)
#
#
#class FindInFile(FactBase):
# '''
# Checks for the existence of text in a file using grep. Returns a list of matching
# lines if the file exists, and ``None`` if the file does not.
# '''
#
# def command(self, name, pattern):
# self.name = name
#
# return (
# 'grep "{0}" {1} 2> /dev/null || '
# '(find {1} -type f > /dev/null && echo "__pyinfra_exists_{1}")'
# ).format(pattern, name).strip()
#
# def process(self, output):
# # If output is the special string: no matches, so return an empty list;
# # this allows us to differentiate between no matches in an existing file
# # or a file not existing.
# if output and output[0] == '__pyinfra_exists_{0}'.format(self.name):
# return []
#
# return output
#
#
#class FindFiles(FactBase):
# '''
# Returns a list of files from a start point, recursively using find.
# '''
#
# def command(self, name):
# return 'find {0} -type f'.format(name)
#
# def process(self, output):
# return output
#
#
#class FindLinks(FindFiles):
# '''
# Returns a list of links from a start point, recursively using find.
# '''
#
# def command(self, name):
# return 'find {0} -type l'.format(name)
#
#
#class FindDirectories(FindFiles):
# '''
# Returns a list of directories from a start point, recursively using find.
# '''
#
# def command(self, name):
# return 'find {0} -type d'.format(name)
| mit | Python |
884c08c601af31906379a877d0d8ce2b65dff988 | Complete sets | ahartz1/python_koans,ahartz1/python_koans | python3/koans/about_sets.py | python3/koans/about_sets.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutSets(Koan):
def test_sets_make_keep_lists_unique(self):
highlanders = ['MacLeod', 'Ramirez', 'MacLeod', 'Matunas', 'MacLeod', 'Malcolm', 'MacLeod']
there_can_only_be_only_one = set(highlanders)
self.assertEqual({'MacLeod', 'Ramirez', 'Matunas', 'Malcolm'}, there_can_only_be_only_one)
def test_empty_sets_have_different_syntax_to_populated_sets(self):
self.assertEqual(set([1, 2, 3]), {1, 2, 3})
self.assertEqual(set([]), set())
def test_dictionaries_and_sets_use_same_curly_braces(self):
# Note: Sets have only started using braces since Python 3
self.assertEqual(set, {1, 2, 3}.__class__)
self.assertEqual(dict, {'one': 1, 'two': 2}.__class__)
self.assertEqual(dict, {}.__class__)
def test_creating_sets_using_strings(self):
self.assertEqual(set(['12345']), {'12345'})
self.assertEqual({'1', '2', '3', '4', '5'}, set('12345'))
def test_convert_the_set_into_a_list_to_sort_it(self):
self.assertEqual(['1', '2', '3', '4', '5'], sorted(set('12345')))
# ------------------------------------------------------------------
def test_set_have_arithmetic_operators(self):
scotsmen = {'MacLeod', 'Wallace', 'Willie'}
warriors = {'MacLeod', 'Wallace', 'Leonidas'}
self.assertEqual({'Willie'}, scotsmen - warriors)
self.assertEqual({'MacLeod', 'Wallace', 'Willie', 'Leonidas'}, scotsmen | warriors)
self.assertEqual({'MacLeod', 'Wallace'}, scotsmen & warriors)
self.assertEqual({'Willie', 'Leonidas'}, scotsmen ^ warriors)
# ------------------------------------------------------------------
def test_we_can_query_set_membership(self):
self.assertEqual(True, 127 in {127, 0, 0, 1} )
self.assertEqual(True, 'cow' not in set('apocalypse now') )
def test_we_can_compare_subsets(self):
self.assertEqual(True, set('cake') <= set('cherry cake'))
self.assertEqual(True, set('cake').issubset(set('cherry cake')) )
self.assertEqual(False, set('cake') > set('pie'))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutSets(Koan):
def test_sets_make_keep_lists_unique(self):
highlanders = ['MacLeod', 'Ramirez', 'MacLeod', 'Matunas', 'MacLeod', 'Malcolm', 'MacLeod']
there_can_only_be_only_one = set(highlanders)
self.assertEqual(__, there_can_only_be_only_one)
def test_empty_sets_have_different_syntax_to_populated_sets(self):
self.assertEqual(__, {1, 2, 3})
self.assertEqual(__, set())
def test_dictionaries_and_sets_use_same_curly_braces(self):
# Note: Sets have only started using braces since Python 3
self.assertEqual(__, {1, 2, 3}.__class__)
self.assertEqual(__, {'one': 1, 'two': 2}.__class__)
self.assertEqual(__, {}.__class__)
def test_creating_sets_using_strings(self):
self.assertEqual(__, {'12345'})
self.assertEqual(__, set('12345'))
def test_convert_the_set_into_a_list_to_sort_it(self):
self.assertEqual(__, sorted(set('12345')))
# ------------------------------------------------------------------
def test_set_have_arithmetic_operators(self):
scotsmen = {'MacLeod', 'Wallace', 'Willie'}
warriors = {'MacLeod', 'Wallace', 'Leonidas'}
self.assertEqual(__, scotsmen - warriors)
self.assertEqual(__, scotsmen | warriors)
self.assertEqual(__, scotsmen & warriors)
self.assertEqual(__, scotsmen ^ warriors)
# ------------------------------------------------------------------
def test_we_can_query_set_membership(self):
self.assertEqual(__, 127 in {127, 0, 0, 1} )
self.assertEqual(__, 'cow' not in set('apocalypse now') )
def test_we_can_compare_subsets(self):
self.assertEqual(__, set('cake') <= set('cherry cake'))
self.assertEqual(__, set('cake').issubset(set('cherry cake')) )
self.assertEqual(__, set('cake') > set('pie'))
| mit | Python |
71cc46b1759e468f7faaef72f75df2798143455d | Update version.py | dpressel/baseline,dpressel/baseline,dpressel/baseline,dpressel/baseline | python/baseline/version.py | python/baseline/version.py | __version__ = "1.5.14"
| __version__ = "1.5.13"
| apache-2.0 | Python |
a1358ba2deec091ca077932693db1a68a9b53995 | Update __init__.py to properly expose KerasTransformer via 'from sparkdl import KerasTransformer' (#96) | databricks/spark-deep-learning | python/sparkdl/__init__.py | python/sparkdl/__init__.py | # Copyright 2017 Databricks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .graph.input import TFInputGraph
from .transformers.keras_image import KerasImageFileTransformer
from .transformers.named_image import DeepImagePredictor, DeepImageFeaturizer
from .transformers.keras_tensor import KerasTransformer
from .transformers.tf_image import TFImageTransformer
from .transformers.tf_tensor import TFTransformer
from .transformers.utils import imageInputPlaceholder
__all__ = [
'imageSchema', 'imageType', 'readImages',
'TFImageTransformer', 'TFInputGraph', 'TFTransformer',
'DeepImagePredictor', 'DeepImageFeaturizer', 'KerasImageFileTransformer', 'KerasTransformer',
'imageInputPlaceholder']
| # Copyright 2017 Databricks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .graph.input import TFInputGraph
from .transformers.keras_image import KerasImageFileTransformer
from .transformers.named_image import DeepImagePredictor, DeepImageFeaturizer
from .transformers.tf_image import TFImageTransformer
from .transformers.tf_tensor import TFTransformer
from .transformers.utils import imageInputPlaceholder
__all__ = [
'imageSchema', 'imageType', 'readImages',
'TFImageTransformer', 'TFInputGraph', 'TFTransformer',
'DeepImagePredictor', 'DeepImageFeaturizer', 'KerasImageFileTransformer', 'KerasTransformer',
'imageInputPlaceholder']
| apache-2.0 | Python |
e1fadf6570d683b6bfcf9c8bb8c8933aca1feaba | load caffe, torch, bigdl model | intel-analytics/BigDL,yangw1234/BigDL,yangw1234/BigDL,yangw1234/BigDL,yangw1234/BigDL,intel-analytics/BigDL,intel-analytics/BigDL,intel-analytics/BigDL | python/test/dev/modules.py | python/test/dev/modules.py | #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Adopt from Spark and it might be refactored in the future
from functools import total_ordering
all_modules = []
@total_ordering
class Module(object):
def __init__(self, name, python_test_goals=()):
"""
Define a new module.
:param name: A short module name, for display in logging and error messagesl
:param python_test_goals: A set of Python test goals for testing this module.
"""
self.name = name
self.python_test_goals = python_test_goals
all_modules.append(self)
def __repr__(self):
return "Module<%s>" % self.name
def __lt__(self, other):
return self.name < other.name
def __eq__(self, other):
return self.name == other.name
def __ne__(self, other):
return not (self.name == other.name)
def __hash__(self):
return hash(self.name)
bigdl_layer = Module(
name="bigdl_layer",
python_test_goals=[
"nn.layer"
])
bigdl_layer = Module(
name="bigdl_criterion",
python_test_goals=[
"nn.criterion"
])
bigdl_layer = Module(
name="bigdl_common",
python_test_goals=[
"util.common"
])
bigdl_optimizer = Module(
name="bigdl_optimizer",
python_test_goals=[
"optim.optimizer",
]
)
test_simple_integration_test = Module(
name="simple_integration_test",
python_test_goals=[
"test.simple_integration_test"
]
)
test_load_caffe = Module(
name="load_caffe_test",
python_test_goals=[
"test.load_caffe_test"
]
)
| #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Adopt from Spark and it might be refactored in the future
from functools import total_ordering
all_modules = []
@total_ordering
class Module(object):
def __init__(self, name, python_test_goals=()):
"""
Define a new module.
:param name: A short module name, for display in logging and error messagesl
:param python_test_goals: A set of Python test goals for testing this module.
"""
self.name = name
self.python_test_goals = python_test_goals
all_modules.append(self)
def __repr__(self):
return "Module<%s>" % self.name
def __lt__(self, other):
return self.name < other.name
def __eq__(self, other):
return self.name == other.name
def __ne__(self, other):
return not (self.name == other.name)
def __hash__(self):
return hash(self.name)
bigdl_layer = Module(
name="bigdl_layer",
python_test_goals=[
"nn.layer"
])
bigdl_layer = Module(
name="bigdl_criterion",
python_test_goals=[
"nn.criterion"
])
bigdl_layer = Module(
name="bigdl_common",
python_test_goals=[
"util.common"
])
bigdl_optimizer = Module(
name="bigdl_optimizer",
python_test_goals=[
"optim.optimizer",
]
)
test_simple_integration_test = Module(
name="simple_integration_test",
python_test_goals=[
"test.simple_integration_test"
]
)
| apache-2.0 | Python |
7e4bc4249daeef8f9e8a6965c38ceab2ff998bf5 | handle events-based methods | woju/qubes-core-admin,woju/qubes-core-admin,QubesOS/qubes-core-admin,marmarek/qubes-core-admin,QubesOS/qubes-core-admin,marmarek/qubes-core-admin,woju/qubes-core-admin,marmarek/qubes-core-admin,QubesOS/qubes-core-admin,woju/qubes-core-admin | qubes/tools/qubesd_query.py | qubes/tools/qubesd_query.py | #!/usr/bin/env python3.6
import argparse
import asyncio
import signal
import sys
QUBESD_SOCK = '/var/run/qubesd.sock'
try:
asyncio.ensure_future
except AttributeError:
asyncio.ensure_future = asyncio.async
parser = argparse.ArgumentParser(
description='low-level qubesd interrogation tool')
parser.add_argument('--connect', '-c', metavar='PATH',
dest='socket',
default=QUBESD_SOCK,
help='path to qubesd UNIX socket (default: %(default)s)')
parser.add_argument('--empty', '-e',
dest='payload',
action='store_false', default=True,
help='do not read from stdin and send empty payload')
parser.add_argument('src', metavar='SRC',
help='source qube')
parser.add_argument('method', metavar='METHOD',
help='method name')
parser.add_argument('dest', metavar='DEST',
help='destination qube')
parser.add_argument('arg', metavar='ARGUMENT',
nargs='?', default='',
help='argument to method')
def sighandler(loop, signame, coro):
print('caught {}, exiting'.format(signame))
coro.cancel()
loop.stop()
@asyncio.coroutine
def qubesd_client(socket, payload, *args):
try:
reader, writer = yield from asyncio.open_unix_connection(socket)
except asyncio.CancelledError:
return
for arg in args:
writer.write(arg.encode('ascii'))
writer.write(b'\0')
writer.write(payload)
writer.write_eof()
try:
while not reader.at_eof():
data = yield from reader.read(4096)
sys.stdout.buffer.write(data) # pylint: disable=no-member
sys.stdout.flush()
except asyncio.CancelledError:
return
finally:
writer.close()
def main(args=None):
args = parser.parse_args(args)
loop = asyncio.get_event_loop()
# pylint: disable=no-member
payload = sys.stdin.buffer.read() if args.payload else b''
# pylint: enable=no-member
coro = asyncio.ensure_future(qubesd_client(args.socket, payload,
args.src, args.method, args.dest, args.arg))
for signame in ('SIGINT', 'SIGTERM'):
loop.add_signal_handler(getattr(signal, signame),
sighandler, loop, signame, coro)
try:
loop.run_until_complete(coro)
finally:
loop.close()
if __name__ == '__main__':
main()
| #!/usr/bin/env python3.6
import argparse
import asyncio
import signal
import sys
QUBESD_SOCK = '/var/run/qubesd.sock'
try:
asyncio.ensure_future
except AttributeError:
asyncio.ensure_future = asyncio.async
parser = argparse.ArgumentParser(
description='low-level qubesd interrogation tool')
parser.add_argument('--connect', '-c', metavar='PATH',
dest='socket',
default=QUBESD_SOCK,
help='path to qubesd UNIX socket (default: %(default)s)')
parser.add_argument('--empty', '-e',
dest='payload',
action='store_false', default=True,
help='do not read from stdin and send empty payload')
parser.add_argument('src', metavar='SRC',
help='source qube')
parser.add_argument('method', metavar='METHOD',
help='method name')
parser.add_argument('dest', metavar='DEST',
help='destination qube')
parser.add_argument('arg', metavar='ARGUMENT',
nargs='?', default='',
help='argument to method')
def sighandler(loop, signame, coro):
print('caught {}, exiting'.format(signame))
coro.cancel()
loop.stop()
@asyncio.coroutine
def qubesd_client(socket, payload, *args):
try:
reader, writer = yield from asyncio.open_unix_connection(socket)
except asyncio.CancelledError:
return
for arg in args:
writer.write(arg.encode('ascii'))
writer.write(b'\0')
writer.write(payload)
writer.write_eof()
try:
data = yield from reader.read()
sys.stdout.buffer.write(data) # pylint: disable=no-member
except asyncio.CancelledError:
return
finally:
writer.close()
def main(args=None):
args = parser.parse_args(args)
loop = asyncio.get_event_loop()
# pylint: disable=no-member
payload = sys.stdin.buffer.read() if args.payload else b''
# pylint: enable=no-member
coro = asyncio.ensure_future(qubesd_client(args.socket, payload,
args.src, args.method, args.dest, args.arg))
for signame in ('SIGINT', 'SIGTERM'):
loop.add_signal_handler(getattr(signal, signame),
sighandler, loop, signame, coro)
try:
loop.run_until_complete(coro)
finally:
loop.close()
if __name__ == '__main__':
main()
| lgpl-2.1 | Python |
493ce497e5d84d8db9c37816aefea9099df42e90 | Add Synonym and related classes | sherlocke/pywatson | pywatson/answer/synonym.py | pywatson/answer/synonym.py | from pywatson.util.map_initializable import MapInitializable
class SynSetSynonym(MapInitializable):
def __init__(self, is_chosen, value, weight):
self.is_chosen = is_chosen
self.value = value
self.weight = weight
@classmethod
def from_mapping(cls, syn_mapping):
return cls(is_chosen=syn_mapping['isChosen'],
value=syn_mapping['value'],
weight=syn_mapping['weight'])
class SynSet(MapInitializable):
def __init__(self, name, synonyms=()):
self.name = name
self.synonyms = tuple(synonyms)
@classmethod
def from_mapping(cls, synset_mapping):
return cls(name=synset_mapping[0]['name'],
synonyms=(SynSetSynonym.from_mapping(s) for s in synset_mapping[0]['synonym']))
class Synonym(MapInitializable):
def __init__(self, part_of_speech, lemma, value, syn_set):
self.part_of_speech = part_of_speech
self.lemma = lemma
self.value = value
self.syn_set = syn_set
@classmethod
def from_mapping(cls, synonym_mapping):
return cls(part_of_speech=synonym_mapping['partOfSpeech'],
lemma=synonym_mapping['lemma'],
value=synonym_mapping['value'],
syn_set=SynSet.from_mapping(synonym_mapping['synSet']))
| class Synonym(object):
def __init__(self):
pass
| mit | Python |
b9487ee71ca2aac0d7ae3be955a9c7629ca35956 | Check the isolate version before running it. | eunchong/build,eunchong/build,eunchong/build,eunchong/build | scripts/slave/recipe_modules/isolate/resources/isolate.py | scripts/slave/recipe_modules/isolate/resources/isolate.py | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Calls either isolate.py or isolate Go executable in the checkout.
"""
import os
import subprocess
import sys
def try_go(path, args):
"""Tries to run the Go implementation of isolate.
Returns None if it should fall back to the python implementation.
"""
luci_go = os.path.join(os.path.dirname(path), 'luci-go')
if sys.platform == 'win32':
exe = os.path.join(luci_go, 'win64', 'isolate.exe')
elif sys.platform == 'darwin':
exe = os.path.join(luci_go, 'mac64', 'isolate')
else:
exe = os.path.join(luci_go, 'linux64', 'isolate')
if not os.access(exe, os.X_OK):
return None
# Try to use Go implementation.
try
version = subprocess.check_output([exe, 'version']).strip()
version = tuple(map(int, version.split('.')))
except OSError, subprocess.CalledProcessError, ValueError:
return None
# Key behavior based on version if necessary.
if version < (0, 1):
return None
return subprocess.call([exe] + args)
def main():
path = sys.argv[1]
args = sys.argv[2:]
ret = try_go(path, args)
if ret is None:
return subprocess.call(
[sys.executable, os.path.join(path, 'isolate.py')] + args)
return ret
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Calls either isolate.py or isolate Go executable in the checkout.
"""
import os
import subprocess
import sys
def main():
path = sys.argv[1]
luci_go = os.path.join(os.path.dirname(path), 'luci-go')
if sys.platform == 'win32':
exe = os.path.join(luci_go, 'win64', 'isolate.exe')
elif sys.platform == 'darwin':
exe = os.path.join(luci_go, 'mac64', 'isolate')
else:
exe = os.path.join(luci_go, 'linux64', 'isolate')
if os.access(exe, os.X_OK):
# Use Go implementation. We'd prefer to build on-the-fly but the bots do not
# all have yet the Go toolset.
return subprocess.call([exe] + sys.argv[2:])
return subprocess.call(
[sys.executable, os.path.join(path, 'isolate.py')] + sys.argv[2:])
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | Python |
a950f73e043064cc9eac202c686397029f54c7ea | Disable multiple :/ | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | readthedocs/config/utils.py | readthedocs/config/utils.py | """Shared functions for the config module."""
import yaml
def to_dict(value):
"""Recursively transform a class from `config.models` to a dict."""
if hasattr(value, 'as_dict'):
return value.as_dict()
if isinstance(value, list):
return [
to_dict(v)
for v in value
]
if isinstance(value, dict):
return {
k: to_dict(v)
for k, v in value.items()
}
return value
def list_to_dict(list_):
"""Transform a list to a dictionary with its indices as keys."""
dict_ = {
str(i): element
for i, element in enumerate(list_)
}
return dict_
class SafeLoaderIgnoreUnknown(yaml.SafeLoader): # pylint: disable=too-many-ancestors
"""
YAML loader to ignore unknown tags.
Borrowed from https://stackoverflow.com/a/57121993
"""
def ignore_unknown(self, node): # pylint: disable=no-self-use, unused-argument
return None
SafeLoaderIgnoreUnknown.add_constructor(None, SafeLoaderIgnoreUnknown.ignore_unknown)
def yaml_load_safely(content):
"""
Uses ``SafeLoaderIgnoreUnknown`` loader to skip unknown tags.
When a YAML contains ``!!python/name:int`` it will complete ignore it an
return ``None`` for those fields instead of failing. We need this to avoid
executing random code, but still support these YAML files.
"""
return yaml.load(content, Loader=SafeLoaderIgnoreUnknown)
| """Shared functions for the config module."""
import yaml
def to_dict(value):
"""Recursively transform a class from `config.models` to a dict."""
if hasattr(value, 'as_dict'):
return value.as_dict()
if isinstance(value, list):
return [
to_dict(v)
for v in value
]
if isinstance(value, dict):
return {
k: to_dict(v)
for k, v in value.items()
}
return value
def list_to_dict(list_):
"""Transform a list to a dictionary with its indices as keys."""
dict_ = {
str(i): element
for i, element in enumerate(list_)
}
return dict_
class SafeLoaderIgnoreUnknown(yaml.SafeLoader): # pylint: disable=too-many-ancestors
"""
YAML loader to ignore unknown tags.
Borrowed from https://stackoverflow.com/a/57121993
"""
def ignore_unknown(self, node): # pylint: disable=no-self-use disable=unused-argument
return None
SafeLoaderIgnoreUnknown.add_constructor(None, SafeLoaderIgnoreUnknown.ignore_unknown)
def yaml_load_safely(content):
"""
Uses ``SafeLoaderIgnoreUnknown`` loader to skip unknown tags.
When a YAML contains ``!!python/name:int`` it will complete ignore it an
return ``None`` for those fields instead of failing. We need this to avoid
executing random code, but still support these YAML files.
"""
return yaml.load(content, Loader=SafeLoaderIgnoreUnknown)
| mit | Python |
e7035094822777479be3f40c28dd1ecc52baab49 | Update user.py | samfcmc/fenixedu-python-sdk | fenixedu/user.py | fenixedu/user.py |
""" User: """
class User(object):
def __init__(self, access_token = None, refresh_token = None,
token_expires = None):
self.access_token = access_token
self.refresh_token = refresh_token
self.token_expires = token_expires
|
""" User: """
class User(object):
def __init__(self, access_token = None, refresh_token = None,
token_expires = None, code = None):
self.access_token = access_token
self.refresh_token = refresh_token
self.token_expires = token_expires
self.code = code
| mit | Python |
b1cf7ca8fbe70d77787c7256e161d2baf220f39a | Update urls.py | gmkou/FikaNote,gmkou/FikaNote,gmkou/FikaNote | fikanote/urls.py | fikanote/urls.py | from django.conf.urls import include, url, handler404
import app.views
import app.feed
import app.shownote
import app.agenda
import app.agendajson
urlpatterns = [
url(r'^$', app.views.index, name='index'),
url(r'^(?P<number>\d+)/$', app.views.episode),
url(r'^agenda', app.agenda.agenda),
url(r'^agendajson', app.agendajson.agendajson),
url(r'^add', app.views.add, name='add'),
url(r'^shownote', app.shownote.shownote),
url(r'^feed/', app.feed.feed)
]
| from django.conf.urls import patterns, include, url, handler404
import app.views
import app.feed
import app.shownote
import app.agenda
import app.agendajson
urlpatterns = [
url(r'^$', app.views.index, name='index'),
url(r'^(?P<number>\d+)/$', app.views.episode),
url(r'^agenda', app.agenda.agenda),
url(r'^agendajson', app.agendajson.agendajson),
url(r'^add', app.views.add, name='add'),
url(r'^shownote', app.shownote.shownote),
url(r'^feed/', app.feed.feed)
]
| mit | Python |
9c8b3bed9f47fc1218590b971de9b9723741d7c2 | Disable the old admin panel | clubadm/clubadm,clubadm/clubadm,clubadm/clubadm | clubadm/urls.py | clubadm/urls.py | from django.conf import settings
from django.conf.urls import include, url
from django.contrib.auth.views import logout
from clubadm import views, admin
urlpatterns = [
url(r"^$", views.home, name="home"),
url(r"^login$", views.login, name="login"),
url(r"^callback$", views.callback, name="callback"),
url(r"^(?P<year>[0-9]{4})/$", views.welcome, name="welcome"),
url(r"^(?P<year>[0-9]{4})/signup/$", views.signup, name="signup"),
url(r"^(?P<year>[0-9]{4})/signout/$", views.signout, name="signout"),
url(r"^(?P<year>[0-9]{4})/profile/$", views.profile, name="profile"),
url(r"^(?P<year>[0-9]{4})/send_mail/$", views.send_mail, name="send_mail"),
url(r"^(?P<year>[0-9]{4})/send_gift/$", views.send_gift, name="send_gift"),
url(r"^(?P<year>[0-9]{4})/receive_gift/$", views.receive_gift, name="receive_gift"),
url(r"^(?P<year>[0-9]{4})/read_mails/$", views.read_mails, name="read_mails"),
url(r"^logout$", logout, {"next_page": "/"}),
url(r"^profile$", views.profile_legacy),
#url(r"^admin/", admin.site.urls),
url(r"^jserror$", views.jserror, name="jserror"),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)),
]
| from django.conf import settings
from django.conf.urls import include, url
from django.contrib.auth.views import logout
from clubadm import views, admin
urlpatterns = [
url(r"^$", views.home, name="home"),
url(r"^login$", views.login, name="login"),
url(r"^callback$", views.callback, name="callback"),
url(r"^(?P<year>[0-9]{4})/$", views.welcome, name="welcome"),
url(r"^(?P<year>[0-9]{4})/signup/$", views.signup, name="signup"),
url(r"^(?P<year>[0-9]{4})/signout/$", views.signout, name="signout"),
url(r"^(?P<year>[0-9]{4})/profile/$", views.profile, name="profile"),
url(r"^(?P<year>[0-9]{4})/send_mail/$", views.send_mail, name="send_mail"),
url(r"^(?P<year>[0-9]{4})/send_gift/$", views.send_gift, name="send_gift"),
url(r"^(?P<year>[0-9]{4})/receive_gift/$", views.receive_gift, name="receive_gift"),
url(r"^(?P<year>[0-9]{4})/read_mails/$", views.read_mails, name="read_mails"),
url(r"^logout$", logout, {"next_page": "/"}),
url(r"^profile$", views.profile_legacy),
url(r"^admin/", admin.site.urls),
url(r"^jserror$", views.jserror, name="jserror"),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)),
]
| mit | Python |
76d118577ec966ffe2c13c8009866299fcb3f962 | fix the bug that will generate !!python/unicode | espressif/esp-idf,espressif/esp-idf,espressif/esp-idf,espressif/esp-idf | tools/ci/python_packages/tiny_test_fw/Utility/GitlabCIJob.py | tools/ci/python_packages/tiny_test_fw/Utility/GitlabCIJob.py | # Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
class Job(dict):
"""
Gitlab CI job
:param job: job data loaded from .gitlab-ci.yml
:param job_name: job name
"""
def __init__(self, job, job_name):
super(Job, self).__init__(job)
self["name"] = job_name
self.tags = set(self["tags"])
def match_group(self, group):
"""
Match group by tags of job.
All filters values of group should be included in tags.
:param group: case group to match
:return: True or False
"""
match_result = False
if "case group" not in self and group.ci_job_match_keys == self.tags:
# group not assigned and all tags match
match_result = True
return match_result
def assign_group(self, group):
"""
assign a case group to a test job.
:param group: the case group to assign
"""
self["case group"] = group
def output_config(self, file_path):
"""
output test config to the given path.
file name will be job_name.yml
:param file_path: output file path
:return: None
"""
file_name = os.path.join(file_path, self["name"] + ".yml")
if "case group" in self:
with open(file_name, "w") as f:
yaml.safe_dump(self["case group"].output(), f, encoding='utf-8', default_flow_style=False)
| # Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
class Job(dict):
"""
Gitlab CI job
:param job: job data loaded from .gitlab-ci.yml
:param job_name: job name
"""
def __init__(self, job, job_name):
super(Job, self).__init__(job)
self["name"] = job_name
self.tags = set(self["tags"])
def match_group(self, group):
"""
Match group by tags of job.
All filters values of group should be included in tags.
:param group: case group to match
:return: True or False
"""
match_result = False
if "case group" not in self and group.ci_job_match_keys == self.tags:
# group not assigned and all tags match
match_result = True
return match_result
def assign_group(self, group):
"""
assign a case group to a test job.
:param group: the case group to assign
"""
self["case group"] = group
def output_config(self, file_path):
"""
output test config to the given path.
file name will be job_name.yml
:param file_path: output file path
:return: None
"""
file_name = os.path.join(file_path, self["name"] + ".yml")
if "case group" in self:
with open(file_name, "w") as f:
yaml.dump(self["case group"].output(), f, default_flow_style=False)
| apache-2.0 | Python |
91cee59a1ab45be18d1cbb3de7545055de353d0e | add werkzeug routing: allows '/endpoint/<some_id>' style routing | philipn/flask-sockets,kennethreitz/flask-sockets | flask_sockets.py | flask_sockets.py | # -*- coding: utf-8 -*-
from werkzeug.routing import Map, Rule
from werkzeug.exceptions import NotFound
def log_request(self):
log = self.server.log
if log:
if hasattr(log, 'info'):
log.info(self.format_request() + '\n')
else:
log.write(self.format_request() + '\n')
# Monkeys are made for freedom.
try:
import gevent
from geventwebsocket.gunicorn.workers import GeventWebSocketWorker as Worker
except ImportError:
pass
if 'gevent' in locals():
# Freedom-Patch logger for Gunicorn.
if hasattr(gevent, 'pywsgi'):
gevent.pywsgi.WSGIHandler.log_request = log_request
class SocketMiddleware(object):
def __init__(self, wsgi_app, socket):
self.ws = socket
self.app = wsgi_app
def __call__(self, environ, start_response):
adapter = self.ws.url_map.bind_to_environ(environ)
try:
handler, values = adapter.match()
environment = environ['wsgi.websocket']
handler(environment, **values)
return []
except NotFound:
return self.app(environ, start_response)
class Sockets(object):
def __init__(self, app=None):
self.url_map = Map()
if app:
self.init_app(app)
def init_app(self, app):
app.wsgi_app = SocketMiddleware(app.wsgi_app, self)
def route(self, rule, **options):
def decorator(f):
endpoint = options.pop('endpoint', None)
self.add_url_rule(rule, endpoint, f, **options)
return f
return decorator
def add_url_rule(self, rule, _, f, **options):
self.url_map.add(Rule(rule, endpoint=f))
# CLI sugar.
if 'Worker' in locals():
worker = Worker
| # -*- coding: utf-8 -*-
def log_request(self):
log = self.server.log
if log:
if hasattr(log, 'info'):
log.info(self.format_request() + '\n')
else:
log.write(self.format_request() + '\n')
# Monkeys are made for freedom.
try:
import gevent
from geventwebsocket.gunicorn.workers import GeventWebSocketWorker as Worker
except ImportError:
pass
if 'gevent' in locals():
# Freedom-Patch logger for Gunicorn.
if hasattr(gevent, 'pywsgi'):
gevent.pywsgi.WSGIHandler.log_request = log_request
class SocketMiddleware(object):
def __init__(self, wsgi_app, socket):
self.ws = socket
self.app = wsgi_app
def __call__(self, environ, start_response):
path = environ['PATH_INFO']
if path in self.ws.url_map:
handler = self.ws.url_map[path]
environment = environ['wsgi.websocket']
handler(environment)
return []
else:
return self.app(environ, start_response)
class Sockets(object):
def __init__(self, app=None):
self.url_map = {}
if app:
self.init_app(app)
def init_app(self, app):
app.wsgi_app = SocketMiddleware(app.wsgi_app, self)
def route(self, rule, **options):
def decorator(f):
endpoint = options.pop('endpoint', None)
self.add_url_rule(rule, endpoint, f, **options)
return f
return decorator
def add_url_rule(self, rule, _, f, **options):
self.url_map[rule] = f
# CLI sugar.
if 'Worker' in locals():
worker = Worker
| mit | Python |
af199fbaa6637f308795fcbca8c284c8edbb234e | move add out of ez_norm layer | 255BITS/HyperGAN,255BITS/HyperGAN | hypergan/modules/ez_norm.py | hypergan/modules/ez_norm.py | import torch.nn as nn
from hypergan.modules.modulated_conv2d import EqualLinear
class EzNorm(nn.Module):
def __init__(self, style_size, channels, dims, equal_linear=False, use_conv=True, dim=1):
super(EzNorm, self).__init__()
if equal_linear:
self.beta = EqualLinear(style_size, channels, lr_mul=0.01)
else:
self.beta = nn.Linear(style_size, channels)
if dims == 2:
self.conv = nn.Conv1d(channels, 1, 1, 1, padding = 0)
else:
self.conv = nn.Conv2d(channels, 1, 1, 1, padding = 0)
self.dim = dim
def forward(self, content, style, epsilon=1e-5):
N = content.shape[0]
D = content.shape[self.dim]
view = [1 for x in content.shape]
view[0] = N
view[self.dim] = D
return self.beta(style).view(*view) * self.conv(content)
| import torch.nn as nn
from hypergan.modules.modulated_conv2d import EqualLinear
class EzNorm(nn.Module):
def __init__(self, style_size, channels, dims, equal_linear=False, use_conv=True, dim=1):
super(EzNorm, self).__init__()
if equal_linear:
self.beta = EqualLinear(style_size, channels, lr_mul=0.01)
else:
self.beta = nn.Linear(style_size, channels)
if dims == 2:
self.conv = nn.Conv1d(channels, 1, 1, 1, padding = 0)
else:
self.conv = nn.Conv2d(channels, 1, 1, 1, padding = 0)
self.dim = dim
def forward(self, content, style, epsilon=1e-5):
N = content.shape[0]
D = content.shape[self.dim]
view = [1 for x in content.shape]
view[0] = N
view[self.dim] = D
return content + self.beta(style).view(*view) * self.conv(content)
| mit | Python |
219b6e24e28c9a7e11c0119adebcf97f14fa8018 | Change port | disqus/codebox,disqus/codebox | codebox/conf.py | codebox/conf.py | """
codebox.conf
~~~~~~~~~~~
:copyright: (c) 2011 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
import os, os.path
import urlparse
class Config(object):
DEBUG = False
TESTING = False
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'DEBUG')
SECRET_KEY = os.environ.get('SECRET_KEY', '\x89\x1d\xec\x8eJ\xda=C`\xf3<X\x81\xff\x1e\r{+\x1b\xe1\xd1@ku')
REDIS_DB = 0
JANRAIN_API_KEY = os.environ.get('JANRAIN_API_KEY')
DOMAIN_BLACKLIST = ['gmail.com', 'hotmail.com', 'live.com', 'msn.com', 'yahoo.com', 'googlemail.com', 'facebookmail.com']
MAIL_SERVER = 'smtp.sendgrid.net'
MAIL_PORT = 587
MAIL_HOST_USER = os.environ.get('SENDGRID_USERNAME')
MAIL_HOST_PASSWORD = os.environ.get('SENDGRID_PASSWORD')
MAIL_USE_TLS = True
if os.environ.has_key('REDISTOGO_URL'):
# 'redis://username:password@my.host:6789'
urlparse.uses_netloc.append('redis')
url = urlparse.urlparse(os.environ['REDISTOGO_URL'])
Config.REDIS_PASSWORD = url.password
Config.REDIS_HOST = url.hostname
Config.REDIS_PORT = url.port
class TestingConfig(Config):
REDIS_DB = 9
TESTING = True
| """
codebox.conf
~~~~~~~~~~~
:copyright: (c) 2011 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
import os, os.path
import urlparse
class Config(object):
DEBUG = False
TESTING = False
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'DEBUG')
SECRET_KEY = os.environ.get('SECRET_KEY', '\x89\x1d\xec\x8eJ\xda=C`\xf3<X\x81\xff\x1e\r{+\x1b\xe1\xd1@ku')
REDIS_DB = 0
JANRAIN_API_KEY = os.environ.get('JANRAIN_API_KEY')
DOMAIN_BLACKLIST = ['gmail.com', 'hotmail.com', 'live.com', 'msn.com', 'yahoo.com', 'googlemail.com', 'facebookmail.com']
MAIL_SERVER = 'smtp.sendgrid.net'
MAIL_PORT = 25
MAIL_HOST_USER = os.environ.get('SENDGRID_USERNAME')
MAIL_HOST_PASSWORD = os.environ.get('SENDGRID_PASSWORD')
MAIL_USE_TLS = True
if os.environ.has_key('REDISTOGO_URL'):
# 'redis://username:password@my.host:6789'
urlparse.uses_netloc.append('redis')
url = urlparse.urlparse(os.environ['REDISTOGO_URL'])
Config.REDIS_PASSWORD = url.password
Config.REDIS_HOST = url.hostname
Config.REDIS_PORT = url.port
class TestingConfig(Config):
REDIS_DB = 9
TESTING = True
| apache-2.0 | Python |
ef0b36954f0e0caa6c7f732dc024a444e8583c94 | Add pre-commit hook and reformat code thanks to black | OCA/server-tools,OCA/server-tools,YannickB/server-tools,YannickB/server-tools,YannickB/server-tools,OCA/server-tools | base_sparse_field_list_support/__manifest__.py | base_sparse_field_list_support/__manifest__.py | # -*- coding: utf-8 -*-
{
"name": "Base Sparse Field List Support",
"summary": "add list support to convert_to_cache()",
"version": "10.0.1.0.0",
"category": "Technical Settings",
"website": "www.akretion.com",
"author": "Akretion",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": ["base"],
}
| # -*- coding: utf-8 -*-
{
"name": "Base Sparse Field List Support",
"summary": "add list support to convert_to_cache()",
"version": "10.0.1.0.0",
'category': 'Technical Settings',
"website": "www.akretion.com",
"author": "Akretion",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"base",
],
}
| agpl-3.0 | Python |
f98acc3c0fc52ab5eea229976276ca7da3ee964c | Bump version to 0.0.5 | yasyf/bcferries | bcferries/__init__.py | bcferries/__init__.py | VERSION = '0.0.5'
from bcferries import BCFerries
| VERSION = '0.0.4'
from bcferries import BCFerries
| mit | Python |
d9333e03d10d9b4172d3fbf233c48a3117b4f82c | Add docstrings to Timer | ktbs/ktbs-bench,ktbs/ktbs-bench | ktbs_bench/utils/timer.py | ktbs_bench/utils/timer.py | import resource
import logging
from time import time
class Timer(object):
"""
Measure process duration.
The timer is a little object that must be started and stopped to compute
delta times.
Examples
--------
Measuring time
>>> from time import sleep
>>> my_timer = Timer() # timer start at instantiation by default
>>> sleep(1)
>>> my_timer.stop()
>>> # Get the delta times
>>> times = my_timer.get_times()
>>> 1 < times['real'] < 1.1
True
"""
def __init__(self, tick_now=True):
self.start_time = []
if tick_now:
self.start_time = self.tick()
self.stop_time = {}
self.delta = {}
@staticmethod
def tick():
"""Return real, sys and usr times.
Returns
-------
usr : float
User CPU time since the start of the process.
sys : float
System CPU time since the start of the process.
real : float
Time since the start of the UNIX epoch.
References
----------
.. [1] Ipython %time magic command implementation http://git.io/GJpSNA
"""
usage_times = resource.getrusage(resource.RUSAGE_SELF)[:2]
times = {'usr': usage_times[0], 'sys': usage_times[1], 'real': time()}
return times
def start(self):
"""Start the timer."""
if self.start_time:
logging.warning('Start time has already been set. Continuing with previous value.')
else:
self.start_time = self.tick()
def stop(self):
"""Stop the timer and compute delta time."""
self.stop_time = self.tick()
for time_type in self.start_time.keys():
self.delta[time_type] = self.stop_time[time_type] - self.start_time[time_type]
def get_times(self):
"""Return a dict of delta times."""
if self.delta:
return self.delta
else:
logging.error("The timer has not been stopped yet.")
def __repr__(self):
"""String representation of Timer delta times."""
if self.stop_time:
res = 'usr: %s\tsys: %s\tusr+sys: %s\t real: %s' % \
(self.delta['usr'], self.delta['sys'], self.delta['usr'] + self.delta['sys'], self.delta['real'])
else:
res = 'timer has not been stopped.'
return res
| import resource
import logging
from time import time
class Timer:
"""Measure process duration."""
def __init__(self, tick_now=True):
self.start_time = []
if tick_now:
self.start_time = self.tick()
self.stop_time = {}
self.delta = {}
@staticmethod
def tick():
"""Return usr, sys and real times."""
usage_times = resource.getrusage(resource.RUSAGE_SELF)[:2]
times = {'usr': usage_times[0], 'sys': usage_times[1], 'real': time()}
return times
def start(self):
"""Start the timer"""
if self.start_time:
logging.warning('Start time has already been set. Continuing with previous value.')
else:
self.start_time = self.tick()
def stop(self):
"""Stop the timer and compute delta time."""
self.stop_time = self.tick()
for type in self.start_time.keys():
self.delta[type] = self.stop_time[type] - self.start_time[type]
def get_times(self):
"""Return a dict of delta times."""
if self.delta:
return self.delta
else:
logging.error("The timer has not been stopped yet.")
def __repr__(self):
if self.stop_time:
res = 'usr: %s\tsys: %s\tusr+sys: %s\t real: %s' % \
(self.delta['usr'], self.delta['sys'], self.delta['usr'] + self.delta['sys'], self.delta['real'])
else:
res = 'timer has not been stopped.'
return res
| mit | Python |
5a23904ef475d1f2c298dfea33b95eb763302ee9 | Remove unused code | fastmonkeys/kuulemma,fastmonkeys/kuulemma,City-of-Helsinki/kuulemma,City-of-Helsinki/kuulemma,City-of-Helsinki/kuulemma,fastmonkeys/kuulemma | kuulemma/views/hearing.py | kuulemma/views/hearing.py | from flask import Blueprint, redirect, render_template, url_for
from ..models import Hearing
hearing = Blueprint(
name='hearing',
import_name=__name__,
url_prefix='/kuulemiset'
)
# Redirects to the first hearing before the real index page is implemented.
@hearing.route('')
def index():
hearing = Hearing.query.first()
if not hearing:
return redirect(url_for('frontpage.index'))
return redirect(url_for(
'hearing.show',
hearing_id=hearing.id,
slug=hearing.slug
))
@hearing.route('/<int:hearing_id>-<slug>')
def show(hearing_id, slug):
hearing = Hearing.query.get_or_404(hearing_id)
if hearing.slug != slug:
return redirect(
url_for('hearing.show', hearing_id=hearing_id, slug=hearing.slug)
)
commentable_sections_string = hearing.get_commentable_sections_string()
return render_template(
'hearing/show.html',
hearing=hearing,
commentable_sections_string=commentable_sections_string,
hearing_page_active=True
)
| from flask import Blueprint, redirect, render_template, url_for
from sqlalchemy import desc
from ..models import Comment, Hearing
hearing = Blueprint(
name='hearing',
import_name=__name__,
url_prefix='/kuulemiset'
)
# Redirects to the first hearing before the real index page is implemented.
@hearing.route('')
def index():
hearing = Hearing.query.first()
if not hearing:
return redirect(url_for('frontpage.index'))
return redirect(url_for(
'hearing.show',
hearing_id=hearing.id,
slug=hearing.slug
))
@hearing.route('/<int:hearing_id>-<slug>')
def show(hearing_id, slug):
hearing = Hearing.query.get_or_404(hearing_id)
if hearing.slug != slug:
return redirect(
url_for('hearing.show', hearing_id=hearing_id, slug=hearing.slug)
)
latest_comments = (
Comment.query
.filter(Comment.hearing == hearing)
.order_by(desc(Comment.created_at))
)
# TODO: Change this into number of likes when likes are implemented.
popular_comments = (
Comment.query
.filter(Comment.hearing == hearing)
)
commentable_sections_string = hearing.get_commentable_sections_string()
return render_template(
'hearing/show.html',
hearing=hearing,
latest_comments=latest_comments,
popular_comments=popular_comments,
commentable_sections_string=commentable_sections_string,
hearing_page_active=True
)
| agpl-3.0 | Python |
214534a86fe8a7113cfc56255e201f5500744215 | Fix the Auth section not showing up in automatic admin. | ergodicbreak/evennia,TheTypoMaster/evennia,titeuf87/evennia,jamesbeebop/evennia,ypwalter/evennia,ypwalter/evennia,TheTypoMaster/evennia,ergodicbreak/evennia,feend78/evennia,mrkulk/text-world,TheTypoMaster/evennia,emergebtc/evennia,jamesbeebop/evennia,feend78/evennia,feend78/evennia,emergebtc/evennia,shollen/evennia,mrkulk/text-world,shollen/evennia,titeuf87/evennia,emergebtc/evennia,ypwalter/evennia,titeuf87/evennia,mrkulk/text-world,jamesbeebop/evennia,titeuf87/evennia,mrkulk/text-world,feend78/evennia,ergodicbreak/evennia | game/web/urls.py | game/web/urls.py | #
# File that determines what each URL points to. This uses _Python_ regular
# expressions, not Perl's.
#
# See:
# http://diveintopython.org/regular_expressions/street_addresses.html#re.matching.2.3
#
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# User Authentication
url(r'^accounts/login', 'django.contrib.auth.views.login'),
url(r'^accounts/logout', 'django.contrib.auth.views.logout'),
# Front page
url(r'^', include('game.web.apps.website.urls')),
# News stuff
url(r'^news/', include('game.web.apps.news.urls')),
# Page place-holder for things that aren't implemented yet.
url(r'^tbi/', 'game.web.apps.website.views.to_be_implemented'),
# Admin interface
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/(.*)', admin.site.root, name='admin'),
)
# If you'd like to serve media files via Django (strongly not recommended!),
# open up your settings.py file and set SERVE_MEDIA to True. This is
# appropriate on a developing site, or if you're running Django's built-in
# test server. Normally you want a webserver that is optimized for serving
# static content to handle media files (apache, lighttpd).
if settings.SERVE_MEDIA:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
)
| #
# File that determines what each URL points to. This uses _Python_ regular
# expressions, not Perl's.
#
# See:
# http://diveintopython.org/regular_expressions/street_addresses.html#re.matching.2.3
#
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
#admin.autodiscover()
urlpatterns = patterns('',
# User Authentication
url(r'^accounts/login', 'django.contrib.auth.views.login'),
url(r'^accounts/logout', 'django.contrib.auth.views.logout'),
# Front page
url(r'^', include('game.web.apps.website.urls')),
# News stuff
url(r'^news/', include('game.web.apps.news.urls')),
# Page place-holder for things that aren't implemented yet.
url(r'^tbi/', 'game.web.apps.website.views.to_be_implemented'),
# Admin interface
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/(.*)', admin.site.root, name='admin'),
)
# If you'd like to serve media files via Django (strongly not recommended!),
# open up your settings.py file and set SERVE_MEDIA to True. This is
# appropriate on a developing site, or if you're running Django's built-in
# test server. Normally you want a webserver that is optimized for serving
# static content to handle media files (apache, lighttpd).
if settings.SERVE_MEDIA:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
)
| bsd-3-clause | Python |
7a2cbbe715e3bbe5030b5f61c0ba6fb84725e17d | fix doctest | yannicklm/pycp | pycp/util.py | pycp/util.py | """Various useful functions"""
import os
def debug(message):
"""Print debug mesages when env. var PYCP_DEBUG is set."""
if os.environ.get("PYCP_DEBUG"):
print message
def human_readable(size):
"""Build a nice human readable string from a size given in
bytes
"""
if size < 1024**2:
hreadable = float(size)/1024.0
return "%.0fK" % hreadable
elif size < (1024**3):
hreadable = float(size)/(1024**2)
return "%.1fM" % round(hreadable, 1)
else:
hreadable = float(size)/(1024.0**3)
return "%.2fG" % round(hreadable, 2)
def pprint_transfer(src, dest):
"""
Directly borrowed from git's diff.c file.
>>> pprint_transfer("/path/to/foo", "/path/to/bar")
'/path/to/{foo => bar}'
"""
len_src = len(src)
len_dest = len(dest)
# Find common prefix
pfx_length = 0
i = 0
j = 0
while (i < len_src and j < len_dest and src[i] == dest[j]):
if src[i] == os.path.sep:
pfx_length = i + 1
i += 1
j += 1
# Find common suffix
sfx_length = 0
i = len_src - 1
j = len_dest - 1
while (i > 0 and j > 0 and src[i] == dest[j]):
if src[i] == os.path.sep:
sfx_length = len_src - i
i -= 1
j -= 1
src_midlen = len_src - pfx_length - sfx_length
dest_midlen = len_dest - pfx_length - sfx_length
pfx = src[:pfx_length]
sfx = dest[len_dest - sfx_length:]
src_mid = src [pfx_length:pfx_length + src_midlen ]
dest_mid = dest[pfx_length:pfx_length + dest_midlen]
if pfx == os.path.sep:
# The common prefix is / ,
# avoid print /{etc => tmp}/foo, and
# print {/etc => /tmp}/foo
pfx = ""
src_mid = os.path.sep + src_mid
dest_mid = os.path.sep + dest_mid
if not pfx and not sfx:
return "%s => %s" % (src, dest)
res = "%s{%s => %s}%s" % (pfx, src_mid, dest_mid, sfx)
return res
| """Various useful functions"""
import os
def debug(message):
"""Print debug mesages when env. var PYCP_DEBUG is set."""
if os.environ.get("PYCP_DEBUG"):
print message
def human_readable(size):
"""Build a nice human readable string from a size given in
bytes
"""
if size < 1024**2:
hreadable = float(size)/1024.0
return "%.0fK" % hreadable
elif size < (1024**3):
hreadable = float(size)/(1024**2)
return "%.1fM" % round(hreadable, 1)
else:
hreadable = float(size)/(1024.0**3)
return "%.2fG" % round(hreadable, 2)
def pprint_transfer(src, dest):
"""
Directly borrowed from git's diff.c file.
pprint_transfer("/path/to/foo", "/path/to/bar")
>>> /path/to/{foo => bar}
"""
len_src = len(src)
len_dest = len(dest)
# Find common prefix
pfx_length = 0
i = 0
j = 0
while (i < len_src and j < len_dest and src[i] == dest[j]):
if src[i] == os.path.sep:
pfx_length = i + 1
i += 1
j += 1
# Find common suffix
sfx_length = 0
i = len_src - 1
j = len_dest - 1
while (i > 0 and j > 0 and src[i] == dest[j]):
if src[i] == os.path.sep:
sfx_length = len_src - i
i -= 1
j -= 1
src_midlen = len_src - pfx_length - sfx_length
dest_midlen = len_dest - pfx_length - sfx_length
pfx = src[:pfx_length]
sfx = dest[len_dest - sfx_length:]
src_mid = src [pfx_length:pfx_length + src_midlen ]
dest_mid = dest[pfx_length:pfx_length + dest_midlen]
if pfx == os.path.sep:
# The common prefix is / ,
# avoid print /{etc => tmp}/foo, and
# print {/etc => /tmp}/foo
pfx = ""
src_mid = os.path.sep + src_mid
dest_mid = os.path.sep + dest_mid
if not pfx and not sfx:
return "%s => %s" % (src, dest)
res = "%s{%s => %s}%s" % (pfx, src_mid, dest_mid, sfx)
return res
| mit | Python |
ea91ba0aa69d8982a055b8d73a98e5161287a6b2 | Add a method for nan processing | gciteam6/xgboost,gciteam6/xgboost | src/features/time_series.py | src/features/time_series.py | # Built-in modules
import re
# Hand-made modules
from .base import DataFrameHandlerBase
REGEX_DROP_LABEL_NAME_PREFIXES = {
"max_ws_",
"ave_wv_",
"ave_ws_",
"max_tp_",
"min_tp_",
"sl_",
"sd_",
"vb_",
"weather_",
"dsr_",
"dsd_",
"dsr_"
}
DROP_LABEL_NAMES = [
"weather",
"weather_detail",
"wind",
"wave"
]
REGEX_SHIFT_COL_NAME_PREFIXES = [
"pr_*",
"max_iws_*",
"gsr_*",
"lap_*",
"sap_*",
"cap_*",
"3h_cap_*",
"rhm_*",
"min_rhm_*",
"vp_*",
"dtp_*",
]
class TimeSeriesReshaper(DataFrameHandlerBase):
def __init__(self):
super().__init__()
self.REGEX_DROP_LABEL_NAME_PREFIXES = REGEX_DROP_LABEL_NAME_PREFIXES
self.DROP_LABEL_NAMES = DROP_LABEL_NAMES
self.REGEX_SHIFT_COL_NAME_PREFIXES = REGEX_SHIFT_COL_NAME_PREFIXES
@staticmethod
def shift_indexes(df, freq, shift_col_name_list):
non_shift_col_name_list = [
col_name for col_name in df.columns \
if col_name not in shift_col_name_list
]
df_shifted = df[shift_col_name_list].shift(freq=freq, axis=0)
df_non_shifted = df[non_shift_col_name_list]
return df_non_shifted.merge(
df_shifted,
how="outer",
left_index=True,
right_index=True
)
@staticmethod
def get_regex_matched_col_name(col_name_list, regex_name_prefix_list):
return [
col_name \
for col_name in col_name_list \
for name_prefix in regex_name_prefix_list \
if re.compile("^" + name_prefix + ".*$").match(col_name)
]
@staticmethod
def drop_columns_of_many_nan(df, nan_number_threshold):
reserve_column_list = [
v < nan_number_threshold for v in df.isnull().sum(axis=0)
]
return df.loc[:, reserve_column_list]
if __name__ == '__main__':
print("time series !")
| # Built-in modules
import re
# Hand-made modules
from .base import DataFrameHandlerBase
REGEX_DROP_LABEL_NAME_PREFIXES = {
"max_ws_",
"ave_wv_",
"ave_ws_",
"max_tp_",
"min_tp_",
"sl_",
"sd_",
"vb_",
"weather_",
"dsr_",
"dsd_",
"dsr_"
}
DROP_LABEL_NAMES = [
"weather",
"weather_detail",
"wind",
"wave"
]
REGEX_SHIFT_COL_NAME_PREFIXES = [
"pr_*",
"max_iws_*",
"gsr_*",
"lap_*",
"sap_*",
"cap_*",
"3h_cap_*",
"rhm_*",
"min_rhm_*",
"vp_*",
"dtp_*",
]
class TimeSeriesReshaper(DataFrameHandlerBase):
def __init__(self):
super().__init__()
self.REGEX_DROP_LABEL_NAME_PREFIXES = REGEX_DROP_LABEL_NAME_PREFIXES
self.DROP_LABEL_NAMES = DROP_LABEL_NAMES
self.REGEX_SHIFT_COL_NAME_PREFIXES = REGEX_SHIFT_COL_NAME_PREFIXES
@staticmethod
def shift_indexes(df, freq, shift_col_name_list):
non_shift_col_name_list = [
col_name for col_name in df.columns \
if col_name not in shift_col_name_list
]
df_shifted = df[shift_col_name_list].shift(freq=freq, axis=0)
df_non_shifted = df[non_shift_col_name_list]
return df_non_shifted.merge(
df_shifted,
how="outer",
left_index=True,
right_index=True
)
@staticmethod
def get_regex_matched_col_name(col_name_list, regex_name_prefix_list):
return [
col_name \
for col_name in col_name_list \
for name_prefix in regex_name_prefix_list \
if re.compile("^" + name_prefix + ".*$").match(col_name)
]
if __name__ == '__main__':
print("time series !")
| mit | Python |
9439fff96fa82e6415f796f5872de727d417789c | create wp users | epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp | src/wordpress/configurator.py | src/wordpress/configurator.py | import os
import logging
import subprocess
from .models import WPException, WPUser
class WPRawConfig:
""" First object to implement some business logic
- is the site installed? properly configured ?
It provides also the methods to actually interact with WP-CLI
- generic run_wp_cli
- adding WP users, either from name+email or sciperID
"""
def __init__(self, wp_site):
self.wp_site = wp_site
def __repr__(self):
installed_string = '[ok]' if self.is_installed else '[ko]'
return "config {0} for {1}".format(installed_string, repr(self.wp_site))
def run_wp_cli(self, command):
try:
cmd = "wp --quiet {} --path='{}'".format(command, self.wp_site.path)
logging.debug("exec '%s'", cmd)
return subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError as err:
logging.error("%s - WP export - wp_cli failed : %s", repr(self.wp_site), err)
return None
@property
def is_installed(self):
return os.path.isdir(self.wp_site.path)
@property
def is_config_valid(self):
if not self.is_installed:
return False
# TODO: check that the config is working (DB and user ok)
# wp-cli command (status?)
@property
def is_install_valid(self):
if not self.is_config_valid():
return False
# TODO: check that the site is available, that user can login and upload media
# tests from test_wordpress
@property
def db_infos(self):
# TODO: read from wp_config.php {db_name, mysql_username, mysql_password}
pass
@property
def admin_infos(self):
# TODO: read from DB {admin_username, admin_email}
pass
def add_wp_user(self, username, email):
return self._add_user(WPUser(username, email))
def add_ldap_user(self, sciper_id):
try:
return self._add_user(WPUser.from_sciper(sciper_id))
except WPException as err:
logging.error("Generator - %s - 'add_webmasters' failed %s", repr(self), err)
return None
def _add_user(self, user):
if not user.password:
user.set_password()
cmd = "user create {0.username} {0.email} --user_pass=\"{0.password}\" --role=administrator".format(user)
self.run_wp_cli(cmd)
return user
| import os
import logging
import subprocess
from .models import WPException, WPUser
class WPRawConfig:
""" First object to implement some business logic
- is the site installed? properly configured ?
It provides also the methods to actually interact with WP-CLI
- generic run_wp_cli
- adding WP users, either from name+email or sciperID
"""
def __init__(self, wp_site):
self.wp_site = wp_site
def __repr__(self):
installed_string = '[ok]' if self.is_installed else '[ko]'
return "config {0} for {1}".format(installed_string, repr(self.wp_site))
def run_wp_cli(self, command):
try:
cmd = "wp --quiet {} --path='{}'".format(command, self.wp_site.path)
logging.debug("exec '%s'", cmd)
return subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError as err:
logging.error("%s - WP export - wp_cli failed : %s", repr(self.wp_site), err)
return None
@property
def is_installed(self):
return os.path.isdir(self.wp_site.path)
@property
def is_config_valid(self):
if not self.is_installed:
return False
# TODO: check that the config is working (DB and user ok)
# wp-cli command (status?)
@property
def is_install_valid(self):
if not self.is_config_valid():
return False
# TODO: check that the site is available, that user can login and upload media
# tests from test_wordpress
@property
def db_infos(self):
# TODO: read from wp_config.php {db_name, mysql_username, mysql_password}
pass
@property
def admin_infos(self):
# TODO: read from DB {admin_username, admin_email}
pass
def add_wp_user(self, username, email):
return self._add_user(WPUser(username, email))
def add_ldap_user(self, sciper_id):
try:
return self._add_user(WPUser.from_sciper(sciper_id))
except WPException as err:
logging.error("Generator - %s - 'add_webmasters' failed %s", repr(self), err)
return None
def _add_user(self, user):
if not user.password:
user.set_password()
# TODO: call wp-cli to add user in WP
return user
| mit | Python |
19bbe44705652292a2d2e7ff83b833fa61997c4d | Remove unused imports | makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin | geotrek/common/templatetags/geotrek_tags.py | geotrek/common/templatetags/geotrek_tags.py | from django import template
from django.conf import settings
from datetime import datetime, timedelta
from django.utils.translation import gettext_lazy as _
register = template.Library()
@register.simple_tag
def settings_value(name):
return getattr(settings, name, "")
@register.simple_tag
def is_topology_model(model):
return hasattr(model, 'kind') and hasattr(model, 'offset')
@register.simple_tag
def is_blade_model(model):
return model._meta.model_name == 'blade'
@register.simple_tag
def is_site_model(model):
return model._meta.model_name == 'site'
@register.simple_tag
def is_course_model(model):
return model._meta.model_name == 'course'
@register.filter
def duration(value):
"""
Returns a duration in hours to a human readable version (minutes, days, ...)
"""
if value is None:
return ""
seconds = timedelta(minutes=float(value) * 60)
duration = datetime(1, 1, 1) + seconds
if duration.day > 1:
if duration.hour > 0 or duration.minute > 0:
final_duration = _("%s days") % duration.day
else:
final_duration = _("%s days") % (duration.day - 1)
elif duration.hour > 0 and duration.minute > 0:
final_duration = _("%(hour)s h %(min)s") % {'hour': duration.hour,
'min': duration.minute, }
elif duration.hour > 0:
final_duration = _("%(hour)s h") % {'hour': duration.hour}
else:
final_duration = _("%s min") % duration.minute
return final_duration
| from geotrek.zoning.models import RestrictedAreaType, RestrictedArea
from django import template
from django.conf import settings
from datetime import datetime, timedelta
import json
from django.utils.translation import gettext_lazy as _
register = template.Library()
@register.simple_tag
def settings_value(name):
return getattr(settings, name, "")
@register.simple_tag
def is_topology_model(model):
return hasattr(model, 'kind') and hasattr(model, 'offset')
@register.simple_tag
def is_blade_model(model):
return model._meta.model_name == 'blade'
@register.simple_tag
def is_site_model(model):
return model._meta.model_name == 'site'
@register.simple_tag
def is_course_model(model):
return model._meta.model_name == 'course'
@register.filter
def duration(value):
"""
Returns a duration in hours to a human readable version (minutes, days, ...)
"""
if value is None:
return ""
seconds = timedelta(minutes=float(value) * 60)
duration = datetime(1, 1, 1) + seconds
if duration.day > 1:
if duration.hour > 0 or duration.minute > 0:
final_duration = _("%s days") % duration.day
else:
final_duration = _("%s days") % (duration.day - 1)
elif duration.hour > 0 and duration.minute > 0:
final_duration = _("%(hour)s h %(min)s") % {'hour': duration.hour,
'min': duration.minute, }
elif duration.hour > 0:
final_duration = _("%(hour)s h") % {'hour': duration.hour}
else:
final_duration = _("%s min") % duration.minute
return final_duration
| bsd-2-clause | Python |
d8f27ed360454e10ba0cd718430af28ffef2d445 | Test fixes | CodersOfTheNight/stubilous | stubilous/tests.py | stubilous/tests.py | from pytest import fixture
from stubilous.config import Config
@fixture
def config_file() -> str:
return """
---
server:
port: 80
host: localhost
"""
@fixture
def basic_config(config_file) -> Config:
from io import StringIO
import yaml
buff = StringIO()
buff.write(config_file)
buff.seek(0)
return Config.from_dict(yaml.load(buff))
def test_service_config(basic_config):
assert basic_config.port == 80
assert basic_config.host == "localhost"
| from pytest import fixture
from stubilous.config import Config
@fixture
def basic_config() -> Config:
from io import StringIO
import yaml
buff = StringIO()
buff.write("""
---
server:
port: 80
host: localhost
""")
return Config.from_dict(yaml.load(buff))
def test_service_config(basic_config):
server = basic_config()
assert server.port == 80
assert server.host == "localhost"
| mit | Python |
10426b049baeceb8dda1390650503e1d75ff8b64 | Add initial fixtures for the categories. | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite | us_ignite/common/management/commands/common_load_fixtures.py | us_ignite/common/management/commands/common_load_fixtures.py | import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from us_ignite.profiles.models import Category, Interest
INTEREST_LIST = (
('SDN', 'sdn'),
('OpenFlow', 'openflow'),
('Ultra fast', 'ultra-fast'),
('Advanced wireless', 'advanced-wireless'),
('Low-latency', 'low-latency'),
('Local cloud/edge computing', 'local-cloud-edge-computing'),
('Healthcare', 'healthcare'),
('Education & Workforce development', 'education-workforce-development'),
('Energy', 'energy'),
('Transportation', 'transportation'),
('Advanced Manufacturing', 'advanced-manufacturing'),
('Public Safety', 'public-safety'),
)
CATEGORY_LIST = [
'Developer',
'Community leader',
'Subject matter expert',
'Designer',
'Project manager',
'Network engineer',
'Funder',
'Press/media',
'Interested party',
]
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print u'Updated site URL.'
for name, slug in INTEREST_LIST:
interest, is_new = (Interest.objects
.get_or_create(name=name, slug=slug))
if is_new:
print u'Imported interest: %s.' % interest
for name in CATEGORY_LIST:
category, is_new = Category.objects.get_or_create(name=name)
if is_new:
print u'Imported category: %s.' % category
| import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from us_ignite.profiles.models import Interest
INTEREST_LIST = (
('SDN', 'sdn'),
('OpenFlow', 'openflow'),
('Ultra fast', 'ultra-fast'),
('Advanced wireless', 'advanced-wireless'),
('Low-latency', 'low-latency'),
('Local cloud/edge computing', 'local-cloud-edge-computing'),
('Healthcare', 'healthcare'),
('Education & Workforce development', 'education-workforce-development'),
('Energy', 'energy'),
('Transportation', 'transportation'),
('Advanced Manufacturing', 'advanced-manufacturing'),
('Public Safety', 'public-safety'),
)
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Updated site URL."
for name, slug in INTEREST_LIST:
interest, is_new = (Interest.objects
.get_or_create(name=name, slug=slug))
if is_new:
print u'Imported interest: %s' % interest
| bsd-3-clause | Python |
ec750644703a8e1f2c4a0bb03993acd07154143c | Add update profile command | chickenzord/plurk-cli | plurk-cli.py | plurk-cli.py | #!/usr/bin/env python
import click
import json
import plurkenv
plurk = plurkenv.init()
@click.group()
def cli():
pass
@cli.command()
@click.option('--key', '-k', default = None)
@click.option('--list-keys', '-l', is_flag = True)
@click.pass_context
def whoami(ctx, key, list_keys):
ctx.forward(whois, user_id = None)
@cli.command()
@click.argument('user_id', default = None)
@click.option('--key', '-k', default = None)
@click.option('--list-keys', '-l', is_flag = True)
def whois(user_id, key, list_keys):
if user_id:
response = plurk.callAPI('/APP/Profile/getPublicProfile', { 'user_id': user_id})
else:
response = plurk.callAPI('/APP/Profile/getOwnProfile')
user_info = response['user_info']
if list_keys:
for key in user_info.keys():
click.echo(key)
else:
data = user_info[key] if key else json.dumps(user_info, indent = 2)
click.echo(data)
@cli.command()
@click.option('--display-name', '-d', default = None)
@click.option('--full-name', '-f', default = None)
@click.pass_context
def update(ctx, display_name = None, full_name = None):
data = dict()
if display_name:
data['display_name'] = display_name
if full_name:
data['full_name'] = full_name
try:
response = plurk.callAPI('/APP/Users/update', data)
result = {
'display_name': response['user']['display_name'],
'full_name': response['user']['full_name']
}
output = response['error_text'] if 'error_text' in response else json.dumps(result, indent = 2)
except Exception as e:
output = str(e)
click.echo(output)
if __name__ == '__main__':
cli()
| #!/usr/bin/env python
import click
import json
import plurkenv
plurk = plurkenv.init()
@click.group()
def cli():
pass
@cli.command()
@click.option('--key', '-k', default = None)
@click.option('--list-keys', '-l', is_flag = True)
@click.pass_context
def whoami(ctx, key, list_keys):
ctx.forward(whois, user_id = None)
@cli.command()
@click.argument('user_id', default = None)
@click.option('--key', '-k', default = None)
@click.option('--list-keys', '-l', is_flag = True)
def whois(user_id, key, list_keys):
if user_id:
response = plurk.callAPI('/APP/Profile/getPublicProfile', { 'user_id': user_id})
else:
response = plurk.callAPI('/APP/Profile/getOwnProfile')
user_info = response['user_info']
if list_keys:
for key in user_info.keys():
click.echo(key)
else:
data = user_info[key] if key else json.dumps(user_info, indent = 2)
click.echo(data)
if __name__ == '__main__':
cli()
| mit | Python |
ade661b74082197974d2e134253e494a92d26772 | use python version specified in /usr/bin/env, not /usr/local/bin/python. | ianupright/micropsi2,printedheart/micropsi2,ianupright/micropsi2,printedheart/micropsi2,printedheart/micropsi2,ianupright/micropsi2 | src/micropsi_core/runtime.py | src/micropsi_core/runtime.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
MicroPsi runtime component;
maintains a set of users, worlds (up to one per user), and agents, and provides an interface to external clients
"""
__author__ = 'joscha'
__date__ = '10.05.12'
import micropsi_core.nodenet
import micropsi_core.world
def main():
pass
if __name__ == '__main__':
main()
| #!/usr/local/bin/python
# -*- coding: utf-8 -*-
"""
MicroPsi runtime component;
maintains a set of users, worlds (up to one per user), and agents, and provides an interface to external clients
"""
__author__ = 'joscha'
__date__ = '10.05.12'
import micropsi_core.nodenet
import micropsi_core.world
def main():
pass
if __name__ == '__main__':
main()
| mit | Python |
6593dffe70fc4bfa5a3d2c9966fb4886d2063c01 | add socket.io | haifengat/hf_at_py,haifengat/hf_at_py | web_flask/run.py | web_flask/run.py | #!flask/bin/python
from flask import Flask, render_template
from flask_socketio import SocketIO #pip install flask-socketio
from app import app
app.config['SECRET_KEY'] = 'secret!' #app.secret_key = os.urandom(10)
socketio = SocketIO(app)
@socketio.on_error()
def error_handler(e):
print(e)
#this fires
@socketio.on("connect")
def connect():
print("connected")
#this does not
@socketio.on('test')
def test_handler(message):
print("TEST WORKS")
if __name__ == "__main__":
socketio.run(app, debug=True)
#app.run(debug=True, port=5000)
#socketio.run(app)
| #!flask/bin/python
from app import app
app.run(debug=True)
| apache-2.0 | Python |
3701f179cc058872e7637dbc810cf363481e28ba | Remove default version from bots | aaxelb/SHARE,aaxelb/SHARE,CenterForOpenScience/SHARE,laurenbarker/SHARE,laurenbarker/SHARE,aaxelb/SHARE,zamattiac/SHARE,CenterForOpenScience/SHARE,laurenbarker/SHARE,CenterForOpenScience/SHARE,zamattiac/SHARE,zamattiac/SHARE | share/bot.py | share/bot.py | import abc
import logging
from celery.schedules import crontab
from share.robot import RobotAppConfig
logger = logging.getLogger(__name__)
class BotAppConfig(RobotAppConfig, metaclass=abc.ABCMeta):
schedule = crontab(minute=0, hour=0)
task = 'share.tasks.BotTask'
description = 'TODO' # TODO
@property
def task_name(self):
return '{} bot task'.format(self.label)
@property
def label(self):
return self.name.rpartition('bots.')[2]
@abc.abstractmethod
def get_bot(self):
raise NotImplementedError
class Bot(abc.ABC):
def __init__(self, config):
self.config = config
@abc.abstractmethod
def run(self):
raise NotImplementedError
| import abc
import logging
from celery.schedules import crontab
from share.robot import RobotAppConfig
logger = logging.getLogger(__name__)
class BotAppConfig(RobotAppConfig, metaclass=abc.ABCMeta):
version = '0.0.0'
schedule = crontab(minute=0, hour=0)
task = 'share.tasks.BotTask'
description = 'TODO' # TODO
@property
def task_name(self):
return '{} bot task'.format(self.label)
@property
def label(self):
return self.name.rpartition('bots.')[2]
@abc.abstractmethod
def get_bot(self):
raise NotImplementedError
class Bot(abc.ABC):
def __init__(self, config):
self.config = config
@abc.abstractmethod
def run(self):
raise NotImplementedError
| apache-2.0 | Python |
6888113c424f4ff9d1f5add24bb2f7c2a718e853 | Allow duplicate flags via lists of values | blitzrk/sublime_libsass,blitzrk/sublime_libsass | libsass/project.py | libsass/project.py | import json
from libsass.pathutils import subpaths, mkdir_p
import os
default_opts = {
"output_dir": "build/css",
"options": {
"line-comments": True,
"line-numbers": True,
"style": "nested"
}
}
def find_config(top):
'''Search up parent tree for libsass config file'''
top = os.path.dirname(os.path.realpath(top))
for path in subpaths(top):
file = os.path.join(path, '.libsass.json')
if os.path.isfile(file):
return file
def read_config(file):
'''
Read json-formatted config file into map and fill missing values
with defaults
'''
with open(file, 'r') as f:
user_opts = json.load(f)
opts = default_opts
opts.update(user_opts)
return opts
def splitpath(path):
opts_path = find_config(path)
root = os.path.dirname(opts_path or path)
rest = os.path.relpath(path, root)
return (rest, root)
def to_flags(options):
'''Convert map into list of standard flags'''
flags = []
for key, value in options.items():
if value is True:
flags.append('--{0}'.format(key))
elif type(value) is list:
for v in value:
flags.append('--{0} {1}'.format(key, v))
elif value is not False:
flags.append('--{0} {1}'.format(key, value))
return flags
def config_for(path):
'''Determine output path and flags for compiling file at `path`'''
opts_path = find_config(path)
root = os.path.dirname(opts_path or path)
opts = default_opts if opts_path is None else read_config(opts_path)
output_dir = os.path.normpath(opts['output_dir'])
if not os.path.isabs(output_dir):
output_dir = os.path.join(root, output_dir)
# Make sure output folder exists
mkdir_p(output_dir)
flags = to_flags(opts['options'])
return (output_dir, flags)
| import json
from libsass.pathutils import subpaths, mkdir_p
import os
default_opts = {
"output_dir": "build/css",
"options": {
"line-comments": True,
"line-numbers": True,
"style": "nested"
}
}
def find_config(top):
'''Search up parent tree for libsass config file'''
top = os.path.dirname(os.path.realpath(top))
for path in subpaths(top):
file = os.path.join(path, '.libsass.json')
if os.path.isfile(file):
return file
def read_config(file):
'''
Read json-formatted config file into map and fill missing values
with defaults
'''
with open(file, 'r') as f:
user_opts = json.load(f)
opts = default_opts
opts.update(user_opts)
return opts
def splitpath(path):
opts_path = find_config(path)
root = os.path.dirname(opts_path or path)
rest = os.path.relpath(path, root)
return (rest, root)
def to_flags(options):
'''Convert map into list of standard POSIX flags'''
flags = []
for key, value in options.items():
if value is True:
flags.append('--{0}'.format(key))
elif value is not False:
flags.append('--{0}={1}'.format(key, value))
return flags
def config_for(path):
'''Determine output path and flags for compiling file at `path`'''
opts_path = find_config(path)
root = os.path.dirname(opts_path or path)
opts = default_opts if opts_path is None else read_config(opts_path)
output_dir = os.path.normpath(opts['output_dir'])
if not os.path.isabs(output_dir):
output_dir = os.path.join(root, output_dir)
# Make sure output folder exists
mkdir_p(output_dir)
flags = to_flags(opts['options'])
return (output_dir, flags)
| mit | Python |
04eb784155c650b471295bac8f0a125b25d0c5b7 | Use of parse_tag function of bibformat_utils instead of bibformat_engine | inveniosoftware/invenio-formatter,inveniosoftware/invenio-formatter,tiborsimko/invenio-formatter,inveniosoftware/invenio-formatter,tiborsimko/invenio-formatter,tiborsimko/invenio-formatter | lib/elements/bfe_field.py | lib/elements/bfe_field.py | # -*- coding: utf-8 -*-
##
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
from invenio.bibformat_utils import parse_tag
def format(bfo, tag, limit, separator=" "):
"""
Prints the given field of a record.
If tag is in range [001, 010], this element assumes
that it accesses a control field. Else it considers it
accesses a data field.
@param tag the tag code of the field that is to be printed
@param separator a separator between values of the field.
@param limit the maximum number of values to display.
"""
# check if data or control field
p_tag = parse_tag(tag)
if p_tag[0].isdigit() and int(p_tag[0]) in range(0, 11):
return bfo.control_field(tag)
else:
values = bfo.fields(tag)
out = ""
if limit == "" or (not limit.isdigit()) or limit > len(values):
limit = len(values)
if len(values)>0 and isinstance(values[0], dict):
x = 0
for value in values:
x += 1
out += separator.join(value.values())
if x >= limit:
break
else:
out += separator.join(values[:int(limit)])
return out
| # -*- coding: utf-8 -*-
##
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
from invenio.bibformat_engine import parse_tag
def format(bfo, tag, limit, separator=" "):
"""
Prints the given field of a record.
If tag is in range [001, 010], this element assumes
that it accesses a control field. Else it considers it
accesses a data field.
@param tag the tag code of the field that is to be printed
@param separator a separator between values of the field.
@param limit the maximum number of values to display.
"""
# check if data or control field
p_tag = parse_tag(tag)
if p_tag[0].isdigit() and int(p_tag[0]) in range(0, 11):
return bfo.control_field(tag)
else:
values = bfo.fields(tag)
out = ""
if limit == "" or (not limit.isdigit()) or limit > len(values):
limit = len(values)
if len(values)>0 and isinstance(values[0], dict):
x = 0
for value in values:
x += 1
out += separator.join(value.values())
if x >= limit:
break
else:
out += separator.join(values[:int(limit)])
return out
| mit | Python |
fb53f2ed0e6337d6f5766f47cb67c204c89c0568 | Fix oauth2 revoke URI, new URL doesn't seem to work | GAM-team/GAM,GAM-team/GAM | src/oauth2client/__init__.py | src/oauth2client/__init__.py | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
| # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
GOOGLE_REVOKE_URI = 'https://oauth2.googleapis.com/revoke'
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
| apache-2.0 | Python |
79a35406c1d0b01c16c1252d09e33107fa10a245 | Set fields correctly | DoriftoShoes/bawt | bawt/mock/RPi.py | bawt/mock/RPi.py |
class GPIO(object):
BCM = 1
OUT = 1
IN = 1
ANNOUNCEMENT = '''
############################################################
# #
# WARNING: RPi.GPIO unavailable. #
# bawt.mock.RPi.GPIO will be used instead #
# #
############################################################
'''
def __init__(self):
pass
@staticmethod
def setwarnings(warnings=False):
GPIO.warnings = warnings
@staticmethod
def setmode(mode):
GPIO.mode = mode
@staticmethod
def setup(pin, mode):
return True
@staticmethod
def output(pin, output=False):
return True
@staticmethod
def input(pin, input=False):
return True
|
class GPIO(object):
BCM = "BCM"
OUT = "OUT"
ANNOUNCEMENT = '''
############################################################
# #
# WARNING: RPi.GPIO unavailable. #
# bawt.mock.RPi.GPIO will be used instead #
# #
############################################################
'''
def __init__(self):
pass
@staticmethod
def setwarnings(warnings=False):
GPIO.warnings = warnings
@staticmethod
def setmode(mode):
GPIO.mode = mode
@staticmethod
def setup(pin, mode):
return True
@staticmethod
def output(pin, output=False):
return True
@staticmethod
def input(pin, input=False):
return True
| apache-2.0 | Python |
05bb358e2e9344cb6c99f8b5e0bf51e06a7632dd | change inspect_dir function to inspect directories of Scenes that already exist | ibamacsr/indicar-process,ibamacsr/indicar_process,ibamacsr/indicar_process,ibamacsr/indicar_process,ibamacsr/indicar-process | indicarprocess/imagery/tasks.py | indicarprocess/imagery/tasks.py | # -*- coding: utf-8 -*-
from os import listdir
from django.contrib.gis.geos import Polygon
from .models import Scene, Image, ScheduledDownload
from .utils import calendar_date, get_bounds, get_cloud_rate
def download_all():
"""Download all new Scenes of ScheduledDownloads."""
for sd in ScheduledDownload.objects.all():
sd.download_new_scene()
sd.check_last_scene()
def process_all():
"""Process all scenes that have status 'downloaded'."""
for scene in Scene.objects.filter(status='downloaded'):
scene.process()
def inspect_dir(dir, status='processed'):
"""Create a Scene using the name of the dir and list all TIF files present
in that dir to create the Image objects in the database. If the Scene already
exists, only create the missing Image objects.
"""
scene_name = dir.split('/')[-1]
#get cloud_rate of the scene
try:
cloud_rate = get_cloud_rate(scene_name)
except FileNotFoundError:
cloud_rate = None
try:
scene = Scene.objects.get(name=scene_name)
if scene.cloud_rate is None:
scene.cloud_rate = cloud_rate
scene.save()
print('%s already exists.' % scene_name)
except Scene.DoesNotExist:
#get geom of the Scene
try:
geom = Polygon(get_bounds(scene_name))
except IndexError:
geom = None
scene = Scene.objects.create(
sat='L' + scene_name[2],
path=scene_name[3:6],
row=scene_name[6:9],
date=calendar_date(scene_name[9:13], scene_name[13:16]),
geom=geom,
cloud_rate=cloud_rate,
name=scene_name,
status=status
)
for image in listdir(dir):
if image.endswith('.TIF') or image.endswith('.tif'):
Image.objects.get_or_create(
name=image,
type=image.split('_')[1].split('.')[0],
scene=scene
)
| # -*- coding: utf-8 -*-
from os import listdir
from django.contrib.gis.geos import Polygon
from .models import Scene, Image, ScheduledDownload
from .utils import calendar_date, get_bounds, get_cloud_rate
def download_all():
"""Download all new Scenes of ScheduledDownloads."""
for sd in ScheduledDownload.objects.all():
sd.download_new_scene()
sd.check_last_scene()
def process_all():
"""Process all scenes that have status 'downloaded'."""
for scene in Scene.objects.filter(status='downloaded'):
scene.process()
def inspect_dir(dir, status='processed'):
"""Create a Scene using the name of the dir and list all TIF files present
in that dir to create the Image objects in the database.
"""
scene_name = dir.split('/')[-1]
try:
geom = Polygon(get_bounds(scene_name))
except IndexError:
geom = None
try:
cloud_rate = get_cloud_rate(scene_name)
except FileNotFoundError:
cloud_rate = None
scene = Scene.objects.create(
sat='L' + scene_name[2],
path=scene_name[3:6],
row=scene_name[6:9],
date=calendar_date(scene_name[9:13], scene_name[13:16]),
geom=geom,
cloud_rate=cloud_rate,
name=scene_name,
status=status
)
for image in listdir(dir):
if image.endswith('.TIF') or image.endswith('.tif'):
Image.objects.create(
name=image,
type=image.split('_')[1].split('.')[0],
scene=scene
)
| agpl-3.0 | Python |
7283c27bb43c19ad7b59aa14b7407b59bd0ed6ef | Set release version | goanpeca/loghub,spyder-ide/loghub | loghub/__init__.py | loghub/__init__.py | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (See LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Changelog generator based on github milestones or tags."""
VERSION_INFO = (0, 3, 3)
__version__ = '.'.join(map(str, VERSION_INFO))
| # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (See LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Changelog generator based on github milestones or tags."""
VERSION_INFO = (0, 4, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
| mit | Python |
99ec449a7f00f28dc6fd474e37f01dfb68cd33c1 | Add query argument, docstrings, and logging | bgyori/indra,johnbachman/belpy,johnbachman/belpy,johnbachman/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra,bgyori/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,sorgerlab/indra,sorgerlab/indra | indra/sources/virhostnet/api.py | indra/sources/virhostnet/api.py | import pandas
import logging
from .processor import VirhostnetProcessor
logger = logging.getLogger(__name__)
vhn_url = ('http://virhostnet.prabi.fr:9090/psicquic/webservices/current/'\
'search/query/')
data_columns = [
'host_grounding', 'vir_grounding', 'host_mnemonic', 'vir_mnemonic',
'host_mnemonic2', 'vir_mnemonic2', 'exp_method',
'dash', 'publication', 'host_tax', 'vir_tax',
'int_type', 'source', 'source_id', 'score'
]
def process_from_web(query=None):
"""Process host-virus interactions from the VirHostNet website.
Parameters
----------
query : Optional[str]
A query that constrains the results to a given subset of the VirHostNet
database. Example: "taxid:2697049" to search for interactions for
SARS-CoV-2. If not provided, By default, the "*" query is used which
returns the full database.
Returns
-------
VirhostnetProcessor
A VirhostnetProcessor object which contains a list of extracted
INDRA Statements in its statements attribute.
"""
# Search for everything to get the full download by default
url = vhn_url + ('*' if query is None else query)
logger.info('Processing VirHostNet data from %s' % url)
df = pandas.read_csv(url, delimiter='\t', names=data_columns,
header=None)
return process_df(df)
def process_tsv(fname):
"""Process a TSV data file obtained from VirHostNet.
Parameters
----------
fname : str
The path to the VirHostNet tabular data file (in the same format as
the web service).
Returns
-------
VirhostnetProcessor
A VirhostnetProcessor object which contains a list of extracted
INDRA Statements in its statements attribute.
"""
df = pandas.read_csv(fname, delimiter='\t', names=data_columns,
header=None)
return process_df(df)
def process_df(df):
"""Process a VirHostNet pandas DataFrame.
Parameters
----------
df : pandas.DataFrame
A DataFrame representing VirHostNet interactions (in the same format as
the web service).
Returns
-------
VirhostnetProcessor
A VirhostnetProcessor object which contains a list of extracted
INDRA Statements in its statements attribute.
"""
vp = VirhostnetProcessor(df)
vp.extract_statements()
return vp
| import pandas
from .processor import VirhostnetProcessor
vhn_url = ('http://virhostnet.prabi.fr:9090/psicquic/webservices/current/'\
'search/query/*')
data_columns = [
'host_grounding', 'vir_grounding', 'host_mnemonic', 'vir_mnemonic',
'host_mnemonic2', 'vir_mnemonic2', 'exp_method',
'dash', 'publication', 'host_tax', 'vir_tax',
'int_type', 'source', 'source_id', 'score'
]
def process_from_web():
df = pandas.read_csv(vhn_url, delimiter='\t', names=data_columns,
header=None)
return process_df(df)
def process_tsv(fname):
df = pandas.read_csv(fname, delimiter='\t', names=data_columns,
header=None)
return process_df(df)
def process_df(df):
vp = VirhostnetProcessor(df)
vp.extract_statements()
return vp
| bsd-2-clause | Python |
497bfcee0a639c69c796386b536077a6815b90c0 | update cluster cant have job name | fedspendingtransparency/data-act-build-tools,fedspendingtransparency/data-act-build-tools,fedspendingtransparency/data-act-build-tools | databricks/cluster-config.py | databricks/cluster-config.py | import sys
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import json
INSTANCE_ID = sys.argv[1]
JOB_NAME = sys.argv[2]
BRANCH = sys.argv[3]
JOB_PARAMETERS = sys.argv[4]
ENV = sys.argv[5]
FILE_LOCATION = sys.argv[6]
# Run Get request with api_command param
# /jobs/list/ with api 2.0 returns all jobs, 2.1 does not
def getRequest(api_command, params={}):
if api_command == "/jobs/list":
url = "https://{}{}{}".format(INSTANCE_ID, "/api/2.0", api_command)
else:
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.get(
url = url,
json = params,
)
return response
# Get all job names and jobID"s and map to dict
def getJobIds(res):
tempDict = {}
for job in res.json()["jobs"]:
tempDict[job["settings"]["name"]] = job["job_id"]
return tempDict
def updateJsonFile(fileName):
# Open the JSON file for reading
jsonFile = open(fileName, "r")
data = json.load(jsonFile)
jsonFile.close()
# Edit content
# Set notebook params for job
python_params = JOB_PARAMETERS.split("\n")
env_vars = {
"DATABASE_URL": "{{secrets/" + ENV + "/DATABASE_URL}}",
"BRANCH": BRANCH,
"ENV_CODE": ENV
}
data["tasks"][0]["spark_python_task"]["python_file"] = "dbfs:/FileStore/" + BRANCH + "/manage.py"
data["tasks"][0]["spark_python_task"]["parameters"] = python_params
data["tasks"][0]["new_cluster"]["spark_env_vars"] = env_vars
## Save our changes to JSON file
jsonFile = open(fileName, "w+")
jsonFile.write(json.dumps(data))
jsonFile.close()
# Start script
jobs = getJobIds(getRequest("/jobs/list"))
if( JOB_NAME in jobs ):
sys.stdout.write( (str(jobs[JOB_NAME])) )
updateJsonFile(FILE_LOCATION)
| import sys
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import json
INSTANCE_ID = sys.argv[1]
JOB_NAME = sys.argv[2]
BRANCH = sys.argv[3]
JOB_PARAMETERS = sys.argv[4]
ENV = sys.argv[5]
FILE_LOCATION = sys.argv[6]
# Run Get request with api_command param
# /jobs/list/ with api 2.0 returns all jobs, 2.1 does not
def getRequest(api_command, params={}):
if api_command == "/jobs/list":
url = "https://{}{}{}".format(INSTANCE_ID, "/api/2.0", api_command)
else:
url = "https://{}{}{}".format(INSTANCE_ID, API_VERSION, api_command)
response = requests.get(
url = url,
json = params,
)
return response
# Get all job names and jobID"s and map to dict
def getJobIds(res):
tempDict = {}
for job in res.json()["jobs"]:
tempDict[job["settings"]["name"]] = job["job_id"]
return tempDict
def updateJsonFile(fileName):
# Open the JSON file for reading
jsonFile = open(fileName, "r")
data = json.load(jsonFile)
jsonFile.close()
# Edit content
# Set notebook params for job
python_params = JOB_PARAMETERS.split("\n")
env_vars = {
"DATABASE_URL": "{{secrets/" + ENV + "/DATABASE_URL}}",
"BRANCH": BRANCH,
"ENV_CODE": ENV
}
data["tasks"][0]["spark_python_task"]["python_file"] = "dbfs:/FileStore/" + BRANCH + "/manage.py"
data["tasks"][0]["spark_python_task"]["parameters"] = python_params
data["tasks"][0]["new_cluster"]["spark_env_vars"] = env_vars
data["tasks"][0]["new_cluster"]["cluster_name"] = JOB_NAME
## Save our changes to JSON file
jsonFile = open(fileName, "w+")
jsonFile.write(json.dumps(data))
jsonFile.close()
# Start script
jobs = getJobIds(getRequest("/jobs/list"))
if( JOB_NAME in jobs ):
sys.stdout.write( (str(jobs[JOB_NAME])) )
updateJsonFile(FILE_LOCATION)
| cc0-1.0 | Python |
d06cea7508f6403b522b7593de83b2a3fabaad2a | add logic for subnet param | fedspendingtransparency/data-act-build-tools,fedspendingtransparency/data-act-build-tools,fedspendingtransparency/data-act-build-tools | databricks/cluster_config.py | databricks/cluster_config.py | import sys
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import json
from run_databricks_jobs import getJobIds, getRequest
INSTANCE_ID = sys.argv[1]
JOB_NAME = sys.argv[2]
BRANCH = sys.argv[3]
JOB_PARAMETERS = sys.argv[4]
ENV = sys.argv[5]
FILE_LOCATION = sys.argv[6]
def updateJsonFile(fileName):
# Open the JSON file for reading
jsonFile = open(fileName, "r")
data = json.load(jsonFile)
jsonFile.close()
if ENV == "staging":
envCode = "stg"
else:
envCode = ENV
# Edit content
# Set notebook params for job
python_params = JOB_PARAMETERS.split("\n")
env_vars = {
"DATABASE_URL": "{{secrets/" + ENV + "/DATABASE_URL}}",
"BRANCH": BRANCH,
"ENV_CODE": envCode
}
if JOB_NAME.contains("manage"):
subnet = JOB_NAME.split("-")
subnet_param = subnet[1]
else:
subnet_param = "us-gov-west-1a"
# If we wanted to add the ability to add more tasks, we would just require a
# loop right below here adding to data["tasks"][x]
data["tasks"][0]["spark_python_task"]["python_file"] = "dbfs:/FileStore/" + BRANCH + "/manage.py"
data["tasks"][0]["spark_python_task"]["parameters"] = python_params
data["tasks"][0]["new_cluster"]["spark_env_vars"] = env_vars
data["tasks"][0]["new_cluster"]["aws_attributes"]["zone_id"] = subnet_param
# data["tasks"][0]["new_cluster"]["node_type_id"] = "m5a.large" if data["tasks"][0]["new_cluster"]["node_type_id"] == "" else NODE_TYPE
# data["tasks"][0]["new_cluster"]["driver_node_type_id"] = "m5a.large" if data["tasks"][0]["new_cluster"]["driver_node_type_id"] == "" else NODE_TYPE
# data["tasks"][0]["new_cluster"]["num_workers"] = 0 if data["tasks"][0]["new_cluster"]["num_workers"] == "" else WORKERS
data["name"] = JOB_NAME
## Save our changes to JSON file
jsonFile = open(fileName, "w+")
jsonFile.write(json.dumps(data))
jsonFile.close()
if __name__ == '__main__':
# Start script
jobs = getJobIds(getRequest("/jobs/list"))
if( JOB_NAME in jobs ):
sys.stdout.write( (str(jobs[JOB_NAME])) )
updateJsonFile(FILE_LOCATION)
else:
updateJsonFile(FILE_LOCATION)
| import sys
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import json
from run_databricks_jobs import getJobIds, getRequest
INSTANCE_ID = sys.argv[1]
JOB_NAME = sys.argv[2]
BRANCH = sys.argv[3]
JOB_PARAMETERS = sys.argv[4]
ENV = sys.argv[5]
FILE_LOCATION = sys.argv[6]
def updateJsonFile(fileName):
# Open the JSON file for reading
jsonFile = open(fileName, "r")
data = json.load(jsonFile)
jsonFile.close()
if ENV == "staging":
envCode = "stg"
else:
envCode = ENV
# Edit content
# Set notebook params for job
python_params = JOB_PARAMETERS.split("\n")
env_vars = {
"DATABASE_URL": "{{secrets/" + ENV + "/DATABASE_URL}}",
"BRANCH": BRANCH,
"ENV_CODE": envCode
}
# If we wanted to add the ability to add more tasks, we would just require a
# loop right below here adding to data["tasks"][x]
data["tasks"][0]["spark_python_task"]["python_file"] = "dbfs:/FileStore/" + BRANCH + "/manage.py"
data["tasks"][0]["spark_python_task"]["parameters"] = python_params
data["tasks"][0]["new_cluster"]["spark_env_vars"] = env_vars
# data["tasks"][0]["new_cluster"]["node_type_id"] = "m5a.large" if data["tasks"][0]["new_cluster"]["node_type_id"] == "" else NODE_TYPE
# data["tasks"][0]["new_cluster"]["driver_node_type_id"] = "m5a.large" if data["tasks"][0]["new_cluster"]["driver_node_type_id"] == "" else NODE_TYPE
# data["tasks"][0]["new_cluster"]["num_workers"] = 0 if data["tasks"][0]["new_cluster"]["num_workers"] == "" else WORKERS
data["name"] = JOB_NAME
## Save our changes to JSON file
jsonFile = open(fileName, "w+")
jsonFile.write(json.dumps(data))
jsonFile.close()
if __name__ == '__main__':
# Start script
jobs = getJobIds(getRequest("/jobs/list"))
if( JOB_NAME in jobs ):
sys.stdout.write( (str(jobs[JOB_NAME])) )
updateJsonFile(FILE_LOCATION)
else:
updateJsonFile(FILE_LOCATION)
| cc0-1.0 | Python |
4335d38594be32af379c646eb87620f9d0fdd206 | Update cybergis-script-geoserver-import-styles.py | state-hiu/cybergis-scripts,state-hiu/cybergis-scripts | bin/cybergis-script-geoserver-import-styles.py | bin/cybergis-script-geoserver-import-styles.py | from base64 import b64encode
from optparse import make_option
import json
import urllib
import urllib2
import argparse
import time
import sys
import os
import subprocess
#==#
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'lib')))
import cybergis.gs
#==#
parser = argparse.ArgumentParser(description='')
parser.add_argument("--path", help="The location in the filesystem of the styles directory")
parser.add_argument("--prefix", help="The prefix to prepend to all the styles when loaded into GeoServer")
parser.add_argument('-gs', '--geoserver', help="The url of the target GeoServer.")
parser.add_argument("--username", help="The username to use for basic auth requests.")
parser.add_argument("--password", help="The password to use for basic auth requests.")
parser.add_argument('--verbose', '-v', default=0, action='count', help="Print out intermediate status messages.")
args = parser.parse_args()
#==#
cybergis.gs._geoserver_import_styles.run(args)
| from base64 import b64encode
from optparse import make_option
import json
import urllib
import urllib2
import argparse
import time
import sys
import os
import subprocess
#==#
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'lib')))
import cybergis.gs._geoserver_import_styles
#==#
parser = argparse.ArgumentParser(description='')
parser.add_argument("--path", help="The location in the filesystem of the styles directory")
parser.add_argument("--prefix", help="The prefix to prepend to all the styles when loaded into GeoServer")
parser.add_argument('-gs', '--geoserver', help="The url of the target GeoServer.")
parser.add_argument("--username", help="The username to use for basic auth requests.")
parser.add_argument("--password", help="The password to use for basic auth requests.")
parser.add_argument('--verbose', '-v', default=0, action='count', help="Print out intermediate status messages.")
args = parser.parse_args()
#==#
cybergis.gs._geoserver_import_styles.run(args)
| mit | Python |
5df30d2c3d21b339173bcc830525811371380513 | Update version.py | thetestpeople/Geist,kebarr/Geist | geist/version.py | geist/version.py | __version__ = '1.0a8'
| __version__ = '1.0a7'
| mit | Python |
68795e05944b1cb9d15e733b04d961385713459c | Fix save_crops | NaturalHistoryMuseum/inselect,NaturalHistoryMuseum/inselect | inselect/workflow/save_crops.py | inselect/workflow/save_crops.py | #!/usr/bin/env python
"""Saves cropped object images
"""
import argparse
import traceback
from pathlib import Path
# Import numpy here to prevent PyInstaller build from breaking
# TODO LH find a better solution
import numpy
import inselect
import inselect.lib.utils
from inselect.lib.document import InselectDocument
from inselect.lib.document_export import DocumentExport
from inselect.lib.utils import debug_print
# TODO Recursive option
# TODO Ignore if existing crops dir; option to overwrite
def save_crops(dir, overwrite_existing):
dir = Path(dir)
export = DocumentExport()
for p in dir.glob('*' + InselectDocument.EXTENSION):
try:
debug_print('Loading [{0}]'.format(p))
doc = InselectDocument.load(p)
if not overwrite_existing and doc.crops_dir.is_dir():
print('Crops dir [{0}] exists - skipping'.format(doc.crops_dir))
else:
print('Will save crops for [{0}] to [{1}]'.format(p, doc.crops_dir))
debug_print('Loading full-resolution scanned image')
doc.scanned.array
debug_print('Saving crops')
export.save_crops(doc)
except Exception:
print('Error saving crops from [{0}]'.format(p))
traceback.print_exc()
def main():
parser = argparse.ArgumentParser(description='Writes cropped object images from Inselect documents')
parser.add_argument("dir", help='Directory containing Inselect documents')
parser.add_argument('-o', '--overwrite', action='store_true',
help='Overwrite existing crops directories')
parser.add_argument('-d', '--debug', action='store_true')
parser.add_argument('-v', '--version', action='version',
version='%(prog)s ' + inselect.__version__)
args = parser.parse_args()
inselect.lib.utils.DEBUG_PRINT = args.debug
save_crops(args.dir, args.overwrite)
if __name__=='__main__':
main()
| #!/usr/bin/env python
"""Saves cropped object images
"""
import argparse
import traceback
from pathlib import Path
# Import numpy here to prevent PyInstaller build from breaking
# TODO LH find a better solution
import numpy
import inselect
import inselect.lib.utils
from inselect.lib.document import InselectDocument
from inselect.lib.utils import debug_print
# TODO Recursive option
# TODO Ignore if existing crops dir; option to overwrite
def save_crops(dir, overwrite_existing):
dir = Path(dir)
for p in dir.glob('*' + InselectDocument.EXTENSION):
try:
debug_print('Loading [{0}]'.format(p))
doc = InselectDocument.load(p)
if not overwrite_existing and doc.crops_dir.is_dir():
print('Crops dir [{0}] exists - skipping'.format(doc.crops_dir))
else:
print('Will save crops for [{0}] to [{1}]'.format(p, doc.crops_dir))
debug_print('Loading full-resolution scanned image')
doc.scanned.array
debug_print('Saving crops')
doc.save_crops()
except Exception:
print('Error saving crops from [{0}]'.format(p))
traceback.print_exc()
def main():
parser = argparse.ArgumentParser(description='Writes cropped object images from Inselect documents')
parser.add_argument("dir", help='Directory containing Inselect documents')
parser.add_argument('-o', '--overwrite', action='store_true',
help='Overwrite existing crops directories')
parser.add_argument('-d', '--debug', action='store_true')
parser.add_argument('-v', '--version', action='version',
version='%(prog)s ' + inselect.__version__)
args = parser.parse_args()
inselect.lib.utils.DEBUG_PRINT = args.debug
save_crops(args.dir, args.overwrite)
if __name__=='__main__':
main()
| bsd-3-clause | Python |
05b9b87c8980c7d93d9e55769d9ea8da245aa75b | Remove a request | BakeCode/performance-testing,BakeCode/performance-testing | config.py | config.py | from performance_testing.config import Config, Request
CONFIG = Config()
CONFIG.host = 'http://www.example.com'
CONFIG.clients_count = 2
CONFIG.requests_count = 10
CONFIG.requests = [
Request(url='/', type='GET', data=''),
Request(url='/about', type='GET', data='')
]
| from performance_testing.config import Config, Request
CONFIG = Config()
CONFIG.host = 'http://www.example.com'
CONFIG.clients_count = 2
CONFIG.requests_count = 10
CONFIG.requests = [
Request(url='/', type='GET', data=''),
Request(url='/about', type='GET', data=''),
Request(url='/imprint', type='GET', data='')
]
| mit | Python |
e55cccb6f57f666b7608eb9f96ec023d28b1e737 | Add to notes | jonathanstallings/data-structures,jay-tyler/data-structures | priorityq.py | priorityq.py | from __future__ import unicode_literals
from functools import total_ordering
from binary_heap import BinaryHeap
@total_ordering # Will build out the remaining comparison methods
class QNode(object):
"""A class for a queue node."""
def __init__(self, val, priority=None):
self.val = val
self.priority = priority
def __repr__(self):
"""Print representation of node."""
return "{val}".format(val=self.val)
def __eq__(self, other):
"""Implement this and following method with logic to compare
priority and value appropiately.
"""
pass
def __lt__(self, other):
"""Implement in tandem with __eq__."""
pass
class PriorityQ(object):
"""A class for a priority queue. Compose this from BinaryHeap."""
def __init__(self, iterable=()):
"""We can iteratively use insert here."""
pass
def insert(item): # Want to extend spec to add priority as 2nd optional arg
"""Insert an item into the queue. Would be nice to examine item as follows:
If item is node:
add to PriorityQ
else:
init QNode with item as val and priority as None
add to PriorityQ
"""
pass
def pop():
"""Remove the most important item from the queue."""
pass
def peek():
"""Returns the most important item from queue without removal."""
| from __future__ import unicode_literals
from functools import total_ordering
from binary_heap import BinaryHeap
@total_ordering # Will build out the remaining comparison methods
class QNode(object):
"""A class for a queue node."""
def __init__(self, val, priority=None):
self.val = val
self.priority = priority
def __repr__(self):
"""Print representation of node."""
return "{val}".format(val=self.val)
def __eq__(self, other):
"""Implement this and following two methods with logic to compare
priority and value appropiately.
"""
pass
def __lt__(self, other):
"""Implement in tandem with __eq__."""
pass
class PriorityQ(object):
"""A class for a priority queue. Compose this from BinaryHeap."""
def __init__(self, iterable=()):
"""We can iteratively use insert here."""
pass
def insert(item): # Wamt to extend spec to include priority as 2nd arg
"""Insert an item into the queue. Would be nice to examine item as follows:
If item is node:
add to PriorityQ
else:
init QNode with item as val and priority as None
"""
pass
def pop():
"""Remove the most important item from the queue."""
pass
def peek():
"""Returns the most important item from queue without removal."""
| mit | Python |
ac00356aa0bca06750fdadbd5d5dcdbca138fa63 | Improve wording of a comment. | sliedes/clang-triage | config.py | config.py | TOP = '/home/sliedes/scratch/build/clang-triage'
# git repository directory
LLVM_SRC = TOP + '/llvm.src'
# build directory
BUILD = TOP + '/clang-triage.ninja'
# The directory to save the HTML report to
REPORT_DIR = '/home/sliedes/public_html/clang-triage'
# The filename of the actual XHTML report file under REPORT_DIR
REPORT_FILENAME = 'triage_report.xhtml'
# Change to bzip2 if you don't have the (parallel) pbzip2
BZIP2_COMMAND = 'pbzip2'
# Parameters to give to ninja to build LLVM. For example, -j8 to run
# on 8 cores (the default is derived from number of cores available).
NINJA_PARAMS = []
# seconds; will wait additional this many seconds for it to terminate
# after SIGTERM and then kill it
CLANG_TIMEOUT = 4
# common for both triage and reduction
CLANG_PARAMS = ['-Werror', '-ferror-limit=5', '-std=c++11',
'-fno-crash-diagnostics', '-xc++', '-c',
'-o' '/dev/null', '-']
REDUCTION_EXTRA_CLANG_PARAMS = []
TRIAGE_EXTRA_CLANG_PARAMS = []
# A map from human-readable names to directories where to run git pull
PROJECTS = {'llvm': LLVM_SRC, 'clang': LLVM_SRC + '/tools/clang'}
# Path to binary to test
CLANG_BINARY = BUILD + '/bin/clang'
# Do not do git pull more often than this (seconds)
MIN_GIT_CHECKOUT_INTERVAL = 10*60
# Give creduce this long to complete before killing it
CREDUCE_TIMEOUT = 2*60 + 30
# Name of postgresql database to connect to
DB_NAME = 'clang_triage'
# Save miscellaneous reports in this dir (for example, outputs from
# failed clang runs where we couldn't determine the precise reason of
# failure)
MISC_REPORT_SAVE_DIR = 'saved'
# --- Generally you should not need to change anything below this.
CREDUCE_PROPERTY_SCRIPT = 'check_creduce_property.py'
# This path is used to disable llvm-symbolizer. It should contain a
# symlink named llvm-symbolizer pointing to /bin/false.
DUMMY_LLVM_SYMBOLIZER_PATH = 'dummy-llvm-symbolizer'
# Postgresql command to create schema.
CREATE_SCHEMA_COMMAND = [
'psql', '-v', 'ON_ERROR_STOP=1', '--quiet', '-d', DB_NAME,
'-f', 'create_schema.sql']
# timeout from GNU coreutils
CLANG_TIMEOUT_CMD = ['timeout', '-k', str(CLANG_TIMEOUT), str(CLANG_TIMEOUT)]
| TOP = '/home/sliedes/scratch/build/clang-triage'
# git repository directory
LLVM_SRC = TOP + '/llvm.src'
# build directory
BUILD = TOP + '/clang-triage.ninja'
# The directory to save the HTML report to
REPORT_DIR = '/home/sliedes/public_html/clang-triage'
# The filename of the actual XHTML report file under REPORT_DIR
REPORT_FILENAME = 'triage_report.xhtml'
# Change to bzip2 if you don't have the (parallel) pbzip2
BZIP2_COMMAND = 'pbzip2'
# Parameters to give to ninja to build LLVM. For example, -j8 to run
# on 8 cores (the default is derived from number of cores available).
NINJA_PARAMS = []
# seconds; will wait additional this many seconds for it to terminate
# after SIGTERM and then kill it
CLANG_TIMEOUT = 4
# common for both triage and reduction
CLANG_PARAMS = ['-Werror', '-ferror-limit=5', '-std=c++11',
'-fno-crash-diagnostics', '-xc++', '-c',
'-o' '/dev/null', '-']
REDUCTION_EXTRA_CLANG_PARAMS = []
TRIAGE_EXTRA_CLANG_PARAMS = []
# A map from human-readable names to directories where to run git pull
PROJECTS = {'llvm': LLVM_SRC, 'clang': LLVM_SRC + '/tools/clang'}
# Path to tested binary
CLANG_BINARY = BUILD + '/bin/clang'
# Do not do git pull more often than this (seconds)
MIN_GIT_CHECKOUT_INTERVAL = 10*60
# Give creduce this long to complete before killing it
CREDUCE_TIMEOUT = 2*60 + 30
# Name of postgresql database to connect to
DB_NAME = 'clang_triage'
# Save miscellaneous reports in this dir (for example, outputs from
# failed clang runs where we couldn't determine the precise reason of
# failure)
MISC_REPORT_SAVE_DIR = 'saved'
# --- Generally you should not need to change anything below this.
CREDUCE_PROPERTY_SCRIPT = 'check_creduce_property.py'
# This path is used to disable llvm-symbolizer. It should contain a
# symlink named llvm-symbolizer pointing to /bin/false.
DUMMY_LLVM_SYMBOLIZER_PATH = 'dummy-llvm-symbolizer'
# Postgresql command to create schema.
CREATE_SCHEMA_COMMAND = [
'psql', '-v', 'ON_ERROR_STOP=1', '--quiet', '-d', DB_NAME,
'-f', 'create_schema.sql']
# timeout from GNU coreutils
CLANG_TIMEOUT_CMD = ['timeout', '-k', str(CLANG_TIMEOUT), str(CLANG_TIMEOUT)]
| mit | Python |
83e820209f9980e6c9103908b14ff07fee23dc41 | Change .env variable to KCLS_USER | mphuie/kcls-myaccount | getCheckedOut.py | getCheckedOut.py | import requests
from bs4 import BeautifulSoup
import json
from dotenv import load_dotenv
import os
load_dotenv(".env")
s = requests.Session()
r = s.get("https://kcls.bibliocommons.com/user/login", verify=False)
payload = {
"name": os.environ.get("KCLS_USER"),
"user_pin": os.environ.get("PIN")
}
p = s.post("https://kcls.bibliocommons.com/user/login", data=payload)
r = s.get("https://kcls.bibliocommons.com/checkedout?display_quantity=50&page=1&view=medium")
soup = BeautifulSoup(r.text, "html.parser")
checkedOutList = soup.find("div", { "id": "bibList" })
checkedOutItems = []
for title in checkedOutList.find_all("div", { "class": "listItem" }):
title_name = title.find("span", { "class": "title" })
due_date = title.find("span", { "class": "item_due_date" })
checkedOutItems.append({ "title": title_name.text.strip(), "due": due_date.text.strip() })
with open("checkedout.json", "w") as f:
print "%d title(s) checked out" % len(checkedOutItems)
f.write(json.dumps(checkedOutItems))
| import requests
from bs4 import BeautifulSoup
import json
from dotenv import load_dotenv
import os
load_dotenv(".env")
s = requests.Session()
r = s.get("https://kcls.bibliocommons.com/user/login", verify=False)
payload = {
"name": os.environ.get("USER"),
"user_pin": os.environ.get("PIN")
}
s.post("https://kcls.bibliocommons.com/user/login", data=payload)
r = s.get("https://kcls.bibliocommons.com/checkedout?display_quantity=50&page=1&view=medium")
soup = BeautifulSoup(r.text, "html.parser")
checkedOutList = soup.find("div", { "id": "bibList" })
checkedOutItems = []
for title in checkedOutList.find_all("div", { "class": "listItem" }):
title_name = title.find("span", { "class": "title" })
due_date = title.find("span", { "class": "item_due_date" })
checkedOutItems.append({ "title": title_name.text.strip(), "due": due_date.text.strip() })
with open("checkedout.json", "w") as f:
print "%d title(s) checked out" % len(checkedOutItems)
f.write(json.dumps(checkedOutItems))
| apache-2.0 | Python |
55dafd618d020d97a704ea1f6ec32551a5683513 | Add default SECRET_KEY to config | paulaylingdev/blogsite,paulaylingdev/blogsite | config.py | config.py | """Application configuration file."""
# Flask
DEBUG = False
SECRET_KEY = 'CHANGEME'
# Flask SQLAlchemy
SQLALCHEMY_DATABASE_URI = "sqlite:///foo.db"
SQLALCHEMY_ECHO = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask-HTMLmin
MINIFY_PAGE = True
# Flask Bcrypt
BCRYPT_LOG_ROUNDS = 14
# Flask-WTF
WTF_CSRF_SECRET_KEY = 'CHANGEME'
| """Application configuration file."""
# Flask
DEBUG = False
SECRET_KEY = ''
# Flask SQLAlchemy
SQLALCHEMY_DATABASE_URI = "sqlite:///foo.db"
SQLALCHEMY_ECHO = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask-HTMLmin
MINIFY_PAGE = True
# Flask Bcrypt
BCRYPT_LOG_ROUNDS = 14
# Flask-WTF
WTF_CSRF_SECRET_KEY = ''
| mit | Python |
12715c03a80d236630e1178baa5f5b4644d69d3a | increase schedule sleep time | paulgessinger/coalics,paulgessinger/coalics,paulgessinger/coalics | config.py | config.py | import os
PQ_PW=os.environ.get("POSTGRES_PASSWORD")
PQ_USER=os.environ.get("POSTGRES_USER")
PQ_DB=os.environ.get("POSTGRES_DB")
SQLALCHEMY_DATABASE_URI = 'postgresql+pygresql://'+PQ_USER+':'+PQ_PW+'@db/'+PQ_DB
CSRF_SECRET_KEY = os.environ.get("COALICS_CSRF_KEY").encode("utf-8")
SQLALCHEMY_TRACK_MODIFICATIONS = False
REGEX_TIMEOUT = 2
REDIS_HOST='redis'
REDIS_PORT=6379
SOURCE_UPDATE_FREQUENCY = 30*60
| import os
PQ_PW=os.environ.get("POSTGRES_PASSWORD")
PQ_USER=os.environ.get("POSTGRES_USER")
PQ_DB=os.environ.get("POSTGRES_DB")
SQLALCHEMY_DATABASE_URI = 'postgresql+pygresql://'+PQ_USER+':'+PQ_PW+'@db/'+PQ_DB
CSRF_SECRET_KEY = os.environ.get("COALICS_CSRF_KEY").encode("utf-8")
SQLALCHEMY_TRACK_MODIFICATIONS = False
REGEX_TIMEOUT = 2
REDIS_HOST='redis'
REDIS_PORT=6379
SOURCE_UPDATE_FREQUENCY = 10*60
| mit | Python |
ff105fc6d16f82b9acb7ca234154139ce8b39a8f | Refactor file deletion into own method | mgarbacz/bucketeer | bucketeer/uploader.py | bucketeer/uploader.py | import boto, os, hashlib, json
# Upload modified files with bucket and directory specified in config.json
def upload_from_config():
config = json.loads(open('config.json').read())
upload(config['bucket'], config['dir'])
# Upload modified files in src_folder to the s3 bucket named
def upload(bucket_name, src_folder):
success = False
try:
# Requires S3 creds, which are set as environment variables
connection = boto.connect_s3();
bucket = connection.lookup(bucket_name)
if bucket == None:
# Create the bucket if we don't have it
print 'Bucket ' + bucket_name + ' not found. Creating...'
bucket = connection.create_bucket(bucket_name)
print 'Bucket ' + bucket_name + ' created.'
delete_files(src_folder, bucket)
# Iterating over all files in the src folder
for directory, subdirectories, files in os.walk(src_folder):
# Upload each local file in files
for filename in files:
upload_file(filename, directory, src_folder, bucket)
# If we got here with no exceptions, changes have been committed
success = True
# Boto provides little in the way of exception handling, need a blanket
except Exception, e:
print e
if success:
print('Changes committed')
else:
print('Changes not committed')
return success
def upload_file(filename, directory, src_folder, bucket):
# Set up file paths and get s3 file
local_file_path = os.path.join(directory, filename)
# For s3, we don't want the 'public' part of file path
s3_file_path = local_file_path[len(src_folder)+1:]
s3_file = bucket.get_key(s3_file_path)
# If file exists: compare hashes, else: force unmatching hashes
if s3_file != None:
# s3 surround hash with quotes, so we need to include them
local_hash = '\"%s\"' % (
hashlib.md5(open(local_file_path, 'rb').read()).hexdigest() )
s3_hash = s3_file.etag
else:
local_hash = 0
s3_hash = 1
# If the hashes are different, we need to upload the file
if local_hash != s3_hash:
print filename + ' is uploading...'
key_file = boto.s3.key.Key(bucket)
key_file.key = s3_file_path
key_file.set_contents_from_filename(local_file_path)
key_file.make_public()
# Will print after update or if no update was required
print filename + ' is up to date.'
def delete_files(src_folder, bucket):
# Delete each s3 file not present locally
for s3_file in bucket.list():
try:
with open(os.path.join(src_folder, s3_file.key)):
print s3_file.key + ' exists locally.'
except IOError:
print s3_file.key + ' is being deleted from s3...'
bucket.delete_key(s3_file.key)
print s3_file.key + ' has been deleted from s3.'
| import boto, os, hashlib, json
# Upload modified files with bucket and directory specified in config.json
def upload_from_config():
config = json.loads(open('config.json').read())
upload(config['bucket'], config['dir'])
# Upload modified files in src_folder to the s3 bucket named
def upload(bucket_name, src_folder):
success = False
try:
# Requires S3 creds, which are set as environment variables
connection = boto.connect_s3();
bucket = connection.lookup(bucket_name)
if bucket == None:
# Create the bucket if we don't have it
print 'Bucket ' + bucket_name + ' not found. Creating...'
bucket = connection.create_bucket(bucket_name)
print 'Bucket ' + bucket_name + ' created.'
# Delete each s3 file not present locally
for s3_file in bucket.list():
try:
with open(os.path.join(src_folder, s3_file.key)):
print s3_file.key + ' exists locally.'
except IOError:
print s3_file.key + ' is being deleted from s3...'
bucket.delete_key(s3_file.key)
print s3_file.key + ' has been deleted from s3.'
# Iterating over all files in the src folder
for directory, subdirectories, files in os.walk(src_folder):
# Upload each local file in files
for filename in files:
upload_file(filename, directory, src_folder, bucket)
# If we got here with no exceptions, changes have been committed
success = True
# Boto provides little in the way of exception handling, need a blanket
except Exception, e:
print e
if success:
print('Changes committed')
else:
print('Changes not committed')
return success
def upload_file(filename, directory, src_folder, bucket):
# Set up file paths and get s3 file
local_file_path = os.path.join(directory, filename)
# For s3, we don't want the 'public' part of file path
s3_file_path = local_file_path[len(src_folder)+1:]
s3_file = bucket.get_key(s3_file_path)
# If file exists: compare hashes, else: force unmatching hashes
if s3_file != None:
# s3 surround hash with quotes, so we need to include them
local_hash = '\"%s\"' % (
hashlib.md5(open(local_file_path, 'rb').read()).hexdigest() )
s3_hash = s3_file.etag
else:
local_hash = 0
s3_hash = 1
# If the hashes are different, we need to upload the file
if local_hash != s3_hash:
print filename + ' is uploading...'
key_file = boto.s3.key.Key(bucket)
key_file.key = s3_file_path
key_file.set_contents_from_filename(local_file_path)
key_file.make_public()
# Will print after update or if no update was required
print filename + ' is up to date.'
| mit | Python |
f0246b9897d89c1ec6f2361bbb488c4e162e5c5e | Make timestamps more specific as temporal context fades. | madbook/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate | reddit_liveupdate/utils.py | reddit_liveupdate/utils.py | import datetime
import itertools
import pytz
from babel.dates import format_time, format_datetime
from pylons import c
def pairwise(iterable):
a, b = itertools.tee(iterable)
next(b, None)
return itertools.izip(a, b)
def pretty_time(dt):
display_tz = pytz.timezone(c.liveupdate_event.timezone)
today = datetime.datetime.now(display_tz).date()
date = dt.astimezone(display_tz).date()
if date == today:
return format_time(
time=dt,
tzinfo=display_tz,
format="HH:mm z",
locale=c.locale,
)
elif today - date < datetime.timedelta(days=365):
return format_datetime(
datetime=dt,
tzinfo=display_tz,
format="dd MMM HH:mm z",
locale=c.locale,
)
else:
return format_datetime(
datetime=dt,
tzinfo=display_tz,
format="dd MMM YYYY HH:mm z",
locale=c.locale,
)
| import itertools
import pytz
from babel.dates import format_time
from pylons import c
def pairwise(iterable):
a, b = itertools.tee(iterable)
next(b, None)
return itertools.izip(a, b)
def pretty_time(dt):
display_tz = pytz.timezone(c.liveupdate_event.timezone)
return format_time(
time=dt,
tzinfo=display_tz,
format="HH:mm z",
locale=c.locale,
)
| bsd-3-clause | Python |
d2b4b9318df648e5d8808992883beae29c7d60f7 | Remove "with .. as" statement from .py | gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim | pylib/gfxprim/render_utils.py | pylib/gfxprim/render_utils.py | #
# gfxprim.render_utils
#
import jinja2
import logging as log
import os
import time
import re
def template_error(s, *args):
raise Exception(s, *args)
def create_environment(config, template_dir):
env = jinja2.Environment(
line_statement_prefix = "%%",
line_comment_prefix = "##",
undefined = jinja2.StrictUndefined,
loader = jinja2.FileSystemLoader(template_dir))
env.globals['undefined'] = jinja2.StrictUndefined()
env.globals['pixelsizes'] = config.pixelsizes
env.globals['pixeltypes'] = config.pixeltypes
env.globals['pixeltypes_dict'] = config.pixeltypes_dict
env.globals['config'] = config
from gfxprim.pixelsize import LE, BE
env.globals['LE'] = LE
env.globals['BE'] = BE
env.globals['len'] = len
env.globals['error'] = template_error
env.globals['hex'] = lambda(x): hex(x).rstrip('L')
return env
def render_file(env, source, result):
source_file = open(source)
try:
source_text = source_file.read()
finally:
source_text.close()
# Hack to preserve empty lines before %% line_statement
source_text = re.sub("\n\n[ \t]*%%", "\n{{''}}\n%%", source_text)
tmpl = env.from_string(source_text)
tmpl.filename = source
result_text = tmpl.render(
date = time.ctime(),
target = result,
template = source,
header_guard = \
os.path.split(result)[1].upper().replace('.', '_').replace('-', '_'),
)
result_file = open(result, "w")
try:
result_file.write(result_text)
finally:
resulf_file.close()
def load_gfxprimconfig(config_file = None):
"""Initialize GfxPrimConfig from a given or guessed config file.
Looks for the file by parameter, in env['PIXELTYPE_DEFS'] and
in dir(__file__)/../../gfxprim_config.py, in that order.
Returns GfxPrimConfig or None on error
"""
if not config_file:
config_file = os.environ.get("PIXELTYPE_DEFS", None)
if not config_file:
path = os.path.dirname(os.path.abspath(__file__))
config_file = os.path.abspath(
os.path.join(path, "..", "..", "gfxprim_config.py"))
if not os.path.exists(config_file):
log.error("WARNING: GfxPrimConfig file %s not found!\n",
config_file)
return None
from gfxprim.pixeltype import PixelType
from gfxprim.pixelsize import PixelSize, LE, BE
from gfxprim.gfxprimconfig import GfxPrimConfig
l = {"PixelType": PixelType,
"PixelSize": PixelSize,
"LE": LE,
"BE": BE,
"GfxPrimConfig": GfxPrimConfig
}
execfile(config_file, globals(), l)
config = l["config"]
return config
| #
# gfxprim.render_utils
#
import jinja2
import logging as log
import os
import time
import re
def template_error(s, *args):
raise Exception(s, *args)
def create_environment(config, template_dir):
env = jinja2.Environment(
line_statement_prefix = "%%",
line_comment_prefix = "##",
undefined = jinja2.StrictUndefined,
loader = jinja2.FileSystemLoader(template_dir))
env.globals['undefined'] = jinja2.StrictUndefined()
env.globals['pixelsizes'] = config.pixelsizes
env.globals['pixeltypes'] = config.pixeltypes
env.globals['pixeltypes_dict'] = config.pixeltypes_dict
env.globals['config'] = config
from gfxprim.pixelsize import LE, BE
env.globals['LE'] = LE
env.globals['BE'] = BE
env.globals['len'] = len
env.globals['error'] = template_error
env.globals['hex'] = lambda(x): hex(x).rstrip('L')
return env
def render_file(env, source, result):
with open(source) as source_file:
source_text = source_file.read()
# Hack to preserve empty lines before %% line_statement
source_text = re.sub("\n\n[ \t]*%%", "\n{{''}}\n%%", source_text)
tmpl = env.from_string(source_text)
tmpl.filename = source
result_text = tmpl.render(
date = time.ctime(),
target = result,
template = source,
header_guard = \
os.path.split(result)[1].upper().replace('.', '_').replace('-', '_'),
)
with open(result, "w") as result_file:
result_file.write(result_text)
def load_gfxprimconfig(config_file = None):
"""Initialize GfxPrimConfig from a given or guessed config file.
Looks for the file by parameter, in env['PIXELTYPE_DEFS'] and
in dir(__file__)/../../gfxprim_config.py, in that order.
Returns GfxPrimConfig or None on error
"""
if not config_file:
config_file = os.environ.get("PIXELTYPE_DEFS", None)
if not config_file:
path = os.path.dirname(os.path.abspath(__file__))
config_file = os.path.abspath(
os.path.join(path, "..", "..", "gfxprim_config.py"))
if not os.path.exists(config_file):
log.error("WARNING: GfxPrimConfig file %s not found!\n",
config_file)
return None
from gfxprim.pixeltype import PixelType
from gfxprim.pixelsize import PixelSize, LE, BE
from gfxprim.gfxprimconfig import GfxPrimConfig
l = {"PixelType": PixelType,
"PixelSize": PixelSize,
"LE": LE,
"BE": BE,
"GfxPrimConfig": GfxPrimConfig
}
execfile(config_file, globals(), l)
config = l["config"]
return config
| lgpl-2.1 | Python |
95998bd2472a79294fcb3cb10cce99198a38d7cf | Change Wiblog models to make dates less prone to change | lo-windigo/fragdev,lo-windigo/fragdev | wiblog/models.py | wiblog/models.py | from django.db import models
from django.core.urlresolvers import reverse
## Tag - A text tag, used to categorize posts
class Tag(models.Model):
desc = models.CharField('Tag', max_length=50, unique=True)
def __str__(self):
return self.desc
def get_absolute_url(self):
return reverse("wiblog:tags", args=[self.desc])
## Post - a blog post
class Post(models.Model):
DFT = 'DFT'
PUB = 'PUB'
PUBLISH_STATUS = (
(DFT, 'Draft'),
(PUB, 'Published'),
)
body = models.TextField()
date = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
slug = models.SlugField(max_length=150)
status = models.CharField(max_length=9, choices=PUBLISH_STATUS)
tags = models.ManyToManyField(Tag, blank=True)
title = models.CharField(max_length=150)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("wiblog:post", args=[self.slug])
## Comments - Other people's input on posts
class Comment(models.Model):
HAM = 'HAM'
SPM = 'SPM'
UNK = 'UNK'
MOD_STATUS = (
(HAM, 'Valid'),
(SPM, 'Invalid (Spam)'),
(UNK, 'Unmoderated'),
)
comment = models.TextField()
name = models.CharField(max_length=150)
date = models.DateTimeField(auto_now_add=True)
moderated = models.CharField(choices=MOD_STATUS, default=UNK, max_length=14)
post = models.ForeignKey(Post)
url = models.URLField()
# Provide a decent representation for the admin section
def __str__(self):
prev = self.comment
if len(prev) > 75:
prev = prev[0:75]+"..."
return "'"+self.name+"' Says: '"+prev+"'"
| from django.db import models
from django.core.urlresolvers import reverse
## Tag - A text tag, used to categorize posts
class Tag(models.Model):
desc = models.CharField('Tag', max_length=50, unique=True)
def __str__(self):
return self.desc
def get_absolute_url(self):
return reverse("wiblog:tags", args=[self.desc])
## Post - a blog post
class Post(models.Model):
DFT = 'DFT'
PUB = 'PUB'
PUBLISH_STATUS = (
(DFT, 'Draft'),
(PUB, 'Published'),
)
body = models.TextField()
date = models.DateTimeField(auto_now=True)
slug = models.SlugField(max_length=150)
status = models.CharField(max_length=9, choices=PUBLISH_STATUS)
tags = models.ManyToManyField(Tag, blank=True)
title = models.CharField(max_length=150)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("wiblog:post", args=[self.slug])
## Comments - Other people's input on posts
class Comment(models.Model):
HAM = 'HAM'
SPM = 'SPM'
UNK = 'UNK'
MOD_STATUS = (
(HAM, 'Valid'),
(SPM, 'Invalid (Spam)'),
(UNK, 'Unmoderated'),
)
comment = models.TextField()
name = models.CharField(max_length=150)
date = models.DateTimeField(auto_now=True)
moderated = models.CharField(choices=MOD_STATUS, default=UNK, max_length=14)
post = models.ForeignKey(Post)
url = models.URLField()
# Provide a decent representation for the admin section
def __str__(self):
prev = self.comment
if len(prev) > 75:
prev = prev[0:75]+"..."
return "'"+self.name+"' Says: '"+prev+"'"
| agpl-3.0 | Python |
e74bc9fd3908785c02941e400b97ce48ed45f099 | Remove obsolete import. | ohsu-qin/qipipe | qipipe/interfaces/__init__.py | qipipe/interfaces/__init__.py | from .compress import Compress
from .copy import Copy
from .fix_dicom import FixDicom
from .group_dicom import GroupDicom
from .map_ctp import MapCTP
from .move import Move
from .glue import Glue
from .uncompress import Uncompress
from .xnat_upload import XNATUpload | from .compress import Compress
from .copy import Copy
from .fix_dicom import FixDicom
from .group_dicom import GroupDicom
from .map_ctp import MapCTP
from .move import Move
from .glue import Glue
from .stage_ctp import StageCTP
from .uncompress import Uncompress
from .xnat_upload import XNATUpload | bsd-2-clause | Python |
0f7e9177a29859e208fcab1a51007beac2a733f6 | Remove system info from KNX diagnostic (#64721) | toddeye/home-assistant,mezz64/home-assistant,nkgilley/home-assistant,rohitranjan1991/home-assistant,rohitranjan1991/home-assistant,w1ll1am23/home-assistant,rohitranjan1991/home-assistant,GenericStudent/home-assistant,mezz64/home-assistant,w1ll1am23/home-assistant,nkgilley/home-assistant,toddeye/home-assistant,GenericStudent/home-assistant | homeassistant/components/knx/diagnostics.py | homeassistant/components/knx/diagnostics.py | """Diagnostics support for KNX."""
from __future__ import annotations
from typing import Any
import voluptuous as vol
from homeassistant import config as conf_util
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from . import CONFIG_SCHEMA
from .const import DOMAIN
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict:
"""Return diagnostics for a config entry."""
diag: dict[str, Any] = {}
knx_module = hass.data[DOMAIN]
diag["xknx"] = {
"version": knx_module.xknx.version,
"current_address": str(knx_module.xknx.current_address),
}
diag["config_entry_data"] = dict(config_entry.data)
raw_config = await conf_util.async_hass_config_yaml(hass)
diag["configuration_yaml"] = raw_config.get(DOMAIN)
try:
CONFIG_SCHEMA(raw_config)
except vol.Invalid as ex:
diag["configuration_error"] = str(ex)
else:
diag["configuration_error"] = None
return diag
| """Diagnostics support for KNX."""
from __future__ import annotations
from typing import Any
import voluptuous as vol
from homeassistant import config as conf_util
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.system_info import async_get_system_info
from . import CONFIG_SCHEMA
from .const import DOMAIN
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict:
"""Return diagnostics for a config entry."""
diag: dict[str, Any] = {}
diag["home_assistant"] = await async_get_system_info(hass)
knx_module = hass.data[DOMAIN]
diag["xknx"] = {
"version": knx_module.xknx.version,
"current_address": str(knx_module.xknx.current_address),
}
diag["config_entry_data"] = dict(config_entry.data)
raw_config = await conf_util.async_hass_config_yaml(hass)
diag["configuration_yaml"] = raw_config.get(DOMAIN)
try:
CONFIG_SCHEMA(raw_config)
except vol.Invalid as ex:
diag["configuration_error"] = str(ex)
else:
diag["configuration_error"] = None
return diag
| apache-2.0 | Python |
603df7a0622ee32255aa04acb68cbcc3f9a4842f | rework pass_fail script as a module | OpenFAST/OpenFAST,OpenFAST/OpenFAST,OpenFAST/OpenFAST | reg_tests/lib/pass_fail.py | reg_tests/lib/pass_fail.py | #
# Copyright 2017 National Renewable Energy Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This program determines whether a new solution has regressed from the "gold standard"
solution. It reads two OpenFAST binary output files (.outb), and computes the variance
of the two solution files for each output channel. If the max variance is less than
the given tolerance, the test case passes.
Usage: python3 pass_fail.py solution1 solution2 tolerance
Example: python3 pass_fail.py output-local/Test01.outb gold-standard/Test01.outb 0.00000001
"""
import sys, os
import numpy as np
from numpy import linalg as LA
from fast_io import load_output
import rtestlib as rtl
def readFASTOut(fastoutput):
try:
data, info = load_output(fastoutput)
return (data, info)
except Exception as e:
rtl.exitWithError("Error: {}".format(e))
def passRegressionTest(norm, tolerance):
result = True if max(norm) < tolerance else False
return result
def calculateRelativeNorm(testData, baselineData):
## gold standard RMS, L2 norm
nColumns = np.size(testData,1)
diff = np.ones(nColumns)
rms_gold = np.ones(nColumns)
norm_diff = np.ones(nColumns)
for j in range(nColumns):
rms_gold[j] = LA.norm(baselineData[:,j], 2)
diff = testData[:,j]-baselineData[:,j]
norm_diff[j] = LA.norm(diff, 2)
# replace any 0s with small number before for division
rms_gold[rms_gold == 0] = 1e-16
norm = norm_diff / rms_gold
return norm
if __name__=="__main__":
rtl.validateInputOrExit(sys.argv, 4, "{} test_solution baseline_solution tolerance".format(sys.argv[0]))
testSolution = sys.argv[1]
baselineSolution = sys.argv[2]
tolerance = sys.argv[3]
try:
tolerance = float(tolerance)
except ValueError:
rtl.exitWithError("Error: invalid tolerance given, {}".format(tolerance))
rtl.validateFileOrExit(testSolution)
rtl.validateFileOrExit(baselineSolution)
testData, testInfo = readFASTOut(testSolution)
baselineData, baselineInfo = readFASTOut(baselineSolution)
norm = calculateRelativeNorm(testData, baselineData)
if passRegressionTest(norm, tolerance):
print('PASS')
sys.exit(0)
else:
dict1, info1 = readFASTOut(testSolution)
for i in range(len(info1['attribute_names'])):
print(info1['attribute_names'][i], norm[i])
sys.exit(1)
| #
# Copyright 2017 National Renewable Energy Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This program determines whether a new solution has regressed from the "gold standard"
solution. It reads two OpenFAST binary output files (.outb), and computes the variance
of the two solution files for each output channel. If the max variance is less than
the given tolerance, the test case passes.
Usage: python3 pass_fail.py solution1 solution2 tolerance
Example: python3 pass_fail.py output-local/Test01.outb gold-standard/Test01.outb 0.00000001
"""
import sys, os
import numpy as np
from numpy import linalg as LA
from fast_io import load_output
def exitWithError(error):
print(error)
sys.exit(1)
# validate input arguments
nArgsExpected = 4
if len(sys.argv) < nArgsExpected:
exitWithError("Error: {} arguments given, expected {}\n".format(len(sys.argv), nArgsExpected) +
"Usage: {} solution1 solution2 tolerance".format(sys.argv[0]))
solutionFile1 = sys.argv[1]
solutionFile2 = sys.argv[2]
solutionTolerance = sys.argv[3]
if not os.path.isfile(solutionFile1):
exitWithError("Error: solution file does not exist at {}".format(solutionFile1))
if not os.path.isfile(solutionFile2):
exitWithError("Error: solution file does not exist at {}".format(solutionFile2))
try:
solutionTolerance = float(solutionTolerance)
except ValueError:
exitWithError("Error: invalid tolerance given, {}".format(solutionTolerance))
# parse the FAST solution files
try:
dict1, info1 = load_output(solutionFile1)
dict2, info2 = load_output(solutionFile2)
except Exception as e:
exitWithError("Error: {}".format(e))
## gold standard RMS, L2 norm
nColumns = np.size(dict1,1)
diff = np.ones(nColumns)
rms_gold = np.ones(nColumns)
norm_diff = np.ones(nColumns)
for j in range(nColumns):
rms_gold[j] = LA.norm(dict2[:,j], 2)
diff = dict1[:,j]-dict2[:,j]
norm_diff[j] = LA.norm(diff, 2)
# replace any 0s with small number before for division
rms_gold[rms_gold == 0] = 1e-16
norm = norm_diff / rms_gold
####### need to reverse inequality to actually see output since test currently passes every time ######
if max(norm) < solutionTolerance:
print('PASS')
sys.exit(0)
else:
for i in range(len(info1['attribute_names'])):
print(info1['attribute_names'][i], norm[i])
sys.exit(1)
| apache-2.0 | Python |
331764246dced0d9fc31d86671d1b1ecbc1dc335 | update to zimport file adding try/except to ensure works for all files | lamastex/scalable-data-science,lamastex/scalable-data-science,lamastex/scalable-data-science,lamastex/scalable-data-science,lamastex/scalable-data-science,lamastex/scalable-data-science | _sds/basics/infrastructure/onpremise/dockerCompose/zimport/zimport.py | _sds/basics/infrastructure/onpremise/dockerCompose/zimport/zimport.py | #! /usr/bin/python3
import argparse
import json
import requests
import time
import os
from os.path import isfile, join
parser = argparse.ArgumentParser(description = "Import one or more Zeppelin \
notebooks into a running Zeppelin server. The imported files will be found \
in the folder set in ZEPPELIN_NOTEBOOK_DIR in the zeppelin-env.sh \
configuration file, defaulting to the notebook folder in the zeppelin \
root folder.")
parser.add_argument('-a', '--host', default = 'localhost', dest = 'host',
help = "Address of server running Zeppelin. Default: localhost")
parser.add_argument('-p', '--port', default = '8080', dest = 'port',
help = "Port used by Zeppelin. Default: 8080")
parser.add_argument('notebook_dir', metavar = "Input directory",
help = "Path to directory containing Zeppelin notebooks to be imported.")
args = parser.parse_args()
host = "http://{}:{}".format(args.host, args.port)
def import_notebook(note):
requestURL = "{}/api/notebook/import".format(host)
try:
r = requests.post(requestURL, data = note.encode('utf-8')).json()
except:
r = requests.post(requestURL, data = note.encode('utf-8'))
if r["status"] == "OK":
return r["body"]
else:
raise IOError(str(r))
if __name__ == "__main__":
files = [join(args.notebook_dir, f) for f in os.listdir(args.notebook_dir)
if isfile(join(args.notebook_dir, f))]
for f in files:
json = open(f, "r").read()
import_notebook(json)
| #! /usr/bin/python3
import argparse
import json
import requests
import time
import os
from os.path import isfile, join
parser = argparse.ArgumentParser(description = "Import one or more Zeppelin \
notebooks into a running Zeppelin server. The imported files will be found \
in the folder set in ZEPPELIN_NOTEBOOK_DIR in the zeppelin-env.sh \
configuration file, defaulting to the notebook folder in the zeppelin \
root folder.")
parser.add_argument('-a', '--host', default = 'localhost', dest = 'host',
help = "Address of server running Zeppelin. Default: localhost")
parser.add_argument('-p', '--port', default = '8080', dest = 'port',
help = "Port used by Zeppelin. Default: 8080")
parser.add_argument('notebook_dir', metavar = "Input directory",
help = "Path to directory containing Zeppelin notebooks to be imported.")
args = parser.parse_args()
host = "http://{}:{}".format(args.host, args.port)
def import_notebook(note):
requestURL = "{}/api/notebook/import".format(host)
r = requests.post(requestURL, data = note.encode('utf-8')).json()
if r["status"] == "OK":
return r["body"]
else:
raise IOError(str(r))
if __name__ == "__main__":
files = [join(args.notebook_dir, f) for f in os.listdir(args.notebook_dir)
if isfile(join(args.notebook_dir, f))]
for f in files:
json = open(f, "r").read()
import_notebook(json)
| unlicense | Python |
6d2027b25c98d26d4012a6c39ed421bb8a74d4d7 | Split pygstc ending calls | RidgeRun/gstd-1.x,RidgeRun/gstd-1.x,RidgeRun/gstd-1.x,RidgeRun/gstd-1.x | examples/pygstc/simple_pipeline.py | examples/pygstc/simple_pipeline.py | import time
import sys
from pygstc.gstc import *
from pygstc.logger import *
#Create a custom logger with loglevel=DEBUG
gstd_logger = CustomLogger('simple_pipeline', loglevel='DEBUG')
#Create the client with the logger
gstd_client = GstdClient(logger=gstd_logger)
def printError():
print("To play run: python3 simple_pipeline.py play VIDEO_PATH")
print("To stop run: python3 simple_pipeline.py stop")
print("To stop run: python3 simple_pipeline.py reverse")
print("To stop run: python3 simple_pipeline.py slow_motion")
if(len(sys.argv) > 1):
if(sys.argv[1]=="create"):
FILE_SOURCE = sys.argv[2]
#pipeline is the string with the pipeline description
pipeline = "playbin uri=file:"+FILE_SOURCE
#Following instructions create and play the pipeline
gstd_client.pipeline_create ("p0", pipeline)
elif(sys.argv[1]== "play"):
gstd_client.pipeline_play ("p0")
print("Playing")
# Check this
# reverse and slow motion restart the pipeline
elif(sys.argv[1]== "reverse"):
gstd_client.event_seek("p0", rate=-1.0, format=3, flags=1, start_type=1, start=0, end_type=1, end=-1)
print("Playing in reverse")
elif(sys.argv[1]== "slow_motion"):
gstd_client.event_seek("p0", rate=0.5, format=3, flags=1, start_type=1, start=0, end_type=1, end=-1)
print("Playing in slow motion")
elif(sys.argv[1]== "pause"):
gstd_client.pipeline_pause ("p0")
print("Pipeline paused")
elif(sys.argv[1]== "stop"):
gstd_client.pipeline_stop ("p0")
print("Pipeline stoped")
elif(sys.argv[1]== "delete"):
gstd_client.pipeline_delete ("p0")
print("Pipeline deleted")
else:
printError()
else:
printError()
| import time
import sys
from pygstc.gstc import *
from pygstc.logger import *
#Create a custom logger with loglevel=DEBUG
gstd_logger = CustomLogger('simple_pipeline', loglevel='DEBUG')
#Create the client with the logger
gstd_client = GstdClient(logger=gstd_logger)
def printError():
print("To play run: python3 simple_pipeline.py play VIDEO_PATH")
print("To stop run: python3 simple_pipeline.py stop")
print("To stop run: python3 simple_pipeline.py reverse")
print("To stop run: python3 simple_pipeline.py slow_motion")
if(len(sys.argv) > 1):
if(sys.argv[1]=="create"):
FILE_SOURCE = sys.argv[2]
#pipeline is the string with the pipeline description
pipeline = "playbin uri=file:"+FILE_SOURCE
#Following instructions create and play the pipeline
gstd_client.pipeline_create ("p0", pipeline)
elif(sys.argv[1]== "play"):
gstd_client.pipeline_play ("p0")
print("Playing")
# Check this
# reverse and slow motion restart the pipeline
elif(sys.argv[1]== "reverse"):
gstd_client.event_seek("p0", rate=-1.0, format=3, flags=1, start_type=1, start=0, end_type=1, end=-1)
print("Playing in reverse")
elif(sys.argv[1]== "slow_motion"):
gstd_client.event_seek("p0", rate=0.5, format=3, flags=1, start_type=1, start=0, end_type=1, end=-1)
print("Playing in slow motion")
elif(sys.argv[1]== "stop"):
#Following instructions stop and delete the pipeline
gstd_client.pipeline_stop ("p0")
gstd_client.pipeline_delete ("p0")
print("Pipeline deleted")
else:
printError()
else:
printError()
| lgpl-2.1 | Python |
b02e1a4031d897b112647f5b84369d08d1ec967a | improve docstring for leapfrog integrator | adrn/streams,adrn/streams | streams/integrate/leapfrog.py | streams/integrate/leapfrog.py | # coding: utf-8
""" """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os, sys
import uuid
# Third-party
import numpy as np
from ..potential import Potential
from ..util import _validate_coord
__all__ = ["PotentialIntegrator", "leapfrog"]
def leapfrog(acceleration_function, initial_position, initial_velocity, t1, t2, dt=None):
""" Given an acceleration function and initial conditions, integrate from t1 to t2
with a timestep dt using Leapfrog integration.
See: http://ursa.as.arizona.edu/~rad/phys305/ODE_III/node11.html
Parameters
----------
acceleration_function : function
A function that accepts a position or an array of positions and computes
the acceleration at that position.
initial_position : array, list
A list or array of initial positions.
initial_velocity : array, list
A list or array of initial velocities.
"""
initial_position = np.array(initial_position)
initial_velocity = np.array(initial_velocity)
if initial_position.shape != initial_velocity.shape:
raise ValueError("initial_position shape must match initial_velocity shape! Right now, they mismatch: {0} != {1}".format(initial_position.shape, initial_velocity.shape))
if initial_position.ndim == 1:
# x_i just stands for positions, it's actually a vector
x_i = np.array(initial_position).reshape(1, len(initial_position))
v_i = np.array(initial_velocity).reshape(1, len(initial_position))
else:
x_i = np.array(initial_position)
v_i = np.array(initial_velocity)
times = np.arange(t1, t2+dt, dt)
Ntimesteps = len(times)
# Shape of final object should be (Ntimesteps, Nparticles, Ndim)
xs = np.zeros((Ntimesteps,) + x_i.shape, dtype=np.float64)
vs = np.zeros((Ntimesteps,) + v_i.shape, dtype=np.float64)
for ii in range(Ntimesteps):
t = times[ii]
a_i = acceleration_function(x_i)
x_ip1 = x_i + v_i*dt + 0.5*a_i*dt*dt
a_ip1 = acceleration_function(x_ip1)
v_ip1 = v_i + 0.5*(a_i + a_ip1)*dt
xs[ii,:,:] = x_i
vs[ii,:,:] = v_i
a_i = a_ip1
x_i = x_ip1
v_i = v_ip1
return times, xs, vs
class PotentialIntegrator(object):
def __init__(self, potential, integrator=None):
''' Convenience class for integrating particles in a potential.
Parameters
----------
potential : Potential
integrator : function (optional)
The integration scheme. Defaults to leapfrog.
'''
if not isinstance(potential, Potential):
raise TypeError("potential must be a Potential object or subclass.") | # coding: utf-8
""" """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os, sys
import uuid
# Third-party
import numpy as np
from ..potential import Potential
from ..util import _validate_coord
__all__ = ["PotentialIntegrator", "leapfrog"]
def leapfrog(acceleration_function, initial_position, initial_velocity, t1, t2, dt=None):
''' '''
initial_position = np.array(initial_position)
initial_velocity = np.array(initial_velocity)
if initial_position.ndim == 1:
# x_i just stands for positions, it's actually a vector
x_i = np.array(initial_position).reshape(1, len(initial_position))
v_i = np.array(initial_velocity).reshape(1, len(initial_position))
else:
x_i = np.array(initial_position)
v_i = np.array(initial_velocity)
times = np.arange(t1, t2+dt, dt)
Ntimesteps = len(times)
# Shape of final object should be (Ntimesteps, Nparticles, Ndim)
xs = np.zeros((Ntimesteps,) + x_i.shape, dtype=np.float64)
vs = np.zeros((Ntimesteps,) + v_i.shape, dtype=np.float64)
for ii in range(Ntimesteps):
t = times[ii]
a_i = acceleration_function(x_i)
x_ip1 = x_i + v_i*dt + 0.5*a_i*dt*dt
a_ip1 = acceleration_function(x_ip1)
v_ip1 = v_i + 0.5*(a_i + a_ip1)*dt
xs[ii,:,:] = x_i
vs[ii,:,:] = v_i
a_i = a_ip1
x_i = x_ip1
v_i = v_ip1
return times, xs, vs
class PotentialIntegrator(object):
def __init__(self, potential, integrator=None):
''' Convenience class for integrating particles in a potential.
Parameters
----------
potential : Potential
integrator : function (optional)
The integration scheme. Defaults to leapfrog.
'''
if not isinstance(potential, Potential):
raise TypeError("potential must be a Potential object or subclass.") | mit | Python |
540c5f2969e75a0f461e9d46090cfe8d92c53b00 | Remove history name error for absolute paths | aayushkapadia/chemical_reaction_simulator | Simulator/plot.py | Simulator/plot.py | from Simulator import *
import XMLParser
import textToXML
def getHistoryFileName(xmlFileName):
y = xmlFileName[:-3]
y = y + 'txt'
i = len(y) - 1
while i>=0 :
if y[i]=='\\' or y[i]=='/' :
break
i-=1
if i>=0 :
return y[:i+1] + 'history_' + y[i+1:]
else:
return 'history_' + y
def plotFromXML(fileName,simulationTime,chemicalList):
historyFile = getHistoryFileName(fileName)
sim = XMLParser.getSimulator(fileName)
sim.simulate(int(simulationTime),historyFile)
sim.plot(chemicalList)
def plotFromTxt(fileName,simulationTime,chemicalList):
xmlFile = textToXML.getXMLFromTxt(fileName)
plotFromXML(xmlFile,simulationTime,chemicalList)
| from Simulator import *
import XMLParser
import textToXML
def getHistoryFileName(xmlFileName):
y = xmlFileName[:-3]
return 'history_' + y + 'txt'
def plotFromXML(fileName,simulationTime,chemicalList):
historyFile = getHistoryFileName(fileName)
sim = XMLParser.getSimulator(fileName)
sim.simulate(int(simulationTime),historyFile)
sim.plot(chemicalList)
def plotFromTxt(fileName,simulationTime,chemicalList):
xmlFile = textToXML.getXMLFromTxt(fileName)
plotFromXML(xmlFile,simulationTime,chemicalList)
| mit | Python |
992d23e83c33d7e99c380c72536cbe86e1c1c4b2 | add copyright header to net.py | google/flight-lab,google/flight-lab,google/flight-lab,google/flight-lab | controller/common/net.py | controller/common/net.py | # Copyright 2018 Flight Lab authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for network related helpers."""
import socket
def get_ip():
"""Get primary IP (the one with a default route) of local machine.
This works on both Linux and Windows platforms, and doesn't require working
internet connection.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
return s.getsockname()[0]
except:
return '127.0.0.1'
finally:
s.close()
| import socket
def get_ip():
"""Get primary IP (the one with a default route) of local machine.
This works on both Linux and Windows platforms, and doesn't require working
internet connection.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
return s.getsockname()[0]
except:
return '127.0.0.1'
finally:
s.close()
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.