commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
d46c0ea86c29a6c832f0bc92bc58b2b8f6ca0335 | Add some logging in notifier module | rab206/self-proj-pi,syardumi/jasper-client,sunu/jasper-client,Brandon32/jasper-client,DarrenRainey/jasper-client,brad999/nikita-client,clumsical/hackthehouse-marty,benhoff/jasper-client,densic/HomeAutomation,joekinley/jasper-client,jskye/voicehud-jasper,aish9r/jasper-client,rab206/self-proj-pi,ajay-gandhi/jasper-client,Siretu/jasper-client,assanee/jasper-client,sanyaade-iot/jasper-client,steppy345/jasper-client,densic/HomeAutomation,bdizen/jasper-client,sukhoi/jasper-client,joekinley/jasper-client,zanbel/david,rahul1193/jasper-client,DarrenRainey/jasper-client,sunu/jasper-client,djeraseit/jasper-client,Siretu/jasper-client,sanyaade-iot/jasper-client,syardumi/jasper-client,jskye/voicehud-jasper,MaakbareWereld/storyteller,MaakbareWereld/storyteller,skylarker/jasper-client,brad999/nikita,assanee/jasper-client,rowhit/jasper-client,fritz-fritz/jasper-client,zanbel/david,tsaitsai/jasper-client,Brandon32/jasper-client,ajay-gandhi/jasper-client,brad999/nikita-client,brad999/nikita,skylarker/jasper-client,steppy345/jasper-client,clumsical/hackthehouse-marty,benhoff/jasper-client,auhlig/jasper-client,jasperproject/jasper-client,markferry/jasper-client,markferry/jasper-client,djeraseit/jasper-client,fritz-fritz/jasper-client,rahul1193/jasper-client,rowhit/jasper-client,tsaitsai/jasper-client,sukhoi/jasper-client,bdizen/jasper-client,jasperproject/jasper-client,aish9r/jasper-client,auhlig/jasper-client | client/notifier.py | client/notifier.py | # -*- coding: utf-8-*-
import Queue
from modules import Gmail
from apscheduler.scheduler import Scheduler
import logging
logging.basicConfig()
class Notifier(object):
class NotificationClient(object):
def __init__(self, gather, timestamp):
self.gather = gather
self.timestamp = timestamp
def run(self):
self.timestamp = self.gather(self.timestamp)
def __init__(self, profile):
self._logger = logging.getLogger(__name__)
self.q = Queue.Queue()
self.profile = profile
self.notifiers = []
if 'gmail_address' in profile and 'gmail_password' in profile:
self.notifiers.append(self.NotificationClient(self.handleEmailNotifications, None))
else:
self._logger.warning('gmail_address or gmail_password not set in profile, Gmail notifier will not be used')
sched = Scheduler()
sched.start()
sched.add_interval_job(self.gather, seconds=30)
def gather(self):
[client.run() for client in self.notifiers]
def handleEmailNotifications(self, lastDate):
"""Places new Gmail notifications in the Notifier's queue."""
emails = Gmail.fetchUnreadEmails(self.profile, since=lastDate)
if emails:
lastDate = Gmail.getMostRecentDate(emails)
def styleEmail(e):
return "New email from %s." % Gmail.getSender(e)
for e in emails:
self.q.put(styleEmail(e))
return lastDate
def getNotification(self):
"""Returns a notification. Note that this function is consuming."""
try:
notif = self.q.get(block=False)
return notif
except Queue.Empty:
return None
def getAllNotifications(self):
"""
Return a list of notifications in chronological order.
Note that this function is consuming, so consecutive calls
will yield different results.
"""
notifs = []
notif = self.getNotification()
while notif:
notifs.append(notif)
notif = self.getNotification()
return notifs
| # -*- coding: utf-8-*-
import Queue
from modules import Gmail
from apscheduler.scheduler import Scheduler
import logging
logging.basicConfig()
class Notifier(object):
class NotificationClient(object):
def __init__(self, gather, timestamp):
self.gather = gather
self.timestamp = timestamp
def run(self):
self.timestamp = self.gather(self.timestamp)
def __init__(self, profile):
self.q = Queue.Queue()
self.profile = profile
self.notifiers = []
if 'gmail_address' in profile and 'gmail_password' in profile:
self.notifiers.append(self.NotificationClient(self.handleEmailNotifications, None))
sched = Scheduler()
sched.start()
sched.add_interval_job(self.gather, seconds=30)
def gather(self):
[client.run() for client in self.notifiers]
def handleEmailNotifications(self, lastDate):
"""Places new Gmail notifications in the Notifier's queue."""
emails = Gmail.fetchUnreadEmails(self.profile, since=lastDate)
if emails:
lastDate = Gmail.getMostRecentDate(emails)
def styleEmail(e):
return "New email from %s." % Gmail.getSender(e)
for e in emails:
self.q.put(styleEmail(e))
return lastDate
def getNotification(self):
"""Returns a notification. Note that this function is consuming."""
try:
notif = self.q.get(block=False)
return notif
except Queue.Empty:
return None
def getAllNotifications(self):
"""
Return a list of notifications in chronological order.
Note that this function is consuming, so consecutive calls
will yield different results.
"""
notifs = []
notif = self.getNotification()
while notif:
notifs.append(notif)
notif = self.getNotification()
return notifs
| mit | Python |
306d6808179bb3b8c7d1619291447502000f0947 | Standardize variable names on outermost app | flubstep/foxgami.com,flubstep/foxgami.com | py/app.py | py/app.py | import json
import functools
from flask import Flask, Response, request
from foxgami.red import Story
from foxgami.user import User, Session
app = Flask(__name__)
@app.after_request
def add_content_headers(response):
response.headers['Access-Control-Allow-Origin'] = '*'
return response
def return_as_json(inner_f):
@functools.wraps(inner_f)
def new_f(*args, **kwargs):
result = inner_f(*args, **kwargs)
return Response(json.dumps(
result,
indent=4,
separators=(', ', ': ')
), mimetype='application/json')
return new_f
@app.route('/api/stories')
@return_as_json
def hardcoded_aww():
return Story.find(25)
@app.route('/api/stories/<string:story_id>')
@return_as_json
def get_story(story_id):
return Story.get(story_id)
@app.route('/api/users')
@return_as_json
def get_user():
token = request.args.get('token')
if token:
session = Session.get(token)
if session:
return Users.get(session['user_id'])
return User.get_logged_out()
@app.route('/api/users', methods=['POST'])
@return_as_json
def create_user():
user_info = request.get_json()
user = User.create(
name=user_info['name'],
email=user_info['email'],
password=user_info['password']
)
return User.row_to_json(user, with_session=True)
@app.route('/api/login', methods=['POST'])
@return_as_json
def login_user():
login_info = request.get_json()
user = User.get_by_email_password(
email=login_info['email'],
password=login_info['password']
)
return User.row_to_json(user, with_session=True)
if __name__ == '__main__':
app.run(debug=True)
| import json
import functools
from flask import Flask, Response, request
from foxgami.red import Story
from foxgami.user import User, Session
app = Flask(__name__)
@app.after_request
def add_content_headers(response):
response.headers['Access-Control-Allow-Origin'] = '*'
return response
def return_as_json(inner_f):
@functools.wraps(inner_f)
def new_f(*args, **kwargs):
result = inner_f(*args, **kwargs)
return Response(json.dumps(
result,
indent=4,
separators=(', ', ': ')
), mimetype='application/json')
return new_f
@app.route('/api/stories')
@return_as_json
def hardcoded_aww():
return Story.find(25)
@app.route('/api/stories/<string:story_id>')
@return_as_json
def get_story(story_id):
return Story.get(story_id)
@app.route('/api/users')
@return_as_json
def get_user():
token = request.args.get('token')
if token:
session = Session.get(token)
if user_id:
return Users.get(session['user_id'])
return User.get_logged_out()
@app.route('/api/users', methods=['POST'])
@return_as_json
def create_user():
user_info = request.get_json()
user_id = User.create(
name=user_info['name'],
email=user_info['email'],
password=user_info['password']
)
return User.row_to_json(User.get(user_id), with_session=True)
@app.route('/api/login', methods=['POST'])
@return_as_json
def login_user():
login_info = request.get_json()
user_info = User.get_by_email_password(
email=login_info['email'],
password=login_info['password']
)
return User.row_to_json(user_info, with_session=True)
if __name__ == '__main__':
app.run(debug=True)
| mit | Python |
99bd55917e998b7f51c7fd41069cd62eb5e9749a | Fix typing for cred parameter. | showell/zulip,showell/zulip,punchagan/zulip,shubhamdhama/zulip,eeshangarg/zulip,zulip/zulip,eeshangarg/zulip,eeshangarg/zulip,hackerkid/zulip,shubhamdhama/zulip,brainwane/zulip,shubhamdhama/zulip,rht/zulip,rht/zulip,andersk/zulip,rht/zulip,eeshangarg/zulip,kou/zulip,timabbott/zulip,hackerkid/zulip,showell/zulip,eeshangarg/zulip,tommyip/zulip,brainwane/zulip,shubhamdhama/zulip,synicalsyntax/zulip,showell/zulip,showell/zulip,kou/zulip,timabbott/zulip,brainwane/zulip,zulip/zulip,tommyip/zulip,brainwane/zulip,andersk/zulip,timabbott/zulip,rht/zulip,zulip/zulip,andersk/zulip,andersk/zulip,rht/zulip,kou/zulip,synicalsyntax/zulip,kou/zulip,synicalsyntax/zulip,timabbott/zulip,andersk/zulip,timabbott/zulip,showell/zulip,punchagan/zulip,kou/zulip,tommyip/zulip,shubhamdhama/zulip,shubhamdhama/zulip,punchagan/zulip,eeshangarg/zulip,brainwane/zulip,hackerkid/zulip,tommyip/zulip,zulip/zulip,andersk/zulip,rht/zulip,kou/zulip,punchagan/zulip,punchagan/zulip,synicalsyntax/zulip,punchagan/zulip,andersk/zulip,synicalsyntax/zulip,timabbott/zulip,punchagan/zulip,hackerkid/zulip,hackerkid/zulip,tommyip/zulip,zulip/zulip,synicalsyntax/zulip,hackerkid/zulip,brainwane/zulip,eeshangarg/zulip,tommyip/zulip,zulip/zulip,zulip/zulip,shubhamdhama/zulip,kou/zulip,rht/zulip,showell/zulip,synicalsyntax/zulip,hackerkid/zulip,timabbott/zulip,tommyip/zulip,brainwane/zulip | zerver/views/zephyr.py | zerver/views/zephyr.py | from django.conf import settings
from django.http import HttpResponse, HttpRequest
from django.utils.translation import ugettext as _
from zerver.decorator import authenticated_json_view
from zerver.lib.ccache import make_ccache
from zerver.lib.request import has_request_variables, REQ
from zerver.lib.response import json_success, json_error
from zerver.lib.users import get_api_key
from zerver.models import UserProfile
import base64
import logging
import subprocess
import ujson
from typing import Optional
# Hack for mit.edu users whose Kerberos usernames don't match what they zephyr
# as. The key is for Kerberos and the value is for zephyr.
kerberos_alter_egos = {
'golem': 'ctl',
}
@authenticated_json_view
@has_request_variables
def webathena_kerberos_login(request: HttpRequest, user_profile: UserProfile,
cred: Optional[str]=REQ(default=None)) -> HttpResponse:
global kerberos_alter_egos
if cred is None:
return json_error(_("Could not find Kerberos credential"))
if not user_profile.realm.webathena_enabled:
return json_error(_("Webathena login not enabled"))
try:
parsed_cred = ujson.loads(cred)
user = parsed_cred["cname"]["nameString"][0]
if user in kerberos_alter_egos:
user = kerberos_alter_egos[user]
assert(user == user_profile.email.split("@")[0])
ccache = make_ccache(parsed_cred)
except Exception:
return json_error(_("Invalid Kerberos cache"))
# TODO: Send these data via (say) rabbitmq
try:
api_key = get_api_key(user_profile)
subprocess.check_call(["ssh", settings.PERSONAL_ZMIRROR_SERVER, "--",
"/home/zulip/python-zulip-api/zulip/integrations/zephyr/process_ccache",
user,
api_key,
base64.b64encode(ccache).decode("utf-8")])
except Exception:
logging.exception("Error updating the user's ccache")
return json_error(_("We were unable to setup mirroring for you"))
return json_success()
| from django.conf import settings
from django.http import HttpResponse, HttpRequest
from django.utils.translation import ugettext as _
from zerver.decorator import authenticated_json_view
from zerver.lib.ccache import make_ccache
from zerver.lib.request import has_request_variables, REQ
from zerver.lib.response import json_success, json_error
from zerver.lib.users import get_api_key
from zerver.models import UserProfile
import base64
import logging
import subprocess
import ujson
# Hack for mit.edu users whose Kerberos usernames don't match what they zephyr
# as. The key is for Kerberos and the value is for zephyr.
kerberos_alter_egos = {
'golem': 'ctl',
}
@authenticated_json_view
@has_request_variables
def webathena_kerberos_login(request: HttpRequest, user_profile: UserProfile,
cred: str=REQ(default=None)) -> HttpResponse:
global kerberos_alter_egos
if cred is None:
return json_error(_("Could not find Kerberos credential"))
if not user_profile.realm.webathena_enabled:
return json_error(_("Webathena login not enabled"))
try:
parsed_cred = ujson.loads(cred)
user = parsed_cred["cname"]["nameString"][0]
if user in kerberos_alter_egos:
user = kerberos_alter_egos[user]
assert(user == user_profile.email.split("@")[0])
ccache = make_ccache(parsed_cred)
except Exception:
return json_error(_("Invalid Kerberos cache"))
# TODO: Send these data via (say) rabbitmq
try:
api_key = get_api_key(user_profile)
subprocess.check_call(["ssh", settings.PERSONAL_ZMIRROR_SERVER, "--",
"/home/zulip/python-zulip-api/zulip/integrations/zephyr/process_ccache",
user,
api_key,
base64.b64encode(ccache).decode("utf-8")])
except Exception:
logging.exception("Error updating the user's ccache")
return json_error(_("We were unable to setup mirroring for you"))
return json_success()
| apache-2.0 | Python |
b4721daecfeb4c415a9130e1330a66a08d496c1e | bump version | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | cupy/_version.py | cupy/_version.py | __version__ = '5.0.0rc1'
| __version__ = '5.0.0b4'
| mit | Python |
12c02b41324c7679efdbe80c0b69c5e3c18fcfed | fix notifier to work on Python 3.6 | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | .pfnci/flexci_notify.py | .pfnci/flexci_notify.py | #!/usr/bin/env python3
# Note: keep this script runnable Python 3.6 until FlexCI Python update
import sys
import os
def main(argv):
# Slack config: "HOOK_URL1,HOOK_URL2,HOOK_URL3,..."
slack_config = os.environ.get('CUPY_CI_SLACK_CONFIG', None)
# Gitter config: "TOKEN:ROOM1,ROOM2,ROOM3,..."
gitter_config = os.environ.get('CUPY_CI_GITTER_CONFIG', None)
desc = os.environ.get('FLEXCI_DESCRIPTION', '<no description>')
subdesc = os.environ.get('FLEXCI_SUB_DESCRIPTION', '')
url = os.environ.get('FLEXCI_JOB_URL', '')
msg = argv[1]
body = '{}\n{}\n{}\n{}'.format(desc, subdesc, msg, url)
if slack_config is not None:
from slack_sdk.webhook import WebhookClient
for hook_url in slack_config.split(','):
slack = WebhookClient(hook_url)
slack.send(text=body)
if gitter_config is not None:
from gitterpy.client import GitterClient
token, rooms = gitter_config.split(':')
gitter = GitterClient(token)
for room in rooms.split(','):
gitter.messages.send(room, body)
if __name__ == '__main__':
main(sys.argv)
| #!/usr/bin/env python
import sys
import os
def main(argv):
# Slack config: "HOOK_URL1,HOOK_URL2,HOOK_URL3,..."
slack_config = os.environ.get('CUPY_CI_SLACK_CONFIG', None)
# Gitter config: "TOKEN:ROOM1,ROOM2,ROOM3,..."
gitter_config = os.environ.get('CUPY_CI_GITTER_CONFIG', None)
desc = os.environ.get('FLEXCI_DESCRIPTION', '<no description>')
subdesc = os.environ.get('FLEXCI_SUB_DESCRIPTION', '')
url = os.environ.get('FLEXCI_JOB_URL', '')
msg = argv[1]
body = f'{desc}\n{subdesc}\n{msg}\n{url}'
if slack_config is not None:
from slack_sdk.webhook import WebhookClient
for hook_url in slack_config.split(','):
slack = WebhookClient(hook_url)
slack.send(text=body)
if gitter_config is not None:
from gitterpy.client import GitterClient
token, rooms = gitter_config.split(':')
gitter = GitterClient(token)
for room in rooms.split(','):
gitter.messages.send(room, body)
if __name__ == '__main__':
main(sys.argv)
| mit | Python |
4335c06fdd6fb8d60c7cee6215e7d1da9ab36d18 | remove debugging print | CenterForOpenScience/modular-file-renderer,rdhyee/modular-file-renderer,felliott/modular-file-renderer,mfraezz/modular-file-renderer,Johnetordoff/modular-file-renderer,haoyuchen1992/modular-file-renderer,TomBaxter/modular-file-renderer,erinspace/modular-file-renderer,erinspace/modular-file-renderer,mfraezz/modular-file-renderer,Johnetordoff/modular-file-renderer,chrisseto/modular-file-renderer,AddisonSchiller/modular-file-renderer,rdhyee/modular-file-renderer,felliott/modular-file-renderer,TomBaxter/modular-file-renderer,chrisseto/modular-file-renderer,icereval/modular-file-renderer,AddisonSchiller/modular-file-renderer,TomBaxter/modular-file-renderer,erinspace/modular-file-renderer,chrisseto/modular-file-renderer,haoyuchen1992/modular-file-renderer,felliott/modular-file-renderer,Johnetordoff/modular-file-renderer,mfraezz/modular-file-renderer,AddisonSchiller/modular-file-renderer,Johnetordoff/modular-file-renderer,haoyuchen1992/modular-file-renderer,mfraezz/modular-file-renderer,CenterForOpenScience/modular-file-renderer,icereval/modular-file-renderer,rdhyee/modular-file-renderer,haoyuchen1992/modular-file-renderer,rdhyee/modular-file-renderer,TomBaxter/modular-file-renderer,felliott/modular-file-renderer,icereval/modular-file-renderer,CenterForOpenScience/modular-file-renderer,AddisonSchiller/modular-file-renderer,CenterForOpenScience/modular-file-renderer | mfr_pdb/render.py | mfr_pdb/render.py | from mfr.core import RenderResult
from mako.lookup import TemplateLookup
template = TemplateLookup(
directories=['mfr_pdb/templates']
).get_template('pdb.mako')
def render_html(fp, **kwargs):
content = template.render(pdb_file=fp.read())
# assets must be loaded in this order
assets = {
'js': [
"/static/mfr/mfr_pdb/js/jquery-1.7.min.js",
"/static/mfr/mfr_pdb/js/Three49custom.js",
"/static/mfr/mfr_pdb/js/GLmol.js",
]
}
return RenderResult(content, assets)
| from mfr.core import RenderResult
from mako.lookup import TemplateLookup
template = TemplateLookup(
directories=['mfr_pdb/templates']
).get_template('pdb.mako')
def render_html(fp, **kwargs):
print kwargs
content = template.render(pdb_file=fp.read())
# assets must be loaded in this order
assets = {
'js': [
"/static/mfr/mfr_pdb/js/jquery-1.7.min.js",
"/static/mfr/mfr_pdb/js/Three49custom.js",
"/static/mfr/mfr_pdb/js/GLmol.js",
]
}
return RenderResult(content, assets)
| apache-2.0 | Python |
7ef0db81ab71d79519ebd37aa6650d0544f015df | Add code documentation and author information. | geektoni/Influenza-Like-Illness-Predictor,geektoni/Influenza-Like-Illness-Predictor | data_analysis/generate_weekly_data.py | data_analysis/generate_weekly_data.py | #!/usr/bin/env python
# Given a complete year files with data in the form (page, week, visits)
# this script will generate a convenient csv file which will store for
# each page and for each years's week the total number of visits.
#
# Written by Giovanni De Toni (2017)
# Email: giovanni.det at gmail.com
import fileinput
import pandas as pd
# Set up an empty dictionary
all_data={}
# Read from standard input
for line in fileinput.input():
# Split the line given
# 0: page name
# 1: week number
# 2: visits on that week
result = str.split(line)
# Set up an empty list if the key
# is null
if all_data.get(result[0], []) == []:
all_data[result[0]] = [0 for x in range(53)]
# Sum the visits
all_data[result[0]][int(result[1])] += int(result[2]);
# Generate a pandas dataframe with all the data
df = pd.DataFrame(all_data);
# Print the dataframe to show the result
print(df)
# Save it to file
df.to_csv("result.csv", index_label="Week")
| import fileinput
import pandas as pd
# Set up a dictionary
all_data={}
number=0
# Read from standard input
for line in fileinput.input():
# Split the line given
result = str.split(line)
# Print the line number analyzed
print(number)
# Set up an empty list if the key
# is null
if all_data.get(result[0], []) == []:
all_data[result[0]] = [0 for x in range(53)]
# Sum the visit counter
all_data[result[0]][int(result[1])] += int(result[2]);
number += 1;
# Print all the data
#for key, value in all_data.iteritems():
# print(key)
# print(value)
# Generate a pandas dataframe with all the data
df = pd.DataFrame(all_data);
print(df)
# Save it to file
df.to_csv("result.csv", index_label="Week")
| mit | Python |
a3d7ca17e32457a8479a540b970baa101dcb1720 | Update version to next release | NitishT/minio-py,minio/minio-py,minio/minio-py,NitishT/minio-py | minio/__init__.py | minio/__init__.py | # -*- coding: utf-8 -*-
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017 MinIO, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
minio - MinIO Python Library for Amazon S3 Compatible Cloud Storage
~~~~~~~~~~~~~~~~~~~~~
>>> import minio
>>> minio = Minio('https://s3.amazonaws.com')
>>> for bucket in minio.list_buckets():
... print(bucket.name)
:copyright: (c) 2015, 2016, 2017 by MinIO, Inc.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'minio-py'
__author__ = 'MinIO, Inc.'
__version__ = '5.0.6'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015, 2016, 2017, 2018, 2019 MinIO, Inc.'
from .api import Minio
from .error import ResponseError
from .post_policy import PostPolicy
from .copy_conditions import CopyConditions
from .definitions import Bucket, Object
| # -*- coding: utf-8 -*-
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017 MinIO, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
minio - MinIO Python Library for Amazon S3 Compatible Cloud Storage
~~~~~~~~~~~~~~~~~~~~~
>>> import minio
>>> minio = Minio('https://s3.amazonaws.com')
>>> for bucket in minio.list_buckets():
... print(bucket.name)
:copyright: (c) 2015, 2016, 2017 by MinIO, Inc.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'minio-py'
__author__ = 'MinIO, Inc.'
__version__ = '5.0.5'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015, 2016, 2017, 2018, 2019 MinIO, Inc.'
from .api import Minio
from .error import ResponseError
from .post_policy import PostPolicy
from .copy_conditions import CopyConditions
from .definitions import Bucket, Object
| apache-2.0 | Python |
f72da8ec96dfc4d3d31e628a35896c9044f9b438 | Update version | mechaxl/mixer,suriya/mixer | mixer/__init__.py | mixer/__init__.py | """ Mixer is a module that adds object generation tool for your application.
It's supported Django_ ORM, SQLAlchemy_ ORM, Pony_ ORM, Peewee_ ORM,
Mongoengine_ ODM and etc.
Mixer is very useful for testing and fixtures replacement.
:copyright: 2013 by Kirill Klenov.
:license: BSD, see LICENSE for more details.
"""
from __future__ import unicode_literals
# Module information
# ==================
__version__ = "4.8.1"
__project__ = "mixer"
__author__ = "horneds <horneds@gmail.com>"
__license__ = "BSD"
| """ Mixer is a module that adds object generation tool for your application.
It's supported Django_ ORM, SQLAlchemy_ ORM, Pony_ ORM, Peewee_ ORM,
Mongoengine_ ODM and etc.
Mixer is very useful for testing and fixtures replacement.
:copyright: 2013 by Kirill Klenov.
:license: BSD, see LICENSE for more details.
"""
from __future__ import unicode_literals
# Module information
# ==================
__version__ = "4.8.0"
__project__ = "mixer"
__author__ = "horneds <horneds@gmail.com>"
__license__ = "BSD"
| bsd-3-clause | Python |
e9a0ec7398708a1ecea5c03405a143bf5101afe9 | make sure specs_path is protected by assertion | gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty | dusty/config.py | dusty/config.py | """Module for handling the daemon config file stored at CONFIG_PATH.
This file determines the bundles the user currently wants active, as well
as the location of the Dusty specifications on disk."""
import yaml
from . import constants
def _load(filepath):
with open(filepath, 'r') as f:
return yaml.load(f.read())
def _dump(doc):
return yaml.dump(doc, default_flow_style=False)
def write_default_config():
default_config = {'bundles': [], 'repo_overrides': {}}
save_config(default_config)
def get_config():
return _load(constants.CONFIG_PATH)
def save_config(config):
with open(constants.CONFIG_PATH, 'w') as f:
f.write(_dump(config))
def get_config_value(key):
return get_config().get(key)
def save_config_value(key, value):
current_config = get_config()
current_config[key] = value
save_config(current_config)
def assert_config_key(key):
"""Raises a KeyError if the given key is not currently present
in the app config. It also ensures that keys with string values
do not contain empty strings. Useful for enforcing that certain
keys are present before entering codepaths that depend on them."""
value = get_config_value(key)
if value is None:
raise KeyError('Configuration key {} is required'.format(key))
elif isinstance(value, basestring) and value == '':
raise KeyError('Configuration key {} cannot be an empty string'.format(key))
| """Module for handling the daemon config file stored at CONFIG_PATH.
This file determines the bundles the user currently wants active, as well
as the location of the Dusty specifications on disk."""
import yaml
from . import constants
def _load(filepath):
with open(filepath, 'r') as f:
return yaml.load(f.read())
def _dump(doc):
return yaml.dump(doc, default_flow_style=False)
def write_default_config():
default_config = {'bundles': [], 'specs_path': '~/dusty-specs', 'repo_overrides': {}}
save_config(default_config)
def get_config():
return _load(constants.CONFIG_PATH)
def save_config(config):
with open(constants.CONFIG_PATH, 'w') as f:
f.write(_dump(config))
def get_config_value(key):
return get_config().get(key)
def save_config_value(key, value):
current_config = get_config()
current_config[key] = value
save_config(current_config)
def assert_config_key(key):
"""Raises a KeyError if the given key is not currently present
in the app config. It also ensures that keys with string values
do not contain empty strings. Useful for enforcing that certain
keys are present before entering codepaths that depend on them."""
value = get_config_value(key)
if value is None:
raise KeyError('Configuration key {} is required'.format(key))
elif isinstance(value, basestring) and value == '':
raise KeyError('Configuration key {} cannot be an empty string'.format(key))
| mit | Python |
5c711b4e5e13036d137d02ec7beae6a4c3bbdef7 | comment back things that should be commented | manzaigit/ntulearndownloader,manzaigit/ntulearndownloader | ntudownloader.py | ntudownloader.py | import os, requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse, urljoin
from settings import NTULEARN_URL
# insert loginmethod here
def ntu_login(username, password):
values = {'username': username,
'password': password}
r = requests.post(NTULEARN_URL, data=values)
print(r.content)
# scraper and downloader :)
givenurl = input("Enter URL: ")
html_code = requests.get(givenurl)
clean_html = BeautifulSoup(html_code.content,"html.parser")
valid_filelinks = []
for link in clean_html.find_all("a", href=True):
parsed_link = urlparse(link.get('href'))
if parsed_link.path[-4:] == '.pdf':
valid_filelinks.append(link.get('href'))
for valid_links in valid_filelinks:
print(valid_links)
print("%d file(s) discovered." % len(valid_filelinks))
if(len(valid_filelinks)):
decide_to_save = input("Would you like to save them all? (Y/N): ")
if decide_to_save.upper() == 'Y':
download_path = input("Enter download path: ")
for file in valid_filelinks:
download_link = urljoin(givenurl, file)
f = open(download_path + "\\" + os.path.basename(file), mode = 'wb')
f.write(requests.get(download_link).content)
f.close()
else: print("byebye! :)")
else:
print("There aren't any files to download. Byebye!")
| import os, requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse, urljoin
from settings import NTULEARN_URL
# insert loginmethod here
def ntu_login(username, password):
values = {'username': username,
'password': password}
r = requests.post(NTULEARN_URL, data=values)
print(r.content)
scraper and downloader :)
givenurl = input("Enter URL: ")
html_code = requests.get(givenurl)
clean_html = BeautifulSoup(html_code.content,"html.parser")
valid_filelinks = []
for link in clean_html.find_all("a", href=True):
parsed_link = urlparse(link.get('href'))
if parsed_link.path[-4:] == '.pdf':
valid_filelinks.append(link.get('href'))
for valid_links in valid_filelinks:
print(valid_links)
print("%d file(s) discovered." % len(valid_filelinks))
if(len(valid_filelinks)):
decide_to_save = input("Would you like to save them all? (Y/N): ")
if decide_to_save.upper() == 'Y':
download_path = input("Enter download path: ")
for file in valid_filelinks:
download_link = urljoin(givenurl, file)
f = open(download_path + "\\" + os.path.basename(file), mode = 'wb')
f.write(requests.get(download_link).content)
f.close()
else: print("byebye! :)")
else:
print("There aren't any files to download. Byebye!")
| mit | Python |
b82c575d1d2f89d79ca39e4a3e6a82af1919f51f | update setup | zhenzhai/edx-platform,zhenzhai/edx-platform,zhenzhai/edx-platform,zhenzhai/edx-platform,zhenzhai/edx-platform | common/lib/sandbox-packages/setup.py | common/lib/sandbox-packages/setup.py | from setuptools import setup
setup(
name="sandbox-packages",
version="0.2.69",
packages=[
"loncapa",
"verifiers",
"hint",
"hint.hint_class_helpers",
"hint.hint_class_helpers.expr_parser",
"hint.hint_class",
"hint.hint_class.first_Universal",
"hint.hint_class.last_Universal",
"hint.hint_class.Week2",
"hint.hint_class.Week3",
"hint.hint_class.Week4",
"hint.hint_class.Week5",
"hint.hint_class.Week6",
"hint.hint_class.Week7",
"hint.hint_class.Week8",
"hint.hint_class.Week9"
],
py_modules=[
"eia",
],
install_requires=[
],
)
| from setuptools import setup
setup(
name="sandbox-packages",
version="0.2.68",
packages=[
"loncapa",
"verifiers",
"hint",
"hint.hint_class_helpers",
"hint.hint_class_helpers.expr_parser",
"hint.hint_class",
"hint.hint_class.first_Universal",
"hint.hint_class.last_Universal",
"hint.hint_class.Week2",
"hint.hint_class.Week3",
"hint.hint_class.Week4",
"hint.hint_class.Week5",
"hint.hint_class.Week6",
"hint.hint_class.Week7",
"hint.hint_class.Week8",
"hint.hint_class.Week9"
],
py_modules=[
"eia",
],
install_requires=[
],
)
| agpl-3.0 | Python |
6da5a808ff3412b37445cd2aa26ff3b17d0fa05c | Bump version | getweber/weber-cli | cob/__version__.py | cob/__version__.py | __version__ = "0.0.6"
| __version__ = "0.0.5"
| bsd-3-clause | Python |
d5810d7595f8970aba1cb48ce705247188bb0f3a | Fix type_list | eagafonov/json_schema_helpers | json_schema_helpers/helpers.py | json_schema_helpers/helpers.py |
# Simple types
type_string = dict(type="string")
type_null = dict(type="null")
type_integer = dict(type="integer")
type_number = dict(type="number")
type_object = dict(type="object")
type_list = dict(type="array") # Python clashed with JavaScript :-)
type_boolean = dict(type="boolean")
# Simple type or null
type_string_or_null = dict(oneOf=[type_string, type_null])
type_integer_or_null = dict(oneOf=[type_integer, type_null])
type_number_or_null = dict(oneOf=[type_number, type_null])
type_object_or_null = dict(oneOf=[type_object, type_null])
type_list_or_null = dict(oneOf=[type_list, type_null])
type_boolean_or_null = dict(oneOf=[type_boolean, type_null])
list_of_strings = dict(type="array", items=[{"type": "string"}])
# Complex
def list_of(ref, minItems=None, maxItems=None, exactItems=None):
d = dict(type="array", items=[{"$ref": "#/definitions/%s" % ref}])
if exactItems is not None:
minItems = exactItems
maxItems = exactItems
if minItems is not None:
d['minItems'] = minItems
if maxItems is not None:
d['maxItems'] = maxItems
return d
def ref(ref_id):
'''
Reference to type
'''
return {"$ref": "#/definitions/%s" % ref_id}
def schema(schema_options, **kwargs):
s = {
"$schema": "http://json-schema.org/draft-04/schema#"
}
s.update(schema_options)
s.update(kwargs)
return s
|
# Simple types
type_string = dict(type="string")
type_null = dict(type="null")
type_integer = dict(type="integer")
type_number = dict(type="number")
type_object = dict(type="object")
type_list = dict(type="list")
type_boolean = dict(type="boolean")
# Simple type or null
type_string_or_null = dict(oneOf=[type_string, type_null])
type_integer_or_null = dict(oneOf=[type_integer, type_null])
type_number_or_null = dict(oneOf=[type_number, type_null])
type_object_or_null = dict(oneOf=[type_object, type_null])
type_list_or_null = dict(oneOf=[type_list, type_null])
type_boolean_or_null = dict(oneOf=[type_boolean, type_null])
list_of_strings = dict(type="array", items=[{"type": "string"}])
# Complex
def list_of(ref, minItems=None, maxItems=None, exactItems=None):
d = dict(type="array", items=[{"$ref": "#/definitions/%s" % ref}])
if exactItems is not None:
minItems = exactItems
maxItems = exactItems
if minItems is not None:
d['minItems'] = minItems
if maxItems is not None:
d['maxItems'] = maxItems
return d
def ref(ref_id):
'''
Reference to type
'''
return {"$ref": "#/definitions/%s" % ref_id}
def schema(schema_options, **kwargs):
s = {
"$schema": "http://json-schema.org/draft-04/schema#"
}
s.update(schema_options)
s.update(kwargs)
return s
| mit | Python |
a22e652210852b3234af056e7dbc688a5f263f88 | Fix bug with insertOrganism() function call | PDX-Flamingo/codonpdx-python,PDX-Flamingo/codonpdx-python | codonpdx/insert.py | codonpdx/insert.py | #!/usr/bin/env python
import json
from db import dbManager
# insert an organism into a database table
def insert(args):
if hasattr(args, 'json'):
data = json.loads(args.json)
else:
data = json.load(args.infile)
with dbManager('config/db.cfg') as db:
for org in data:
db.insertOrganism(org, args.dbname)
return data
# insert an organism into a database table
def insertinput(args):
if hasattr(args, 'json') and args.json:
data = json.loads(args.json)
else:
data = json.load(args.infile)
with dbManager('config/db.cfg') as db:
for org in data:
db.insertInputOrganism(org, args.job)
return data
| #!/usr/bin/env python
import json
from db import dbManager
# insert an organism into a database table
def insert(args):
if hasattr(args, 'json'):
data = json.loads(args.json)
else:
data = json.load(args.infile)
with dbManager('config/db.cfg') as db:
for org in data:
db.insertOrganism(org, args.dbname, args.job)
return data
# insert an organism into a database table
def insertinput(args):
if hasattr(args, 'json') and args.json:
data = json.loads(args.json)
else:
data = json.load(args.infile)
with dbManager('config/db.cfg') as db:
for org in data:
db.insertInputOrganism(org, args.job)
return data
| apache-2.0 | Python |
f3cc2de83c88f01f7ec554ae6223132c284b4ad4 | Fix import for Kotti > 0.8x. | Kotti/kotti_site_gallery,Kotti/kotti_site_gallery | kotti_site_gallery/__init__.py | kotti_site_gallery/__init__.py | from __future__ import absolute_import
from fanstatic import Library
from fanstatic import Resource
from kotti.resources import Image
from kotti.fanstatic import view_css
from kotti.fanstatic import view_needed
lib_kotti_site_gallery = Library('kotti_site_gallery', 'static')
ksg_view_css = Resource(lib_kotti_site_gallery,
"kotti_site_gallery.css",
minified="kotti_site_gallery.min.css",
depends=[view_css])
def kotti_configure(settings):
settings['kotti.available_types'] += ' kotti_site_gallery.resources.Site'
settings['kotti.available_types'] += ' kotti_site_gallery.resources.SiteGallery'
settings['pyramid.includes'] += ' kotti_site_gallery.includeme'
settings['pyramid.includes'] += ' kotti_site_gallery.views.includeme'
Image.type_info.addable_to.append(u'Site')
def includeme(config):
view_needed.add(ksg_view_css)
| from fanstatic import Library
from fanstatic import Resource
from kotti.resources import Image
import kotti.static as ks
lib_kotti_site_gallery = Library('kotti_site_gallery', 'static')
view_css = Resource(lib_kotti_site_gallery,
"kotti_site_gallery.css",
minified="kotti_site_gallery.min.css",
depends=[ks.view_css])
def kotti_configure(settings):
settings['kotti.available_types'] += ' kotti_site_gallery.resources.Site'
settings['kotti.available_types'] += ' kotti_site_gallery.resources.SiteGallery'
settings['pyramid.includes'] += ' kotti_site_gallery.includeme'
settings['pyramid.includes'] += ' kotti_site_gallery.views.includeme'
Image.type_info.addable_to.append(u'Site')
def includeme(config):
ks.view_needed.add(view_css)
| bsd-2-clause | Python |
421c1802dd74d8c731caedaaacce1865c4aa4254 | remove reference to old getBucketSizes function. | rfdickerson/CS241,rfdickerson/cs241-data-structures,rfdickerson/CS241,rfdickerson/cs241-data-structures,rfdickerson/CS241,rfdickerson/cs241-data-structures | A4/hashtabletemplate.py | A4/hashtabletemplate.py | from collections import Hashable
class Node (object):
def __init__(self, key, value):
self.key = key
self.value = value
self.nextNode = None
class LinkedList (object):
# insert your assignment 2 here
class Hashtable (object):
def __init__(self, hashFunction, size=500):
""" Initialize a blank hashtable
hashFunction - a function that contains 2 arguments, key and size of hash
and returns an index to a bucket
size - the number of buckets in your hash
"""
pass
def __setitem__(self, key, value):
""" Sets the value at the key to value
key - any immutable object
value - any object
if key is mutable, raise a TypeError
"""
pass
def __getitem__(self, key):
""" Returns the value at the key
key - immutable key value
if there is no value at key, raise AttributeError
"""
pass
def __len__(self):
""" Returns the total number of items in the hash"""
pass
def __contains__(self, key):
""" Returns True is the hash has a key """
pass
def hashFunction(key, numbuckets):
pass
if __name__ == "__main__":
h = Hashtable(hashFunction, 1000)
h["cat"] = "a feline"
h["memphis"] = "a city"
print h["cat"]
print h['memphis']
print 'Does h contain {}, {}'.format('cat', 'cat' in h)
print 'Does h contain {}, {}'.format('piano', 'piano' in h)
print 'h has a size {}'.format(len(h))
| from collections import Hashable
class Node (object):
def __init__(self, key, value):
self.key = key
self.value = value
self.nextNode = None
class LinkedList (object):
# insert your assignment 2 here
class Hashtable (object):
def __init__(self, hashFunction, size=500):
""" Initialize a blank hashtable
hashFunction - a function that contains 2 arguments, key and size of hash
and returns an index to a bucket
size - the number of buckets in your hash
"""
pass
def __setitem__(self, key, value):
""" Sets the value at the key to value
key - any immutable object
value - any object
if key is mutable, raise a TypeError
"""
pass
def __getitem__(self, key):
""" Returns the value at the key
key - immutable key value
if there is no value at key, raise AttributeError
"""
pass
def getBucketSizes(self):
""" yield the sizes of each bucket as an iterator"""
pass
def __len__(self):
""" Returns the total number of items in the hash"""
pass
def __contains__(self, key):
""" Returns True is the hash has a key """
pass
def hashFunction(key, numbuckets):
pass
if __name__ == "__main__":
h = Hashtable(hashFunction, 1000)
h["cat"] = "a feline"
h["memphis"] = "a city"
print h["cat"]
print h['memphis']
print 'Does h contain {}, {}'.format('cat', 'cat' in h)
print 'Does h contain {}, {}'.format('piano', 'piano' in h)
print 'h has a size {}'.format(len(h))
| mit | Python |
a4c5782158e7d3fa696fc4532836355457f48cc0 | Add note about BibleGateway layout | Matthew-Arnold/slack-versebot,Matthew-Arnold/slack-versebot | versebot/webparser.py | versebot/webparser.py | """
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "http://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations = soup.find("select", {"class":"search-translation-select"})
trans = translations.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
| """
VerseBot for reddit
By Matthieu Grieger
parser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
self.translations = self.find_supported_translations().sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "http://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
translations = soup.find("select", {"class":"search-translation-select"})
trans = translations.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
| mit | Python |
37b12b0ebbd838f38128c3c627148d351219f465 | check right when accessing /kit and /stats | StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite | labonneboite/web/root/views.py | labonneboite/web/root/views.py | # coding: utf8
from flask import Blueprint, current_app
from flask import abort, send_from_directory, redirect, render_template, request
from labonneboite.common import util
from labonneboite.conf import settings
from labonneboite.web.search.forms import CompanySearchForm
rootBlueprint = Blueprint('root', __name__)
@rootBlueprint.route('/')
def home():
return render_template('home.html', form=CompanySearchForm())
@rootBlueprint.route('/robots.txt')
def static_from_root():
return send_from_directory(current_app.static_folder, request.path[1:])
@rootBlueprint.route('/kit.pdf')
def kit():
if util.user_is_pro() and util.pro_version_enabled():
return send_from_directory(current_app.static_folder, 'kit.pdf')
abort(404)
@rootBlueprint.route('/espace-presse')
def press():
context = {
'doorbell_tags': util.get_doorbell_tags('press'),
}
return render_template('root/press.html', **context)
@rootBlueprint.route('/comment-faire-une-candidature-spontanee')
def lbb_help():
context = {
'doorbell_tags': util.get_doorbell_tags('help'),
}
return render_template('root/help.html', **context)
@rootBlueprint.route('/faq')
def faq():
context = {
'doorbell_tags': util.get_doorbell_tags('faq'),
}
return render_template('root/faq.html', **context)
@rootBlueprint.route('/conditions-generales')
def cgu():
host = settings.HOST
return render_template('root/cgu.html', host=host)
@rootBlueprint.route('/cookbook')
def cookbook():
return render_template('root/cookbook.html')
@rootBlueprint.route('/stats')
def stats():
if util.user_is_pro() and util.pro_version_enabled():
return redirect('https://datastudio.google.com/open/0B0PPPCjOppNIdVNXVVM0QnJHNEE')
abort(404)
| # coding: utf8
from flask import Blueprint, current_app
from flask import send_from_directory, redirect, render_template, request
from labonneboite.common import util
from labonneboite.conf import settings
from labonneboite.web.search.forms import CompanySearchForm
rootBlueprint = Blueprint('root', __name__)
@rootBlueprint.route('/')
def home():
return render_template('home.html', form=CompanySearchForm())
@rootBlueprint.route('/robots.txt')
def static_from_root():
return send_from_directory(current_app.static_folder, request.path[1:])
@rootBlueprint.route('/kit.pdf')
def kit():
return send_from_directory(current_app.static_folder, 'kit.pdf')
@rootBlueprint.route('/espace-presse')
def press():
context = {
'doorbell_tags': util.get_doorbell_tags('press'),
}
return render_template('root/press.html', **context)
@rootBlueprint.route('/comment-faire-une-candidature-spontanee')
def lbb_help():
context = {
'doorbell_tags': util.get_doorbell_tags('help'),
}
return render_template('root/help.html', **context)
@rootBlueprint.route('/faq')
def faq():
context = {
'doorbell_tags': util.get_doorbell_tags('faq'),
}
return render_template('root/faq.html', **context)
@rootBlueprint.route('/conditions-generales')
def cgu():
host = settings.HOST
return render_template('root/cgu.html', host=host)
@rootBlueprint.route('/cookbook')
def cookbook():
return render_template('root/cookbook.html')
@rootBlueprint.route('/stats')
def stats():
return redirect('https://datastudio.google.com/open/0B0PPPCjOppNIdVNXVVM0QnJHNEE')
| agpl-3.0 | Python |
079f0a4500023ae9e2a760be8dc5291b8f8cc51d | Fix fahrenheit temperatures | kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2 | weather/bot_plugin.py | weather/bot_plugin.py | # test plugin
from bot.pluginDespatch import Plugin
import re
import datetime
import logging
import pyowm
from logos.roomlib import get_global_option
from django.conf import settings
logger = logging.getLogger(__name__)
logging.config.dictConfig(settings.LOGGING)
class WeatherPlugin(Plugin):
plugin = ("weather", "Weather Plugin")
def __init__(self, *args, **kwargs):
Plugin.__init__(self, *args, **kwargs)
self.commands = (\
(r'w (?P<arg>\S.*)$', self.weather, "Weather query"),
)
def privmsg(self, user, channel, message):
pass
def weather(self, regex, chan, nick, **kwargs):
api_key = get_global_option('weather_api_key')
owm = pyowm.OWM(api_key)
arg = regex.group('arg')
observation = owm.weather_at_place(arg)
w = observation.get_weather()
tm = w.get_reference_time()
tm_str = datetime.datetime.fromtimestamp(tm).strftime("%b %d %Y %H:%M")
str1 = "Reference time : " + tm_str
str1 += " Status : {}. ".format(w.get_status())
str1 += " Wind: speed {speed} ".format(**w.get_wind())
str1 += " Humidity: {}.".format(w.get_humidity())
celsius = w.get_temperature('celsius')['temp']
fahrenheit = w.get_temperature('fahrenheit')['temp']
str1 += " Temperature {} F {} C".format(celsius, fahrenheit)
# str1 += " Temperature (F): {temp_min}, {temp}, {temp_max}.".format(**w.get_temperature())
# str1 += " Temperature (C): {temp_min}, {temp}, {temp_max}.".format(**w.get_temperature('celsius'))
self.say(chan, str1)
| # test plugin
from bot.pluginDespatch import Plugin
import re
import datetime
import logging
import pyowm
from logos.roomlib import get_global_option
from django.conf import settings
logger = logging.getLogger(__name__)
logging.config.dictConfig(settings.LOGGING)
class WeatherPlugin(Plugin):
plugin = ("w", "Weather Plugin")
def __init__(self, *args, **kwargs):
Plugin.__init__(self, *args, **kwargs)
self.commands = (\
(r'w (?P<arg>\S+)$', self.weather, "Weather query"),
)
def privmsg(self, user, channel, message):
pass
def weather(self, regex, chan, nick, **kwargs):
api_key = get_global_option('weather_api_key')
owm = pyowm.OWM(api_key)
arg = regex.group('arg')
observation = owm.weather_at_place(arg)
w = observation.get_weather()
tm = w.get_reference_time()
tm_str = datetime.datetime.fromtimestamp(tm).strftime("%b %d %Y %H:%M")
str1 = "Reference time : " + tm_str
str1 += " Status : {}. ".format(w.get_status())
str1 += " Wind: speed {speed} ".format(**w.get_wind())
str1 += " Humidity: {}.".format(w.get_humidity())
str1 += " Temperature (F): {temp_min}, {temp}, {temp_max}.".format(**w.get_temperature())
str1 += " Temperature (C): {temp_min}, {temp}, {temp_max}.".format(**w.get_temperature('celsius'))
self.say(chan, str1)
| apache-2.0 | Python |
0c1517a0e9ada833011ad435a9799ada0bbb7c89 | Use the Github API to compute changed files for PRs. (#8476) | tswast/google-cloud-python,tswast/google-cloud-python,GoogleCloudPlatform/gcloud-python,googleapis/google-cloud-python,tswast/google-cloud-python,tseaver/google-cloud-python,googleapis/google-cloud-python,GoogleCloudPlatform/gcloud-python,tseaver/google-cloud-python,tseaver/google-cloud-python | test_utils/scripts/get_target_packages_kokoro.py | test_utils/scripts/get_target_packages_kokoro.py | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Print a list of packages which require testing."""
import pathlib
import subprocess
import ci_diff_helper
import requests
def print_environment(environment):
print("-> CI environment:")
print('Branch', environment.branch)
print('PR', environment.pr)
print('In PR', environment.in_pr)
print('Repo URL', environment.repo_url)
if environment.in_pr:
print('PR Base', environment.base)
def get_base(environment):
if environment.in_pr:
return environment.base
else:
# If we're not in a PR, just calculate the changes between this commit
# and its parent.
return 'HEAD~1'
def get_changed_files_from_base(base):
return subprocess.check_output([
'git', 'diff', '--name-only', f'{base}..HEAD',
], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n')
_URL_TEMPLATE = (
'https://api.github.com/repos/googleapis/google-cloud-python/pulls/'
'{}/files'
)
def get_changed_files_from_pr(pr):
url = _URL_TEMPLATE.format(pr)
while url is not None:
response = requests.get(url)
for info in response.json():
yield info['filename']
url = response.links.get('next', {}).get('url')
def determine_changed_packages(changed_files):
packages = [
path.parent for path in pathlib.Path('.').glob('*/noxfile.py')
]
changed_packages = set()
for file in changed_files:
file = pathlib.Path(file)
for package in packages:
if package in file.parents:
changed_packages.add(package)
return changed_packages
def main():
environment = ci_diff_helper.get_config()
print_environment(environment)
base = get_base(environment)
if environment.in_pr:
changed_files = list(get_changed_files_from_pr(environment.pr))
else:
changed_files = get_changed_files_from_base(base)
packages = determine_changed_packages(changed_files)
print(f"Comparing against {base}.")
print("-> Changed packages:")
for package in packages:
print(package)
main()
| # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Print a list of packages which require testing."""
import pathlib
import subprocess
import ci_diff_helper
def print_environment(environment):
print("-> CI environment:")
print('Branch', environment.branch)
print('PR', environment.pr)
print('In PR', environment.in_pr)
print('Repo URL', environment.repo_url)
if environment.in_pr:
print('PR Base', environment.base)
def get_base(environment):
if environment.in_pr:
return environment.base
else:
# If we're not in a PR, just calculate the changes between this commit
# and its parent.
return 'HEAD~1'
def get_changed_files(base):
return subprocess.check_output([
'git', 'diff', '--name-only', f'{base}..HEAD',
], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n')
def determine_changed_packages(changed_files):
packages = [
path.parent for path in pathlib.Path('.').glob('*/noxfile.py')
]
changed_packages = set()
for file in changed_files:
file = pathlib.Path(file)
for package in packages:
if package in file.parents:
changed_packages.add(package)
return changed_packages
def main():
environment = ci_diff_helper.get_config()
print_environment(environment)
base = get_base(environment)
changed_files = get_changed_files(base)
packages = determine_changed_packages(changed_files)
print(f"Comparing against {base}.")
print("-> Changed packages:")
for package in packages:
print(package)
main()
| apache-2.0 | Python |
41b5a95a5c396c131d1426dd926e0a1a4beccc86 | Call method changed on v14 | OCA/manufacture,OCA/manufacture | mrp_workorder_sequence/models/mrp_production.py | mrp_workorder_sequence/models/mrp_production.py | # Copyright 2019-20 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class MrpProduction(models.Model):
_inherit = "mrp.production"
def _reset_work_order_sequence(self):
for rec in self:
current_sequence = 1
for work in rec.workorder_ids:
work.sequence = current_sequence
current_sequence += 1
def _create_workorder(self):
res = super()._create_workorder()
self._reset_work_order_sequence()
return res
| # Copyright 2019-20 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class MrpProduction(models.Model):
_inherit = "mrp.production"
def _reset_work_order_sequence(self):
for rec in self:
current_sequence = 1
for work in rec.workorder_ids:
work.sequence = current_sequence
current_sequence += 1
def _generate_workorders(self, exploded_boms):
res = super()._generate_workorders(exploded_boms)
self._reset_work_order_sequence()
return res
| agpl-3.0 | Python |
7b615eda1f37235fb7a727858a87cfec94563e6c | Migrate a test | jandecaluwe/myhdl,jck/myhdl,josyb/myhdl,myhdl/myhdl,jandecaluwe/myhdl,juhasch/myhdl,hgomersall/myhdl,cfelton/myhdl,jandecaluwe/myhdl,josyb/myhdl,jck/myhdl,juhasch/myhdl,myhdl/myhdl,cfelton/myhdl,jck/myhdl,hgomersall/myhdl,cfelton/myhdl,josyb/myhdl,myhdl/myhdl,juhasch/myhdl,hgomersall/myhdl | myhdl/test/conversion/general/test_constants.py | myhdl/test/conversion/general/test_constants.py | from __future__ import absolute_import
from myhdl import *
@module
def constants(v, u, x, y, z, a):
b = Signal(bool(0))
c = Signal(bool(1))
d = Signal(intbv(5)[8:])
e = Signal(intbv(4, min=-3, max=9))
@always_comb
def logic():
u.next = d
v.next = e
x.next = b
y.next = c
z.next = a
return logic
x, y, z, a = [Signal(bool(0)) for i in range(4)]
u = Signal(intbv(0)[8:])
v = Signal(intbv(0, min=-3, max=9))
def test_constants():
assert conversion.analyze(constants(v, u, x, y, z, a)) == 0
| from __future__ import absolute_import
from myhdl import *
def constants(v, u, x, y, z, a):
b = Signal(bool(0))
c = Signal(bool(1))
d = Signal(intbv(5)[8:])
e = Signal(intbv(4, min=-3, max=9))
@always_comb
def logic():
u.next = d
v.next = e
x.next = b
y.next = c
z.next = a
return logic
x, y, z, a = [Signal(bool(0)) for i in range(4)]
u = Signal(intbv(0)[8:])
v = Signal(intbv(0, min=-3, max=9))
def test_constants():
assert conversion.analyze(constants, v, u, x, y, z, a) == 0
| lgpl-2.1 | Python |
24301f1670e8879c2e72f78f9288a7f8b8f32179 | Add relation to mongodb, store stuff that is posted. | 0ortmann/wg-tools,0ortmann/wg-tools,0ortmann/wg-tools,0ortmann/wg-tools | py-backend/server.py | py-backend/server.py | #!/usr/bin/python
from flask import Flask, request, json, make_response
import deptCalculator
from mongoengine import *
import datetime
from bson import json_util
'''
Define some mongo stuff, very rudimentary storing of posted data.
'''
connect('localhost:27017')
class Post(Document):
date_modified = DateTimeField(default=datetime.datetime.now)
meta = {'allow_inheritance': True}
class DeptPost(Post):
name = StringField()
amount = IntField()
'''
Simple Flask-API for serving post requests. API offers stuff like calculating depts among people or storing data.
'''
app = Flask(__name__)
def getDeptPostsAsJson():
''' Returns all deptpost-objects in a json deptList '''
result, data = list(), {}
for post in DeptPost.objects:
data['name'] = post.name
data['amount'] = post.amount
data['date'] = post.date_modified
result.append(data)
return result
@app.route('/calcDepts', methods=['POST'])
def depts():
''' Calculates the "mean" of all depts '''
if request.method == 'POST' and request.headers['Content-Type'] == ('application/json; charset=UTF-8'):
postedJson = json.dumps(request.json)
jsonAsDict = json.loads(postedJson)
deptList = deptCalculator.calcDepts(jsonAsDict)
return json.dumps(deptList)
else:
return "Invalid request", 400
@app.route('/storeDept', methods=['POST'])
def store():
''' Stores the posted data to the mongo '''
if request.method == 'POST' and request.headers['Content-Type'] == ('application/json; charset=UTF-8'):
postedJson = json.dumps(request.json)
jsonAsDict = json.loads(postedJson)
deptPost = DeptPost(name=jsonAsDict.get('name'), amount=jsonAsDict.get('amount'))
deptPost.save()
return json.dumps(getDeptPostsAsJson())
@app.route('/deptList')
def getDeptList():
''' Returns a json list of depts '''
return json.dumps(getDeptPostsAsJson())
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
| #!/usr/bin/python
from flask import Flask, request, json, render_template
import deptCalculator
app = Flask(__name__)
@app.route('/calcDepts', methods=['POST'])
def depts():
if request.method == 'POST' and request.headers['Content-Type'] == ('application/json; charset=UTF-8'):
postedJson = json.dumps(request.json)
print(request.get_json())
jsonAsDict = json.loads(postedJson)
deptList = deptCalculator.calcDepts(jsonAsDict)
print(deptList)
return "FOOBAR: " + json.dumps(deptList)
else:
return render_template('index.html')
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
| mit | Python |
54b147e59d1dfd4b65643a3f8a56098eb5a99451 | Add more default args so tests pass in py3+ | Yelp/kafka-utils,Yelp/kafka-utils | tests/kafka_cluster_manager/decommission_test.py | tests/kafka_cluster_manager/decommission_test.py | from __future__ import unicode_literals
from argparse import Namespace
import mock
import pytest
from kafka_utils.kafka_cluster_manager.cluster_info \
.partition_count_balancer import PartitionCountBalancer
from kafka_utils.kafka_cluster_manager.cmds import decommission
from tests.kafka_cluster_manager.helper import broker_range
@pytest.fixture
def command_instance():
cmd = decommission.DecommissionCmd()
cmd.args = mock.Mock(spec=Namespace)
cmd.args.force_progress = False
cmd.args.broker_ids = []
cmd.args.auto_max_movement_size = True
cmd.args.max_partition_movements = 10
cmd.args.max_leader_changes = 10
return cmd
def test_decommission_no_partitions_to_move(command_instance, create_cluster_topology):
cluster_one_broker_empty = create_cluster_topology(
assignment={('topic', 0): [0, 1]},
brokers=broker_range(3),
)
command_instance.args.brokers_ids = [2]
balancer = PartitionCountBalancer(cluster_one_broker_empty, command_instance.args)
command_instance.run_command(cluster_one_broker_empty, balancer)
| from __future__ import unicode_literals
from argparse import Namespace
import mock
import pytest
from kafka_utils.kafka_cluster_manager.cluster_info \
.partition_count_balancer import PartitionCountBalancer
from kafka_utils.kafka_cluster_manager.cmds import decommission
from tests.kafka_cluster_manager.helper import broker_range
@pytest.fixture
def command_instance():
cmd = decommission.DecommissionCmd()
cmd.args = mock.Mock(spec=Namespace)
cmd.args.force_progress = False
cmd.args.broker_ids = []
cmd.args.auto_max_movement_size = True
return cmd
def test_decommission_no_partitions_to_move(command_instance, create_cluster_topology):
cluster_one_broker_empty = create_cluster_topology(
assignment={('topic', 0): [0, 1]},
brokers=broker_range(3),
)
command_instance.args.brokers_ids = [2]
balancer = PartitionCountBalancer(cluster_one_broker_empty, command_instance.args)
command_instance.run_command(cluster_one_broker_empty, balancer)
| apache-2.0 | Python |
401c4c414eaaa80e9ed22b210f0b98e71ccfc970 | Fix mixed indentation in commented python test | emilybache/DiamondKata,emilybache/DiamondKata | python/test_diamond_incremental.py | python/test_diamond_incremental.py | """
These test cases can be used to test-drive a solution to the diamond kata, in an incremental manner.
to run the tests, use 'py.test' - see http://pytest.org
Instructions:
1. Make the first test case for Diamond A pass
2. change the 'ignore_' to 'test_' for the next test case. Make it pass.
3. Uncomment the next line of the test case. Make it pass
4. When all the lines of code in the test case are passing, continue to the next test case.
5. When all the test cases in this file are uncommented and passing, you should have a full working solution.
"""
import diamond
def test_diamondA_has_one_line_containing_a():
assert diamond.Diamond('A').print_diamond() == "A"
def ignore_letter_sequence_is_list_of_letters_on_each_line_of_the_diamond():
assert diamond.Diamond('A').letter_sequence == ['A']
#assert diamond.Diamond('B').letter_sequence == ['A', 'B', 'A']
#assert diamond.Diamond('C').letter_sequence == ['A', 'B', 'C', 'B', 'A']
#assert diamond.Diamond('D').letter_sequence == ['A', 'B', 'C', 'D', 'C', 'B', 'A']
def ignore_indents_is_list_of_indentation_for_each_line_of_the_diamond():
assert diamond.Diamond('A').indents == [0]
#assert diamond.Diamond('B').indents == [1,0,1]
#assert diamond.Diamond('C').indents == [2,1,0,1,2]
#assert diamond.Diamond('D').indents == [3,2,1,0,1,2,3]
def ignore_between_is_list_of_how_many_middle_spaces_between_the_repeated_letter_for_each_line_of_the_diamond():
assert diamond.Diamond('A').between == [0]
#assert diamond.Diamond('B').between == [0,1,0]
#assert diamond.Diamond('C').between == [0,1,3,1,0]
#assert diamond.Diamond('D').between == [0,1,3,5,3,1,0]
def ignore_one_row_is_a_list_representing_one_diamond_row():
assert diamond.Diamond('A').one_row('A', indent=0, between=0) == "A"
#assert diamond.Diamond('B').one_row('A', indent=1, between=0) == " A"
#assert diamond.Diamond('B').one_row('B', indent=0, between=1) == "B B"
#assert diamond.Diamond('D').one_row('C', indent=1, between=3) == " C C"
def ignore_rows_is_a_list_of_all_diamond_rows():
assert diamond.Diamond('A').rows() == ["A"]
#assert diamond.Diamond('B').rows() == [" A", "B B", " A"]
def ignore_DiamondC_prints_correctly():
assert diamond.Diamond('C').print_diamond() == """\
A
B B
C C
B B
A"""
def ignore_DiamondD_is_correct():
assert diamond.Diamond('D').print_diamond() == """\
A
B B
C C
D D
C C
B B
A"""
| """
These test cases can be used to test-drive a solution to the diamond kata, in an incremental manner.
to run the tests, use 'py.test' - see http://pytest.org
Instructions:
1. Make the first test case for Diamond A pass
2. change the 'ignore_' to 'test_' for the next test case. Make it pass.
3. Uncomment the next line of the test case. Make it pass
4. When all the lines of code in the test case are passing, continue to the next test case.
5. When all the test cases in this file are uncommented and passing, you should have a full working solution.
"""
import diamond
def test_diamondA_has_one_line_containing_a():
assert diamond.Diamond('A').print_diamond() == "A"
def ignore_letter_sequence_is_list_of_letters_on_each_line_of_the_diamond():
assert diamond.Diamond('A').letter_sequence == ['A']
#assert diamond.Diamond('B').letter_sequence == ['A', 'B', 'A']
#assert diamond.Diamond('C').letter_sequence == ['A', 'B', 'C', 'B', 'A']
#assert diamond.Diamond('D').letter_sequence == ['A', 'B', 'C', 'D', 'C', 'B', 'A']
def ignore_indents_is_list_of_indentation_for_each_line_of_the_diamond():
assert diamond.Diamond('A').indents == [0]
#assert diamond.Diamond('B').indents == [1,0,1]
#assert diamond.Diamond('C').indents == [2,1,0,1,2]
#assert diamond.Diamond('D').indents == [3,2,1,0,1,2,3]
def ignore_between_is_list_of_how_many_middle_spaces_between_the_repeated_letter_for_each_line_of_the_diamond():
assert diamond.Diamond('A').between == [0]
#assert diamond.Diamond('B').between == [0,1,0]
#assert diamond.Diamond('C').between == [0,1,3,1,0]
#assert diamond.Diamond('D').between == [0,1,3,5,3,1,0]
def ignore_one_row_is_a_list_representing_one_diamond_row():
assert diamond.Diamond('A').one_row('A', indent=0, between=0) == "A"
#assert diamond.Diamond('B').one_row('A', indent=1, between=0) == " A"
#assert diamond.Diamond('B').one_row('B', indent=0, between=1) == "B B"
#assert diamond.Diamond('D').one_row('C', indent=1, between=3) == " C C"
def ignore_rows_is_a_list_of_all_diamond_rows():
assert diamond.Diamond('A').rows() == ["A"]
#assert diamond.Diamond('B').rows() == [" A", "B B", " A"]
def ignore_DiamondC_prints_correctly():
assert diamond.Diamond('C').print_diamond() == """\
A
B B
C C
B B
A"""
def ignore_DiamondD_is_correct():
assert diamond.Diamond('D').print_diamond() == """\
A
B B
C C
D D
C C
B B
A"""
| mit | Python |
f13d9ec4f953dc93a9add14cb36b4a2ae0891835 | remove unused action | dstufft/jutils | crate_project/apps/packages/admin.py | crate_project/apps/packages/admin.py | from django.contrib import admin
from packages.models import Package, Release, ReleaseFile, TroveClassifier, PackageURI
from packages.models import ReleaseRequire, ReleaseProvide, ReleaseObsolete, ReleaseURI, ChangeLog
class PackageURIAdmin(admin.TabularInline):
model = PackageURI
extra = 0
class PackageAdmin(admin.ModelAdmin):
inlines = [PackageURIAdmin]
list_display = ["name", "created", "modified", "downloads_synced_on"]
list_filter = ["created", "modified", "deleted", "downloads_synced_on"]
search_fields = ["name"]
class ReleaseRequireInline(admin.TabularInline):
model = ReleaseRequire
extra = 0
class ReleaseProvideInline(admin.TabularInline):
model = ReleaseProvide
extra = 0
class ReleaseObsoleteInline(admin.TabularInline):
model = ReleaseObsolete
extra = 0
class ReleaseFileInline(admin.TabularInline):
model = ReleaseFile
extra = 0
class ReleaseURIInline(admin.TabularInline):
model = ReleaseURI
extra = 0
class ReleaseAdmin(admin.ModelAdmin):
inlines = [ReleaseURIInline, ReleaseFileInline, ReleaseRequireInline, ReleaseProvideInline, ReleaseObsoleteInline]
list_display = ["__unicode__", "package", "version", "summary", "author", "author_email", "maintainer", "maintainer_email", "created", "modified"]
list_filter = ["created", "modified", "deleted", "hidden"]
search_fields = ["package__name", "version", "summary", "author", "author_email", "maintainer", "maintainer_email"]
raw_id_fields = ["package"]
class TroveClassifierAdmin(admin.ModelAdmin):
list_display = ["trove"]
search_fields = ["trove"]
class ReleaseFileAdmin(admin.ModelAdmin):
list_display = ["release", "type", "python_version", "downloads", "comment", "created", "modified"]
list_filter = ["type", "created", "modified"]
search_fields = ["release__package__name", "filename", "digest"]
raw_id_fields = ["release"]
class ChangeLogAdmin(admin.ModelAdmin):
list_display = ["package", "release", "type", "created", "modified"]
list_filter = ["type", "created", "modified"]
search_fields = ["package__name"]
raw_id_fields = ["package", "release"]
admin.site.register(Package, PackageAdmin)
admin.site.register(Release, ReleaseAdmin)
admin.site.register(ReleaseFile, ReleaseFileAdmin)
admin.site.register(TroveClassifier, TroveClassifierAdmin)
admin.site.register(ChangeLog, ChangeLogAdmin)
| from django.contrib import admin
from packages.models import Package, Release, ReleaseFile, TroveClassifier, PackageURI
from packages.models import ReleaseRequire, ReleaseProvide, ReleaseObsolete, ReleaseURI, ChangeLog
from packages.tasks import save_releases
def releases_save(modeladmin, request, queryset):
save_releases.delay([x.pk for x in queryset])
releases_save.short_description = "Trigger a Save on the Selected Releases"
class PackageURIAdmin(admin.TabularInline):
model = PackageURI
extra = 0
class PackageAdmin(admin.ModelAdmin):
inlines = [PackageURIAdmin]
list_display = ["name", "created", "modified", "downloads_synced_on"]
list_filter = ["created", "modified", "deleted", "downloads_synced_on"]
search_fields = ["name"]
class ReleaseRequireInline(admin.TabularInline):
model = ReleaseRequire
extra = 0
class ReleaseProvideInline(admin.TabularInline):
model = ReleaseProvide
extra = 0
class ReleaseObsoleteInline(admin.TabularInline):
model = ReleaseObsolete
extra = 0
class ReleaseFileInline(admin.TabularInline):
model = ReleaseFile
extra = 0
class ReleaseURIInline(admin.TabularInline):
model = ReleaseURI
extra = 0
class ReleaseAdmin(admin.ModelAdmin):
inlines = [ReleaseURIInline, ReleaseFileInline, ReleaseRequireInline, ReleaseProvideInline, ReleaseObsoleteInline]
actions = [releases_save]
list_display = ["__unicode__", "package", "version", "summary", "author", "author_email", "maintainer", "maintainer_email", "created", "modified"]
list_filter = ["created", "modified", "deleted", "hidden"]
search_fields = ["package__name", "version", "summary", "author", "author_email", "maintainer", "maintainer_email"]
raw_id_fields = ["package"]
class TroveClassifierAdmin(admin.ModelAdmin):
list_display = ["trove"]
search_fields = ["trove"]
class ReleaseFileAdmin(admin.ModelAdmin):
list_display = ["release", "type", "python_version", "downloads", "comment", "created", "modified"]
list_filter = ["type", "created", "modified"]
search_fields = ["release__package__name", "filename", "digest"]
raw_id_fields = ["release"]
class ChangeLogAdmin(admin.ModelAdmin):
list_display = ["package", "release", "type", "created", "modified"]
list_filter = ["type", "created", "modified"]
search_fields = ["package__name"]
raw_id_fields = ["package", "release"]
admin.site.register(Package, PackageAdmin)
admin.site.register(Release, ReleaseAdmin)
admin.site.register(ReleaseFile, ReleaseFileAdmin)
admin.site.register(TroveClassifier, TroveClassifierAdmin)
admin.site.register(ChangeLog, ChangeLogAdmin)
| bsd-2-clause | Python |
6a04607082a386b41e3fb0c53bb7be7a2ba5c4b6 | simplify hammer-stroud for cube a bit | nschloe/quadpy | quadpy/hexahedron/hammer_stroud.py | quadpy/hexahedron/hammer_stroud.py | # -*- coding: utf-8 -*-
#
import numpy
from ..helpers import untangle, fsd, pm, z
class HammerStroud(object):
'''
Preston C. Hammer and Arthur H. Stroud,
Numerical Evaluation of Multiple Integrals II,
Math. Comp. 12 (1958), 272-280,
<https://doi.org/10.1090/S0025-5718-1958-0102176-6>.
'''
def __init__(self, index):
if index == 1:
self.degree = 5
data = [
(40.0/361.0, fsd(3, numpy.sqrt(19.0/30.0), 1)),
(121.0/2888.0, pm(3, numpy.sqrt(19.0/33.0)))
]
else:
assert index in [2, 3]
self.degree = 7
i = 1.0 if index == 2 else -1.0
r2 = (33.0 - i * numpy.sqrt(165.0)) / 28.0
s2 = (30.0 + i * numpy.sqrt(165.0)) / 35.0
t2 = (195.0 - i * 4.0*numpy.sqrt(165.0)) / 337.0
r = numpy.sqrt(r2)
s = numpy.sqrt(s2)
t = numpy.sqrt(t2)
B1 = 22.0/945.0 / r2**3
B2 = 1.0/135.0 / s2**3
B3 = 1.0/216.0 / t2**3
B0 = 1.0 - 6.0*B1 - 12.0*B2 - 8.0*B3
data = [
(B0, z(3)),
(B1, fsd(3, r, 1)),
(B2, fsd(3, s, 2)),
(B3, pm(3, t)),
]
self.points, self.weights = untangle(data)
self.weights *= 8.0
return
| # -*- coding: utf-8 -*-
#
import numpy
from .helpers import fs_r00, pm_rrr, fs_rr0, z
from ..helpers import untangle
class HammerStroud(object):
'''
Preston C. Hammer and Arthur H. Stroud,
Numerical Evaluation of Multiple Integrals II,
Math. Comp. 12 (1958), 272-280,
<https://doi.org/10.1090/S0025-5718-1958-0102176-6>.
'''
def __init__(self, index):
if index == 1:
self.degree = 5
data = [
(40.0/361.0, fs_r00(numpy.sqrt(19.0/30.0))),
(121.0/2888.0, pm_rrr(numpy.sqrt(19.0/33.0)))
]
else:
assert index in [2, 3]
self.degree = 7
i = 1.0 if index == 2 else -1.0
r2 = (33.0 - i * numpy.sqrt(165.0)) / 28.0
s2 = (30.0 + i * numpy.sqrt(165.0)) / 35.0
t2 = (195.0 - i * 4.0*numpy.sqrt(165.0)) / 337.0
r = numpy.sqrt(r2)
s = numpy.sqrt(s2)
t = numpy.sqrt(t2)
B1 = 22.0/945.0 / r2**3
B2 = 1.0/135.0 / s2**3
B3 = 1.0/216.0 / t2**3
B0 = 1.0 - 6.0*B1 - 12.0*B2 - 8.0*B3
data = [
(B0, z()),
(B1, fs_r00(r)),
(B2, fs_rr0(s)),
(B3, pm_rrr(t))
]
self.points, self.weights = untangle(data)
self.weights *= 8.0
return
| mit | Python |
9cb8a22673d36ab8e5379065d2d71ea2805370c8 | Fix day calculation on history limit | ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player | radio/templatetags/radio_extras.py | radio/templatetags/radio_extras.py | import random
from django import template
from django.conf import settings
from django.contrib.auth.models import User
from radio.models import Profile
register = template.Library()
# anonymous time seting
@register.simple_tag()
def settings_anonymous_time():
return getattr(settings, 'ANONYMOUS_TIME', 0)
# Get user time setting
@register.simple_tag()
def get_user_time(user):
print("Template TAG USER {}".format(user))
history = {}
if user.is_authenticated():
print("I am logged in")
user_profile = Profile.objects.get(user=user)
else:
print("I am AnonymousUser")
try:
anon_user = User.objects.get(username='ANONYMOUS_USER')
except User.DoesNotExist:
raise ImproperlyConfigured('ANONYMOUS_USER is missing from User table, was "./manage.py migrations" not run?')
user_profile = Profile.objects.get(user=anon_user)
if user_profile:
history.update(minutes = user_profile.plan.history)
else:
history.update(minutes = settings.ANONYMOUS_TIME)
history.update(hours = history['minutes'] / 60)
if history['minutes'] % 60 == 0:
if history['minutes'] % 1440 == 0:
history.update(display = '{} days'.format(history['minutes'] // 1440))
else:
history.update(display = '{} hours'.format(history['minutes'] // 60))
else:
history.update(display = '{} minutes'.format(history['minutes']))
return history
# Amazon adds
@register.simple_tag()
def settings_amazon_adds():
return getattr(settings, 'AMAZON_ADDS', False)
# All Amazon Settings
@register.simple_tag()
def settings_amazon_ad(value):
if value.startswith("AMAZON_AD"): # Only expose amazon settings
if value == 'AMAZON_AD_FALL_BACK_SEARCH': # Pick from the list
return random.choice(getattr(settings, value, False))
return getattr(settings, value, False)
return None
# Allow settings in VISABLE_SETTINGS to be aviliable
@register.simple_tag()
def get_setting(value):
visable_settings = getattr(settings, 'VISABLE_SETTINGS', None)
if value in visable_settings:
return getattr(settings, value, False)
return None
| import random
from django import template
from django.conf import settings
from django.contrib.auth.models import User
from radio.models import Profile
register = template.Library()
# anonymous time seting
@register.simple_tag()
def settings_anonymous_time():
return getattr(settings, 'ANONYMOUS_TIME', 0)
# Get user time setting
@register.simple_tag()
def get_user_time(user):
print("Template TAG USER {}".format(user))
history = {}
if user.is_authenticated():
print("I am logged in")
user_profile = Profile.objects.get(user=user)
else:
print("I am AnonymousUser")
try:
anon_user = User.objects.get(username='ANONYMOUS_USER')
except User.DoesNotExist:
raise ImproperlyConfigured('ANONYMOUS_USER is missing from User table, was "./manage.py migrations" not run?')
user_profile = Profile.objects.get(user=anon_user)
if user_profile:
history.update(minutes = user_profile.plan.history)
else:
history.update(minutes = settings.ANONYMOUS_TIME)
history.update(hours = history['minutes'] / 60)
if history['minutes'] % 60 == 0:
if history['minutes'] % 1440 == 0:
history.update(display = '{} days'.format(history['minutes'] // 1140))
else:
history.update(display = '{} hours'.format(history['minutes'] // 60))
else:
history.update(display = '{} minutes'.format(history['minutes']))
return history
# Amazon adds
@register.simple_tag()
def settings_amazon_adds():
return getattr(settings, 'AMAZON_ADDS', False)
# All Amazon Settings
@register.simple_tag()
def settings_amazon_ad(value):
if value.startswith("AMAZON_AD"): # Only expose amazon settings
if value == 'AMAZON_AD_FALL_BACK_SEARCH': # Pick from the list
return random.choice(getattr(settings, value, False))
return getattr(settings, value, False)
return None
# Allow settings in VISABLE_SETTINGS to be aviliable
@register.simple_tag()
def get_setting(value):
visable_settings = getattr(settings, 'VISABLE_SETTINGS', None)
if value in visable_settings:
return getattr(settings, value, False)
return None
| mit | Python |
00454daf57f2d24ca6c8080b6c3bb57dfcf32be2 | change the new parser tests | tjwei/jedi,WoLpH/jedi,flurischt/jedi,flurischt/jedi,WoLpH/jedi,mfussenegger/jedi,jonashaag/jedi,tjwei/jedi,mfussenegger/jedi,dwillmer/jedi,jonashaag/jedi,dwillmer/jedi | test/test_new_parser.py | test/test_new_parser.py | from jedi.parser import Parser
def test_basic_parsing():
def compare(string):
"""Generates the AST object and then regenerates the code."""
assert Parser(string).module.get_code() == string
compare('\na #pass\n')
compare('wblabla* 1\t\n')
compare('def x(a, b:3): pass\n')
compare('assert foo\n')
| import logging
from jedi.parser import pytree
from jedi.parser.pgen2 import Driver
def test_basic_parsing():
def compare(string):
"""Generates the AST object and then regenerates the code."""
assert d.parse_string(string).get_code() == string
#if self.options["print_function"]:
# python_grammar = pygram.python_grammar_no_print_statement
#else:
# When this is True, the refactor*() methods will call write_file() for
# files processed even if they were not changed during refactoring. If
# and only if the refactor method's write parameter was True.
logger = logging.getLogger("RefactoringTool")
d = Driver(pytree.python_grammar, convert=pytree.convert, logger=logger)
compare('\na #pass\n')
compare('wblabla* 1\t\n')
compare('def x(a, b:3): pass\n')
compare('assert foo\n')
| mit | Python |
dcc3c857345ea1db3a264f7695591ef166c58719 | Update __init__.py | jameslyons/pycipher | pycipher/__init__.py | pycipher/__init__.py | from adfgx import ADFGX
from adfgvx import ADFGVX
from simplesubstitution import SimpleSubstitution
from caesar import Caesar
from affine import Affine
from enigma import Enigma
from autokey import Autokey
from beaufort import Beaufort
from bifid import Bifid as Bifid
from columnartransposition import ColTrans
from gronsfeld import Gronsfeld
from foursquare import Foursquare
from m209 import M209 as M209
from polybius import PolybiusSquare
from playfair import Playfair
from vigenere import Vigenere
from rot13 import Rot13
from atbash import Atbash
from railfence import Railfence
from porta import Porta
from fracmorse import FracMorse
import util
#from lorentz import Lorentz as Lorentz
__all__=["Atbash","ADFGX","ADFGVX","SimpleSubstitution","Caesar","Affine","Enigma","Autokey","Beaufort",
"Bifid","ColTrans","Gronsfeld","Foursquare","M209","PolybiusSquare","Playfair","Vigenere","Rot13","util",
"Railfence","Porta"]
__version__ = "0.5.1"
| from adfgx import ADFGX
from adfgvx import ADFGVX
from simplesubstitution import SimpleSubstitution
from caesar import Caesar
from affine import Affine
from enigma import Enigma
from autokey import Autokey
from beaufort import Beaufort
from bifid import Bifid as Bifid
from columnartransposition import ColTrans
from gronsfeld import Gronsfeld
from foursquare import Foursquare
from m209 import M209 as M209
from polybius import PolybiusSquare
from playfair import Playfair
from vigenere import Vigenere
from rot13 import Rot13
from atbash import Atbash
from railfence import Railfence
from porta import Porta
import util
#from lorentz import Lorentz as Lorentz
__all__=["Atbash","ADFGX","ADFGVX","SimpleSubstitution","Caesar","Affine","Enigma","Autokey","Beaufort",
"Bifid","ColTrans","Gronsfeld","Foursquare","M209","PolybiusSquare","Playfair","Vigenere","Rot13","util",
"Railfence","Porta"]
__version__ = "0.5.1"
| mit | Python |
7f0d09a5d8c391dc6b28e6461939796fffc20825 | Update to version v0.3.1 | remiomosowon/pyeasyga,remiomosowon/pyeasyga | pyeasyga/__init__.py | pyeasyga/__init__.py | # -*- coding: utf-8 -*-
"""pyeasyga
A simple and easy-to-use genetic algorithm implementation library in Python.
For a bit array solution representation, simply instantiate the
GeneticAlgorithm class with input data, define and supply a fitness function,
run the Genetic Algorithm, and retrieve the solution!
Other solution representations will require setting some more attributes.
"""
__author__ = 'Ayodeji Remi-Omosowon'
__email__ = 'remiomosowon@gmail.com'
__version__ = '0.3.1'
| # -*- coding: utf-8 -*-
"""pyeasyga
A simple and easy-to-use genetic algorithm implementation library in Python.
For a bit array solution representation, simply instantiate the
GeneticAlgorithm class with input data, define and supply a fitness function,
run the Genetic Algorithm, and retrieve the solution!
Other solution representations will require setting some more attributes.
"""
__author__ = 'Ayodeji Remi-Omosowon'
__email__ = 'remiomosowon@gmail.com'
__version__ = '0.3.0'
| bsd-3-clause | Python |
263230a4381ab2c13c40a083839aa4bc7a5e88c3 | Fix hard coded filename path | naiquevin/lookupy | pyharbor/pyharbor.py | pyharbor/pyharbor.py | import json
class Har(object):
def __init__(self, filename):
self.filaname = filename
self.har = read_har(filename)
def entries(self, include=None, exclude=None, **kwargs):
entries = filter_entries(self.har, **kwargs)
if include is not None:
return include_keys(entries, include)
elif exclude is not None:
return exclude_keys(entries, exclude)
else:
return entries
def read_har(filename):
with open(filename, 'r') as f:
return json.load(f)
def filter_entries(har, **kwargs):
pred = kwargs.pop('pred', None)
if pred is None:
pred = lambda e: all(get_key(e, k) == v for k, v in kwargs.items())
return (e for e in har['log']['entries'] if pred(e))
def include_keys(entries, fields):
return (dict((f, get_key(e, f)) for f in fields) for e in entries)
def exclude_keys(entries, fields):
raise NotImplementedError
def get_key(_dict, key):
parts = key.split('__', 1)
try:
result = _dict[parts[0]]
except KeyError:
return None
else:
return result if len(parts) == 1 else get_key(result, parts[1])
def test():
d = dict([('a', 'A'),
('p', {'q': 'Q'}),
('x', {'y': {'z': 'Z'}})])
assert get_key(d, 'a') == 'A'
assert get_key(d, 'p__q') == 'Q'
assert get_key(d, 'x__y__z') == 'Z'
print('ok. All tests pass.')
if __name__ == '__main__':
test()
# Usage:
#
# import sys
#
# script, filename = sys.argv
#
## high level object oriented abstraction
#
# har = Har(filename)
# entries = har.entries(response__status=404, include=['request__url', 'response__status'])
# print(list(entries))
#
## lower level functional abstraction
#
# har = read_har(filename)
# entries = filter_entries(har, response__status=404)
# print(list(include_keys(entries, ['request__url', 'response__status'])))
| import json
filename = '/home/vineet/Dropbox/kodecrm.com404.har'
class Har(object):
def __init__(self, filename):
self.filaname = filename
self.har = read_har(filename)
def entries(self, include=None, exclude=None, **kwargs):
entries = filter_entries(self.har, **kwargs)
if include is not None:
return include_keys(entries, include)
elif exclude is not None:
return exclude_keys(entries, exclude)
else:
return entries
def read_har(filename):
with open(filename, 'r') as f:
return json.load(f)
def filter_entries(har, **kwargs):
pred = kwargs.pop('pred', None)
if pred is None:
pred = lambda e: all(get_key(e, k) == v for k, v in kwargs.items())
return (e for e in har['log']['entries'] if pred(e))
def include_keys(entries, fields):
return (dict((f, get_key(e, f)) for f in fields) for e in entries)
def exclude_keys(entries, fields):
raise NotImplementedError
def get_key(_dict, key):
parts = key.split('__', 1)
try:
result = _dict[parts[0]]
except KeyError:
return None
else:
return result if len(parts) == 1 else get_key(result, parts[1])
def test():
d = dict([('a', 'A'),
('p', {'q': 'Q'}),
('x', {'y': {'z': 'Z'}})])
assert get_key(d, 'a') == 'A'
assert get_key(d, 'p__q') == 'Q'
assert get_key(d, 'x__y__z') == 'Z'
print('ok. All tests pass.')
if __name__ == '__main__':
test()
# Usage:
#
## high level object oriented abstraction
# har = Har(filename)
# entries = har.entries(response__status=404, include=['request__url', 'response__status'])
# print(list(entries))
## lower level functional abstraction
# har = read_har(filename)
# entries = filter_entries(har, response__status=404)
# print(list(include_keys(entries, ['request__url', 'response__status'])))
| mit | Python |
d1d9371a728a5d5f5617568b23b072b89295bb61 | Fix typo in Atlys HDMI2USB target. | mithro/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware | targets/atlys/hdmi2usb.py | targets/atlys/hdmi2usb.py | from litex.gen.fhdl.decorators import ClockDomainsRenamer
from litex.soc.integration.soc_core import mem_decoder
from litex.soc.interconnect import stream
from gateware.encoder import EncoderDMAReader, EncoderBuffer, Encoder
from gateware.streamer import USBStreamer
from targets.utils import csr_map_update
from targets.atlys.video import SoC as BaseSoC
class HDMI2USBSoC(BaseSoC):
csr_peripherals = (
"encoder_reader",
"encoder",
)
csr_map_update(BaseSoC.csr_map, csr_peripherals)
mem_map = {
"encoder": 0x50000000, # (shadow @0xd0000000)
}
mem_map.update(BaseSoC.mem_map)
def __init__(self, platform, *args, **kwargs):
BaseSoC.__init__(self, platform, *args, **kwargs)
encoder_port = self.sdram.crossbar.get_port()
self.submodules.encoder_reader = EncoderDMAReader(encoder_port)
encoder_cdc = stream.AsyncFIFO([("data", 128)], 4)
encoder_cdc = ClockDomainsRenamer({"write": "sys",
"read": "encoder"})(encoder_cdc)
encoder_buffer = ClockDomainsRenamer("encoder")(EncoderBuffer())
encoder = Encoder(platform)
encoder_streamer = USBStreamer(platform, platform.request("fx2"))
self.submodules += encoder_cdc, encoder_buffer, encoder, encoder_streamer
self.comb += [
self.encoder_reader.source.connect(encoder_cdc.sink),
encoder_cdc.source.connect(encoder_buffer.sink),
encoder_buffer.source.connect(encoder.sink),
encoder.source.connect(encoder_streamer.sink)
]
self.add_wb_slave(mem_decoder(self.mem_map["encoder"]), encoder.bus)
self.add_memory_region("encoder",
self.mem_map["encoder"] + self.shadow_base, 0x2000)
self.platform.add_period_constraint(encoder_streamer.cd_usb.clk, 10.0)
encoder_streamer.cd_usb.clk.attr.add("keep")
self.crg.cd_encoder.clk.attr.add("keep")
self.platform.add_false_path_constraints(
self.crg.cd_sys.clk,
self.crg.cd_encoder.clk,
encoder_streamer.cd_usb.clk)
SoC = HDMI2USBSoC
| from litex.gen.fhdl.decorators import ClockDomainsRenamer
from litex.soc.integration.soc_core import mem_decoder
from litex.soc.interconnect import stream
from gateware.encoder import EncoderDMAReader, EncoderBuffer, Encoder
from gateware.streamer import USBStreamer
from targets.utils import csr_map_update
from targets.atlys.video import SoC as BaseSoC
class HDMI2USBSoC(BaseSoC):
csr_peripherals = (
"encoder_reader",
"encoder",
)
csr_map_update(BaseSoC.csr_map, csr_peripherals)
mem_map = {
"encoder": 0x50000000, # (shadow @0xd0000000)
}
mem_map.update(BaseSoC.mem_map)
def __init__(self, platform, *args, **kwargs):
BaseSoC.__init__(self, platform, *args, **kwargs)
encoder_port = self.sdram.crossbar.get_port()
self.submodules.encoder_reader = EncoderDMAReader(encoder_port)
encoder_cdc = stream.AsyncFIFO([("data", 128)], 4)
encoder_cdc = ClockDomainsRenamer({"write": "sys",
"read": "encoder"})(encoder_cdc)
encoder_buffer = ClockDomainsRenamer("encoder")(EncoderBuffer())
encoder = Encoder(platform)
encoder_streamer = USBStreamer(platform, platform.request("fx2"))
self.submodules += encoder_cdc, encoder_buffer, encoder, encoder_streamer
self.comb += [
self.encoder_reader.source.connect(encoder_cdc.sink),
encoder_cdc.source.connect(encoder_buffer.sink),
encoder_buffer.source.connect(encoder.sink),
encoder.source.connect(encoder_streamer.sink)
]
self.add_wb_slave(mem_decoder(self.mem_map["encoder"]), encoder.bus)
self.add_memory_region("encoder",
self.mem_map["encoder"] + self.shadow_base, 0x2000)
self.platform.add_period_constraint(encoder_streamer.cd_usb.clk, 10.0)
encoder.streamer.cd_usb.clk.attr.add("keep")
self.crg.cd_encoder.clk.attr.add("keep")
self.platform.add_false_path_constraints(
self.crg.cd_sys.clk,
self.crg.cd_encoder.clk,
encoder_streamer.cd_usb.clk)
SoC = HDMI2USBSoC
| bsd-2-clause | Python |
6a9af602428542157e4a8b42bac8b04803ecd2eb | Create directory inside test | rlee287/pyautoupdate,rlee287/pyautoupdate | testing/test_rm_dirs.py | testing/test_rm_dirs.py | from __future__ import absolute_import, print_function
import pytest
import os
from ..launcher import Launcher
class TestRunProgram:
@pytest.fixture(scope='class')
def create_update_dir(self, request):
os.mkdir('downloads')
files=['tesfeo','fjfesf','fihghg']
filedir=[os.path.join('downloads',fi) for fi in files]
for each_file in filedir:
with open(each_file, mode='w') as file:
file.write('')
def teardown():
for files in filedir:
try:
if os.path.isfile(file_path):
os.unlink(file_path)
raise AssertionError#fail test if files exist
except Exception as e:
print(e, file=sys.stderr)
request.addfinalizer(teardown)
return self.create_update_dir
def test_run(self,create_update_dir):
l = Launcher('','')
l._reset_update_dir()
| from __future__ import absolute_import, print_function
import pytest
import os
from ..launcher import Launcher
class TestRunProgram:
@pytest.fixture(scope='class')
def create_update_dir(self, request):
files=['tesfeo','fjfesf','fihghg']
filedir=[os.path.join('downloads',fi) for fi in files]
for each_file in filedir:
with open(each_file, mode='w') as file:
file.write('')
def teardown():
for files in filedir:
try:
if os.path.isfile(file_path):
os.unlink(file_path)
raise AssertionError#fail test if files exist
except Exception as e:
print(e, file=sys.stderr)
request.addfinalizer(teardown)
return self.create_update_dir
def test_run(self,create_update_dir):
l = Launcher('','')
l._reset_update_dir()
| lgpl-2.1 | Python |
dcde1a0c2b676d0ac11856d2797f3402f5ba038e | Remove unused import | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer | wafer/users/models.py | wafer/users/models.py | from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.utils.encoding import python_2_unicode_compatible
from libravatar import libravatar_url
try:
from urllib2 import urlparse
except ImportError:
from urllib import parse as urlparse
from wafer.kv.models import KeyValue
from wafer.talks.models import ACCEPTED, PENDING
@python_2_unicode_compatible
class UserProfile(models.Model):
user = models.OneToOneField(User)
kv = models.ManyToManyField(KeyValue)
contact_number = models.CharField(max_length=16, null=True, blank=True)
bio = models.TextField(null=True, blank=True)
homepage = models.CharField(max_length=256, null=True, blank=True)
# We should probably do social auth instead
# And care about other code hosting sites...
twitter_handle = models.CharField(max_length=15, null=True, blank=True)
github_username = models.CharField(max_length=32, null=True, blank=True)
def __str__(self):
return u'%s' % self.user
def accepted_talks(self):
return self.user.talks.filter(status=ACCEPTED)
def pending_talks(self):
return self.user.talks.filter(status=PENDING)
def avatar_url(self, size=96, https=True, default='mm'):
if not self.user.email:
return None
return libravatar_url(self.user.email, size=size, https=https,
default=default)
def homepage_url(self):
"""Try ensure we prepend http: to the url if there's nothing there
This is to ensure we're not generating relative links in the
user templates."""
if not self.homepage:
return self.homepage
parsed = urlparse.urlparse(self.homepage)
if parsed.scheme:
return self.homepage
# Vague sanity check
abs_url = ''.join(['http://', self.homepage])
if urlparse.urlparse(abs_url).scheme == 'http':
return abs_url
return self.homepage
def display_name(self):
return self.user.get_full_name() or self.user.username
def create_user_profile(sender, instance, created, raw=False, **kwargs):
if raw:
return
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
| from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.utils.encoding import python_2_unicode_compatible
from libravatar import libravatar_url
try:
from urllib2 import urlparse
except ImportError:
from urllib import parse as urlparse
from django.utils.http import urlquote
from wafer.kv.models import KeyValue
from wafer.talks.models import ACCEPTED, PENDING
@python_2_unicode_compatible
class UserProfile(models.Model):
user = models.OneToOneField(User)
kv = models.ManyToManyField(KeyValue)
contact_number = models.CharField(max_length=16, null=True, blank=True)
bio = models.TextField(null=True, blank=True)
homepage = models.CharField(max_length=256, null=True, blank=True)
# We should probably do social auth instead
# And care about other code hosting sites...
twitter_handle = models.CharField(max_length=15, null=True, blank=True)
github_username = models.CharField(max_length=32, null=True, blank=True)
def __str__(self):
return u'%s' % self.user
def accepted_talks(self):
return self.user.talks.filter(status=ACCEPTED)
def pending_talks(self):
return self.user.talks.filter(status=PENDING)
def avatar_url(self, size=96, https=True, default='mm'):
if not self.user.email:
return None
return libravatar_url(self.user.email, size=size, https=https,
default=default)
def homepage_url(self):
"""Try ensure we prepend http: to the url if there's nothing there
This is to ensure we're not generating relative links in the
user templates."""
if not self.homepage:
return self.homepage
parsed = urlparse.urlparse(self.homepage)
if parsed.scheme:
return self.homepage
# Vague sanity check
abs_url = ''.join(['http://', self.homepage])
if urlparse.urlparse(abs_url).scheme == 'http':
return abs_url
return self.homepage
def display_name(self):
return self.user.get_full_name() or self.user.username
def create_user_profile(sender, instance, created, raw=False, **kwargs):
if raw:
return
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
| isc | Python |
19278f5599b68d87976796c3cf4884493c6f3473 | Add module description. | andela-akiura/bucketlist | config/__init__.py | config/__init__.py | """This package stores the config variables of the app."""
| mit | Python | |
c97b760c9ca3213eded060e422ebe65b75f0dd84 | fix big with hide=null | navotsil/Open-Knesset,ofri/Open-Knesset,navotsil/Open-Knesset,otadmor/Open-Knesset,DanaOshri/Open-Knesset,alonisser/Open-Knesset,DanaOshri/Open-Knesset,DanaOshri/Open-Knesset,otadmor/Open-Knesset,daonb/Open-Knesset,MeirKriheli/Open-Knesset,jspan/Open-Knesset,jspan/Open-Knesset,ofri/Open-Knesset,Shrulik/Open-Knesset,otadmor/Open-Knesset,Shrulik/Open-Knesset,ofri/Open-Knesset,noamelf/Open-Knesset,habeanf/Open-Knesset,MeirKriheli/Open-Knesset,alonisser/Open-Knesset,Shrulik/Open-Knesset,habeanf/Open-Knesset,Shrulik/Open-Knesset,jspan/Open-Knesset,noamelf/Open-Knesset,MeirKriheli/Open-Knesset,alonisser/Open-Knesset,navotsil/Open-Knesset,navotsil/Open-Knesset,OriHoch/Open-Knesset,MeirKriheli/Open-Knesset,daonb/Open-Knesset,OriHoch/Open-Knesset,habeanf/Open-Knesset,OriHoch/Open-Knesset,noamelf/Open-Knesset,OriHoch/Open-Knesset,daonb/Open-Knesset,jspan/Open-Knesset,ofri/Open-Knesset,DanaOshri/Open-Knesset,noamelf/Open-Knesset,habeanf/Open-Knesset,daonb/Open-Knesset,alonisser/Open-Knesset,otadmor/Open-Knesset | src/knesset/mks/admin.py | src/knesset/mks/admin.py | from django import forms
from django.contrib import admin
from django.forms.models import modelformset_factory
from django.forms.models import inlineformset_factory
from django.contrib.contenttypes import generic
from django.db.models import Q
from knesset.mks.models import *
from knesset.links.models import Link
from knesset.video.models import Video
class MembershipInline(admin.TabularInline):
model = Membership
extra = 1
class MemberLinksInline(generic.GenericTabularInline):
model = Link
ct_fk_field = 'object_pk'
extra = 1
class MemberAltnameInline(admin.TabularInline):
model = MemberAltname
extra = 1
class MemberRelatedVideosInline(generic.GenericTabularInline):
model = Video
ct_fk_field = 'object_pk'
can_delete = False
fields = ['title','description','embed_link','group','sticky','hide']
ordering = ['group','-sticky','-published']
readonly_fields = ['title','description','embed_link','group']
extra = 0
def queryset(self, request):
qs = super(MemberRelatedVideosInline, self).queryset(request)
qs = qs.filter(Q(hide=False) | Q(hide=None))
return qs
class PartyAdmin(admin.ModelAdmin):
ordering = ('name',)
# fields = ('name','start_date','end_date', 'is_coalition','number_of_members')
list_display = ('name','start_date', 'end_date','is_coalition', 'number_of_members', 'number_of_seats')
inlines = (MembershipInline,)
admin.site.register(Party, PartyAdmin)
class MemberAdmin(admin.ModelAdmin):
ordering = ('name',)
# fields = ('name','start_date','end_date')
list_display = ('name','PartiesString')
inlines = (MembershipInline, MemberLinksInline, MemberAltnameInline, MemberRelatedVideosInline)
# A template for a very customized change view:
change_form_template = 'admin/simple/change_form_with_extra.html'
def change_view(self, request, object_id, extra_context=None):
m = Member.objects.get(id=object_id)
my_context = {
'extra': {'hi_corr':m.CorrelationListToString(m.HighestCorrelations()),
'low_corr':m.CorrelationListToString(m.LowestCorrelations()),
}
}
return super(MemberAdmin, self).change_view(request, object_id,
extra_context=my_context)
admin.site.register(Member, MemberAdmin)
class CorrelationAdmin(admin.ModelAdmin):
ordering = ('-normalized_score',)
admin.site.register(Correlation, CorrelationAdmin)
class MembershipAdmin(admin.ModelAdmin):
ordering = ('member__name',)
admin.site.register(Membership, MembershipAdmin)
| from django import forms
from django.contrib import admin
from django.forms.models import modelformset_factory
from django.forms.models import inlineformset_factory
from django.contrib.contenttypes import generic
from knesset.mks.models import *
from knesset.links.models import Link
from knesset.video.models import Video
class MembershipInline(admin.TabularInline):
model = Membership
extra = 1
class MemberLinksInline(generic.GenericTabularInline):
model = Link
ct_fk_field = 'object_pk'
extra = 1
class MemberAltnameInline(admin.TabularInline):
model = MemberAltname
extra = 1
class MemberRelatedVideosInline(generic.GenericTabularInline):
model = Video
ct_fk_field = 'object_pk'
can_delete = False
fields = ['title','description','embed_link','group','sticky','hide']
ordering = ['group','-sticky','-published']
readonly_fields = ['title','description','embed_link','group']
extra = 0
def queryset(self, request):
qs = super(MemberRelatedVideosInline, self).queryset(request)
qs = qs.filter(hide=False)
return qs
class PartyAdmin(admin.ModelAdmin):
ordering = ('name',)
# fields = ('name','start_date','end_date', 'is_coalition','number_of_members')
list_display = ('name','start_date', 'end_date','is_coalition', 'number_of_members', 'number_of_seats')
inlines = (MembershipInline,)
admin.site.register(Party, PartyAdmin)
class MemberAdmin(admin.ModelAdmin):
ordering = ('name',)
# fields = ('name','start_date','end_date')
list_display = ('name','PartiesString')
inlines = (MembershipInline, MemberLinksInline, MemberAltnameInline, MemberRelatedVideosInline)
# A template for a very customized change view:
change_form_template = 'admin/simple/change_form_with_extra.html'
def change_view(self, request, object_id, extra_context=None):
m = Member.objects.get(id=object_id)
my_context = {
'extra': {'hi_corr':m.CorrelationListToString(m.HighestCorrelations()),
'low_corr':m.CorrelationListToString(m.LowestCorrelations()),
}
}
return super(MemberAdmin, self).change_view(request, object_id,
extra_context=my_context)
admin.site.register(Member, MemberAdmin)
class CorrelationAdmin(admin.ModelAdmin):
ordering = ('-normalized_score',)
admin.site.register(Correlation, CorrelationAdmin)
class MembershipAdmin(admin.ModelAdmin):
ordering = ('member__name',)
admin.site.register(Membership, MembershipAdmin)
| bsd-3-clause | Python |
b6b15ce13e3633d24e00028d12081a58c6391d05 | Update muLAn version | muLAn-project/muLAn,muLAn-project/muLAn | muLAn/_version.py | muLAn/_version.py | version_info = (0, 8, 14)
__version__ = '.'.join(map(str, version_info))
| version_info = (0, 8, 6)
__version__ = '.'.join(map(str, version_info))
| mit | Python |
54cdc132bd4eae881b233ce8b94a0e532bf30883 | add comments. | PyLadiesSeoul/LampGenie | 1_get_Url.py | 1_get_Url.py | # -*- coding:utf-8 -*-
# import란?
import requests
import BeautifulSoup
# string을 저장.
mobile_site_url = "http://www.aladin.co.kr/m/off/gate.aspx?"
# requests 모듈 사용하기.
response = requests.get(mobile_site_url)
content = response.content
# BeautifulSoup 사용하기.
shop_list = BeautifulSoup.BeautifulSoup(content).findAll('td')
# for문과 print 하기.
for x in shop_list:
print "=" * 50
print x.text
| import requests
import BeautifulSoup
mobile_site_url = "http://www.aladin.co.kr/m/off/gate.aspx?"
response = requests.get(mobile_site_url)
content = response.content
shop_list = BeautifulSoup.BeautifulSoup(content).findAll('td')
for x in shop_list:
print "=" * 50
print x.text
| mit | Python |
8901aecc35f939a2b3a62665afff1d50bdab5867 | Bump version number | nabla-c0d3/nassl,nabla-c0d3/nassl,nabla-c0d3/nassl | nassl/__init__.py | nassl/__init__.py | __author__ = "Alban Diquet"
__version__ = "4.0.0"
| __author__ = "Alban Diquet"
__version__ = "3.1.0"
| agpl-3.0 | Python |
a805d414557a14a8588b6e34a9fbb93cc87651df | Change ttl_skew to 50%, to allow a task to run twice before riemann notices that it never checked in. | crashlytics/riemann-sumd | lib/scheduler.py | lib/scheduler.py | import time
import logging
log = logging.getLogger(__name__)
class TaskSchedule():
def __init__(self):
self.tasks = []
log.debug("TaskSchedule created")
def add(self, task, ttl_skew=0.5):
offset = ((ttl_skew * task.ttl) - task.skew())
log.info("Scheduling '%s' for %ss from now" % (task.name, offset))
if task.skew() > (task.ttl * ttl_skew):
log.warning("Task skew of %s is > %s%% of TTL(%s) for '%s'" % (task.skew(), (ttl_skew*100), task.ttl, task.name))
else:
log.debug("Task skew for '%s' is %s" % ( task.name, task.skew()))
deadline = time.time() + offset
self.tasks.append((task, deadline))
def update(self):
self.tasks.sort(key=lambda task: task[1], reverse=True)
def next(self):
task, deadline = self.tasks.pop()
log.info("Next task is '%s' scheduled to run in %ss" % (task.name, deadline-time.time()))
return (task, deadline)
def ready(self, deadline, grace=1.1):
now = time.time()
return (deadline - now) < grace
def waiting(self):
self.update()
return len([t for t in self.tasks if self.ready(t[1])]) | import time
import logging
log = logging.getLogger(__name__)
class TaskSchedule():
def __init__(self):
self.tasks = []
log.debug("TaskSchedule created")
def add(self, task, ttl_skew=0.8):
offset = ((ttl_skew * task.ttl) - task.skew())
log.info("Scheduling '%s' for %ss from now" % (task.name, offset))
if task.skew() > (task.ttl * 0.5):
log.warning("Task skew of %s is > 50%% of TTL(%s) for '%s'" % (task.skew(), task.ttl, task.name))
else:
log.debug("Task skew for '%s' is %s" % ( task.name, task.skew()))
deadline = time.time() + offset
self.tasks.append((task, deadline))
def update(self):
self.tasks.sort(key=lambda task: task[1], reverse=True)
def next(self):
task, deadline = self.tasks.pop()
log.info("Next task is '%s' scheduled to run in %ss" % (task.name, deadline-time.time()))
return (task, deadline)
def ready(self, deadline, grace=1.1):
now = time.time()
return (deadline - now) < grace
def waiting(self):
self.update()
return len([t for t in self.tasks if self.ready(t[1])]) | mit | Python |
c108deb403d49eb2b8fddcc49b2ba585c676e730 | Move spec matching logic to base layer | johnsca/layer-apache-hadoop-datanode,juju-solutions/layer-apache-hadoop-datanode | reactive/datanode.py | reactive/datanode.py | from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
from charmhelpers.core import hookenv
@when('hadoop.installed')
@when_not('namenode.related')
def blocked():
hookenv.status_set('blocked', 'Waiting for relation to NameNode')
@when('hadoop.installed', 'namenode.related')
@when_not('namenode.spec.mismatch', 'namenode.ready', 'datanode.started')
def waiting(namenode): # pylint: disable=unused-argument
hookenv.status_set('waiting', 'Waiting for NameNode')
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.host(), namenode.port())
utils.install_ssh_key('ubuntu', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
namenode.register()
hadoop.open_ports('datanode')
set_state('datanode.started')
hookenv.status_set('active', 'Ready')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
| from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
from charmhelpers.core import hookenv
@when('hadoop.installed')
@when_not('namenode.related')
def blocked():
hookenv.status_set('blocked', 'Waiting for relation to NameNode')
@when('hadoop.installed', 'namenode.related')
def set_spec(namenode):
hadoop = get_hadoop_base()
namenode.set_datanode_spec(hadoop.spec())
@when('namenode.spec.mismatch')
def spec_mismatch(namenode):
hookenv.status_set('blocked',
'Spec mismatch with NameNode: {} != {}'.format(
namenode.datanode_spec(), namenode.namenode_spec()))
@when('hadoop.installed', 'namenode.related')
@when_not('namenode.spec.mismatch', 'namenode.ready', 'datanode.started')
def waiting(namenode): # pylint: disable=unused-argument
hookenv.status_set('waiting', 'Waiting for NameNode')
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.host(), namenode.port())
utils.install_ssh_key('ubuntu', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
namenode.register()
hadoop.open_ports('datanode')
set_state('datanode.started')
hookenv.status_set('active', 'Ready')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
| apache-2.0 | Python |
01a2c81d1abe82b758631c141823536f9da67b01 | make bigger section for data models | mylokin/redisext,mylokin/redisext | redisext/__init__.py | redisext/__init__.py | '''
Introduction
------------
Redisext is a tool for data modeling. Its primary goal is to provide light
interface to well-known data models based on Redis such as queues, hashmaps,
counters, pools and stacks. Redisext could be treated as a ORM for Redis.
Tutorial
--------
Counter Model allows you to build counters in a minute. For example::
import redisext.backend.redis
import redisext.counter
import redisext.serializer
class Connection(redisext.backend.redis.Connection):
MASTER = {'host': 'localhost', 'port': 6379, 'db': 0}
class Visitors(Connection, redisext.counter.Counter):
SERIALIZER = redisext.serializer.Numeric
This is it! You can start using it. Example of mythical frontpage view::
def frontpage():
visitors_counter = Visitors('fronpage')
visitors_counter.increment()
context = {
'visitors': visitors_counter.get()
}
return context
.. note::
Details on :class:`redisext.counter.Counter`.
Data Models
===========
.. automodule:: redisext.counter
.. automodule:: redisext.hashmap
.. automodule:: redisext.pool
.. automodule:: redisext.queue
.. automodule:: redisext.stack
Abstract Model
--------------
.. automodule:: redisext.models
.. automodule:: redisext.serializer
.. automodule:: redisext.key
.. automodule:: redisext.backend
'''
| '''
Introduction
------------
Redisext is a tool for data modeling. Its primary goal is to provide light
interface to well-known data models based on Redis such as queues, hashmaps,
counters, pools and stacks. Redisext could be treated as a ORM for Redis.
Tutorial
--------
Counter Model allows you to build counters in a minute. For example::
import redisext.backend.redis
import redisext.counter
import redisext.serializer
class Connection(redisext.backend.redis.Connection):
MASTER = {'host': 'localhost', 'port': 6379, 'db': 0}
class Visitors(Connection, redisext.counter.Counter):
SERIALIZER = redisext.serializer.Numeric
This is it! You can start using it. Example of mythical frontpage view::
def frontpage():
visitors_counter = Visitors('fronpage')
visitors_counter.increment()
context = {
'visitors': visitors_counter.get()
}
return context
.. note::
Details on :class:`redisext.counter.Counter`.
Data Models
-----------
.. automodule:: redisext.counter
.. automodule:: redisext.hashmap
.. automodule:: redisext.pool
.. automodule:: redisext.queue
.. automodule:: redisext.stack
Abstract Model
--------------
.. automodule:: redisext.models
.. automodule:: redisext.serializer
.. automodule:: redisext.key
.. automodule:: redisext.backend
'''
| mit | Python |
73f8a1e2e0006c2a37ae6264afe70a8207ffbb54 | Bump version. | gzzhanghao/mitmproxy,fimad/mitmproxy,gzzhanghao/mitmproxy,ddworken/mitmproxy,cortesi/mitmproxy,xaxa89/mitmproxy,dufferzafar/mitmproxy,zlorb/mitmproxy,Kriechi/mitmproxy,laurmurclar/mitmproxy,ParthGanatra/mitmproxy,dufferzafar/mitmproxy,tdickers/mitmproxy,MatthewShao/mitmproxy,vhaupert/mitmproxy,Kriechi/mitmproxy,mosajjal/mitmproxy,zlorb/mitmproxy,ikoz/mitmproxy,jvillacorta/mitmproxy,zlorb/mitmproxy,xaxa89/mitmproxy,ddworken/mitmproxy,gzzhanghao/mitmproxy,xaxa89/mitmproxy,ParthGanatra/mitmproxy,ujjwal96/mitmproxy,dwfreed/mitmproxy,mosajjal/mitmproxy,jvillacorta/mitmproxy,gzzhanghao/mitmproxy,dwfreed/mitmproxy,fimad/mitmproxy,ujjwal96/mitmproxy,cortesi/mitmproxy,pombredanne/netlib,Kriechi/netlib,mitmproxy/mitmproxy,vhaupert/mitmproxy,ujjwal96/mitmproxy,ddworken/mitmproxy,laurmurclar/mitmproxy,mhils/mitmproxy,ujjwal96/mitmproxy,Kriechi/mitmproxy,fimad/mitmproxy,Kriechi/mitmproxy,mosajjal/mitmproxy,jvillacorta/mitmproxy,mosajjal/mitmproxy,mitmproxy/mitmproxy,StevenVanAcker/mitmproxy,ParthGanatra/mitmproxy,tdickers/mitmproxy,mitmproxy/mitmproxy,mitmproxy/netlib,mhils/mitmproxy,xaxa89/mitmproxy,pombredanne/netlib,ParthGanatra/mitmproxy,vhaupert/mitmproxy,laurmurclar/mitmproxy,dwfreed/mitmproxy,dufferzafar/mitmproxy,ikoz/mitmproxy,vhaupert/mitmproxy,laurmurclar/mitmproxy,cortesi/mitmproxy,mhils/mitmproxy,dwfreed/mitmproxy,akihikodaki/netlib,MatthewShao/mitmproxy,dufferzafar/mitmproxy,MatthewShao/mitmproxy,Kriechi/netlib,cortesi/mitmproxy,zlorb/mitmproxy,mitmproxy/mitmproxy,StevenVanAcker/mitmproxy,MatthewShao/mitmproxy,ddworken/mitmproxy,mitmproxy/mitmproxy,StevenVanAcker/mitmproxy,tdickers/mitmproxy,fimad/mitmproxy,jvillacorta/mitmproxy,mhils/mitmproxy,tdickers/mitmproxy,ikoz/mitmproxy,mhils/mitmproxy,akihikodaki/netlib,StevenVanAcker/mitmproxy,ikoz/mitmproxy | netlib/version.py | netlib/version.py | IVERSION = (0, 9, 1)
VERSION = ".".join(str(i) for i in IVERSION)
NAME = "netlib"
NAMEVERSION = NAME + " " + VERSION
| IVERSION = (0, 9)
VERSION = ".".join(str(i) for i in IVERSION)
NAME = "netlib"
NAMEVERSION = NAME + " " + VERSION
| mit | Python |
150c79b70f8ec0992083ef8d2499623f94765097 | Add context to render_basic. | FlipperPA/wagtail-components | restructured_text/restructured_text.py | restructured_text/restructured_text.py | from django.utils.safestring import mark_safe
from docutils.core import publish_parts
from wagtail.wagtailcore import blocks
class RSTBlock(blocks.TextBlock):
"""
A ReSTructured text block for Wagtail streamfields.
"""
class Meta:
icon = 'code'
def render_basic(self, value, context=None):
return publish_parts(value, writer_name='html')['body']
| from django.utils.safestring import mark_safe
from docutils.core import publish_parts
from wagtail.wagtailcore import blocks
class RSTBlock(blocks.TextBlock):
"""
A ReSTructured text block for Wagtail streamfields.
"""
class Meta:
icon = 'code'
def render_basic(self, value):
return publish_parts(value, writer_name='html')['body']
| bsd-3-clause | Python |
d1b78294bde544d2db1c44a96e0986be4419e68d | Add docstring to utils.is_json_string_list | matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo | web/utils/__init__.py | web/utils/__init__.py | import json
from django.core.exceptions import ValidationError
def is_json_string_list(s):
'''
Checks if the string s represents a list of strings in JSON.
The function does nothing if s represents a valid list of strings,
or raises a suitable ValidationError if not.
'''
try:
val = json.loads(s)
except:
raise ValidationError('Not a JSON value.')
if type(val) is not list:
raise ValidationError('Not a JSON list.')
for x in val:
if type(x) is not unicode:
raise ValidationError('Not a JSON list of strings.')
def truncate(s, max_length=50, indicator="..."):
'''
Returns the string s truncated to at most max_length characters.
If s is shorter than max_length, the function returns it as it was,
otherwise, it truncates it to max_length characters (counting the string
indicating the truncation). If the indicator itself is longer than
max_length, we raise a ValueError.
'''
if len(s) <= max_length:
return s
elif max_length < len(indicator):
raise ValueError('Indicator longer than maximum length.')
else:
return u'{0}{1}'.format(s[:max_length - len(indicator)], indicator)
| import json
from django.core.exceptions import ValidationError
def is_json_string_list(s):
try:
val = json.loads(s)
except:
raise ValidationError('Not a JSON value.')
if type(val) is not list:
raise ValidationError('Not a JSON list.')
for x in val:
if type(x) is not unicode:
raise ValidationError('Not a JSON list of strings.')
def truncate(s, max_length=50, indicator="..."):
'''
Returns the string s truncated to at most max_length characters.
If s is shorter than max_length, the function returns it as it was,
otherwise, it truncates it to max_length characters (counting the string
indicating the truncation). If the indicator itself is longer than
max_length, we raise a ValueError.
'''
if len(s) <= max_length:
return s
elif max_length < len(indicator):
raise ValueError('Indicator longer than maximum length.')
else:
return u'{0}{1}'.format(s[:max_length - len(indicator)], indicator)
| agpl-3.0 | Python |
9ca1d1f1ddf2684a99d3730b59b2389e0e3f09cf | Update writeInputs.py | radical-cybertools/radical.repex | repex/writeInputs.py | repex/writeInputs.py | import os
import sys
import fileinput
import shutil
def writeInputs(max_temp, min_temp, replicas, timesteps, basename):
max_temp = max_temp
min_temp = min_temp
replicas = replicas
timesteps = timesteps
#for i in range(replicas):
#shutil.copy2('mdin', 'mdin_{0}'.format(i))
Temps_List = [
min_temp + x * (max_temp - min_temp) / replicas
for x in range(replicas)
]
#print len(Temps_List)
## for every entry in Temp_list
## create new copy of mdin_{n}
## Find and replace temperature in the file
## write new file
InputFile = os.getcwd() + "/" + basename + ".mdin"
for i in range(len(Temps_List)):
mdinFile = open(os.getcwd() + '/' + basename + '.mdin', 'r')
tbuffer = mdinFile.read()
tbuffer = tbuffer.replace("@temperature@", str(Temps_List[i]))
tbuffer = tbuffer.replace("@timesteps@", str(timesteps))
mdinFile.close()
w_file = open('mdin_{0}'.format(i), "w")
w_file.write(tbuffer)
w_file.close()
| import os
import sys
import fileinput
import shutil
def writeInputs(max_temp, min_temp, replicas, timesteps, basename):
max_temp = max_temp
min_temp = min_temp
replicas = replicas
timesteps = timesteps
#for i in range(replicas):
#shutil.copy2('mdin', 'mdin_{0}'.format(i))
Temps_List = [
min_temp + x * (max_temp - min_temp) / replicas
for x in range(replicas)
]
#print len(Temps_List)
## for every entry in Temp_list
## create new copy of mdin_{n}
## Find and replace temperature in the file
## write new file
InputFile = os.getcwd() + "/" + basename + ".mdin"
for i in range(len(Temps_List)):
mdinFile = open(os.getcwd() + '/' + basename + '.mdin', 'r')
placeholder1 = '@temperature@'
placeholder2 = '@timesteps@'
tbuffer = mdinFile.read()
tbuffer = tbuffer.replace("@temperature@", str(Temps_List[i]))
tbuffer = tbuffer.replace("@timesteps@", str(timesteps))
mdinFile.close()
w_file = open('mdin_{0}'.format(i), "w")
w_file.write(tbuffer)
w_file.close()
| mit | Python |
5950625aa536e6a0e4d2ca89393c323df29e0d3d | Fix build against django 4.0 Django 4.0 dropped `django.conf.urls.url()` for `django.urls.re_path()` | maciej-gol/tenant-schemas-celery,maciej-gol/tenant-schemas-celery | test_app/test_app/urls.py | test_app/test_app/urls.py | """test_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.urls import re_path
from django.contrib import admin
urlpatterns = [
re_path(r'^admin/', admin.site.urls),
]
| """test_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| mit | Python |
548e6736c41ca11a2c07f2ce6104ab3eff7f3517 | Update CLAW package with version 1.2.1 (#10528) | LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/claw/package.py | var/spack/repos/builtin/packages/claw/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Claw(CMakePackage):
"""CLAW Compiler targets performance portability problem in climate and
weather application written in Fortran. From a single source code, it
generates architecture specific code decorated with OpenMP or OpenACC"""
homepage = 'https://claw-project.github.io/'
git = 'https://github.com/claw-project/claw-compiler.git'
maintainers = ['clementval']
version('1.2.1', commit='939989ab52edb5c292476e729608725654d0a59a', submodules=True)
version('1.2.0', commit='fc9c50fe02be97b910ff9c7015064f89be88a3a2', submodules=True)
version('1.1.0', commit='16b165a443b11b025a77cad830b1280b8c9bcf01', submodules=True)
depends_on('cmake@3.0:', type='build')
depends_on('java@7:')
depends_on('ant@1.9:')
depends_on('libxml2')
depends_on('bison')
def cmake_args(self):
args = []
spec = self.spec
args.append('-DOMNI_CONF_OPTION=--with-libxml2={0}'.
format(spec['libxml2'].prefix))
args.append('-DCMAKE_Fortran_COMPILER={0}'.
format(self.compiler.fc))
return args
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Claw(CMakePackage):
"""CLAW Compiler targets performance portability problem in climate and
weather application written in Fortran. From a single source code, it
generates architecture specific code decorated with OpenMP or OpenACC"""
homepage = 'https://claw-project.github.io/'
git = 'https://github.com/claw-project/claw-compiler.git'
maintainers = ['clementval']
version('1.2.0', commit='fc9c50fe02be97b910ff9c7015064f89be88a3a2', submodules=True)
version('1.1.0', commit='16b165a443b11b025a77cad830b1280b8c9bcf01', submodules=True)
depends_on('cmake@3.0:', type='build')
depends_on('java@7:')
depends_on('ant@1.9:')
depends_on('libxml2')
depends_on('bison')
def cmake_args(self):
args = []
spec = self.spec
args.append('-DOMNI_CONF_OPTION=--with-libxml2={0}'.
format(spec['libxml2'].prefix))
args.append('-DCMAKE_Fortran_COMPILER={0}'.
format(self.compiler.fc))
return args
| lgpl-2.1 | Python |
22f0e0200e560dc3fbbc3c9911ba32bb52c298df | Add a warning when OCIO is set to invalid path, and fallback to the blender config path. | prman-pixar/RenderManForBlender,prman-pixar/RenderManForBlender | rfb_utils/color_manager_blender.py | rfb_utils/color_manager_blender.py | import os
import bpy
import sys
from ..rfb_utils.envconfig_utils import envconfig
try:
from rman_utils.color_manager import ColorManager
except:
ColorManager = None
__clrmgr__ = None
__has_warned__ = False
class ColorManagerBlender(ColorManager):
def __init__(self, config_path, **kwargs):
super(ColorManagerBlender, self).__init__(config_path, **kwargs)
def update(self):
ociopath = get_env_config_path()
super(ColorManagerBlender, self).update(ociopath)
def color_manager():
"""return the color manager singleton
"""
if __clrmgr__ is None:
init()
return __clrmgr__
def init():
"""initialize ColorManager
"""
global __clrmgr__
if __clrmgr__ is None:
ociopath = get_env_config_path()
if ColorManager:
__clrmgr__ = ColorManagerBlender(ociopath)
def get_env_config_path():
"""return ocio config path from the environment
"""
global __has_warned__
blender_config_path = envconfig().get_blender_ocio_config()
envconfig_path = envconfig().getenv('OCIO', None)
ociopath = blender_config_path
if envconfig_path:
if os.path.exists(envconfig_path):
ociopath = envconfig_path
elif not __has_warned__:
bpy.ops.renderman.printer('INVOKE_DEFAULT', level='WARNING', message='OCIO environment value (%s) is invalid.' % envconfig_path)
__has_warned__ = True
return ociopath
def get_config_path():
"""return ocio config path
"""
clrmgr = color_manager()
if clrmgr:
return clrmgr.config_file_path()
return get_env_config_path()
def get_colorspace_name():
"""return the scene colorspace name. updating with $OCIO
"""
clrmgr = color_manager()
if ColorManager:
clrmgr.update()
return clrmgr.scene_colorspace_name
return "" | import os
import bpy
import sys
from ..rfb_utils.envconfig_utils import envconfig
try:
from rman_utils.color_manager import ColorManager
except:
ColorManager = None
__clrmgr__ = None
class ColorManagerBlender(ColorManager):
def __init__(self, config_path, **kwargs):
super(ColorManagerBlender, self).__init__(config_path, **kwargs)
def update(self):
ociopath = get_env_config_path()
super(ColorManagerBlender, self).update(ociopath)
def color_manager():
"""return the color manager singleton
"""
if __clrmgr__ is None:
init()
return __clrmgr__
def init():
"""initialize ColorManager
"""
global __clrmgr__
if __clrmgr__ is None:
ociopath = get_env_config_path()
if ColorManager:
__clrmgr__ = ColorManagerBlender(ociopath)
def get_env_config_path():
"""return ocio config path from the environment
"""
blender_config_path = envconfig().get_blender_ocio_config()
ociopath = envconfig().getenv('OCIO', blender_config_path)
return ociopath
def get_config_path():
"""return ocio config path
"""
clrmgr = color_manager()
if clrmgr:
return clrmgr.config_file_path()
return get_env_config_path()
def get_colorspace_name():
"""return the scene colorspace name. updating with $OCIO
"""
clrmgr = color_manager()
if ColorManager:
clrmgr.update()
return clrmgr.scene_colorspace_name
return "" | mit | Python |
a06e23dcd2956fb5b9f6dff48714ff9c9b4bd5b7 | Update flows.py | illotum/sdn-fabric,illotum/sdn-fabric | fabric/flows.py | fabric/flows.py | """
This module containes pure functions to help with
creating flow table entries.
"""
from ryu.ofproto import ofproto_v1_3 as ofp
from ryu.ofproto import ofproto_v1_3_parser as parser
def compose(actions=[], to_table=0):
"""
Compose instructions set from given entries
:param actions: actions to perform after match
:type actions: list of `parser.OFPAction`
:param to_table: table to switch to after applying all actions;
value 0 (default table) will be ignored
:type to_table: int
:returns: instructions for `parser.OFPFlowMod`
:rtype: list of `parser.OFPInstruction`
"""
inst = []
if actions:
inst.append(
parser.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS,
actions))
if to_table:
inst.append(parser.OFPInstructionGoto(to_table))
return inst
def dpid_to_mac(dpid):
pass
def flow_to_port(dl_dst,out_port,tableno=DEFAULT_TABLE_SOMETHING):
'''
=>parser.OFPFlowMod
'''
pass
def flow_to_remote(dl_dst,dpid):
pass
def to_local():
'''
returns to_port(,,LOCAL---can be anything)
'''
pass
def match_all():
'''
=>Match
3.
'''
pass
def flow_install_transit():
'''
=> FlowMod
2.
'''
pass
def match_inbound(dpid):
'''
=> FLOWMOD
1.
'''
pass
| """
This module containes pure functions to help with
creating flow table entries.
"""
from ryu.ofproto import ofproto_v1_3 as ofp
from ryu.ofproto import ofproto_v1_3_parser as parser
def compose(actions=[], to_table=0):
"""
Compose instructions set from given entries
:param actions: actions to perform after match
:type actions: list of `parser.OFPAction`
:param to_table: table to switch to after applying all actions;
value 0 (default table) will be ignored
:type to_table: int
:returns: instructions for `parser.OFPFlowMod`
:rtype: list of `parser.OFPInstruction`
"""
inst = []
if actions:
inst.append(
parser.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS,
actions))
if to_table:
inst.append(parser.OFPInstructionGoto(to_table))
return inst
def action_to_local():
pass
def action_to_remote():
pass
def action_decapsulate():
pass
def match_all():
pass
def match_transit():
pass
def match_inbound(dpid):
pass
| apache-2.0 | Python |
6af1ff0a3dd89a06a1fd4b6ee4428405f365da17 | Bump the version string to v0.12.0 (#764) | tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io | tensorflow_io/core/python/ops/version_ops.py | tensorflow_io/core/python/ops/version_ops.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""version_ops"""
package = 'tensorflow>=2.1.0,<2.2.0'
version = '0.12.0'
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""version_ops"""
package = 'tensorflow==2.1.0'
version = '0.11.0'
| apache-2.0 | Python |
7ba52a548693603c73366375408413ead578e671 | add test for console output error | titusz/onixcheck | tests/test_onixcheck.py | tests/test_onixcheck.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from onixcheck.__main__ import main
from onixcheck import data
def test_main_o3_ref_valid(capsys):
argv = [data.VALID_ONIX2_REF]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'VALID' in out
assert 'INVALID' not in err
assert exit_code == 0
def test_main_o2_ref_valid(capsys):
argv = [data.VALID_ONIX2_REF]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'VALID' in out
assert 'INVALID' not in err
assert exit_code == 0
def test_main_has_ns_valid(capsys):
argv = [data.VALID_ONIX3_REF_NS]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'VALID' in out
assert 'INVALID' not in err
assert exit_code == 0
def test_main_plain_invalid(capsys):
argv = [data.INVALID_ONIX3_REF]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'INVALID' in err
assert exit_code == 1
def test_main_debug(capsys):
argv = [data.VALID_ONIX3_REF, '-d']
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'DEBUG' in out
assert exit_code == 0
def test_console_ouptup_encoding(capsys):
argv = [data.WIN_CONSOLE_ISSUE]
exit_code = main(argv)
assert exit_code == 1
out, err = capsys.readouterr()
assert 'UnicodeEncodeError' not in out
assert 'UnicodeEncodeError' not in err
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from onixcheck.__main__ import main
from onixcheck import data
def test_main_o3_ref_valid(capsys):
argv = [data.VALID_ONIX2_REF]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'VALID' in out
assert 'INVALID' not in err
assert exit_code == 0
def test_main_o2_ref_valid(capsys):
argv = [data.VALID_ONIX2_REF]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'VALID' in out
assert 'INVALID' not in err
assert exit_code == 0
def test_main_has_ns_valid(capsys):
argv = [data.VALID_ONIX3_REF_NS]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'VALID' in out
assert 'INVALID' not in err
assert exit_code == 0
def test_main_plain_invalid(capsys):
argv = [data.INVALID_ONIX3_REF]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'INVALID' in err
assert exit_code == 1
def test_main_debug(capsys):
argv = [data.VALID_ONIX3_REF, '-d']
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'DEBUG' in out
assert exit_code == 0
| bsd-2-clause | Python |
7fcd1690bd61ee9781ec1f2adeed19731afa883c | simplify example | doubleO8/versionone-sdk-spoon,aarcro/VersionOne.SDK.Python,coddingtonbear/VersionOne.SDK.Python,versionone/VersionOne.SDK.Python | examples/meta_example.py | examples/meta_example.py | from v1pysdk import V1Meta
v1 = V1Meta()
my_story = v1.Story.find_by_id('1005')
print s.Name
# 'New Story 2'
s.Owners
# [<v1pysdk.v1meta.Member object at 0x02AD9710>]
s.Scope
# <v1pysdk.v1meta.Scope object at 0x02AB2550>
for my_story in v1.Story,query("Name='New Story 2'"):
print my_story.Name
|
from v1pysdk.v1meta import *
meta = V1Meta()
Story = meta.asset_class('Story')
s = Story('1005')
print s.Name
# 'New Story 2'
s.Owners
# [<v1pysdk.v1meta.Member object at 0x02AD9710>]
s.Scope
# <v1pysdk.v1meta.Scope object at 0x02AB2550>
| bsd-3-clause | Python |
e25e26f2cb628b025330a70ef2e2d0b888259d08 | Stop app only if it started | pygeek/flower,Lingling7/flower,ucb-bar/bar-crawl-web,barseghyanartur/flower,allengaller/flower,lucius-feng/flower,pj/flower,lucius-feng/flower,ChinaQuants/flower,tellapart/flower,asmodehn/flower,tellapart/flower,Lingling7/flower,jzhou77/flower,barseghyanartur/flower,lucius-feng/flower,ChinaQuants/flower,marrybird/flower,pj/flower,pj/flower,jzhou77/flower,marrybird/flower,barseghyanartur/flower,pygeek/flower,allengaller/flower,asmodehn/flower,asmodehn/flower,raphaelmerx/flower,raphaelmerx/flower,alexmojaki/flower,raphaelmerx/flower,allengaller/flower,tellapart/flower,marrybird/flower,jzhou77/flower,alexmojaki/flower,Lingling7/flower,ucb-bar/bar-crawl-web,alexmojaki/flower,ChinaQuants/flower,pygeek/flower,ucb-bar/bar-crawl-web | flower/app.py | flower/app.py | from __future__ import absolute_import
import logging
from functools import partial
from concurrent.futures import ThreadPoolExecutor
import celery
import tornado.web
from tornado import ioloop
from .api import control
from .urls import handlers
from .events import Events
from .options import default_options
logger = logging.getLogger(__name__)
class Flower(tornado.web.Application):
pool_executor_cls = ThreadPoolExecutor
max_workers = 4
def __init__(self, options=None, capp=None, events=None,
io_loop=None, **kwargs):
kwargs.update(handlers=handlers)
super(Flower, self).__init__(**kwargs)
self.options = options or default_options
self.io_loop = io_loop or ioloop.IOLoop.instance()
self.ssl_options = kwargs.get('ssl_options', None)
self.capp = capp or celery.Celery()
self.events = events or Events(self.capp, db=self.options.db,
persistent=self.options.persistent,
enable_events=self.options.enable_events,
io_loop=self.io_loop,
max_tasks_in_memory=self.options.max_tasks)
self.started = False
def start(self):
self.pool = self.pool_executor_cls(max_workers=self.max_workers)
self.events.start()
self.listen(self.options.port, address=self.options.address,
ssl_options=self.ssl_options, xheaders=self.options.xheaders)
self.io_loop.add_future(
control.ControlHandler.update_workers(app=self),
callback=lambda x: logger.debug('Successfully updated worker cache'))
self.io_loop.start()
self.started = True
def stop(self):
if self.started:
self.events.stop()
self.pool.shutdown(wait=False)
self.started = False
def delay(self, method, *args, **kwargs):
return self.pool.submit(partial(method, *args, **kwargs))
@property
def transport(self):
return getattr(self.capp.connection().transport,
'driver_type', None)
| from __future__ import absolute_import
import logging
from functools import partial
from concurrent.futures import ThreadPoolExecutor
import celery
import tornado.web
from tornado import ioloop
from .api import control
from .urls import handlers
from .events import Events
from .options import default_options
logger = logging.getLogger(__name__)
class Flower(tornado.web.Application):
pool_executor_cls = ThreadPoolExecutor
max_workers = 4
def __init__(self, options=None, capp=None, events=None,
io_loop=None, **kwargs):
kwargs.update(handlers=handlers)
super(Flower, self).__init__(**kwargs)
self.options = options or default_options
self.io_loop = io_loop or ioloop.IOLoop.instance()
self.ssl_options = kwargs.get('ssl_options', None)
self.capp = capp or celery.Celery()
self.events = events or Events(self.capp, db=self.options.db,
persistent=self.options.persistent,
enable_events=self.options.enable_events,
io_loop=self.io_loop,
max_tasks_in_memory=self.options.max_tasks)
def start(self):
self.pool = self.pool_executor_cls(max_workers=self.max_workers)
self.events.start()
self.listen(self.options.port, address=self.options.address,
ssl_options=self.ssl_options, xheaders=self.options.xheaders)
self.io_loop.add_future(
control.ControlHandler.update_workers(app=self),
callback=lambda x: logger.debug('Successfully updated worker cache'))
self.io_loop.start()
def stop(self):
self.events.stop()
self.pool.shutdown(wait=False)
def delay(self, method, *args, **kwargs):
return self.pool.submit(partial(method, *args, **kwargs))
@property
def transport(self):
return getattr(self.capp.connection().transport,
'driver_type', None)
| bsd-3-clause | Python |
159aea1c97b8e8de45802cace031e7206c3c8fec | Add a __str__ for Tile. | chipx86/the-cure | thecure/sprites/tile.py | thecure/sprites/tile.py | from thecure.resources import load_spritesheet_frame
from thecure.sprites import Sprite
class Tile(Sprite):
NAME = 'tile'
WIDTH = 64
HEIGHT = 64
NEED_TICKS = False
def __init__(self, filename, tile_offset):
super(Tile, self).__init__()
self.filename = filename
self.tile_offset = tile_offset
self.rect.size = (self.WIDTH, self.HEIGHT)
def __str__(self):
return 'Tile %s:%s at %s' % (self.filename, self.tile_offset,
self.rect.topleft)
def update_image(self):
self.image = load_spritesheet_frame(self.filename, self.tile_offset,
frame_size=self.rect.size)
assert self.image
| from thecure.resources import load_spritesheet_frame
from thecure.sprites import Sprite
class Tile(Sprite):
NAME = 'tile'
WIDTH = 64
HEIGHT = 64
NEED_TICKS = False
def __init__(self, filename, tile_offset):
super(Tile, self).__init__()
self.filename = filename
self.tile_offset = tile_offset
self.rect.size = (self.WIDTH, self.HEIGHT)
def update_image(self):
self.image = load_spritesheet_frame(self.filename, self.tile_offset,
frame_size=(self.WIDTH,
self.HEIGHT))
assert self.image
| mit | Python |
aa20215e00aaa872d391fc774f4696c913274eca | add payment_term field in invoices from sale order | Gebesa-Dev/Addons-gebesa | sales_channel/models/sale_order.py | sales_channel/models/sale_order.py | # -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, fields, models
class SaleOrder(models.Model):
_inherit = "sale.order"
@api.multi
def action_invoice_create(self, grouped=False, final=False):
res = super(SaleOrder, self).action_invoice_create(
grouped, final)
invoice = self.env['account.invoice'].browse(res)
for inv in invoice:
if inv.partner_id.parent_id:
inv.sales_channel_id = \
inv.partner_id.parent_id.sales_channel_id
else:
inv.sales_channel_id = inv.partner_id.sales_channel_id
if inv.partner_id.parent_id:
inv.payment_term_id = \
inv.partner_id.parent_id.property_payment_term_id
else:
inv.payment_term_id = \
inv.partner_id.property_payment_term_id
return res
class SaleAdvancePaymentInv(models.TransientModel):
_inherit = "sale.advance.payment.inv"
@api.multi
def _create_invoice(self, order, so_line, amount):
res = super(SaleAdvancePaymentInv, self)._create_invoice(
order, so_line, amount)
for inv in res:
if inv.partner_id.parent_id:
inv.sales_channel_id = \
inv.partner_id.parent_id.sales_channel_id
else:
inv.sales_channel_id = inv.partner_id.sales_channel_id
if inv.partner_id.parent_id:
inv.payment_term_id = \
inv.partner_id.parent_id.property_payment_term_id
else:
inv.payment_term_id = \
inv.partner_id.property_payment_term_id
return res
| # -*- coding: utf-8 -*-
# © <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, fields, models
class SaleOrder(models.Model):
_inherit = "sale.order"
@api.multi
def action_invoice_create(self, grouped=False, final=False):
res = super(SaleOrder, self).action_invoice_create(
grouped, final)
invoice = self.env['account.invoice'].browse(res)
for inv in invoice:
if inv.partner_id.parent_id:
inv.sales_channel_id = \
inv.partner_id.parent_id.sales_channel_id
else:
inv.sales_channel_id = inv.partner_id.sales_channel_id
if inv.partner_id.parent_id:
inv.payment_term_id = \
inv.partner_id.parent_id.property_payment_term_id
else:
inv.payment_term_id = \
inv.partner_id.property_payment_term_id
return res
class SaleAdvancePaymentInv(models.TransientModel):
_inherit = "sale.advance.payment.inv"
@api.multi
def _create_invoice(self, order, so_line, amount):
res = super(SaleAdvancePaymentInv, self)._create_invoice(
order, so_line, amount)
for inv in res:
if inv.partner_id.parent_id:
inv.sales_channel_id = \
inv.partner_id.parent_id.sales_channel_id
else:
inv.sales_channel_id = inv.partner_id.sales_channel_id
if inv.partner_id.parent_id:
inv.payment_term_id = \
inv.partner_id.parent_id.property_payment_term_id
else:
inv.payment_term_id = \
inv.partner_id.property_payment_term_id
return res
| agpl-3.0 | Python |
def66bc381f03970640a61d64b49ad5de9ef3879 | Remove OCAMLLIB from build environment | 0install/0install,afb/0install,afb/0install,afb/0install,gasche/0install,bastianeicher/0install,bhilton/0install,fdopen/0install,gasche/0install,0install/0install,jaychoo/0install,dbenamy/0install,gfxmonk/0install,jaychoo/0install,dbenamy/0install,DarkGreising/0install,bastianeicher/0install,fdopen/0install,bhilton/0install,bhilton/0install,bartbes/0install,gasche/0install,bastianeicher/0install,dbenamy/0install,DarkGreising/0install,bartbes/0install,fdopen/0install,gasche/0install,HoMeCracKeR/0install,jaychoo/0install,pombreda/0install,gfxmonk/0install,bartbes/0install,afb/0install,HoMeCracKeR/0install,HoMeCracKeR/0install,gfxmonk/0install,DarkGreising/0install,pombreda/0install,pombreda/0install,0install/0install | ocaml/build-in.py | ocaml/build-in.py | # Needed because ocamlbuild 3.12.1 doesn't support absolute pathnames (4.00.1 does)
import sys
import os
from os.path import relpath
ocaml_build_dir = relpath(sys.argv[1], '.')
# Hack: when we can depend on a full OCaml feed with the build tools, we can remove this.
# Until then, we need to avoid trying to compile against the limited runtime environment.
if 'OCAMLLIB' in os.environ:
del os.environ['OCAMLLIB']
os.execvp("make", ["make", 'OCAML_BUILDDIR=' + ocaml_build_dir, "ocaml"])
| # Needed because ocamlbuild 3.12.1 doesn't support absolute pathnames (4.00.1 does)
import sys
import os
from os.path import relpath
ocaml_build_dir = relpath(sys.argv[1], '.')
os.execvp("make", ["make", 'OCAML_BUILDDIR=' + ocaml_build_dir, "ocaml"])
| lgpl-2.1 | Python |
2476fef95f006b5d85fd655c970ba35b04ed999a | add method imports to __ini__.py | cstrelioff/resumepy,cstrelioff/resumepy | resumepy/__init__.py | resumepy/__init__.py | from .process import create_parser
from .process import process_html
from .process import process_pdf
from .process import process_text
from .utils import check_dir
from .utils import copy_file
from .utils import mkdirs
from .utils import make_temp_directory
| mit | Python | |
d666b96ed7bc3339e162f53accee141fa2f42a16 | Update model.py | meng-sun/hil,kylehogan/hil,kylehogan/hil,henn/hil,henn/hil,CCI-MOC/haas,henn/hil_sahil,apoorvemohan/haas,apoorvemohan/haas,henn/hil_sahil,henn/haas,SahilTikale/haas,lokI8/haas,SahilTikale/switchHaaS,meng-sun/hil,kylehogan/haas | haas/model.py | haas/model.py | from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy import *
from passlib.hash import sha512_crypt
from haas.config import cfg
Base = declarative_base()
Session = sessionmaker()
def init_db(create=False):
uri = cfg.get('database', 'uri')
engine = create_engine(uri)
if create:
Base.metadata.create_all(engine)
Session.configure(bind=engine)
class Model(Base):
"""All of our database models are descendants of this class.
It provides some base functionality that we want everywhere.
"""
__abstract__ = True
@declared_attr
def __tablename__(cls):
"""Automatically generate the table name."""
return cls.__name__.lower()
label = Column(String, primary_key=True)
# Various joining tables, for many-to-many relationships:
_m2m = {}
_m2m_pairs = [ ('group', 'user') ]
for left, right in _m2m_pairs:
_m2m[(left, right)] = Table(left + '_to_' + right, Base.metadata,
Column(left + '_label', String, ForeignKey(left + '.label')),
Column(right + '_label', String, ForeignKey(right + '.label')),
)
class Group(Model):
users = relationship('User', secondary=_m2m[('group', 'user')], backref='groups')
class Headnode(Model): pass
class Hnic(Model): pass
class Network(Model): pass
class Nic(Model): pass
class Node(Model):
"""A physical computer
The node id is the object's label
"""
available = Column(Boolean)
project = relationship('Project',backref=backref('nodes',order_by=label))
def __init__(self,node_id):
self.label = node_id
class Port(Model): pass
class Project(Model): pass
class Switch(Model): pass
class User(Model):
"""A HaaS user account.
The username is the object's label.
"""
hashed_password = Column(String) # hashed and salted with passlib (sha512)
def __init__(self, name, password):
self.label = name
self.set_password(password)
def verify_password(self, password):
"""verifies that `password` is the correct password for the user.
`password` should be the plaintext password. It will be checked
against the salted/hashed one in the database.
"""
return sha512_crypt.verify(password, self.hashed_password)
def set_password(self, password):
"""Sets the user's password to `password`.
`password` should be the plaintext of the password, not the hash.
"""
self.hashed_password = sha512_crypt.encrypt(password)
| from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy import *
from passlib.hash import sha512_crypt
from haas.config import cfg
Base = declarative_base()
Session = sessionmaker()
def init_db(create=False):
uri = cfg.get('database', 'uri')
engine = create_engine(uri)
if create:
Base.metadata.create_all(engine)
Session.configure(bind=engine)
class Model(Base):
"""All of our database models are descendants of this class.
It provides some base functionality that we want everywhere.
"""
__abstract__ = True
@declared_attr
def __tablename__(cls):
"""Automatically generate the table name."""
return cls.__name__.lower()
label = Column(String, primary_key=True)
# Various joining tables, for many-to-many relationships:
_m2m = {}
_m2m_pairs = [ ('group', 'user') ]
for left, right in _m2m_pairs:
_m2m[(left, right)] = Table(left + '_to_' + right, Base.metadata,
Column(left + '_label', String, ForeignKey(left + '.label')),
Column(right + '_label', String, ForeignKey(right + '.label')),
)
class Group(Model):
users = relationship('User', secondary=_m2m[('group', 'user')], backref='groups')
class Headnode(Model): pass
class Hnic(Model): pass
class Network(Model): pass
class Nic(Model): pass
class Node(Model):
"""A physical computer
The node id is the object's label
"""
available = Column(Boolean)
project = relationship('Project',backref=backref('nodes',order_by=label))
def __init__(self,node_id):
self.label = node_id
class Port(Model): pass
class Project(Model): pass
class Switch(Model): pass
class User(Model):
"""A HaaS user account.
The username is the object's label.
"""
hashed_password = Column(String) # hashed and salted with passlib (sha512)
def __init__(self, name, password):
self.label = name
self.set_password(password)
def verify_password(self, password):
"""verifies that `password` is the correct password for the user.
`password` should be the plaintext password. It will be checked
against the salted/hashed one in the database.
"""
return sha512_crypt.verify(password, self.hashed_password)
def set_password(self, password):
"""Sets the user's password to `password`.
`password` should be the plaintext of the password, not the hash.
"""
self.hashed_password = sha512_crypt.encrypt(password)
| apache-2.0 | Python |
7ba828bd266dabf0a44a47c6428a4a4db003ea16 | disable csrf protection at testing env. | soasme/rio,soasme/rio,soasme/rio | rio/settings/test.py | rio/settings/test.py | # -*- coding: utf-8 -*-
DEBUG = True
TESTING = True
SECRET_KEY = 'no-secret'
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/rio-test.db'
CELERY_BROKER_URL = 'sqla+' + SQLALCHEMY_DATABASE_URI
CELERY_RESULT_BACKEND = 'db+' + SQLALCHEMY_DATABASE_URI
REDIS_DEFAULT_CLUSTERS = {
0: {'host': '127.0.0.1', 'port': 6379},
}
CELERY_ALWAYS_EAGER = True
WTF_CSRF_ENABLED=False
| # -*- coding: utf-8 -*-
DEBUG = True
TESTING = True
SECRET_KEY = 'no-secret'
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/rio-test.db'
CELERY_BROKER_URL = 'sqla+' + SQLALCHEMY_DATABASE_URI
CELERY_RESULT_BACKEND = 'db+' + SQLALCHEMY_DATABASE_URI
REDIS_DEFAULT_CLUSTERS = {
0: {'host': '127.0.0.1', 'port': 6379},
}
CELERY_ALWAYS_EAGER = True
| mit | Python |
ffbcadf66ba6379e4f98ec070c0e4203e12e1236 | add phenotype group | Clinical-Genomics/scout,Clinical-Genomics/scout,Clinical-Genomics/scout | scout/blueprints/core/constants.py | scout/blueprints/core/constants.py | # -*- coding: utf-8 -*-
"""Constants."""
PHENOTYPE_GROUPS = {
'HP:0001298': {
'name': 'Encephalopathy',
'abbr': 'ENC'
},
'HP:0012759': {
'name': 'Neurodevelopmental abnormality',
'abbr': 'NDEV'
},
'HP:0001250': {
'name': 'Seizures',
'abbr': 'EP'
},
'HP:0100022': {
'name': 'Abnormality of movement',
'abbr': 'MOVE'
},
'HP:0000707': {
'name': 'Neurology, other',
'abbr': 'NEUR'
},
'HP:0003011': {
'name': 'Abnormality of the musculature',
'abbr': 'MUSC'
},
'HP:0001638': {
'name': 'Cardiomyopathy',
'abbr': 'CARD'
},
'HP:0001507': {
'name': 'Growth abnormality',
'abbr': 'GROW'
},
'HP:0001392': {
'name': 'Abnormality of the liver',
'abbr': 'LIV'
},
'HP:0011458': {
'name': 'Abdominal symptom',
'abbr': 'GI'
},
'HP:0012373': {
'name': 'Abnormal eye physiology',
'abbr': 'EYE'
},
'HP:0000077': {
'name': 'Abnormality of the kidney',
'abbr': 'KIDN'
},
'HP:0000951': {
'name': 'Abnormality of the skin',
'abbr': 'SKIN'
},
'HP:0001939': {
'name': 'Abnormality of metabolism/homeostasis',
'abbr': 'METAB'
},
'HP:0000118': {
'name': 'As yet undefined/to be added',
'abbr': 'UND'
},
'HP:0002011': {
'name': 'Abnormal CNS morphology',
'abbr': 'CNS'
}
}
| # -*- coding: utf-8 -*-
"""Constants."""
PHENOTYPE_GROUPS = {
'HP:0001298':{
'name': 'Encephalopathy',
'abbr': 'ENC'
},
'HP:0012759':{
'name': 'Neurodevelopmental abnormality',
'abbr': 'NDEV'
},
'HP:0001250':{
'name': 'Seizures',
'abbr': 'EP'
},
'HP:0100022':{
'name': 'Abnormality of movement',
'abbr': 'MOVE'
},
'HP:0000707':{
'name': 'Neurology, other',
'abbr': 'NEUR'
},
'HP:0003011':{
'name': 'Abnormality of the musculature',
'abbr': 'MUSC'
},
'HP:0001638':{
'name': 'Cardiomyopathy',
'abbr': 'CARD'
},
'HP:0001507':{
'name': 'Growth abnormality',
'abbr': 'GROW'
},
'HP:0001392':{
'name': 'Abnormality of the liver',
'abbr': 'LIV'
},
'HP:0011458':{
'name': 'Abdominal symptom',
'abbr': 'GI'
},
'HP:0012373':{
'name': 'Abnormal eye physiology',
'abbr': 'EYE'
},
'HP:0000077':{
'name': 'Abnormality of the kidney',
'abbr': 'KIDN'
},
'HP:0000951':{
'name': 'Abnormality of the skin',
'abbr': 'SKIN'
},
'HP:0001939':{
'name': 'Abnormality of metabolism/homeostasis',
'abbr': 'METAB'
},
'HP:0000118':{
'name': 'As yet undefined/to be added',
'abbr': 'UND'
},
}
| bsd-3-clause | Python |
405bccd60660397da0be2c6607017bd8a287a077 | use locker | jpn--/pines,jpn--/pines | pines/tar.py | pines/tar.py | import os, io, tarfile, hashlib, json
from .busy_dir import locker
def _sha512_checksum(s):
sha512 = hashlib.sha512()
sha512.update(s)
return sha512.hexdigest()
def _save_hashes(hashes, path="."):
"""
Save configuration to a JSON file.
If filename is not an absolute path, it will be prefixed with ~/.pines/
"""
filename = os.path.join(path, 'hashes.json')
with open(filename, "w") as f:
json.dump(hashes, f, indent=2, sort_keys=True)
def _load_hashes(path="."):
filename = os.path.join(path, 'hashes.json')
with open(filename, "r") as f:
hashes = json.load(f)
return hashes
def directory_to_targz_string(directory):
"""
tar and gzip a directory into a bytes string
Parameters
----------
directory : str
Returns
-------
bytes
"""
with io.BytesIO() as bt:
with tarfile.open(fileobj=bt,mode='w:gz') as tf:
tf.add(directory,arcname=os.path.basename(directory))
bt.seek(0)
s=bt.read()
return s
def extract_targz_string(s, path=".", members=None, return_listdir=True, package_name=None):
"""
restore a tar-gzipped directory
Parameters
----------
s : bytes
Content to ungzip and untar
path : str
Where to extract, defaults to current working directory.
members : list
see tarfile.extractall
"""
skip_rewrite = False
hashes = {}
checksum = None
with locker(path):
if package_name is not None:
hashes = _load_hashes(path=path)
if package_name in hashes:
checksum = _sha512_checksum(s)
if checksum==hashes[package_name]:
skip_rewrite = True
if not skip_rewrite:
with io.BytesIO() as bt:
bt.write(s)
bt.seek(0)
with tarfile.open(fileobj=bt,mode='r:gz') as tf:
tf.extractall(path=path, members=members)
if package_name is not None:
hashes[package_name] = checksum
_save_hashes(hashes, path=path)
if return_listdir:
if path==".":
return os.getcwd(), os.listdir(path)
else:
return path, os.listdir(path)
| import os, io, tarfile, hashlib, json
from .busy_dir import locked_directory
def _sha512_checksum(s):
sha512 = hashlib.sha512()
sha512.update(s)
return sha512.hexdigest()
def _save_hashes(hashes, path="."):
"""
Save configuration to a JSON file.
If filename is not an absolute path, it will be prefixed with ~/.pines/
"""
filename = os.path.join(path, 'hashes.json')
with open(filename, "w") as f:
json.dump(hashes, f, indent=2, sort_keys=True)
def _load_hashes(path="."):
filename = os.path.join(path, 'hashes.json')
with open(filename, "r") as f:
hashes = json.load(f)
return hashes
def directory_to_targz_string(directory):
"""
tar and gzip a directory into a bytes string
Parameters
----------
directory : str
Returns
-------
bytes
"""
with io.BytesIO() as bt:
with tarfile.open(fileobj=bt,mode='w:gz') as tf:
tf.add(directory,arcname=os.path.basename(directory))
bt.seek(0)
s=bt.read()
return s
def extract_targz_string(s, path=".", members=None, return_listdir=True, package_name=None):
"""
restore a tar-gzipped directory
Parameters
----------
s : bytes
Content to ungzip and untar
path : str
Where to extract, defaults to current working directory.
members : list
see tarfile.extractall
"""
skip_rewrite = False
hashes = {}
checksum = None
with locked_directory(path):
if package_name is not None:
hashes = _load_hashes(path=path)
if package_name in hashes:
checksum = _sha512_checksum(s)
if checksum==hashes[package_name]:
skip_rewrite = True
if not skip_rewrite:
with io.BytesIO() as bt:
bt.write(s)
bt.seek(0)
with tarfile.open(fileobj=bt,mode='r:gz') as tf:
tf.extractall(path=path, members=members)
if package_name is not None:
hashes[package_name] = checksum
_save_hashes(hashes, path=path)
if return_listdir:
if path==".":
return os.getcwd(), os.listdir(path)
else:
return path, os.listdir(path)
| mit | Python |
0a8209725e1efbc819f21652cc0413625f998059 | Update KC CTS | KhronosGroup/Vulkan-CTS,googlestadia/VK-GL-CTS,KhronosGroup/Vulkan-CTS,googlestadia/VK-GL-CTS,KhronosGroup/VK-GL-CTS,KhronosGroup/Vulkan-CTS,googlestadia/VK-GL-CTS,KhronosGroup/VK-GL-CTS,KhronosGroup/Vulkan-CTS,KhronosGroup/VK-GL-CTS,KhronosGroup/VK-GL-CTS,googlestadia/VK-GL-CTS,KhronosGroup/VK-GL-CTS,KhronosGroup/Vulkan-CTS,KhronosGroup/Vulkan-CTS,googlestadia/VK-GL-CTS,googlestadia/VK-GL-CTS,KhronosGroup/VK-GL-CTS | external/fetch_kc_cts.py | external/fetch_kc_cts.py | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------
# Khronos OpenGL CTS
# ------------------
#
# Copyright (c) 2016 The Khronos Group Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#-------------------------------------------------------------------------
import os
import sys
import shutil
import argparse
import subprocess
from fetch_sources import *
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts"))
from build.common import *
EXTERNAL_DIR = os.path.realpath(os.path.normpath(os.path.dirname(__file__)))
SHA1 = "fbac64a4b2acdd4c41e47efa0b7db9a023ee108e"
PACKAGES = [
GitRepo(
"https://gitlab.khronos.org/opengl/kc-cts.git",
"git@gitlab.khronos.org:opengl/kc-cts.git",
SHA1,
"kc-cts"),
]
if __name__ == "__main__":
args = parseArgs()
for pkg in PACKAGES:
if args.clean:
pkg.clean()
else:
pkg.update(args.protocol)
| # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------
# Khronos OpenGL CTS
# ------------------
#
# Copyright (c) 2016 The Khronos Group Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#-------------------------------------------------------------------------
import os
import sys
import shutil
import argparse
import subprocess
from fetch_sources import *
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts"))
from build.common import *
EXTERNAL_DIR = os.path.realpath(os.path.normpath(os.path.dirname(__file__)))
SHA1 = "be43972ee52d445cb4baa102dc1508760d739e14"
PACKAGES = [
GitRepo(
"https://gitlab.khronos.org/opengl/kc-cts.git",
"git@gitlab.khronos.org:opengl/kc-cts.git",
SHA1,
"kc-cts"),
]
if __name__ == "__main__":
args = parseArgs()
for pkg in PACKAGES:
if args.clean:
pkg.clean()
else:
pkg.update(args.protocol)
| apache-2.0 | Python |
beabd44cc187cd986c05eb1ac5289866081ebd22 | Update mockito-core to 2.24.5 | GerritCodeReview/plugins_webhooks | external_plugin_deps.bzl | external_plugin_deps.bzl | load("//tools/bzl:maven_jar.bzl", "maven_jar")
def external_plugin_deps():
maven_jar(
name = "mockito",
artifact = "org.mockito:mockito-core:2.24.5",
sha1 = "599509fe319bd9e39559b8f987bee5d4b77167e4",
deps = [
"@byte-buddy//jar",
"@byte-buddy-agent//jar",
"@objenesis//jar",
],
)
BYTE_BUDDY_VERSION = "1.9.7"
maven_jar(
name = "byte-buddy",
artifact = "net.bytebuddy:byte-buddy:" + BYTE_BUDDY_VERSION,
sha1 = "8fea78fea6449e1738b675cb155ce8422661e237",
)
maven_jar(
name = "byte-buddy-agent",
artifact = "net.bytebuddy:byte-buddy-agent:" + BYTE_BUDDY_VERSION,
sha1 = "8e7d1b599f4943851ffea125fd9780e572727fc0",
)
maven_jar(
name = "objenesis",
artifact = "org.objenesis:objenesis:2.6",
sha1 = "639033469776fd37c08358c6b92a4761feb2af4b",
)
| load("//tools/bzl:maven_jar.bzl", "maven_jar")
def external_plugin_deps():
maven_jar(
name = "mockito",
artifact = "org.mockito:mockito-core:2.24.0",
sha1 = "969a7bcb6f16e076904336ebc7ca171d412cc1f9",
deps = [
"@byte-buddy//jar",
"@byte-buddy-agent//jar",
"@objenesis//jar",
],
)
BYTE_BUDDY_VERSION = "1.9.7"
maven_jar(
name = "byte-buddy",
artifact = "net.bytebuddy:byte-buddy:" + BYTE_BUDDY_VERSION,
sha1 = "8fea78fea6449e1738b675cb155ce8422661e237",
)
maven_jar(
name = "byte-buddy-agent",
artifact = "net.bytebuddy:byte-buddy-agent:" + BYTE_BUDDY_VERSION,
sha1 = "8e7d1b599f4943851ffea125fd9780e572727fc0",
)
maven_jar(
name = "objenesis",
artifact = "org.objenesis:objenesis:2.6",
sha1 = "639033469776fd37c08358c6b92a4761feb2af4b",
)
| apache-2.0 | Python |
6c8fc2db5943a5e6c6c8d56a5561003cb60d23e4 | fix so admin and debug toolbar isn't closed | Pr0jectX/challenge | core/middleware.py | core/middleware.py | from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from core.models import Config
from levels.models import Level
import re
class ClosedMiddleware(object):
def process_request(self, request):
if hasattr(request, 'user') and request.user.is_staff:
return None
if re.match('^/admin/', request.path_info):
return None
if re.match('^/__debug__/', request.path_info) and settings.DEBUG:
return None
if Config.objects.all().count() != 1:
return self.closed(request)
config = Config.objects.get(pk=1)
if not config.active:
return self.closed(request)
if Level.objects.all().count() == 0:
return self.closed(request)
return None
def closed(self, request):
try:
config = Config.objects.get(pk=1)
except:
config = {'welcometext': _('This site is not configured yet.')}
c = {}
c['config'] = config
return render(request, 'core/closed.html', c)
| from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from core.models import Config
from levels.models import Level
class ClosedMiddleware(object):
def process_request(self, request):
if hasattr(request, 'user') and request.user.is_staff:
return None
if Config.objects.all().count() != 1:
return self.closed(request)
config = Config.objects.get(pk=1)
if not config.active:
return self.closed(request)
if Level.objects.all().count() == 0:
return self.closed(request)
return None
def closed(self, request):
try:
config = Config.objects.get(pk=1)
except:
config = {'welcometext': _('This site is not configured yet.')}
c = {}
c['config'] = config
return render(request, 'core/closed.html', c)
| agpl-3.0 | Python |
676562966cc8d9f6731b1cbac3c8a3827c7d62c4 | Bump version to 0.1.53 | botify-labs/python-simple-workflow,botify-labs/python-simple-workflow | swf/__init__.py | swf/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
version = (0, 1, 53)
__title__ = "python-simple-workflow"
__author__ = "Oleiade"
__license__ = "MIT"
__version__ = '.'.join(map(str, version))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
version = (0, 1, 52)
__title__ = "python-simple-workflow"
__author__ = "Oleiade"
__license__ = "MIT"
__version__ = '.'.join(map(str, version))
| mit | Python |
4da61308f727dc42f0b0030aa54eeff2205c1fee | Change case | yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti | core/web/webapp.py | core/web/webapp.py | from __future__ import unicode_literals
import os
from importlib import import_module
from bson.json_util import dumps
from flask import Flask, url_for, request
from flask_login import LoginManager, current_user
from core.user import User
from core.web.json import JSONDecoder
from core.web.api import api
from core.web.frontend import frontend
from mongoengine.errors import DoesNotExist
from core.scheduling import Scheduler
Scheduler() # load all schedule modules
webapp = Flask(__name__)
webapp.secret_key = os.urandom(24)
webapp.json_decoder = JSONDecoder
webapp.debug = True
login_manager = LoginManager()
login_manager.init_app(webapp)
login_manager.login_view = '/login'
auth_module = import_module('core.auth.local')
webapp.register_blueprint(auth_module.auth)
# Handle authentication
@login_manager.user_loader
def load_user(session_token):
try:
return User.objects.get(session_token=session_token)
except DoesNotExist:
return None
@login_manager.request_loader
def api_auth(request):
print request.headers
try:
return User.objects.get(api_key=request.headers.get('X-Api-Key'))
except DoesNotExist:
return None
login_manager.anonymous_user = auth_module.get_default_user
@frontend.before_request
def frontend_login_required():
if not current_user.is_active and (request.endpoint and request.endpoint != 'frontend.static'):
return login_manager.unauthorized()
@api.before_request
def api_login_required():
if not current_user.is_active:
return dumps({"error": "X-Api-Key header missing or invalid"}), 401
webapp.register_blueprint(frontend)
webapp.register_blueprint(api, url_prefix='/api')
@webapp.route('/list_routes')
def list_routes():
import urllib
output = []
for rule in webapp.url_map.iter_rules():
options = {}
for arg in rule.arguments:
options[arg] = "[{0}]".format(arg)
methods = ','.join(rule.methods)
url = url_for(rule.endpoint, **options)
line = urllib.unquote("{:50s} {:20s} {}".format(rule.endpoint, methods, url))
output.append(line)
for line in sorted(output):
print line
return "<br>".join(output)
@webapp.template_test()
def startswith(string, pattern):
return string.startswith(pattern)
| from __future__ import unicode_literals
import os
from importlib import import_module
from bson.json_util import dumps
from flask import Flask, url_for, request
from flask_login import LoginManager, current_user
from core.user import User
from core.web.json import JSONDecoder
from core.web.api import api
from core.web.frontend import frontend
from mongoengine.errors import DoesNotExist
from core.scheduling import Scheduler
Scheduler() # load all schedule modules
webapp = Flask(__name__)
webapp.secret_key = os.urandom(24)
webapp.json_decoder = JSONDecoder
webapp.debug = True
login_manager = LoginManager()
login_manager.init_app(webapp)
login_manager.login_view = '/login'
auth_module = import_module('core.auth.local')
webapp.register_blueprint(auth_module.auth)
# Handle authentication
@login_manager.user_loader
def load_user(session_token):
try:
return User.objects.get(session_token=session_token)
except DoesNotExist:
return None
@login_manager.request_loader
def api_auth(request):
print request.headers
try:
return User.objects.get(api_key=request.headers.get('X-Api-Key'))
except DoesNotExist:
return None
login_manager.anonymous_user = auth_module.get_default_user
@frontend.before_request
def frontend_login_required():
if not current_user.is_active and (request.endpoint and request.endpoint != 'frontend.static'):
return login_manager.unauthorized()
@api.before_request
def api_login_required():
if not current_user.is_active:
return dumps({"error": "X-API-KEY header missing or invalid"}), 401
webapp.register_blueprint(frontend)
webapp.register_blueprint(api, url_prefix='/api')
@webapp.route('/list_routes')
def list_routes():
import urllib
output = []
for rule in webapp.url_map.iter_rules():
options = {}
for arg in rule.arguments:
options[arg] = "[{0}]".format(arg)
methods = ','.join(rule.methods)
url = url_for(rule.endpoint, **options)
line = urllib.unquote("{:50s} {:20s} {}".format(rule.endpoint, methods, url))
output.append(line)
for line in sorted(output):
print line
return "<br>".join(output)
@webapp.template_test()
def startswith(string, pattern):
return string.startswith(pattern)
| apache-2.0 | Python |
6ab4e871c07d123a9b5ab2ee9ca1f01d41aeed94 | Fix bug in incar attribute gettingg. | PytLab/VASPy,PytLab/VASPy | scripts/change_incar_parameters.py | scripts/change_incar_parameters.py | '''
Modify recursively parameters in all INCAR file.
'''
import argparse
import commands
import logging
from vaspy import PY2
from vaspy.incar import InCar
SHELL_COMMAND = "find ./ -name 'INCAR'"
_logger = logging.getLogger("vaspy.script")
if "__main__" == __name__:
# Check command validity.
status, output = commands.getstatusoutput(SHELL_COMMAND)
if status:
raise SystemExit("Invalid shell commands - '{}'".format(SHELL_COMMAND))
# Get InCar objects.
incar_paths = (incar_path.strip() for incar_path in output.split('\n'))
incars = [InCar(incar_path) for incar_path in incar_paths]
# Get all possible arguments.
set_list = [set(incar.pnames) for incar in incars]
possible_args = set.intersection(*set_list)
# Set arguments for this script.
parser = argparse.ArgumentParser()
for arg in possible_args:
arg_str = "--{}".format(arg)
parser.add_argument(arg_str, help="set {} INCAR parameter".format(arg))
args_space = parser.parse_args()
# Change parameters for all incars.
if PY2:
pname_value_pairs = args_space.__dict__.iteritems()
else:
pname_value_pairs = args_space.__dict__.items()
for pname, value in pname_value_pairs :
if value is None:
continue
for incar in incars:
_logger.info("{} --> {} in {}.".format(pname, value, incar.filename))
incar.set(pname, value)
incar.tofile()
_logger.info("{} INCAR files ... ok.".format(len(incars)))
| #!/usr/bin/env python
'''
Modify recursively parameters in all INCAR file.
'''
import argparse
import commands
import logging
from vaspy.incar import InCar
SHELL_COMMAND = "find ./ -name 'INCAR'"
_logger = logging.getLogger("vaspy.script")
if "__main__" == __name__:
# Check command validity.
status, output = commands.getstatusoutput(SHELL_COMMAND)
if status:
raise SystemExit("Invalid shell commands - '{}'".format(SHELL_COMMAND))
# Get InCar objects.
incar_paths = (incar_path.strip() for incar_path in output.split('\n'))
incars = [InCar(incar_path) for incar_path in incar_paths]
# Get all possible arguments.
set_list = [set(incar.pnames) for incar in incars]
possible_args = set.intersection(*set_list)
# Set arguments for this script.
parser = argparse.ArgumentParser()
for arg in possible_args:
arg_str = "--{}".format(arg)
parser.add_argument(arg_str, help="set {} INCAR parameter".format(arg))
args_space = parser.parse_args()
# Change parameters for all incars.
if PY2:
pname_value_pairs = args.__dict__.iteritems()
else:
pname_value_pairs = args.__dict__.items()
for pname, value in pname_value_pairs :
if value is None:
continue
for incar in incars:
_logger.info("{} --> {} in {}.".format(pname, value, incar.filename()))
incar.set(pname, value)
incar.tofile()
_logger.info("{} INCAR files ... ok.".format(len(incars)))
| mit | Python |
469388b1e22496740c264ba4bb37305708ea543e | handle scores < -1 and > 1 | lrvick/synt | synt/guesser.py | synt/guesser.py | # -*- coding: utf-8 -*-
from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
prob = classifier.prob_classify(bag_of_words)
#return a -1 .. 1 score
score = prob.prob('positive') - prob.prob('negative')
#if score doesn't fall within -1 and 1 return 0.0
#example: single words might return a heavily biased score like -9.8343
if not (-1 <= score <= 1):
return 0.0
return score
| from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
#return a -1 .. 1 score
score = prob.prob('positive') - prob.prob('negative')
return score
| agpl-3.0 | Python |
9b8bedc00be962284abb0420537d32e2ed7f4947 | Tag new release: 3.1.15 | Floobits/floobits-sublime,Floobits/floobits-sublime | floo/version.py | floo/version.py | PLUGIN_VERSION = '3.1.15'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| PLUGIN_VERSION = '3.1.14'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| apache-2.0 | Python |
554942c6ae8dfb71d1a2af895b60cd6b4923cc28 | Tag new release: 3.1.9 | Floobits/floobits-sublime,Floobits/floobits-sublime | floo/version.py | floo/version.py | PLUGIN_VERSION = '3.1.9'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| PLUGIN_VERSION = '3.1.8'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.11'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| apache-2.0 | Python |
cc0f463f49ba551a5381948d8ea89cd82ba9b52e | Fix #765. | HIPERFIT/futhark,HIPERFIT/futhark,diku-dk/futhark,diku-dk/futhark,HIPERFIT/futhark,diku-dk/futhark,diku-dk/futhark,diku-dk/futhark | rts/python/memory.py | rts/python/memory.py | # Start of memory.py.
import ctypes as ct
def addressOffset(x, offset, bt):
return ct.cast(ct.addressof(x.contents)+int(offset), ct.POINTER(bt))
def allocateMem(size):
return ct.cast((ct.c_byte * max(0,size))(), ct.POINTER(ct.c_byte))
# Copy an array if its is not-None. This is important for treating
# Numpy arrays as flat memory, but has some overhead.
def normaliseArray(x):
if (x.base is x) or (x.base is None):
return x
else:
return x.copy()
def unwrapArray(x):
return normaliseArray(x).ctypes.data_as(ct.POINTER(ct.c_byte))
def createArray(x, dim):
return np.ctypeslib.as_array(x, shape=dim)
def indexArray(x, offset, bt, nptype):
return nptype(addressOffset(x, offset*ct.sizeof(bt), bt)[0])
def writeScalarArray(x, offset, v):
ct.memmove(ct.addressof(x.contents)+int(offset)*ct.sizeof(v), ct.addressof(v), ct.sizeof(v))
# An opaque Futhark value.
class opaque(object):
def __init__(self, desc, *payload):
self.data = payload
self.desc = desc
def __repr__(self):
return "<opaque Futhark value of type {}>".format(self.desc)
# End of memory.py.
| # Start of memory.py.
import ctypes as ct
def addressOffset(x, offset, bt):
return ct.cast(ct.addressof(x.contents)+int(offset), ct.POINTER(bt))
def allocateMem(size):
return ct.cast((ct.c_byte * max(0,size))(), ct.POINTER(ct.c_byte))
# Copy an array if its is not-None. This is important for treating
# Numpy arrays as flat memory, but has some overhead.
def normaliseArray(x):
if (x.base is x) or (x.base is None):
return x
else:
return x.copy()
def unwrapArray(x):
return normaliseArray(x).ctypes.data_as(ct.POINTER(ct.c_byte))
def createArray(x, dim):
return np.ctypeslib.as_array(x, shape=dim)
def indexArray(x, offset, bt, nptype):
return nptype(addressOffset(x, offset, bt)[0])
def writeScalarArray(x, offset, v):
ct.memmove(ct.addressof(x.contents)+int(offset), ct.addressof(v), ct.sizeof(v))
# An opaque Futhark value.
class opaque(object):
def __init__(self, desc, *payload):
self.data = payload
self.desc = desc
def __repr__(self):
return "<opaque Futhark value of type {}>".format(self.desc)
# End of memory.py.
| isc | Python |
1c695763ce3edc982e939fee0e0c5ab2da10d575 | Sort those lists.. | CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend | scripts/import_staff_identities.py | scripts/import_staff_identities.py | #!/usr/bin/env python
import time
import requests
from threepio import logger
from service.accounts.eucalyptus import AccountDriver as EucaAccountDriver
from service.accounts.openstack import AccountDriver as OSAccountDriver
from core.models import AtmosphereUser as User
from core.models import Provider, Quota, Allocation
def main():
"""
TODO: Add argparse, --delete : Deletes existing users in openstack (Never use in PROD)
"""
openstack = Provider.objects.get(location='OpenStack-Tucson (BETA)')
os_driver = OSAccountDriver(openstack)
found = 0
create = 0
quota_dict = {
'cpu':10,
'memory': 20,
'storage': 10,
'storage_count': 10
}
higher_quota = Quota.objects.get_or_create(**quota_dict)[0]
usernames = os_driver.list_usergroup_names()
staff = members_query_groupy("staff")
staff_users = sorted(list(set(staff) & set(usernames)))
non_staff = sorted(list(set(usernames) - set(staff)))
for user in staff_users:
# Openstack account exists, but we need the identity.
ident = os_driver.create_account(user)
print 'Found staff user:%s -- Remove allocation and Update quota' % user
im = ident.identitymembership_set.all()[0]
#Disable time allocation
im.allocation = None
im.quota = higher_quota
im.save()
for user in non_staff:
#Raise everybody's quota
ident = os_driver.create_account(user)
im = ident.identitymembership_set.all()[0]
im.quota = higher_quota
im.allocation = Allocation.default_allocation()
im.save()
print 'Found non-staff user:%s -- Update quota' % user
print "Total users added to atmosphere:%s" % len(usernames)
def members_query_groupy(groupname):
r = requests.get(
'http://gables.iplantcollaborative.org/groups/%s/members'
% groupname)
json_obj = r.json()
usernames = []
for user in json_obj['data']:
usernames.append(user['name'].replace('esteve','sgregory'))
return usernames
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import time
import requests
from threepio import logger
from service.accounts.eucalyptus import AccountDriver as EucaAccountDriver
from service.accounts.openstack import AccountDriver as OSAccountDriver
from core.models import AtmosphereUser as User
from core.models import Provider, Quota, Allocation
def main():
"""
TODO: Add argparse, --delete : Deletes existing users in openstack (Never use in PROD)
"""
openstack = Provider.objects.get(location='OpenStack-Tucson (BETA)')
os_driver = OSAccountDriver(openstack)
found = 0
create = 0
quota_dict = {
'cpu':10,
'memory': 20,
'storage': 10,
'storage_count': 10
}
higher_quota = Quota.objects.get_or_create(**quota_dict)[0]
usernames = os_driver.list_usergroup_names()
staff = members_query_groupy("staff")
staff_users = list(set(staff) & set(usernames))
non_staff = list(set(usernames) - set(staff))
for user in staff_users:
# Openstack account exists, but we need the identity.
ident = os_driver.create_account(user)
print 'Found staff user:%s -- Remove allocation and Update quota' % user
im = ident.identitymembership_set.all()[0]
#Disable time allocation
im.allocation = None
im.quota = higher_quota
im.save()
for user in non_staff:
#Raise everybody's quota
ident = os_driver.create_account(user)
im = ident.identitymembership_set.all()[0]
im.quota = higher_quota
im.allocation = Allocation.default_allocation()
im.save()
print 'Found non-staff user:%s -- Update quota' % user
print "Total users added to atmosphere:%s" % len(usernames)
def members_query_groupy(groupname):
r = requests.get(
'http://gables.iplantcollaborative.org/groups/%s/members'
% groupname)
json_obj = r.json()
usernames = []
for user in json_obj['data']:
usernames.append(user['name'].replace('esteve','sgregory'))
return usernames
if __name__ == "__main__":
main()
| apache-2.0 | Python |
c40f7b216d1ebe14332acd2f4aa8df819053258a | Create serializers.py | divyamodi128/django_comments,divyamodi128/django_comments,divyamodi128/django_comments | src/posts/serializers.py | src/posts/serializers.py | from rest_framework import serializers
from .models import Post
class PostSerializers(serializers.HyperlinkedModelSerializer):
class Meta:
model = Post
fields = ('url', 'title', 'content', 'timestamp', 'updated', 'media_url')
| from rest_framework import serializers
from .models import Post
class PostSerializers(serializers.HyperlinkedModelSerializer):
class Meta:
model = Post
fields = ('url', 'title', 'content', 'timestamp', 'updated', 'media_url')
'''4703635512
ygnesha
ranga 510 771 9301 ''' | mit | Python |
17933131c3f78a41a4aa803959e31e200cecd97e | add missing label declarator to all fields in forms | byteweaver/django-forums,ckcnik/django-forums,byteweaver/django-forums,ckcnik/django-forums | forums/forms.py | forums/forms.py | from django import forms
class TopicCreateForm(forms.Form):
topic = forms.CharField(label="Topic", min_length=3)
message = forms.CharField(label="Message", min_length=3, widget=forms.Textarea())
class PostCreateForm(forms.Form):
message = forms.CharField(label="Message", min_length=3, widget=forms.Textarea())
| from django import forms
class TopicCreateForm(forms.Form):
topic = forms.CharField("Topic", min_length=3)
message = forms.CharField("Message", min_length=3, widget=forms.Textarea())
class PostCreateForm(forms.Form):
message = forms.CharField("Message", min_length=3, widget=forms.Textarea())
| bsd-3-clause | Python |
8fa358b0ab8b2892ca062ceec399d58e4b077e46 | update config path | voidabhi/cricinfo,voidabhi/cricinfo | cricinfo/my_bot.py | cricinfo/my_bot.py | #!/usr/bin/python
import requests
from bs4 import BeautifulSoup
import xmltodict
import click
from ConfigParser import SafeConfigParser
def get_config(key):
""" Fetch config from config file """
parser = SafeConfigParser()
parser.read('../.config')
return parser.get(key)
class Match(object):
""" Represents a cricinfo match """
def __init__(self, title, link, description, guid):
self.title = title
self.link = link
self.description = description
self.guid = guid
@classmethod
def from_xml(self, xml):
""" create object from serialized xml """
item = xmltodict.parse(xml)['item']
return Match(item['title'], item['link'], item['description'], item['guid'])
def __repr__(self):
return '<Match=%s>'%self.title
def get_matches():
"""Fetches matches from the cricinfo url"""
r = requests.get(parser.get('url'))
soup = BeautifulSoup(r.text)
for match in soup.find_all('item'):
yield Match.from_xml(str(match))
def print_matches(matches):
"""Prints all matches to the console."""
click.echo()
for match in matches:
click.secho('%s\t' % match.title, bold=True, fg="red", nl=False)
click.echo()
@click.command()
def main():
"""A cli to Cricinfo to see live scores"""
# fetch matches
matches = get_matches()
# print matches
print_matches(matches)
if __name__ == '__main__':
main()
| #!/usr/bin/python
import requests
from bs4 import BeautifulSoup
import xmltodict
import click
from ConfigParser import SafeConfigParser
def get_config(key):
""" Fetch config from config file """
parser = SafeConfigParser()
parser.read('.config')
return parser.get(key)
class Match(object):
""" Represents a cricinfo match """
def __init__(self, title, link, description, guid):
self.title = title
self.link = link
self.description = description
self.guid = guid
@classmethod
def from_xml(self, xml):
""" create object from serialized xml """
item = xmltodict.parse(xml)['item']
return Match(item['title'], item['link'], item['description'], item['guid'])
def __repr__(self):
return '<Match=%s>'%self.title
def get_matches():
"""Fetches matches from the cricinfo url"""
r = requests.get(parser.get('url'))
soup = BeautifulSoup(r.text)
for match in soup.find_all('item'):
yield Match.from_xml(str(match))
def print_matches(matches):
"""Prints all matches to the console."""
click.echo()
for match in matches:
click.secho('%s\t' % match.title, bold=True, fg="red", nl=False)
click.echo()
@click.command()
def main():
"""A cli to Cricinfo to see live scores"""
# fetch matches
matches = get_matches()
# print matches
print_matches(matches)
if __name__ == '__main__':
main()
| mit | Python |
c0815fb71a97a6dd0ddce99755012738a137e6cc | remove access token | stopthatcow/zazu,stopthatcow/zazu | zazu/github_helper.py | zazu/github_helper.py | # -*- coding: utf-8 -*-
"""github functions for zazu"""
import zazu.util
zazu.util.lazy_import(locals(), [
'click',
'getpass',
'github',
'keyring',
're',
'requests',
'socket'
])
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2016"
def get_gh_token():
"""Make new GitHub token"""
api_url = 'https://api.github.com'
add_auth = {
"scopes": [
"repo"
],
"note": "zazu for {}@{}".format(getpass.getuser(), socket.gethostname())
}
token = None
while token is None:
user = zazu.util.prompt("GitHub username", expected_type=str)
password = click.prompt("GitHub password", type=str, hide_input=True)
r = requests.post('{}/authorizations'.format(api_url), json=add_auth, auth=(user, password))
if r.status_code == 401:
if 'Must specify two-factor authentication OTP code.' in r.json()['message']:
headers = {'X-GitHub-OTP': click.prompt('GitHub two-factor code (6 digits)', type=str)}
r = requests.post('{}/authorizations'.format(api_url), headers=headers, json=add_auth, auth=(user, password))
else:
click.echo("Invalid username or password!")
continue
if r.status_code == 201:
token = r.json()['token']
elif r.status_code == 422:
click.echo('You already have a GitHub token for zazu in GitHub but it is not saved in the keychain! '
'Go to https://github.com/settings/tokens to generate a new one with "repo" scope')
token = zazu.util.prompt('Enter new token manually')
else:
raise Exception("Error authenticating with GitHub, status:{} content:{}".format(r.status_code, r.json()))
return token
def make_gh():
token = keyring.get_password('https://api.github.com', 'token')
if token is None:
click.echo("No saved GitHub token found in keychain, lets add one...")
token = get_gh_token()
keyring.set_password('https://api.github.com', 'token', token)
gh = github.Github(token)
return gh
def parse_github_url(url):
"""Parses github url into organization and repo name"""
tokens = re.split('/|:', url.replace('.git', ''))
repo = tokens.pop()
organization = tokens.pop()
return organization, repo
| # -*- coding: utf-8 -*-
"""github functions for zazu"""
import zazu.util
zazu.util.lazy_import(locals(), [
'click',
'getpass',
'github',
'keyring',
're',
'requests',
'socket'
])
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2016"
def get_gh_token():
"""Make new GitHub token"""
api_url = 'https://api.github.com'
add_auth = {
"scopes": [
"repo"
],
"note": "zazu for {}@{}".format(getpass.getuser(), socket.gethostname())
}
token = None
while token is None:
user = zazu.util.prompt("GitHub username", expected_type=str)
password = click.prompt("GitHub password", type=str, hide_input=True)
r = requests.post('{}/authorizations'.format(api_url), json=add_auth, auth=(user, password))
if r.status_code == 401:
if 'Must specify two-factor authentication OTP code.' in r.json()['message']:
headers = {'X-GitHub-OTP': click.prompt('GitHub two-factor code (6 digits)', type=str)}
r = requests.post('{}/authorizations'.format(api_url), headers=headers, json=add_auth, auth=(user, password))
else:
click.echo("Invalid username or password!")
continue
if r.status_code == 201:
token = r.json()['token']
elif r.status_code == 422:
click.echo('You already have a GitHub token for zazu in GitHub but it is not saved in the keychain! '
'Go to https://github.com/settings/tokens to generate a new one with "repo" scope')
token = zazu.util.prompt('Enter new token manually')
else:
raise Exception("Error authenticating with GitHub, status:{} content:{}".format(r.status_code, r.json()))
return token
def make_gh():
token = '80fa911ab6337aaa2e166939951786a008d45b4d' # keyring.get_password('https://api.github.com', 'token')
if token is None:
click.echo("No saved GitHub token found in keychain, lets add one...")
token = get_gh_token()
keyring.set_password('https://api.github.com', 'token', token)
gh = github.Github(token)
return gh
def parse_github_url(url):
"""Parses github url into organization and repo name"""
tokens = re.split('/|:', url.replace('.git', ''))
repo = tokens.pop()
organization = tokens.pop()
return organization, repo
| mit | Python |
1a357cdcb6b204bb7df147461b16d1cc15b05143 | add filter capability | clofresh/proboscis | proboscis.py | proboscis.py | import sys
import time
from datetime import datetime
import pymongo
db = pymongo.Connection().mongolog
time_key = 'created'
message_key = 'msg'
if len(sys.argv) > 1:
filter_query = eval(sys.argv[1])
else:
filter_query = {}
last_time = list(db.log.find(filter_query, [time_key]).sort(time_key, pymongo.DESCENDING).limit(1))[0][time_key]
while True:
query = {time_key: {'$gt': last_time}}
query.update(filter_query)
for row in db.log.find(query).sort(time_key, pymongo.ASCENDING):
message = row.get(message_key, None)
if message:
print datetime.fromtimestamp(float(row[time_key])).strftime('%Y-%m-%d %H:%M:%S.%f:\t'),
print message
last_time = max(last_time, row[time_key])
time.sleep(1)
| import time
from datetime import datetime
import pymongo
db = pymongo.Connection().mongolog
time_key = 'created'
message_key = 'msg'
last_time = list(db.log.find({}, [time_key]).sort(time_key, pymongo.DESCENDING).limit(1))[0][time_key]
while True:
for row in db.log.find({time_key: {'$gt': last_time}}).sort(time_key, pymongo.ASCENDING):
message = row.get(message_key, None)
if message:
print datetime.fromtimestamp(float(row[time_key])).strftime('%Y-%m-%d %H:%M:%S.%f:\t'),
print message
last_time = max(last_time, row[time_key])
time.sleep(1)
| bsd-3-clause | Python |
d5dc16ce9f7f6373ebef6563ba25e14133d51cf7 | Fix test/helpers.py linter warnings. | duckinator/boreutils,duckinator/boreutils | test/helpers.py | test/helpers.py | """
Helper functions for the Boreutils test suite.
"""
from contextlib import contextmanager
from pathlib import Path
import os
import subprocess
@contextmanager
def _modified_path():
current_dir = Path(__file__).resolve().parent
bin_dir = (current_dir / '..' / 'bin').resolve()
original_path = os.environ['PATH']
try:
os.environ['PATH'] = f'{bin_dir}:{original_path}'
yield
finally:
os.environ['PATH'] = original_path
def check(cmd):
"""Run a command, capture the output as text, check it had a 0 returncode,
and return the +CompletedProcess+ object."""
with _modified_path():
return subprocess.run(cmd, capture_output=True, text=True, check=True)
def check_version(tool):
"""Check if running `{tool} --version` has '(Boreutils)' as the second word."""
return check([tool, "--version"]).stdout.split(' ')[1] == '(Boreutils)'
def run(cmd):
"""Run a command, capture the output as text, _don't_ check the return code,
and return the +CompletedProcess+ object."""
with _modified_path():
return subprocess.run(cmd, capture_output=True, text=True, check=False)
| from contextlib import contextmanager
from pathlib import Path
import os
import subprocess
@contextmanager
def _modified_path():
current_dir = Path(__file__).resolve().parent
bin_dir = (current_dir / '..' / 'bin').resolve()
original_path = os.environ['PATH']
try:
os.environ['PATH'] = f'{bin_dir}:{original_path}'
yield
finally:
os.environ['PATH'] = original_path
def check(cmd):
with _modified_path():
return subprocess.run(cmd, capture_output=True, text=True, check=True)
def check_version(tool):
return check([tool, "--version"]).stdout.split(' ')[1] == '(Boreutils)'
def run(cmd):
with _modified_path():
return subprocess.run(cmd, capture_output=True, text=True)
| isc | Python |
5c17662e3acac6f4daab116cde23a4cca71c3372 | Initialize threadpool as late as possible to let the process fork before spawning threads. | martijnvermaat/rpclib,martijnvermaat/rpclib,arskom/spyne,martijnvermaat/rpclib,arskom/spyne,arskom/spyne | src/rpclib/aux/thread.py | src/rpclib/aux/thread.py |
#
# rpclib - Copyright (C) Rpclib contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import logging
logger = logging.getLogger(__name__)
from multiprocessing.pool import ThreadPool
from rpclib.aux import AuxProcBase
class ThreadAuxProc(AuxProcBase):
def __init__(self, pool_size=1):
AuxProcBase.__init__(self)
self.pool = None
self.__pool_size = pool_size
@property
def pool_size(self):
return self.__pool_size
def process_context(self, server, ctx, *args, **kwargs):
a = [server, ctx]
a.extend(args)
self.pool.apply_async(self.process, a, kwargs)
def initialize(self, server):
self.pool = ThreadPool(self.__pool_size)
|
#
# rpclib - Copyright (C) Rpclib contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import logging
logger = logging.getLogger(__name__)
from multiprocessing.pool import ThreadPool
from rpclib.aux import AuxProcBase
class ThreadAuxProc(AuxProcBase):
def __init__(self, pool_size=1):
AuxProcBase.__init__(self)
self.__pool_size = pool_size
self.pool = ThreadPool(pool_size)
@property
def pool_size(self):
return self.__pool_size
def process_context(self, server, ctx, *args, **kwargs):
a = [server, ctx]
a.extend(args)
self.pool.apply_async(self.process, a, kwargs)
| lgpl-2.1 | Python |
b297ad6b4d52b688a1c50ffc2a5574d8061c5ce0 | Check for errors parsing the CSV as we go. | larsyencken/csvdiff | csvdiff/records.py | csvdiff/records.py | # -*- coding: utf-8 -*-
#
# records.py
# csvdiff
#
import csv
from . import error
class InvalidKeyError(Exception):
pass
def load(file_or_stream):
istream = (open(file_or_stream)
if not hasattr(file_or_stream, 'read')
else file_or_stream)
return _safe_iterator(csv.DictReader(istream))
def _safe_iterator(reader):
for lineno, r in enumerate(reader, 2):
if any(k is None for k in r):
error.abort('CSV parse error on line {}'.format(lineno))
yield r
def index(record_seq, index_columns):
try:
return {
tuple(r[i] for i in index_columns): r
for r in record_seq
}
except KeyError as k:
raise InvalidKeyError('invalid column name {k} as key'.format(k=k))
def save(record_seq, fieldnames, ostream):
writer = csv.DictWriter(ostream, fieldnames)
writer.writeheader()
for r in record_seq:
writer.writerow(r)
def sort(recs):
return sorted(recs, key=_record_key)
def _record_key(r):
return sorted(r.items())
| # -*- coding: utf-8 -*-
#
# records.py
# csvdiff
#
import csv
class InvalidKeyError(Exception):
pass
def load(file_or_stream):
istream = (open(file_or_stream)
if not hasattr(file_or_stream, 'read')
else file_or_stream)
return csv.DictReader(istream)
def index(record_seq, index_columns):
try:
return {
tuple(r[i] for i in index_columns): r
for r in record_seq
}
except KeyError as k:
raise InvalidKeyError('invalid column name {k} as key'.format(k=k))
def save(record_seq, fieldnames, ostream):
writer = csv.DictWriter(ostream, fieldnames)
writer.writeheader()
for r in record_seq:
writer.writerow(r)
def sort(recs):
return sorted(recs, key=_record_key)
def _record_key(r):
return sorted(r.items())
| bsd-3-clause | Python |
408f470085e481a687ddea012920feca26f4d2fe | set fixedWidth to func_list_widget | IfengAutomation/uitester,IfengAutomation/uitester | uitester/ui/case_manager/completer_widget.py | uitester/ui/case_manager/completer_widget.py | # -*- encoding: UTF-8 -*-
import os
from PyQt5 import uic
from PyQt5.QtCore import Qt, pyqtSignal
from PyQt5.QtWidgets import QWidget, QListWidget, QTextBrowser
class CompleterWidget(QWidget):
select_signal = pyqtSignal(str, name="select_signal")
selected_func_name_signal = pyqtSignal(str, str, name="selected_func_name_signal")
def __init__(self, parent=None):
super(CompleterWidget, self).__init__(parent)
ui_dir_path = os.path.dirname(__file__)
ui_file_path = os.path.join(ui_dir_path, 'completer_widget.ui')
uic.loadUi(ui_file_path, self)
self.setWindowFlags(Qt.FramelessWindowHint | Qt.Tool)
self.func_list_widget = QListWidget()
self.func_list_widget.setFixedWidth(200)
self.func_list_widget.setSizeAdjustPolicy(QListWidget.AdjustToContents)
self.func_list_layout.insertWidget(0, self.func_list_widget)
self.desc_text_browser.setSizeAdjustPolicy(QTextBrowser.AdjustToContents)
self.func_list_widget.setFocusPolicy(Qt.NoFocus)
def update_desc(self, text, func_doc):
"""
update the description according to the selected function name
:param func_doc:
:param text:
:return:
"""
if not text:
return
if not func_doc: # desc is None
self.desc_text_browser.setText("<pre> <font color='red'>\"" + text +
"\" has no description." + "</font></pre>")
return
func_doc = func_doc.split("\n")
func_desc = ''
for line in func_doc:
func_desc = func_desc + line.lstrip() + "\n"
self.desc_text_browser.setText("<pre> <font color='green'>" + func_desc + "</font></pre>")
| # -*- encoding: UTF-8 -*-
import os
from PyQt5 import uic
from PyQt5.QtCore import Qt, pyqtSignal
from PyQt5.QtWidgets import QWidget, QListWidget, QTextBrowser
class CompleterWidget(QWidget):
select_signal = pyqtSignal(str, name="select_signal")
selected_func_name_signal = pyqtSignal(str, str, name="selected_func_name_signal")
def __init__(self, parent=None):
super(CompleterWidget, self).__init__(parent)
ui_dir_path = os.path.dirname(__file__)
ui_file_path = os.path.join(ui_dir_path, 'completer_widget.ui')
uic.loadUi(ui_file_path, self)
self.setWindowFlags(Qt.FramelessWindowHint | Qt.Tool)
self.func_list_widget = QListWidget()
self.func_list_widget.setSizeAdjustPolicy(QListWidget.AdjustToContents)
self.func_list_layout.insertWidget(0, self.func_list_widget)
self.desc_text_browser.setSizeAdjustPolicy(QTextBrowser.AdjustToContents)
self.func_list_widget.setFocusPolicy(Qt.NoFocus)
def update_desc(self, text, func_doc):
"""
update the description according to the selected function name
:param func_doc:
:param text:
:return:
"""
if not text:
return
if not func_doc: # desc is None
self.desc_text_browser.setText("<pre> <font color='red'>\"" + text +
"\" has no description." + "</font></pre>")
return
func_doc = func_doc.split("\n")
func_desc = ''
for line in func_doc:
func_desc = func_desc + line.lstrip() + "\n"
self.desc_text_browser.setText("<pre> <font color='green'>" + func_desc + "</font></pre>")
| apache-2.0 | Python |
204f7e2b544eb1767e50bbee0171d631914298a8 | Make test runnable. | markvdw/opt_tools | testing/test_stopwatch.py | testing/test_stopwatch.py | import sys
import time
import unittest
sys.path.append('..')
from opt_tools.helpers import Stopwatch
class TestStopwatch(unittest.TestCase):
def test_basic(self):
s = Stopwatch()
s2 = Stopwatch()
s3 = Stopwatch(elapsed_time=13.0)
s2.start()
s3.start()
time.sleep(0.1)
self.assertTrue(s.elapsed_time == 0.0) # Does not start by itself
s.start()
time.sleep(0.1)
s.stop()
et = s.elapsed_time
self.assertTrue(et > 0.1) # Counts the right amount of time
time.sleep(0.1)
self.assertTrue(et == s.elapsed_time) # Does not continue after stopped
s.start()
time.sleep(0.1)
self.assertTrue(s.elapsed_time > 0.2)
self.assertTrue(s2.elapsed_time > 0.4) # Other one counts total elapsed time
self.assertTrue(s3.elapsed_time > 13.0) # Initialised with elapsed_time works
def test_pause(self):
s = Stopwatch()
s2 = Stopwatch()
s2.start()
s.start()
time.sleep(0.1)
with s.pause():
time.sleep(0.1)
self.assertTrue(0.12 > s.elapsed_time > 0.1) # Check that it was paused
time.sleep(0.1)
self.assertTrue(0.22 > s.elapsed_time > 0.2) # Check that it's still running
self.assertTrue(0.32 > s2.elapsed_time > 0.3)
def test_errors(self):
s = Stopwatch()
s.start()
with self.assertRaises(RuntimeError):
s.start()
s.stop()
with self.assertRaises(RuntimeError):
s.stop()
| import time
import unittest
from helpers import Stopwatch
class TestStopwatch(unittest.TestCase):
def test_basic(self):
s = Stopwatch()
s2 = Stopwatch()
s3 = Stopwatch(elapsed_time=13.0)
s2.start()
s3.start()
time.sleep(0.1)
self.assertTrue(s.elapsed_time == 0.0) # Does not start by itself
s.start()
time.sleep(0.1)
s.stop()
et = s.elapsed_time
self.assertTrue(et > 0.1) # Counts the right amount of time
time.sleep(0.1)
self.assertTrue(et == s.elapsed_time) # Does not continue after stopped
s.start()
time.sleep(0.1)
self.assertTrue(s.elapsed_time > 0.2)
self.assertTrue(s2.elapsed_time > 0.4) # Other one counts total elapsed time
self.assertTrue(s3.elapsed_time > 13.0) # Initialised with elapsed_time works
def test_pause(self):
s = Stopwatch()
s2 = Stopwatch()
s2.start()
s.start()
time.sleep(0.1)
with s.pause():
time.sleep(0.1)
self.assertTrue(0.12 > s.elapsed_time > 0.1) # Check that it was paused
time.sleep(0.1)
self.assertTrue(0.22 > s.elapsed_time > 0.2) # Check that it's still running
self.assertTrue(0.32 > s2.elapsed_time > 0.3)
def test_errors(self):
s = Stopwatch()
s.start()
with self.assertRaises(RuntimeError):
s.start()
s.stop()
with self.assertRaises(RuntimeError):
s.stop()
| apache-2.0 | Python |
e1eb3c608f975c6672a47d3e4bcb7c29e8a7b965 | update version | gbrammer/grizli | grizli/version.py | grizli/version.py | # git describe --tags
__version__ = "0.6.0-79-g3c65f39"
| # git describe --tags
__version__ = "0.6.0-67-g098087f"
| mit | Python |
fab0b57e0c28336ed1ae771e5b3c94a95dd8b093 | Use only deterministic names in da.wrap | jakirkham/dask,ssanderson/dask,PhE/dask,mrocklin/dask,mraspaud/dask,mraspaud/dask,pombredanne/dask,vikhyat/dask,vikhyat/dask,blaze/dask,wiso/dask,gameduell/dask,jcrist/dask,dask/dask,ssanderson/dask,mrocklin/dask,dask/dask,cowlicks/dask,mikegraham/dask,PhE/dask,chrisbarber/dask,clarkfitzg/dask,wiso/dask,clarkfitzg/dask,jcrist/dask,pombredanne/dask,ContinuumIO/dask,cpcloud/dask,jakirkham/dask,blaze/dask,ContinuumIO/dask | dask/array/wrap.py | dask/array/wrap.py | from __future__ import absolute_import, division, print_function
from itertools import product
from functools import partial
from toolz import curry
import numpy as np
from ..base import tokenize
from .core import Array, normalize_chunks
from .numpy_compat import full
def dims_from_size(size, blocksize):
"""
>>> list(dims_from_size(30, 8))
[8, 8, 8, 6]
"""
result = (blocksize,) * (size // blocksize)
if size % blocksize:
result = result + (size % blocksize,)
return result
def wrap_func_shape_as_first_arg(func, *args, **kwargs):
"""
Transform np creation function into blocked version
"""
if 'shape' not in kwargs:
shape, args = args[0], args[1:]
else:
shape = kwargs.pop('shape')
if not isinstance(shape, (tuple, list)):
shape = (shape,)
chunks = kwargs.pop('chunks', None)
chunks = normalize_chunks(chunks, shape)
name = kwargs.pop('name', None)
dtype = kwargs.pop('dtype', None)
if dtype is None:
dtype = func(shape, *args, **kwargs).dtype
name = name or 'wrapped-' + tokenize((func, shape, dtype, args, kwargs))
keys = product([name], *[range(len(bd)) for bd in chunks])
shapes = product(*chunks)
func = partial(func, dtype=dtype, **kwargs)
vals = ((func,) + (s,) + args for s in shapes)
dsk = dict(zip(keys, vals))
return Array(dsk, name, chunks, dtype=dtype)
@curry
def wrap(wrap_func, func, **kwargs):
f = partial(wrap_func, func, **kwargs)
f.__doc__ = """
Blocked variant of %(name)s
Follows the signature of %(name)s exactly except that it also requires a
keyword argument chunks=(...)
Original signature follows below.
""" % {'name': func.__name__} + func.__doc__
f.__name__ = 'blocked_' + func.__name__
return f
w = wrap(wrap_func_shape_as_first_arg)
ones = w(np.ones, dtype='f8')
zeros = w(np.zeros, dtype='f8')
empty = w(np.empty, dtype='f8')
full = w(full)
| from __future__ import absolute_import, division, print_function
from itertools import count, product
from functools import partial
from toolz import curry
import numpy as np
from .core import Array, normalize_chunks
from .numpy_compat import full
names = ('wrapped_%d' % i for i in count(1))
def dims_from_size(size, blocksize):
"""
>>> list(dims_from_size(30, 8))
[8, 8, 8, 6]
"""
result = (blocksize,) * (size // blocksize)
if size % blocksize:
result = result + (size % blocksize,)
return result
def wrap_func_shape_as_first_arg(func, *args, **kwargs):
"""
Transform np.random function into blocked version
"""
if 'shape' not in kwargs:
shape, args = args[0], args[1:]
else:
shape = kwargs.pop('shape')
if not isinstance(shape, (tuple, list)):
shape = (shape,)
chunks = kwargs.pop('chunks', None)
chunks = normalize_chunks(chunks, shape)
name = kwargs.pop('name', None)
dtype = kwargs.pop('dtype', None)
if dtype is None:
dtype = func(shape, *args, **kwargs).dtype
name = name or next(names)
keys = product([name], *[range(len(bd)) for bd in chunks])
shapes = product(*chunks)
func = partial(func, dtype=dtype, **kwargs)
vals = ((func,) + (s,) + args for s in shapes)
dsk = dict(zip(keys, vals))
return Array(dsk, name, chunks, dtype=dtype)
@curry
def wrap(wrap_func, func, **kwargs):
f = partial(wrap_func, func, **kwargs)
f.__doc__ = """
Blocked variant of %(name)s
Follows the signature of %(name)s exactly except that it also requires a
keyword argument chunks=(...)
Original signature follows below.
""" % {'name': func.__name__} + func.__doc__
f.__name__ = 'blocked_' + func.__name__
return f
w = wrap(wrap_func_shape_as_first_arg)
ones = w(np.ones, dtype='f8')
zeros = w(np.zeros, dtype='f8')
empty = w(np.empty, dtype='f8')
full = w(full)
| bsd-3-clause | Python |
c70b02cbe7b57548dec9efd6da2d511470bb0ecc | Update tests | mirnylab/cooler | tests/cli/test_coarsen.py | tests/cli/test_coarsen.py | # -*- coding: utf-8 -*-
from functools import partial
import os.path as op
import subprocess
import tempfile
import filecmp
import sys
import os
import cooler
import logging
import numpy as np
import h5py
import nose
from nose.tools import with_setup, set_trace
from click.testing import CliRunner
from cooler.cli.cload import cload, tabix as cload_tabix
from cooler.cli.coarsen import coarsen, tile, multires_aggregate
if sys.version_info[0] == 3 and sys.version_info[1] == 3:
raise nose.SkipTest
testdir = op.realpath(op.join(op.dirname(__file__), op.pardir))
tmp = tempfile.gettempdir()
multires_path = op.join(tmp, 'test.multires.cool')
def teardown_func(*filepaths):
for fp in filepaths:
try:
os.remove(fp)
except OSError:
pass
def test_recursive_agg():
infile = op.join(testdir, 'data', 'GM12878-MboI-matrix.2000kb.cool')
outfile = '/tmp/bla.cool'
chunksize = int(10e6)
n_zooms = 2
n_cpus = 8
multires_aggregate(infile, outfile, n_cpus, chunksize)
#ccc.multires_balance(outfile, n_zooms, chunksize, n_cpus)
@with_setup(teardown=partial(teardown_func, multires_path))
def test_tile():
runner = CliRunner()
result = runner.invoke(
tile, [
op.join(testdir, 'data',
'dec2_20_pluslig_1pGene_grch38_UBR4_D_1nt.pairwise.sorted.cool'),
'--out', multires_path,
'--no-balance',
]
)
#sys.stdout.write(result.output)
assert(result.exit_code == 0)
# this file should have base + 6 zoom levels
assert(len(cooler.io.ls(multires_path)) == 7)
# inconsistent chromosome names in chrom table (truncated) and bin table
# (full length) of the input file are now resolved by forcing use of the
# chrom table names in the bin tables of the output file
c = cooler.Cooler(multires_path)
names = c.bins()['chrom'][:].cat.categories
assert names[0] == 'ENSG00000127481|ENST00000375254|'
| # -*- coding: utf-8 -*-
from functools import partial
import os.path as op
import subprocess
import tempfile
import filecmp
import sys
import os
import cooler
import logging
import numpy as np
import h5py
import nose
from nose.tools import with_setup, set_trace
from click.testing import CliRunner
from cooler.cli.cload import cload, tabix as cload_tabix
from cooler.cli.coarsen import coarsen, tile, multires_aggregate
if sys.version_info[0] == 3 and sys.version_info[1] == 3:
raise nose.SkipTest
testdir = op.realpath(op.join(op.dirname(__file__), op.pardir))
tmp = tempfile.gettempdir()
multires_path = op.join(tmp, 'test.multires.cool')
def teardown_func(*filepaths):
for fp in filepaths:
try:
os.remove(fp)
except OSError:
pass
def test_recursive_agg():
infile = op.join(testdir, 'data', 'GM12878-MboI-matrix.2000kb.cool')
outfile = '/tmp/bla.cool'
chunksize = int(10e6)
n_zooms = 2
n_cpus = 8
multires_aggregate(infile, outfile, n_cpus, chunksize)
#ccc.multires_balance(outfile, n_zooms, chunksize, n_cpus)
@with_setup(teardown=partial(teardown_func, multires_path))
def test_tile():
runner = CliRunner()
result = runner.invoke(
tile, [
op.join(testdir, 'data', 'dec2_20_pluslig_1pGene_grch38_UBR4_D_1nt.pairwise.sorted.cool'),
'--out', multires_path,
'--no-balance',
]
)
sys.stdout.write(result.output)
# this file should have 6 zoom levels
#c = cooler.Cooler(h5py.File(multires_path)['5'])
#print('pixels (5):', len(c.pixels()[:].index))
# should get a ValueError because the chromosome names in the pixels dont' match
# the stored chromosome names
assert(result.exit_code == -1)
| bsd-3-clause | Python |
c94177f671ee06d4922f323090fa6653c6b97b3f | fix issue | pyprism/Hiren-Git-Commit-Reminder,pyprism/Hiren-Git-Commit-Reminder | github/tests.py | github/tests.py | from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.test import LiveServerTestCase
from django.contrib.auth.models import User
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.keys import Keys
from github.views import *
import os
class HomePageTest(TestCase):
def test_root_url_resolves_to_index_page_view(self):
found = resolve('/')
self.assertEqual(found.func, index)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = index(request)
self.assertTrue(response.content.startswith(b'<html>'))
self.assertIn(b'<title>Hiren: The Bunny</title>', response.content)
self.assertIn(
b'<a href="/login"><i class="fa fa-sign-in"></i> Login</a>', response.content)
self.assertTrue(response.content.endswith(b'</html>'))
class LoginFunctionalTest(LiveServerTestCase):
def setUp(self):
User.objects.create_superuser(
username='admin', password='admin', email='admin@admin.lol')
if 'TRAVIS' in os.environ:
username = os.environ["SAUCE_USERNAME"]
access_key = os.environ["SAUCE_ACCESS_KEY"]
hub_url = "%s:%s@ondemand.saucelabs.com:80" % (username, access_key)
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities['platform'] = "WINDOWS"
capabilities['version'] = "10"
browser = webdriver.Remote(desired_capabilities=capabilities,
command_executor="http://%s/wd/hub" % hub_url)
else:
self.browser = webdriver.Firefox()
self.browser.maximize_window()
self.browser.implicitly_wait(5)
def tearDown(self):
self.browser.quit()
def test_login_user(self):
self.browser.get('%s%s' % (self.live_server_url,"/login/"))
| from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.test import LiveServerTestCase
from django.contrib.auth.models import User
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.keys import Keys
from github.views import *
import os
class HomePageTest(TestCase):
def test_root_url_resolves_to_index_page_view(self):
found = resolve('/')
self.assertEqual(found.func, index)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = index(request)
self.assertTrue(response.content.startswith(b'<html>'))
self.assertIn(b'<title>Hiren: The Bunny</title>', response.content)
self.assertIn(
b'<a href="/login"><i class="fa fa-sign-in"></i> Login</a>', response.content)
self.assertTrue(response.content.endswith(b'</html>'))
class LoginFunctionalTest(LiveServerTestCase):
def setUp(self):
User.objects.create_superuser(
username='admin', password='admin', email='admin@admin.lol')
if 'TRAVIS' in os.environ:
username = os.environ["SAUCE_USERNAME"]
access_key = os.environ["SAUCE_ACCESS_KEY"]
hub_url = "%s:%s@ondemand.saucelabs.com:80/wd/hub" % (username, access_key)
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities['platform'] = "WINDOWS"
capabilities['version'] = "10"
browser = webdriver.Remote(desired_capabilities=capabilities,
command_executor="http://%s/wd/hub" % hub_url)
else:
self.browser = webdriver.Firefox()
self.browser.maximize_window()
self.browser.implicitly_wait(5)
def tearDown(self):
self.browser.quit()
def test_login_user(self):
self.browser.get('%s%s' % (self.live_server_url,"/login/"))
| mit | Python |
d472beade2135cf98f04c81914ac91f9896a9f2e | Update het.py | mspopgen/Evolib,mspopgen/Evolib,padraicc/Evolib | bin/het.py | bin/het.py | import sys
from evolib.NGSFormats import VariantCallFormat
vcf = VariantCallFormat(sys.stdin)
args = sys.argv[1:]
names = args[0].split(",")
for row in vcf:
chrom, pos = row['CHROM'], row['POS']
GTs = list((smp['GT'] for smp in row.iter_samples() if smp.name in names and smp['GT'] is not None and smp['GT'] != "./." and smp['DP'] != "." and smp['DP'] is not None and int(smp['DP']) > 7))
ngts = len(GTs)
nhet = GTs.count("0/1")
if ngts == 0:
print chrom, pos, "NA"
else:
print chrom, pos, nhet / float(ngts)
| import sys
from evolib.NGSFormats import VariantCallFormat
vcf = VariantCallFormat(sys.stdin)
args = sys.argv[1:]
names = args[0].split(",")
for row in vcf:
chrom, pos = row['CHROM'], row['POS']
try:
GTs = list((smp['GT'] for smp in row.iter_samples() if smp.name in names and smp['GT'] != "./." and smp['DP'] != "." and smp['DP'] is not None and int(smp['DP']) > 7))
except KeyError:
GTs = []
ngts = len(GTs)
nhet = GTs.count("0/1")
if ngts == 0:
print chrom, pos, "NA"
else:
print chrom, pos, nhet / float(ngts)
| mit | Python |
1de64fe5e7153c68a427805b24f08e8347492ff4 | remove unused import | ngouzy/smartchangelog,ngouzy/commitmsg | test/support.py | test/support.py | import inspect
import subprocess
import sys
import os
from contextlib import contextmanager
from typing import List, Iterator, TextIO, cast
from io import StringIO
import commitmsg
"""Path of the file containing commitmsg module"""
commitmsg_script_path = inspect.getfile(commitmsg)
def git_command(*git_args: str) -> subprocess.CompletedProcess:
args = ['git'] + cast(List[str], list(git_args))
completed_process = subprocess.run(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
assert completed_process.returncode == 0
assert len(completed_process.stderr) == 0
return completed_process
@contextmanager
def set_commit_editmsg(msg: str) -> Iterator[TextIO]:
filename = 'COMMIT_EDITMSG'
with open(filename, mode='w') as f:
f.write(msg)
try:
yield cast(TextIO, f)
finally:
if os.path.isfile(filename):
os.remove(filename)
@contextmanager
def set_args(*args):
old = list(sys.argv)
sys.argv[:] = args
oldout, olderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = StringIO(), StringIO()
try:
yield sys.stdout, sys.stderr
finally:
sys.stdout.seek(0)
sys.stderr.seek(0)
sys.argv[:] = old
sys.stdout, sys.stderr = oldout, olderr
| import inspect
import subprocess
import sys
import os
from contextlib import contextmanager
from typing import List, Tuple, Iterator, TextIO, cast
from io import StringIO
import commitmsg
"""Path of the file containing commitmsg module"""
commitmsg_script_path = inspect.getfile(commitmsg)
def git_command(*git_args: str) -> subprocess.CompletedProcess:
args = ['git'] + cast(List[str], list(git_args))
completed_process = subprocess.run(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
assert completed_process.returncode == 0
assert len(completed_process.stderr) == 0
return completed_process
@contextmanager
def set_commit_editmsg(msg: str) -> Iterator[TextIO]:
filename = 'COMMIT_EDITMSG'
with open(filename, mode='w') as f:
f.write(msg)
try:
yield cast(TextIO, f)
finally:
if os.path.isfile(filename):
os.remove(filename)
@contextmanager
def set_args(*args):
old = list(sys.argv)
sys.argv[:] = args
oldout, olderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = StringIO(), StringIO()
try:
yield sys.stdout, sys.stderr
finally:
sys.stdout.seek(0)
sys.stderr.seek(0)
sys.argv[:] = old
sys.stdout, sys.stderr = oldout, olderr
| mit | Python |
aa603a96d0b7260ec5b687e1d75404512156de10 | add missing param to test | oss/shrunk,oss/shrunk,oss/shrunk,oss/shrunk,oss/shrunk | test/test_db.py | test/test_db.py | """ shrunk - Rutgers University URL Shortener
Unit tests for the database.
"""
from shrunk import ShrunkClient
def get_shrunk_connection():
return ShrunkClient("db")
def setup():
pass
def teardown():
"""Cleans up the database after testing."""
shrunk = get_shrunk_connection()
shrunk.delete_user_urls("shrunk_test")
def test_urls():
"""Puts and retrieves URLs from the database."""
shrunk = get_shrunk_connection()
long_urls = ["foo.com", "bar.net", "báz7.edu.fr"]
short_urls = []
for url in long_urls:
result = shrunk.create_short_url(url, netid="shrunk_test")
short_urls.append(result)
results = [shrunk.get_long_url(url) for url in short_urls]
assert long_urls == results
def test_visit():
"""Tests logic when "visiting" a URL."""
shrunk = get_shrunk_connection()
long_url = "http://www.foobar.net/index"
short_url = shrunk.create_short_url(long_url, netid="shrunk_test")
assert short_url is not None
hits = 4
for _ in range(0, hits):
shrunk.visit(short_url, "127.0.0.1")
assert shrunk.get_num_visits(short_url) == hits
def test_deletion():
"""Tests a deletion from the database."""
shrunk = get_shrunk_connection()
long_url = "foo.com"
short_url = shrunk.create_short_url(long_url, netid="shrunk_test")
assert short_url is not None
shrunk.delete_url(short_url, "shrunk_test")
assert shrunk.get_long_url(short_url) is None
| """ shrunk - Rutgers University URL Shortener
Unit tests for the database.
"""
from shrunk import ShrunkClient
def get_shrunk_connection():
return ShrunkClient("localhost")
def setup():
pass
def teardown():
"""Cleans up the database after testing."""
shrunk = get_shrunk_connection()
shrunk.delete_user_urls("shrunk_test")
def test_urls():
"""Puts and retrieves URLs from the database."""
shrunk = get_shrunk_connection()
long_urls = ["foo.com", "bar.net", "báz7.edu.fr"]
short_urls = []
for url in long_urls:
result = shrunk.create_short_url(url, netid="shrunk_test")
short_urls.append(result)
results = [shrunk.get_long_url(url) for url in short_urls]
assert long_urls == results
def test_visit():
"""Tests logic when "visiting" a URL."""
shrunk = get_shrunk_connection()
long_url = "http://www.foobar.net/index"
short_url = shrunk.create_short_url(long_url, netid="shrunk_test")
assert short_url is not None
hits = 4
for _ in range(0, hits):
shrunk.visit(short_url, "127.0.0.1")
assert shrunk.get_num_visits(short_url) == hits
def test_deletion():
"""Tests a deletion from the database."""
shrunk = get_shrunk_connection()
long_url = "foo.com"
short_url = shrunk.create_short_url(long_url, netid="shrunk_test")
assert short_url is not None
shrunk.delete_url(short_url)
assert shrunk.get_long_url(short_url) is None
| mit | Python |
3b6aeac94c08a8d961d10bd9d0bf2fbca1e5ea35 | Add CJSON test suite to the test_io.py file | Schamnad/cclib,berquist/cclib,langner/cclib,langner/cclib,gaursagar/cclib,Schamnad/cclib,cclib/cclib,cclib/cclib,cclib/cclib,ATenderholt/cclib,berquist/cclib,berquist/cclib,langner/cclib,gaursagar/cclib,ATenderholt/cclib | test/test_io.py | test/test_io.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Run writer unit tests for cclib."""
import sys
import unittest
sys.path.append('io')
from testccio import *
from testfilewriter import *
from testxyzwriter import *
from testcjsonreader import *
from testcjsonwriter import *
if __name__ == "__main__":
unittest.main()
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2016, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Run writer unit tests for cclib."""
import sys
import unittest
sys.path.append('io')
from testccio import *
from testfilewriter import *
from testxyzwriter import *
from testcjsonreader import *
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | Python |
e914d33fa17f475142c2e8b267f2e096944150a3 | add roles from keycloak to user profile | bcgov/gwells,bcgov/gwells,bcgov/gwells,bcgov/gwells | gwells/authentication.py | gwells/authentication.py | """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.contrib.auth import get_user_model
from rest_framework import exceptions
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from gwells.models.Profile import Profile
class JwtOidcAuthentication(JSONWebTokenAuthentication):
"""
Authenticate users who provide a JSON Web Token in the request headers (e.g. Authorization: JWT xxxxxxxxxx)
"""
def authenticate_credentials(self, payload):
User = get_user_model()
# get keycloak ID from JWT token
username = payload.get('sub')
if username is None:
raise exceptions.AuthenticationFailed('JWT did not contain a "sub" attribute')
# get or create a user with the keycloak ID
try:
user, user_created = User.objects.get_or_create(username=username)
except:
raise exceptions.AuthenticationFailed('Failed to retrieve or create user')
if user_created:
user.set_password(User.objects.make_random_password(length=36))
user.save()
# load the user's GWELLS profile
try:
profile, __ = Profile.objects.get_or_create(user=user.id)
except:
raise exceptions.AuthenticationFailed('Failed to create user profile')
# get the roles supplied by Keycloak for this user
try:
roles = payload.get('realm_access').get('roles')
except:
raise exceptions.AuthenticationFailed('Failed to retrieve roles')
if 'gwells_admin' in roles:
profile.is_gwells_admin = True
profile.save()
return user
| """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.contrib.auth import get_user_model
from rest_framework import exceptions
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from gwells.models.Profile import Profile
class JwtOidcAuthentication(JSONWebTokenAuthentication):
"""
Authenticate users who provide a JSON Web Token in the request headers (e.g. Authorization: JWT xxxxxxxxxx)
"""
def authenticate_credentials(self, payload):
User = get_user_model()
username = payload.get('sub')
if username is None:
raise exceptions.AuthenticationFailed('JWT did not contain a "sub" attribute')
try:
user, created = User.objects.get_or_create(username=username)
except:
raise exceptions.AuthenticationFailed('Failed to retrieve or create user')
if created:
user.is_staff = True
user.set_password(User.objects.make_random_password(length=36))
user.save()
try:
roles = payload.get('resource_access').get('account').get('roles')
except:
raise exceptions.AuthenticationFailed('Failed to retrieve roles')
try:
profile = Profile.objects.get_or_create(user=user.id)
except:
raise exceptions.AuthenticationFailed('Failed to create user profile')
if 'gwells_admin' in roles:
profile.is_gwells_admin = True
profile.save()
return user
| apache-2.0 | Python |
5117f2a5a5676144c46f374138a406cf499c03cf | Bump version 2.1.2 | arteria/django-hijack-admin,arteria/django-hijack-admin,arteria/django-hijack-admin | hijack_admin/__init__.py | hijack_admin/__init__.py | # -*- coding: utf-8 -*-
__version__ = '2.1.2' # pragma: no cover
default_app_config = 'hijack_admin.apps.HijackAdminConfig'
| # -*- coding: utf-8 -*-
__version__ = '2.1.1' # pragma: no cover
default_app_config = 'hijack_admin.apps.HijackAdminConfig'
| mit | Python |
e3680dfd76c8471e63fd2bd71d8d0754bec75036 | Remove dead code | hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare | hs_communities/models.py | hs_communities/models.py | from django.db import models
class Topic(models.Model):
name = models.CharField(editable=True, null=False, max_length=255)
def __str__(self):
return "{}".format(self.name)
class TopicEntry(models.Model):
topic = models.ForeignKey(Topic,
null=False,
editable=True,
help_text='One topic entry for a resource')
order = models.IntegerField(null=False,
editable=True,
help_text='Position of this entry: 1-n')
def __str__(self):
return "{} {}".format(self.topic, self.order)
class Topics(models.Model):
topics = models.ManyToManyField(TopicEntry,
editable=True,
help_text='A list of topics, in order')
def add(self, new_topic):
"""
:param topic:
:return:
"""
topic_entry = TopicEntry()
topic_entry.topic = new_topic
topics_list = self.topics.values_list("order", flat=True)
topic_entry.order = max(topics_list if topics_list else [0]) + 1
topic_entry.save()
def __str__(self):
return "{}".format(self.topics)
| from django.db import models
class Topic(models.Model):
name = models.CharField(editable=True, null=False, max_length=255)
def __str__(self):
return "{}".format(self.name)
class TopicEntry(models.Model):
topic = models.ForeignKey(Topic,
null=False,
editable=True,
help_text='One topic entry for a resource')
order = models.IntegerField(null=False,
editable=True,
help_text='Position of this entry: 1-n')
def __str__(self):
return "{} {}".format(self.topic, self.order)
class Topics(models.Model):
topics = models.ManyToManyField(TopicEntry,
editable=True,
help_text='A list of topics, in order')
def add(self, new_topic):
"""
:param topic:
:return:
"""
topic_entry = TopicEntry()
topic_entry.topic = new_topic
topics_list = self.topics.values_list("order", flat=True)
topic_entry.order = max(topics_list if topics_list else [0]) + 1
topic_entry.save()
# def delete(self, id):
# """
#
# :param id:
# :return:
# """
# print("would delete topic id {}".format(id))
def __str__(self):
return "{}".format(self.topics)
| bsd-3-clause | Python |
adf8562b78496454f822a9cf33f161445a995a2a | move author and version info to __init__ | asrozar/perception | perceptiond.py | perceptiond.py | #!/usr/bin/env python
from app import db_session
# Perception
#
# Copyright (C) 2017 Avery Rozar
#
# This program is free software; you can redistribute it and/or modify it under the terms of the The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software") to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from app.lib.perception_daemon import PerceptionDaemon
from time import sleep
import sys
# Local Class
# -------------------------------------------------------------------------------
class MyPerceptionDaemon(PerceptionDaemon):
@staticmethod
def run():
while True:
sleep(1)
# Main
# -------------------------------------------------------------------------------
def main():
perceptiond = MyPerceptionDaemon('/var/run/perceptiond.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
perceptiond.start()
elif 'stop' == sys.argv[1]:
perceptiond.stop()
elif 'restart' == sys.argv[1]:
perceptiond.restart()
else:
print("Unknown command")
sys.exit(0)
sys.exit(0)
else:
print("usage: %s start|stop|restart" % sys.argv[0])
sys.exit(0)
if __name__ == '__main__':
try:
main()
except(IOError, SystemError) as e:
db_session.close()
print(e)
except KeyboardInterrupt:
db_session.close()
print('Crtl+C Pressed. Shutting down.')
| #!/usr/bin/env python
from app import db_session
# Perception
#
# Copyright (C) 2017 Avery Rozar
#
# This program is free software; you can redistribute it and/or modify it under the terms of the The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software") to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__version__ = '0.2'
__author__ = 'Avery Rozar: avery.rozar@insecure-it.com'
from app.lib.perception_daemon import PerceptionDaemon
from time import sleep
import sys
# Local Class
# -------------------------------------------------------------------------------
class MyPerceptionDaemon(PerceptionDaemon):
@staticmethod
def run():
while True:
sleep(1)
# Main
# -------------------------------------------------------------------------------
def main():
perceptiond = MyPerceptionDaemon('/var/run/perceptiond.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
perceptiond.start()
elif 'stop' == sys.argv[1]:
perceptiond.stop()
elif 'restart' == sys.argv[1]:
perceptiond.restart()
else:
print("Unknown command")
sys.exit(0)
sys.exit(0)
else:
print("usage: %s start|stop|restart" % sys.argv[0])
sys.exit(0)
if __name__ == '__main__':
try:
main()
except(IOError, SystemError) as e:
db_session.close()
print(e)
except KeyboardInterrupt:
db_session.close()
print('Crtl+C Pressed. Shutting down.')
| mit | Python |
7a6a6446c56d9bb119bf2e534bc90549c93c3a80 | put showLabel function into displayWindow class as a method | lucaskotw/paperpass | guipaperpass.py | guipaperpass.py | from Tkinter import Label
class displaywWindow:
" This class display the window of paperpass "
def showLabel():
widget = Label(None, text='Hello!')
widget.pack()
| from Tkinter import Label
def showLabel():
widget = Label(None, text='Hello!')
widget.pack()
| mit | Python |
195a74fffebff179ca8a52d685a37bdad34cd8ad | Fix bug where text box get and clear don't work | lawsie/guizero,lawsie/guizero,lawsie/guizero | guizero/Text.py | guizero/Text.py | from tkinter import Label, StringVar
from . import utilities as utils
class Text(Label):
def __init__(self, master, text="", size=12, color="black", font="Helvetica", grid=None, align=None):
# Description of this object (for friendly error messages)
self.description = "[Text] object with text \"" + str(text) + "\""
# Save some of the config
self.current_size = size
self.current_color = color
self.current_font = font
self.text = str(text)
# Attempt to instantiate the object and raise an error if failed
try:
super().__init__(master, text=self.text)
except AttributeError:
utils.error_format( self.description + "\n" +
"The first argument was a " + str(type(master)) +". First argument must be [App] or [Box]")
# Initial config on setup
self.config(fg=color, font=(font, size))
# Pack or grid depending on parent
utils.auto_pack(self, master, grid, align)
# Clear text (set to empty string)
def clear(self):
self.text = ""
self.config(text="")
# Returns the text
def get(self):
return self.text
# Sets the text
def set(self, text):
self.text = str(text)
self.config(text=self.text)
self.description = "[Text] object with text \"" + str(text) + "\""
# Sets the text colour
def color(self, color):
self.config(fg=color)
# Set the font
def font_face(self, font):
self.current_font = font
self.config(font=(self.current_font, self.current_size))
# Set the font size
def font_size(self, size):
self.current_size = size
self.config(font=(self.current_font, self.current_size))
# Append to the StringVar controlling this text
def append(self, text):
new_text = self.text + str(text)
self.text = new_text
self.config(text=new_text)
self.description = "[Text] object with text \"" + new_text + "\""
| from tkinter import Label, StringVar
from . import utilities as utils
class Text(Label):
def __init__(self, master, text="", size=12, color="black", font="Helvetica", grid=None, align=None):
# Description of this object (for friendly error messages)
self.description = "[Text] object with text \"" + str(text) + "\""
# Save some of the config
self.current_size = size
self.current_color = color
self.current_font = font
self.text = str(text)
# Attempt to instantiate the object and raise an error if failed
try:
super().__init__(master, text=self.text)
except AttributeError:
utils.error_format( self.description + "\n" +
"The first argument was a " + str(type(master)) +". First argument must be [App] or [Box]")
# Initial config on setup
self.config(fg=color, font=(font, size))
# Pack or grid depending on parent
utils.auto_pack(self, master, grid, align)
# Clear text (set to empty string)
def clear(self):
self.string_var.set("")
# Returns the text
def get(self):
return self.string_var.get()
# Sets the text
def set(self, text):
self.text = str(text)
self.config(text=self.text)
self.description = "[Text] object with text \"" + str(text) + "\""
# Sets the text colour
def color(self, color):
self.config(fg=color)
# Set the font
def font_face(self, font):
self.current_font = font
self.config(font=(self.current_font, self.current_size))
# Set the font size
def font_size(self, size):
self.current_size = size
self.config(font=(self.current_font, self.current_size))
# Append to the StringVar controlling this text
def append(self, text):
new_text = self.text + str(text)
self.text = new_text
self.config(text=new_text)
self.description = "[Text] object with text \"" + new_text + "\""
| bsd-3-clause | Python |
abc43a49323f5b925de9bf506fc9a766721e369f | Add authenticated decorator for profile page | kkstu/DNStack,kkstu/DNStack,kkstu/DNStack | handler/user.py | handler/user.py | #!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
from BaseHandler import BaseHandler
from tornado.web import authenticated as Auth
class LoginHandler(BaseHandler):
def get(self):
if not self.session.isGuest:
return self.redirect('/') # 已登录则跳转到首页
next = self.get_argument("next", "/")
self.render('user/login.html', next=next)
def post(self):
username = self.get_argument("username", None)
password = self.get_argument("password", None)
remember = self.get_argument("remember", "no")
#self.create_session(self,data,remember)
def create_session(self,data,remember):
sid = self.session.gen_session_id()
self.session.data = data
self.session.isGuest = False
#self.session.save() # Why don't save? See self._on_finish !!
if remember == "yes":
expires_days = 15 # Remember Session 15 days
else:
expires_days = None
self.set_secure_cookie(self.cookie_name, sid, expires_days)
# Sign Out
class LogoutHandler(BaseHandler):
def get(self):
self.session.remove()
self.clear_cookie(self.cookie_name)
self.redirect(self.get_login_url())
# Profile
class ProfileHandler(BaseHandler):
@Auth
def get(self):
self.render('user/profile.html')
# Password
class PasswdHandler(BaseHandler):
def get(self):
self.render('user/passwd.html') | #!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
from BaseHandler import BaseHandler
class LoginHandler(BaseHandler):
def get(self):
if not self.session.isGuest:
return self.redirect('/') # 已登录则跳转到首页
next = self.get_argument("next", "/")
self.render('user/login.html', next=next)
def post(self):
username = self.get_argument("username", None)
password = self.get_argument("password", None)
remember = self.get_argument("remember", "no")
#self.create_session(self,data,remember)
def create_session(self,data,remember):
sid = self.session.gen_session_id()
self.session.data = data
self.session.isGuest = False
#self.session.save() # Why don't save? See self._on_finish !!
if remember == "yes":
expires_days = 15 # Remember Session 15 days
else:
expires_days = None
self.set_secure_cookie(self.cookie_name, sid, expires_days)
# Sign Out
class LogoutHandler(BaseHandler):
def get(self):
self.session.remove()
self.clear_cookie(self.cookie_name)
self.redirect(self.get_login_url())
# Profile
class ProfileHandler(BaseHandler):
def get(self):
self.render('user/profile.html')
# Password
class PasswdHandler(BaseHandler):
def get(self):
self.render('user/passwd.html') | mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.