commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
7ed5f886d1fc92c3f7c39c7e6c11c15f3151296d | fix romance error | LeagueOfAutomatedNations/LeagueBot,LeagueOfAutomatedNations/LeagueBot | leaguebot/services/alerters/slack.py | leaguebot/services/alerters/slack.py | from leaguebot import app
import leaguebot.models.map as screepmap
import leaguebot.services.screeps as screeps
import leaguebot.services.slack as slack
import re
def sendBattleMessage(battleinfo):
message = getBattleMessageText(battleinfo)
sendToSlack(message)
def getBattleMessageText(battleinfo):
tick = screeps.get_time()
room_name = battleinfo['_id']
room_owner = screepmap.getRoomOwner(room_name)
message = str(tick) + ' - Battle: ' + '<https://screeps.com/a/#!/room/' + room_name + '|' + room_name + '>'
if not room_owner:
return message
room_level = screepmap.getRoomLevel(room_name)
if room_level and room_level > 0:
message += ' RCL ' + str(room_level)
message += ', defender ' + '<https://screeps.com/a/#!/profile/' + room_owner + '|' + room_owner + '>'
room_alliance = screepmap.getUserAlliance(room_owner)
if room_alliance:
message += ' (' + room_alliance + ')'
return message
def sendNukeMessage(nukeinfo):
message = getNukeMessageText(nukeinfo)
sendToSlack(message)
def getNukeMessageText(nukeinfo):
tick = screeps.get_time()
eta = str(nukeinfo['landTime']-tick)
room_name = nukeinfo['room']
room_owner = screepmap.getRoomOwner(room_name)
message = str(tick) + ' - Nuke: ' + room_name + ' in ' + str(eta) + ' ticks'
if not room_owner:
message += ', abandoned'
else:
room_alliance = screepmap.getUserAlliance(room_owner)
message += ', defender ' + room_owner
if room_alliance:
message += ' (' + room_alliance + ')'
return message
def sendToSlack(message):
if 'SEND_TO_SLACK' not in app.config or not app.config['SEND_TO_SLACK']:
return False
try:
channel = app.config['SLACK_CHANNEL']
slack.send_slack_message(channel, message)
print (message)
return True
except:
return False
| from leaguebot import app
import leaguebot.models.map as screepmap
import leaguebot.services.screeps as screeps
import leaguebot.services.slack as slack
import re
def sendBattleMessage(battleinfo):
message = getBattleMessageText(battleinfo)
sendToSlack(message)
def getBattleMessageText(battleinfo):
tick = screeps.get_time()
room_name = battleinfo['_id']
room_owner = screepmap.getRoomOwner(room_name)
message = str(tick) + ' - Battle: ' + '<https://screeps.com/a/#!/room/' + roomname + '|' + roomname + '>'
if not room_owner:
return message
room_level = screepmap.getRoomLevel(room_name)
if room_level and room_level > 0:
message += ' RCL ' + str(room_level)
message += ', defender ' + '<https://screeps.com/a/#!/profile/' + room_owner + '|' + room_owner + '>'
room_alliance = screepmap.getUserAlliance(room_owner)
if room_alliance:
message += ' (' + room_alliance + ')'
return message
def sendNukeMessage(nukeinfo):
message = getNukeMessageText(nukeinfo)
sendToSlack(message)
def getNukeMessageText(nukeinfo):
tick = screeps.get_time()
eta = str(nukeinfo['landTime']-tick)
room_name = nukeinfo['room']
room_owner = screepmap.getRoomOwner(room_name)
message = str(tick) + ' - Nuke: ' + room_name + ' in ' + str(eta) + ' ticks'
if not room_owner:
message += ', abandoned'
else:
room_alliance = screepmap.getUserAlliance(room_owner)
message += ', defender ' + room_owner
if room_alliance:
message += ' (' + room_alliance + ')'
return message
def sendToSlack(message):
if 'SEND_TO_SLACK' not in app.config or not app.config['SEND_TO_SLACK']:
return False
try:
channel = app.config['SLACK_CHANNEL']
slack.send_slack_message(channel, message)
print (message)
return True
except:
return False
| mit | Python |
6cfeb4fefb0eaa7c47ba8f44b4d1fb640983dc5a | Set version to v0.25.0-dev | SoCo/SoCo,SoCo/SoCo | soco/__init__.py | soco/__init__.py | """SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.25.0-dev"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
| """SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.24.0"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
| mit | Python |
14a0738ec836bd3369984835797f5002813b270a | Fix imports to local paths | abenicho/isvr | nilearn/_utils/__init__.py | nilearn/_utils/__init__.py |
from .niimg_conversions import is_a_niimg, _get_shape, _repr_niimgs, \
copy_niimg, check_niimg, concat_niimgs, check_niimgs
from .numpy_conversions import as_ndarray
from .cache_mixin import CacheMixin
|
from niimg_conversions import is_a_niimg, _get_shape, _repr_niimgs, \
copy_niimg, check_niimg, concat_niimgs, check_niimgs
from numpy_conversions import as_ndarray
from cache_mixin import CacheMixin
| bsd-3-clause | Python |
c8c96913057bcbccce931529fc0f7ebdbad725fa | Add support for matching %s__in | stuaxo/mnd | mnd/match.py | mnd/match.py | """
Argument matching.
"""
from operator import eq, contains
from collections import namedtuple
class InvalidArg:
def __bool__(self):
return False
def arg_comparitor(name):
"""
:param arg name
:return: pair containing name, comparitor
given an argument name, munge it and return a proper comparitor
>>> get_arg_cmp("a")
a, operator.eq
>>> get_arg_cmp("a__in")
a, operator.contains
"""
if name.endswith("__in"):
return name[:-4], contains
else:
return name, eq
def arg_match(m_arg, arg, comparitor=eq, default=True):
"""
:param m_arg: value to match against or callable
:param arg: arg to match
:param comparitor: function that returns True if m_arg and arg match
:param default: will be returned if m_arg is None
if m_arg is a callable it will be called with arg
>>> arg_match(1, 1)
True
>>> arg_match(1, 2)
True
You can match by sub args by passing in a dict
>>> from collections import namedtuple
>>> Msg = namedtuple("msg", ["note", "type"])
>>> m = Msg(note=1, type="note_on")
>>> arg_match(dict(note=1), m)
True
"""
if m_arg is None:
return default
if isinstance(m_arg, dict):
for name, value in m_arg.items():
name, _comparitor = arg_comparitor(name)
subarg = getattr(arg, name, InvalidArg)
if subarg is InvalidArg:
return subarg
matched = arg_match(subarg, value, _comparitor, default)
if not matched:
return matched
return True
else:
if hasattr(m_arg, "__call__"):
return m_arg(arg)
else:
return comparitor(arg, m_arg)
def args_match(m_args, m_kwargs, *args, **kwargs):
"""
:param m_args: values to match args against
:param m_kwargs: values to match kwargs against
:param arg: args to match
:param arg: kwargs to match
"""
if len(m_args) > len(args):
return False
for m_arg, arg in zip(m_args, args):
matches = arg_match(m_arg, arg, eq)
if not matches:
return False # bail out
if m_kwargs:
for name, m_arg in m_kwargs.items():
name, comparitor = arg_comparitor(name)
arg = kwargs.get(name)
if not arg_match(m_arg, arg, comparitor):
return False # bail out
return True
| """
Argument matching.
"""
from collections import namedtuple
class InvalidArg:
pass
def arg_match(m_arg, arg, default=True):
"""
:param m_arg: value to match against or callable
:param arg: arg to match
:param default: will be returned if m_arg is None
if m_arg is a callable it will be called with arg
>>> arg_match(1, 1)
True
>>> arg_match(1, 2)
True
You can match by sub args by passing in a dict
>>> from collections import namedtuple
>>> Msg = namedtuple("msg", ["note", "type"])
>>> m = Msg(note=1, type="note_on")
>>> arg_match(dict(note=1), m)
True
"""
if m_arg is None:
return default
if isinstance(m_arg, dict):
for name, value in m_arg.items():
subarg = getattr(arg, name, InvalidArg)
if subarg is InvalidArg:
return subarg
matched = arg_match(subarg, value, default)
if not matched:
return matched
return True
else:
if hasattr(m_arg, "__call__"):
return m_arg(arg)
else:
return m_arg == arg
def args_match(m_args, m_kwargs, *args, **kwargs):
"""
:param m_args: values to match args against
:param m_kwargs: values to match kwargs against
:param arg: args to match
:param arg: kwargs to match
"""
if len(m_args) > len(args):
return False
for m_arg, arg in zip(m_args, args):
if not arg_match(m_arg, arg):
return False # bail out
if m_kwargs:
for name, m_arg in m_kwargs.items():
arg = kwargs.get(name)
if not arg_match(m_arg, arg):
return False # bail out
return True
| mit | Python |
ec3443a000d4d004575b0425ec40640c17d2adfb | Use __version__. | codysoyland/django-template-repl | src/template_repl/__init__.py | src/template_repl/__init__.py | __version__ = '0.2.1'
def get_version():
return __version__
| def get_version():
return '0.2.1'
| bsd-3-clause | Python |
5d5a739979d2bbf160c951b846a0dcc4acd504c6 | Remove scikits.ts and larry from example | jstoxrocky/statsmodels,gef756/statsmodels,bashtage/statsmodels,edhuckle/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,yl565/statsmodels,huongttlan/statsmodels,bsipocz/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,josef-pkt/statsmodels,wzbozon/statsmodels,bert9bert/statsmodels,DonBeo/statsmodels,bert9bert/statsmodels,wkfwkf/statsmodels,hainm/statsmodels,ChadFulton/statsmodels,pprett/statsmodels,cbmoore/statsmodels,nguyentu1602/statsmodels,bzero/statsmodels,hainm/statsmodels,wkfwkf/statsmodels,wwf5067/statsmodels,astocko/statsmodels,ChadFulton/statsmodels,waynenilsen/statsmodels,huongttlan/statsmodels,wkfwkf/statsmodels,nvoron23/statsmodels,wwf5067/statsmodels,kiyoto/statsmodels,musically-ut/statsmodels,Averroes/statsmodels,bsipocz/statsmodels,cbmoore/statsmodels,detrout/debian-statsmodels,yl565/statsmodels,alekz112/statsmodels,bavardage/statsmodels,yarikoptic/pystatsmodels,jstoxrocky/statsmodels,josef-pkt/statsmodels,pprett/statsmodels,astocko/statsmodels,statsmodels/statsmodels,waynenilsen/statsmodels,DonBeo/statsmodels,jstoxrocky/statsmodels,bert9bert/statsmodels,ChadFulton/statsmodels,phobson/statsmodels,phobson/statsmodels,edhuckle/statsmodels,cbmoore/statsmodels,bavardage/statsmodels,nvoron23/statsmodels,bzero/statsmodels,phobson/statsmodels,edhuckle/statsmodels,hlin117/statsmodels,josef-pkt/statsmodels,musically-ut/statsmodels,wzbozon/statsmodels,gef756/statsmodels,gef756/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,hainm/statsmodels,saketkc/statsmodels,nguyentu1602/statsmodels,kiyoto/statsmodels,wzbozon/statsmodels,DonBeo/statsmodels,kiyoto/statsmodels,adammenges/statsmodels,huongttlan/statsmodels,nvoron23/statsmodels,wzbozon/statsmodels,musically-ut/statsmodels,wdurhamh/statsmodels,DonBeo/statsmodels,detrout/debian-statsmodels,pprett/statsmodels,wdurhamh/statsmodels,detrout/debian-statsmodels,yl565/statsmodels,statsmodels/statsmodels,YihaoLu/statsmodels,yarikoptic/pystatsmodels,alekz112/statsmodels,wwf5067/statsmodels,rgommers/statsmodels,jseabold/statsmodels,bzero/statsmodels,jstoxrocky/statsmodels,YihaoLu/statsmodels,nguyentu1602/statsmodels,nguyentu1602/statsmodels,bzero/statsmodels,Averroes/statsmodels,yl565/statsmodels,edhuckle/statsmodels,wkfwkf/statsmodels,YihaoLu/statsmodels,adammenges/statsmodels,bsipocz/statsmodels,statsmodels/statsmodels,bzero/statsmodels,astocko/statsmodels,gef756/statsmodels,bashtage/statsmodels,adammenges/statsmodels,DonBeo/statsmodels,wwf5067/statsmodels,yarikoptic/pystatsmodels,alekz112/statsmodels,Averroes/statsmodels,YihaoLu/statsmodels,alekz112/statsmodels,josef-pkt/statsmodels,saketkc/statsmodels,saketkc/statsmodels,hlin117/statsmodels,wdurhamh/statsmodels,bavardage/statsmodels,bavardage/statsmodels,phobson/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,bavardage/statsmodels,waynenilsen/statsmodels,bert9bert/statsmodels,jseabold/statsmodels,bsipocz/statsmodels,saketkc/statsmodels,cbmoore/statsmodels,pprett/statsmodels,nvoron23/statsmodels,wdurhamh/statsmodels,edhuckle/statsmodels,nvoron23/statsmodels,yl565/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,kiyoto/statsmodels,rgommers/statsmodels,hainm/statsmodels,saketkc/statsmodels,bert9bert/statsmodels,jseabold/statsmodels,wdurhamh/statsmodels,jseabold/statsmodels,wzbozon/statsmodels,statsmodels/statsmodels,detrout/debian-statsmodels,hlin117/statsmodels,statsmodels/statsmodels,gef756/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,waynenilsen/statsmodels,cbmoore/statsmodels,kiyoto/statsmodels,huongttlan/statsmodels,rgommers/statsmodels,astocko/statsmodels,hlin117/statsmodels,jseabold/statsmodels,musically-ut/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels | examples/tsa/ex_dates.py | examples/tsa/ex_dates.py | """
Using dates with timeseries models
"""
import statsmodels.api as sm
import numpy as np
import pandas
# Getting started
# ---------------
data = sm.datasets.sunspots.load()
# Right now an annual date series must be datetimes at the end of the year.
from datetime import datetime
dates = sm.tsa.datetools.date_from_range('1700', length=len(data.endog))
# Using Pandas
# ------------
# Make a pandas TimeSeries or DataFrame
endog = pandas.TimeSeries(data.endog, index=dt_dates)
# and instantiate the model
ar_model = sm.tsa.AR(endog, freq='A')
pandas_ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1)
# Let's do some out-of-sample prediction
pred = pandas_ar_res.predict(start='2005', end='2015')
print pred
# Using explicit dates
# --------------------
ar_model = sm.tsa.AR(data.endog, dates=dt_dates, freq='A')
ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1)
pred = ar_res.predict(start='2005', end='2015')
print pred
# This just returns a regular array, but since the model has date information
# attached, you can get the prediction dates in a roundabout way.
print ar_res._data.predict_dates
# This attribute only exists if predict has been called. It holds the dates
# associated with the last call to predict.
#..TODO: should this be attached to the results instance?
| """
Using dates with timeseries models
"""
import statsmodels.api as sm
import numpy as np
import pandas
# Getting started
# ---------------
data = sm.datasets.sunspots.load()
# Right now an annual date series must be datetimes at the end of the year.
# We can use scikits.timeseries and datetime to create this array.
import datetime
import scikits.timeseries as ts
dates = ts.date_array(start_date=1700, length=len(data.endog), freq='A')
# To make an array of datetime types, we need an integer array of ordinals
#.. from datetime import datetime
#.. dt_dates = dates.toordinal().astype(int)
#.. dt_dates = np.asarray([datetime.fromordinal(i) for i in dt_dates])
dt_dates = dates.tolist()
# Using Pandas
# ------------
# Make a pandas TimeSeries or DataFrame
endog = pandas.Series(data.endog, index=dt_dates)
# and instantiate the model
ar_model = sm.tsa.AR(endog, freq='A')
pandas_ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1)
# Let's do some out-of-sample prediction
pred = pandas_ar_res.predict(start='2005', end='2015')
print pred
# Using explicit dates
# --------------------
ar_model = sm.tsa.AR(data.endog, dates=dt_dates, freq='A')
ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1)
pred = ar_res.predict(start='2005', end='2015')
print pred
# This just returns a regular array, but since the model has date information
# attached, you can get the prediction dates in a roundabout way.
print ar_res._data.predict_dates
# This attribute only exists if predict has been called. It holds the dates
# associated with the last call to predict.
#..TODO: should this be attached to the results instance?
# Using scikits.timeseries
# ------------------------
ts_data = ts.time_series(data.endog, dates=dates)
ts_ar_model = sm.tsa.AR(ts_data, freq='A')
ts_ar_res = ts_ar_model.fit(maxlag=9)
# Using Larry
# -----------
import la
larr = la.larry(data.endog, [dt_dates])
la_ar_model = sm.tsa.AR(larr, freq='A')
la_ar_res = la_ar_model.fit(maxlag=9)
| bsd-3-clause | Python |
d7cb8495dc7608ac45195fb523c7b728c24f3a4c | Make all api functions have consistent data variable naming | edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen | kitchen/dashboard/api.py | kitchen/dashboard/api.py | """Data API"""
# -*- coding: utf-8 -*-
import json
from django.http import HttpResponse, Http404
from django.views.decorators.http import require_http_methods
from kitchen.dashboard import chef
@require_http_methods(["GET"])
def get_roles(request):
"""Returns all nodes in the repo"""
data = chef.get_roles()
return HttpResponse(json.dumps(data), content_type="application/json")
@require_http_methods(["GET"])
def get_nodes(request):
"""Returns node files. If 'extended' is given, the extended version is
returned
"""
if request.GET.get('extended'):
data = chef.get_nodes_extended()
else:
data = chef.get_nodes()
data = chef.filter_nodes(data, request.GET.get('env'))
return HttpResponse(json.dumps(data), content_type="application/json")
@require_http_methods(["GET"])
def get_node(request, name):
"""Returns a node"""
data = chef.get_node(name)
if not data:
raise Http404()
return HttpResponse(json.dumps(data), content_type="application/json")
| """Data API"""
# -*- coding: utf-8 -*-
import json
from django.http import HttpResponse, Http404
from django.views.decorators.http import require_http_methods
from kitchen.dashboard import chef
@require_http_methods(["GET"])
def get_roles(request):
"""Returns all nodes in the repo"""
roles = chef.get_roles()
return HttpResponse(json.dumps(roles), content_type="application/json")
@require_http_methods(["GET"])
def get_nodes(request):
"""Returns node files. If 'extended' is given, the extended version is
returned
"""
if request.GET.get('extended'):
data = chef.get_nodes_extended()
else:
data = chef.get_nodes()
data = chef.filter_nodes(data, request.GET.get('env'))
return HttpResponse(json.dumps(data), content_type="application/json")
@require_http_methods(["GET"])
def get_node(request, name):
"""Returns a node"""
data = chef.get_node(name)
if not data:
raise Http404()
return HttpResponse(json.dumps(data), content_type="application/json")
| apache-2.0 | Python |
c31fb511abf67bfebaa8e1c296b68465fb2523bb | fix imports | clonker/ci-tests | ui/viewer.py | ui/viewer.py | import mdtraj as md
import IPython
from mdtraj.html import TrajectoryView, enable_notebook
from IPython.html.widgets.widget_int import IntSliderWidget
def view_traj(traj, topology_file=None, stride=1):
r"""Opens a trajectory viewer (from mdtraj).
Parameters
----------
traj : `mdtraj.Trajectory` or string
mdtraj.Trajectory object or file name for MD trajectory
topology_file : string (default=None)
If traj is a file name, topology_file is the file name
of the accompanying topology file (.pdb/.mol2/...)
stride : int
If traj is a file name, this is the number of frames
to skip between two successive trajectory reads.
"""
if isinstance(traj, str):
traj = md.load(traj, top=topology_file, stride=stride)
# ensure we're able to use TrajectoryView
enable_notebook()
widget = TrajectoryView(traj, colorBy='atom')
IPython.display.display(widget)
slider = IntSliderWidget(max=traj.n_frames - 1)
def on_value_change(name, val):
widget.frame = val
slider.on_trait_change(on_value_change, 'value')
IPython.display.display(slider)
None
| import mdtraj as md
import IPython
from mdtraj.html import TrajectoryView, enable_notebook
import IPython
def view_traj(traj, topology_file=None, stride=1):
r"""Opens a trajectory viewer (from mdtraj).
Parameters
----------
traj : `mdtraj.Trajectory` or string
mdtraj.Trajectory object or file name for MD trajectory
topology_file : string (default=None)
If traj is a file name, topology_file is the file name
of the accompanying topology file (.pdb/.mol2/...)
stride : int
If traj is a file name, this is the number of frames
to skip between two successive trajectory reads.
"""
if isinstance(traj, str):
traj = md.load(traj, top=topology_file, stride=stride)
widget = md.html.TrajectoryView(traj, colorBy='atom')
IPython.display.display(widget)
slider = IPython.html.widgets.IntSliderWidget(max=traj.n_frames - 1)
def on_value_change(name, val):
widget.frame = val
slider.on_trait_change(on_value_change, 'value')
IPython.display.display(slider)
None
| bsd-3-clause | Python |
aca62878340f1a1a04e674bcd2ce8e894e4efdc9 | update help button to use font awesome and be disabled if no help | xgds/xgds_core,xgds/xgds_core,xgds/xgds_core | xgds_core/templatetags/help_button.py | xgds_core/templatetags/help_button.py | #__BEGIN_LICENSE__
# Copyright (c) 2015, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The xGDS platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#__END_LICENSE__
from django import template
from django.core.urlresolvers import reverse
from django.utils.safestring import mark_safe
register = template.Library()
@register.simple_tag(name='help_button')
def help_button(help_content_path, help_title):
try:
url = reverse('help_popup', kwargs={'help_content_path':help_content_path,
'help_title':str(help_title)})
result = "<a href='#' onclick='help.openPopup(\"" + url + "\")' class='help_button btn btn-primary fa fa-question-circle-o fa-lg' role='button'></a>"
return mark_safe(result)
except:
# if the url is not found disable it
result = "<a href='#' class='help_button btn btn-primary fa fa-question-circle-o fa-lg disabled' role='button' disabled></a>"
return mark_safe(result)
| #__BEGIN_LICENSE__
# Copyright (c) 2015, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The xGDS platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#__END_LICENSE__
from django import template
from django.core.urlresolvers import reverse
from django.utils.safestring import mark_safe
register = template.Library()
@register.simple_tag(name='help_button')
def help_button(help_content_path, help_title):
try:
url = reverse('help_popup', kwargs={'help_content_path':help_content_path,
'help_title':str(help_title)})
result = "<a href='#' onclick='help.openPopup(\"" + url + "\")' class='help_button'><i class='fa fa-question-circle-o fa-lg' aria-hidden='true'></i></a>"
return mark_safe(result)
except:
# if the url is not found do not include the help button
return ""
| apache-2.0 | Python |
95b00dae180c54d62b43f04743e9f8ec34e3de52 | Fix minor style issue | sagersmith8/ai_graph_coloring,sagersmith8/ai_graph_coloring | ai_graph_color/line.py | ai_graph_color/line.py | class Line:
def __init__(self, point_a, point_b):
"""
Make a new line from two points.
:param point_a: one of the points on the line
:type point_a: tuple(float, float)
:param point_b: one of the points on the line
:type point_b: tuple(float, float)
"""
self.left_point = min(point_a, point_b)
self.right_point = max(point_a, point_b)
self.distance = point_distance(point_a, point_b)
self.deallocation_routines = []
def add_reference(self, deallocation_routine, *params):
"""
Add a deallocation routine for when resources associated with this line
:param deallocation_routine: the function to call to free a specific
resource
:type deallocation_routine: function
:param params: parameters to the deallocation_routine
:type params: tuple
:return: Nothing
"""
self.deallocation_routines.append((deallocation_routine, params))
def free(self):
"""
Call the deallocation routines specified for when this line is freed.
:return: Nothing
"""
for routine, params in self.deallocation_routines:
routine(*params)
def point_distance(point_a, point_b):
"""
Compute the euclidean distance between two points.
:param point_a: one of the two points to measure distance between
:type point_a: tuple(float, float)
:param point_b: the other of the two points to measure distance between
:type point_b: tuple(float, float)
:rtype: float
:return: the distance between point_a and point_b
"""
return ((point_a[0] - point_b[0])**2 +
(point_a[1] - point_b[1])**2)**0.5
| class Line:
def __init__(self, point_a, point_b):
"""
Make a new line from two points.
:param point_a: one of the points on the line
:type point_a: tuple(float, float)
:param point_b: one of the points on the line
:type point_b: tuple(float, float)
"""
self.left_point = min(point_a, point_b)
self.right_point = max(point_a, point_b)
self.distance = point_distance(point_a, point_b)
self.deallocation_routines = []
def add_reference(self, deallocation_routine, *params):
"""
Add a deallocation routine for when resources associated with this line
:param deallocation_routine: the function to call to free a specific
resource
:type deallocation_routine: function
:param params: parameters to the deallocation_routine
:type params: tuple
:return: Nothing
"""
self.deallocation_routines.append((deallocation_routine, params))
def free(self):
"""
Call the deallocation routines specified for when this line is freed.
:return: Nothing
"""
for routine, params in self.deallocation_routines:
routine(*params)
def point_distance(point_a, point_b):
"""
Compute the euclidean distance between two points.
:param point_a: one of the two points to measure distance between
:type point_a: tuple(float, float)
:param point_b: the other of the two points to measure distance between
:type point_b: tuple(float, float)
:rtype: float
:return: the distance between point_a and point_b
"""
return ((point_a[0] - point_b[0])**2 +
(point_a[1] - point_b[1])**2)**0.5
| mit | Python |
23a8df19e272bf4a48d59629976fc0cd4a1b83eb | Add TFBooster Code to Solve "German signal" problem | Gabvaztor/TFBoost | Settings/German_Signal/ModelConfiguration.py | Settings/German_Signal/ModelConfiguration.py | """
Normally, this files contains all necessary code to execute successfully the solution of the problem
but in this case (because this version is not stable) all code is in "TFModel_backup.py" file.
"""
# TODO Define Code
"""
TFBooster Code to solve problem
"""
setting_object = SettingsObject.Settings(Dictionary.string_settings_german_signal_path)
path_train_and_test_images = [setting_object.train_path,setting_object.test_path]
number_of_classes = 59 # Start in 0
percentages_sets = None # Example
labels_set = [Dictionary.string_labels_type_option_hierarchy]
is_an_unique_csv = False # If this variable is true, then only one CSV file will be passed and it will be treated like
# trainSet, validationSet(if necessary) and testSet
known_data_type = '' # Contains the type of data if the data file contains an unique type of data. Examples: # Number
# or Chars.
reader_features = tfr.ReaderFeatures(set_data_files = path_train_and_test_images,number_of_classes = number_of_classes,
labels_set = labels_set,
is_unique_csv = is_an_unique_csv,known_data_type = known_data_type,
percentages_sets = percentages_sets)
"""
Creating Reader from ReaderFeatures
"""
tf_reader = tfr.Reader(reader_features = reader_features) # Reader Object with all information
"""
Getting train, validation (if necessary) and test set.
"""
test_set = tf_reader.test_set # Test Set
train_set = tf_reader.train_set # Train Set
del reader_features
del tf_reader
models = models.TFModels(input=train_set[0],test=test_set[0],
input_labels=train_set[1],test_labels=test_set[1],
number_of_classes=number_of_classes, setting_object=setting_object)
models.convolution_model_image() | """
Normally, this files contains all necessary code to execute successfully the solution of the problem
but in this case (because this version is not stable) all code is in "TFModel_backup.py" file.
"""
| apache-2.0 | Python |
868589926bd09729e03d59f929f3de0ae0fee673 | Bump release | racker/fleece,racker/fleece | fleece/__about__.py | fleece/__about__.py | """Fleece package attributes and metadata."""
__all__ = (
'__title__',
'__summary__',
'__author__',
'__email__',
'__license__',
'__version__',
'__copyright__',
'__url__',
)
__title__ = 'fleece'
__summary__ = 'Wrap the lamb...da'
__author__ = 'Rackers'
__email__ = 'bruce.stringer@rackspace.com'
__version__ = '0.19.0'
__license__ = 'Apache License, Version 2.0'
__keywords__ = ['fleece', 'lambda']
__copyright__ = 'Copyright Rackspace US, Inc. 2016'
__url__ = 'https://github.com/racker/fleece'
| """Fleece package attributes and metadata."""
__all__ = (
'__title__',
'__summary__',
'__author__',
'__email__',
'__license__',
'__version__',
'__copyright__',
'__url__',
)
__title__ = 'fleece'
__summary__ = 'Wrap the lamb...da'
__author__ = 'Rackers'
__email__ = 'bruce.stringer@rackspace.com'
__version__ = '0.18.7'
__license__ = 'Apache License, Version 2.0'
__keywords__ = ['fleece', 'lambda']
__copyright__ = 'Copyright Rackspace US, Inc. 2016'
__url__ = 'https://github.com/racker/fleece'
| apache-2.0 | Python |
19cd8be63d482a3fb23456c5c87ebdfe16ac9415 | Add a method to test `locateMarker()' in util.py. | isislovecruft/scramblesuit,isislovecruft/scramblesuit | unittests.py | unittests.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import util
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
def callback( masterKey ):
self.failUnless(len(masterKey) == const.MASTER_KEY_LENGTH)
self.failUnless(self.udh.receivePublicKey(buf, callback) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
class UtilTest( unittest.TestCase ):
def test1_isValidHMAC( self ):
self.failIf(util.isValidHMAC("A" * const.HMAC_LENGTH,
"B" * const.HMAC_LENGTH) == True)
self.failIf(util.isValidHMAC("A" * const.HMAC_LENGTH,
"A" * const.HMAC_LENGTH) == False)
def test2_locateMarker( self ):
self.failIf(util.locateMarker("D", "ABC") != None)
hmac = "X" * const.HMAC_LENGTH
marker = "A" * const.MARKER_LENGTH
payload = marker + hmac
self.failIf(util.locateMarker(marker, payload) == None)
self.failIf(util.locateMarker(marker, payload[:-1]) != None)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import util
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
def callback( masterKey ):
self.failUnless(len(masterKey) == const.MASTER_KEY_LENGTH)
self.failUnless(self.udh.receivePublicKey(buf, callback) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
class UtilTest( unittest.TestCase ):
def test1_isValidHMAC( self ):
self.failIf(util.isValidHMAC("A" * const.HMAC_LENGTH,
"B" * const.HMAC_LENGTH) == True)
self.failIf(util.isValidHMAC("A" * const.HMAC_LENGTH,
"A" * const.HMAC_LENGTH) == False)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
5935d03339770fd2359767f47c761d3b7bfd59d1 | Support some very basic features | Xion/unmatcher | unmatcher.py | unmatcher.py | """
unmatcher :: Regular expression reverser for Python
"""
__version__ = "0.0.1"
__author__ = "Karol Kuczmarski"
__license__ = "Simplified BSD"
import random
import re
import string
__all__ = ['reverse']
def reverse(pattern, groups=None, **kwargs):
if not isinstance(pattern, basestring):
pattern = pattern.pattern # assuming regex object
regex_ast = re.sre_parse.parse(pattern).data
groups = groups or {}
reversal = Reversal(regex_ast, groups=groups, **kwargs)
return reversal.perform()
# Implementation
class Reversal(object):
BUILTIN_CHARSETS = {
'any': string.printable, # matches . (dot)
'word': string.ascii_letters,
'digit': string.digits,
'space': string.whitespace,
}
MAX_REPEAT = 64
def __init__(self, regex_ast, **kwargs):
self.regex_ast = regex_ast
self.groupdict = kwargs.get('groupdict', {}) # NYI
def perform(self):
# TODO: preserve unicodeness/ANSIness in Python 2
return ''.join(map(self._reverse_node, self.regex_ast))
def _reverse_node(self, (type_, data)):
"""Generates string matching given node from regular expression AST."""
if type_ == 'literal':
return chr(data)
if type_ == 'any':
return random.choice(self.BUILTIN_CHARSETS['any'])
if type_ == 'in':
return self._reverse_charset_node(data)
if type_ in ('min_repeat', 'max_repeat'):
return self._reverse_repeat_node(data)
if type_ == 'at':
return '' # match-beginning (^) or match-end ($);
# irrelevant for string generation
# TODO: add support for the rest of regex syntax elements
raise ValueError("unsupported regular expression element: %s" % type_)
def _reverse_charset_node(self, node_data):
"""Generates string matching 'in' node from regular expr. AST.
This node matches a set of characters:
* a built-in one (``\w``, ``\d``, etc.),
* an ad-hoc one (``[a-z]``, ``[123abc]``, etc.),
* or a combination of those (``[a-z\d])``, etc.)
"""
chosen = random.choice(node_data)
type_, data = chosen
# range (e.g. a-z) inside [ ]
if type_ == 'range':
# TODO: add support for negation: [^...]
min_char, max_char = data
return chr(random.randint(min_char, max_char))
# built-in character set: \d, \w, etc.
if type_ == 'category':
return self._reverse_builtin_charset_node(data)
raise Exception("unexpected charset node: %s" % type_)
def _reverse_builtin_charset_node(self, node_data):
"""Generates string matching 'category' node from regular expr. AST.
This node matches a built-in set of characters, like ``\d`` or ``\w``.
"""
_, type_ = node_data.rsplit('_', 1) # category(_not)?_(digit|word|etc)
negate = '_not_' in node_data
charset = self.BUILTIN_CHARSETS[type_]
if negate:
charset = list(set(self.BUILTIN_CHARSETS['any']) - set(charset))
return random.choice(charset)
def _reverse_repeat_node(self, node_data):
"""Generates string matching 'min_repeat' or 'max_repeat' node
from regular expression AST.
This node matches a repetition of pattern matched by its child node.
"""
# TODO: make sure if ``[what]`` is always a 1-element list
min_count, max_count, [what] = node_data
max_count = min(max_count, self.MAX_REPEAT)
count = random.randint(min_count, max_count)
return ''.join(self._reverse_node(what) for _ in xrange(count))
| """
unmatcher :: Regular expression reverser for Python
"""
__version__ = "0.0.1"
__author__ = "Karol Kuczmarski"
__license__ = "Simplified BSD"
import random
import re
import string
__all__ = ['reverse']
def reverse(pattern, groups=None, **kwargs):
if not isinstance(pattern, basestring):
pattern = pattern.pattern # assuming regex object
regex_ast = re.sre_parse.parse(pattern).data
groups = groups or {}
reversal = Reversal(regex_ast, groups=groups, **kwargs)
return reversal.perform()
# Implementation
class Reversal(object):
def __init__(self, regex_ast, **kwargs):
self.regex_ast = regex_ast
self.capture_groups = kwargs.get('groups', {}) # NYI
def perform(self):
# TODO: preserve unicodeness/ANSIness in Python 2
return ''.join(map(self._reverse_node, self.regex_ast))
def _reverse_node(self, (type_, data)):
if type_ == 'literal':
return chr(data)
raise ValueError("unsupported regular expression element: %s" % type_)
| bsd-2-clause | Python |
b6a4349a0f3f9c66a52eabb540b778ec3975e96a | Split imports | matteobachetti/srt-single-dish-tools | srttools/tests/test_import.py | srttools/tests/test_import.py | from astropy.table import Table
def test_import_scan():
from srttools import Scan
s = Scan()
assert isinstance(s, Table)
def test_import_scanset():
from srttools import ScanSet
s = ScanSet()
assert isinstance(s, Table)
def test_import_calibratortable():
from srttools import CalibratorTable
s = CalibratorTable()
assert isinstance(s, Table)
| from srttools import Scan, ScanSet, CalibratorTable
from astropy.table import Table
def test_import_scan():
s = Scan()
assert isinstance(s, Table)
def test_import_scanset():
s = ScanSet()
assert isinstance(s, Table)
def test_import_calibratortable():
s = CalibratorTable()
assert isinstance(s, Table)
| bsd-3-clause | Python |
97f7e1f04be413e5cffaaa7c718188ceae672c05 | reduce ldap calls | tjcsl/director,tjcsl/director,tjcsl/director,tjcsl/director | web3/apps/users/forms.py | web3/apps/users/forms.py | from django import forms
from django.core.validators import EmailValidator
from .models import User, Group
from ...utils.tjldap import get_uid
class UserForm(forms.ModelForm):
username = forms.CharField(max_length=32,
widget=forms.TextInput(attrs={"class": "form-control"}))
email = forms.CharField(max_length=100,
widget=forms.TextInput(attrs={"class": "form-control"}),
validators=[EmailValidator])
is_superuser = forms.BooleanField(required=False,
label="Superuser Account",
widget=forms.CheckboxInput(attrs={"class": "custom-control-input"}))
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
instance = getattr(self, "instance", None)
if instance and instance.pk:
self.fields["username"].disabled = True
def clean_username(self):
data = self.cleaned_data["username"].strip()
if self.instance.pk:
return data
try:
uid = get_uid(data)
except IndexError:
raise forms.ValidationError("Username is not a valid TJ username!")
return data
def save(self, commit=True):
instance = forms.ModelForm.save(self, commit=False)
if not self.instance.pk:
instance.id = get_uid(instance.username)
instance.service = False
instance.is_active = True
instance.is_staff = instance.is_superuser
instance.is_admin = instance.is_superuser
if commit:
instance.save()
if not Group.objects.filter(id=instance.id).exists():
group = Group.objects.create(id=instance.id, service=instance.service, name=instance.username)
group.users.add(instance.pk)
group.save()
return instance
class Meta:
model = User
fields = ["username", "email", "is_superuser"]
| from django import forms
from django.core.validators import EmailValidator
from .models import User, Group
from ...utils.tjldap import get_uid
class UserForm(forms.ModelForm):
username = forms.CharField(max_length=32,
widget=forms.TextInput(attrs={"class": "form-control"}))
email = forms.CharField(max_length=100,
widget=forms.TextInput(attrs={"class": "form-control"}),
validators=[EmailValidator])
is_superuser = forms.BooleanField(required=False,
label="Superuser Account",
widget=forms.CheckboxInput(attrs={"class": "custom-control-input"}))
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
instance = getattr(self, "instance", None)
if instance and instance.pk:
self.fields["username"].disabled = True
def clean_username(self):
data = self.cleaned_data["username"].strip()
try:
uid = get_uid(data)
except IndexError:
raise forms.ValidationError("Username is not a valid TJ username!")
return data
def save(self, commit=True):
instance = forms.ModelForm.save(self, commit=False)
instance.id = get_uid(instance.username)
instance.service = False
instance.is_active = True
instance.is_staff = instance.is_superuser
instance.is_admin = instance.is_superuser
if commit:
instance.save()
if not Group.objects.filter(id=instance.id).exists():
group = Group.objects.create(id=instance.id, service=instance.service, name=instance.username)
group.users.add(instance.pk)
group.save()
return instance
class Meta:
model = User
fields = ["username", "email", "is_superuser"]
| mit | Python |
17d0bd407a738dc032621fc000f33674d0f6613e | add property `id` | wemoo/wemoo-center,wemoo/wemoo-center,wemoo/wemoo-center | app/models/task.py | app/models/task.py | # coding: utf8
import datetime
from config import environment
db = environment.mdb
class Task(db.Document):
TYPE_ONCE = 1
TYPE_CONTINUES = 2
title = db.StringField(required=True, max_length=100)
task_type = db.IntField(required=True)
desc = db.StringField(required=True, max_length=1000)
script = db.StringField(required=True)
result = db.StringField(default=None)
finished = db.BooleanField(default=False)
created_at = db.DateTimeField(default=datetime.datetime.now)
updated_at = db.DateTimeField(default=datetime.datetime.now)
def to_dict(self):
return {
'id': str(self.id),
'title': self.title,
'task_type': self.task_type,
'desc': self.desc,
'script': self.script,
'result': self.result,
'created_at': str(self.created_at),
'updated_at': str(self.updated_at)
}
| # coding: utf8
import datetime
from config import environment
db = environment.mdb
class Task(db.Document):
TYPE_ONCE = 1
TYPE_CONTINUES = 2
title = db.StringField(required=True, max_length=100)
task_type = db.IntField(required=True)
desc = db.StringField(required=True, max_length=1000)
script = db.StringField(required=True)
result = db.StringField(default=None)
finished = db.BooleanField(default=False)
created_at = db.DateTimeField(default=datetime.datetime.now)
updated_at = db.DateTimeField(default=datetime.datetime.now)
def to_dict(self):
return {
'title': self.title,
'task_type': self.task_type,
'desc': self.desc,
'script': self.script,
'result': self.result,
'created_at': str(self.created_at),
'updated_at': str(self.updated_at)
}
| mit | Python |
532c201053ae271544270035423f690b4774794a | Fix multiselect user/group field when retrieving results from a report | Swimlane/sw-python-client | swimlane/core/fields/usergroup.py | swimlane/core/fields/usergroup.py | from .base import MultiSelectField
from swimlane.core.resources.usergroup import UserGroup
class UserGroupField(MultiSelectField):
"""Manages getting/setting users from record User/Group fields"""
field_type = 'Core.Models.Fields.UserGroupField, Core'
supported_types = [UserGroup]
def set_swimlane(self, value):
"""Workaround for reports returning an empty usergroup field as a single element list with no id/name"""
if value == [{"$type": "Core.Models.Utilities.UserGroupSelection, Core"}]:
value = []
return super(UserGroupField, self).set_swimlane(value)
def cast_to_python(self, value):
"""Convert JSON definition to UserGroup object"""
# v2.x does not provide a distinction between users and groups at the field selection level, can only return
# UserGroup instances instead of specific User or Group instances
if value is not None:
value = UserGroup(self.record._swimlane, value)
return value
def cast_to_swimlane(self, value):
"""Dump UserGroup back to JSON representation"""
if value is not None:
value = value.get_usergroup_selection()
return value
| from .base import MultiSelectField
from swimlane.core.resources.usergroup import UserGroup
class UserGroupField(MultiSelectField):
"""Manages getting/setting users from record User/Group fields"""
field_type = 'Core.Models.Fields.UserGroupField, Core'
supported_types = [UserGroup]
def cast_to_python(self, value):
"""Convert JSON definition to UserGroup object"""
# v2.x does not provide a distinction between users and groups at the field selection level, can only return
# UserGroup instances instead of specific User or Group instances
if value is not None:
value = UserGroup(self.record._swimlane, value)
return value
def cast_to_swimlane(self, value):
"""Dump UserGroup back to JSON representation"""
if value is not None:
value = value.get_usergroup_selection()
return value
| mit | Python |
69ec68bcf5fed95f85d5b4a3ac1fc155cb26175d | create background thread task | duncan60/flask-simple-demo,duncan60/flask-simple-demo,duncan60/flask-simple-demo | application/socket/simple.py | application/socket/simple.py | # -*- coding: utf-8 -*-
from flask import request
from application import app, api, socketio
from flask_socketio import emit, disconnect
thread = None
def background_thread():
count = 0
while True:
socketio.sleep(2)
count += 1
socketio.emit('serverResponse',
{'data': 'Server count:{0}'.format(count)},
namespace='/test')
@socketio.on('clientEvent', namespace='/test')
def test_message(message):
emit('serverResponse',
{'data': 'server msg: {0} !!!'.format(message['data'])})
@socketio.on('disconnectRequest', namespace='/test')
def disconnect_request():
emit('serverResponse',
{'data': 'Disconnected!'})
disconnect()
@socketio.on('connect', namespace='/test')
def test_connect():
global thread
if thread is None:
thread = socketio.start_background_task(target=background_thread)
emit('serverResponse', {'data': 'Connected'})
@socketio.on('disconnect', namespace='/test')
def test_disconnect():
print('Client disconnected:', request.sid)
| # -*- coding: utf-8 -*-
from flask import request
from application import app, api, socketio
from flask_socketio import emit, disconnect
@socketio.on('clientEvent', namespace='/test')
def test_message(message):
emit('serverResponse',
{'data': 'server msg: {0} !!!'.format(message['data'])})
@socketio.on('disconnectRequest', namespace='/test')
def disconnect_request():
emit('serverResponse',
{'data': 'Disconnected!'})
disconnect()
@socketio.on('connect', namespace='/test')
def test_connect():
emit('serverResponse', {'data': 'Connected'})
@socketio.on('disconnect', namespace='/test')
def test_disconnect():
print('Client disconnected:', request.sid)
| mit | Python |
2d3e481360a0564163c0b004c8bfaf9b4fb645c1 | Make modules uninstallable | Domatix/l10n-spain,factorlibre/l10n-spain,factorlibre/l10n-spain,factorlibre/l10n-spain | l10n_es_account_asset/__openerp__.py | l10n_es_account_asset/__openerp__.py | # -*- coding: utf-8 -*-
# © 2012-2015 Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Gestión de activos fijos para España",
"version": "8.0.2.0.0",
"depends": ["account_asset"],
"author": "Serv. Tecnol. Avanzados - Pedro M. Baeza, "
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"website": "http://www.serviciosbaeza.com",
"category": "Accounting & Finance",
"data": [
"views/account_asset_view.xml",
],
'installable': False,
}
| # -*- coding: utf-8 -*-
# © 2012-2015 Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Gestión de activos fijos para España",
"version": "8.0.2.0.0",
"depends": ["account_asset"],
"author": "Serv. Tecnol. Avanzados - Pedro M. Baeza, "
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"website": "http://www.serviciosbaeza.com",
"category": "Accounting & Finance",
"data": [
"views/account_asset_view.xml",
],
"installable": True,
}
| agpl-3.0 | Python |
67c7233cdf4893eb4302f297e12dfa53886d3523 | Add a config check for missing Thrift server or misconfiguration | cloudera/hue,kawamon/hue,cloudera/hue,lumig242/Hue-Integration-with-CDAP,todaychi/hue,Peddle/hue,kawamon/hue,jayceyxc/hue,cloudera/hue,MobinRanjbar/hue,lumig242/Hue-Integration-with-CDAP,xq262144/hue,vmax-feihu/hue,todaychi/hue,cloudera/hue,kawamon/hue,jayceyxc/hue,fangxingli/hue,vmax-feihu/hue,fangxingli/hue,vmax-feihu/hue,jjmleiro/hue,todaychi/hue,xq262144/hue,kawamon/hue,MobinRanjbar/hue,xq262144/hue,cloudera/hue,kawamon/hue,vmax-feihu/hue,cloudera/hue,vmax-feihu/hue,fangxingli/hue,lumig242/Hue-Integration-with-CDAP,jjmleiro/hue,fangxingli/hue,lumig242/Hue-Integration-with-CDAP,cloudera/hue,xq262144/hue,kawamon/hue,Peddle/hue,fangxingli/hue,kawamon/hue,MobinRanjbar/hue,cloudera/hue,cloudera/hue,jjmleiro/hue,jayceyxc/hue,jayceyxc/hue,MobinRanjbar/hue,xq262144/hue,kawamon/hue,kawamon/hue,cloudera/hue,jjmleiro/hue,cloudera/hue,jjmleiro/hue,kawamon/hue,todaychi/hue,vmax-feihu/hue,xq262144/hue,Peddle/hue,jayceyxc/hue,jayceyxc/hue,kawamon/hue,fangxingli/hue,todaychi/hue,xq262144/hue,cloudera/hue,todaychi/hue,Peddle/hue,kawamon/hue,fangxingli/hue,kawamon/hue,vmax-feihu/hue,jayceyxc/hue,kawamon/hue,cloudera/hue,jayceyxc/hue,cloudera/hue,Peddle/hue,todaychi/hue,kawamon/hue,MobinRanjbar/hue,lumig242/Hue-Integration-with-CDAP,Peddle/hue,MobinRanjbar/hue,fangxingli/hue,jjmleiro/hue,cloudera/hue,cloudera/hue,jjmleiro/hue,MobinRanjbar/hue,xq262144/hue,jjmleiro/hue,todaychi/hue,lumig242/Hue-Integration-with-CDAP,todaychi/hue,jayceyxc/hue,xq262144/hue,cloudera/hue,lumig242/Hue-Integration-with-CDAP,Peddle/hue,Peddle/hue,lumig242/Hue-Integration-with-CDAP,kawamon/hue,cloudera/hue,jjmleiro/hue,kawamon/hue,vmax-feihu/hue,MobinRanjbar/hue,Peddle/hue,lumig242/Hue-Integration-with-CDAP,kawamon/hue | apps/hbase/src/hbase/conf.py | apps/hbase/src/hbase/conf.py | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import sys
from django.utils.translation import ugettext_lazy as _t, ugettext as _
from desktop.lib.conf import Config, validate_thrift_transport, coerce_bool
from desktop.lib.exceptions import StructuredThriftTransportException
LOG = logging.getLogger(__name__)
HBASE_CLUSTERS = Config(
key="hbase_clusters",
default="(Cluster|localhost:9090)",
help=_t("Comma-separated list of HBase Thrift servers for clusters in the format of '(name|host:port)'. Use full hostname with security."
"Prefix hostname with https:// if using SSL and http mode with impersonation."),
type=str
)
TRUNCATE_LIMIT = Config(
key="truncate_limit",
default="500",
help=_t("Hard limit of rows or columns per row fetched before truncating."),
type=int
)
THRIFT_TRANSPORT = Config(
key="thrift_transport",
default="buffered",
help=_t("'buffered' is the default of the HBase Thrift Server and supports security. " +
"'framed' can be used to chunk up responses, " +
"which is useful when used in conjunction with the nonblocking server in Thrift."),
type=str
)
HBASE_CONF_DIR = Config(
key='hbase_conf_dir',
help=_t('HBase configuration directory, where hbase-site.xml is located.'),
default=os.environ.get("HBASE_CONF_DIR", '/etc/hbase/conf')
)
# Hidden, just for making patching of older version of Hue easier. To remove in Hue 4.
USE_DOAS = Config(
key='use_doas',
help=_t('Force Hue to use Http Thrift mode with doas impersonation, regarless of hbase-site.xml properties.'),
default=False,
type=coerce_bool
)
def config_validator(user):
res = []
from hbase.api import HbaseApi
from hbase.settings import NICE_NAME
try:
if not 'test' in sys.argv: # Avoid tests hanging
api = HbaseApi(user=user)
cluster_name = api.getClusters()[0]['name'] # Currently pick first configured cluster
# Check connectivity
api.connectCluster(cluster_name)
api.getTableList(cluster_name)
except Exception, e:
print e
if 'Could not connect' in str(e):
msg = "The application won't work without a running HBase Thrift Server v1."
else:
msg = 'Failed to authenticate to HBase Thrift Server, check authentication configurations.'
LOG.exception(msg)
res.append((NICE_NAME, _(msg)))
res.extend(validate_thrift_transport(THRIFT_TRANSPORT))
return res
| #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from django.utils.translation import ugettext_lazy as _t
from desktop.lib.conf import Config, validate_thrift_transport, coerce_str_lowercase, coerce_bool
HBASE_CLUSTERS = Config(
key="hbase_clusters",
default="(Cluster|localhost:9090)",
help=_t("Comma-separated list of HBase Thrift servers for clusters in the format of '(name|host:port)'. Use full hostname with security."
"Prefix hostname with https:// if using SSL and http mode with impersonation."),
type=str
)
TRUNCATE_LIMIT = Config(
key="truncate_limit",
default="500",
help=_t("Hard limit of rows or columns per row fetched before truncating."),
type=int
)
THRIFT_TRANSPORT = Config(
key="thrift_transport",
default="buffered",
help=_t("'buffered' is the default of the HBase Thrift Server and supports security. " +
"'framed' can be used to chunk up responses, " +
"which is useful when used in conjunction with the nonblocking server in Thrift."),
type=str
)
HBASE_CONF_DIR = Config(
key='hbase_conf_dir',
help=_t('HBase configuration directory, where hbase-site.xml is located.'),
default=os.environ.get("HBASE_CONF_DIR", '/etc/hbase/conf')
)
# Hidden, just for making patching of older version of Hue easier. To remove in Hue 4.
USE_DOAS = Config(
key='use_doas',
help=_t('Force Hue to use Http Thrift mode with doas impersonation, regarless of hbase-site.xml properties.'),
default=False,
type=coerce_bool
)
def config_validator(user):
res = []
res.extend(validate_thrift_transport(THRIFT_TRANSPORT))
return res
| apache-2.0 | Python |
7ad9813e3214108cf68e00db6b10f88501103b92 | Fix middleware to not hardcode mozillians | akatsoulas/mozillians,hoosteeno/mozillians,chirilo/mozillians,hoosteeno/mozillians,mozilla/mozillians,satdav/mozillians,glogiotatidis/mozillians-new,brian-yang/mozillians,fxa90id/mozillians,akarki15/mozillians,glogiotatidis/mozillians-new,justinpotts/mozillians,safwanrahman/mozillians,justinpotts/mozillians,mozilla/mozillians,satdav/mozillians,anistark/mozillians,akatsoulas/mozillians,anistark/mozillians,johngian/mozillians,fxa90id/mozillians,safwanrahman/mozillians,johngian/mozillians,chirilo/mozillians,hoosteeno/mozillians,glogiotatidis/mozillians-new,akatsoulas/mozillians,hoosteeno/mozillians,ChristineLaMuse/mozillians,safwanrahman/mozillians,ChristineLaMuse/mozillians,glogiotatidis/mozillians-new,chirilo/mozillians,johngian/mozillians,fxa90id/mozillians,anistark/mozillians,anistark/mozillians,akarki15/mozillians,akarki15/mozillians,akarki15/mozillians,chirilo/mozillians,akatsoulas/mozillians,justinpotts/mozillians,safwanrahman/mozillians,justinpotts/mozillians,brian-yang/mozillians,johngian/mozillians,mozilla/mozillians,ChristineLaMuse/mozillians,fxa90id/mozillians,satdav/mozillians,brian-yang/mozillians,mozilla/mozillians,satdav/mozillians,brian-yang/mozillians | apps/phonebook/middleware.py | apps/phonebook/middleware.py | import os
from django.http import (HttpResponseForbidden, HttpResponseNotAllowed,
HttpResponseRedirect)
import commonware.log
from funfactory.manage import ROOT
from funfactory.urlresolvers import reverse
# TODO: this is hackish. Once we update mozillians to the newest playdoh layout
error_page = __import__(os.path.basename(ROOT)).urls.error_page
log = commonware.log.getLogger('m.phonebook')
class PermissionDeniedMiddleware(object):
"""Add a generic 40x "not allowed" handler.
TODO: Currently uses the 500.html error template, but in the future should
display a more tailored-to-the-actual-error "not allowed" page."""
def process_response(self, request, response):
if isinstance(response, (HttpResponseForbidden,
HttpResponseNotAllowed)):
if request.user.is_authenticated():
log.debug('Permission denied middleware, user was '
'authenticated, sending 500')
return error_page(request, 500, status=response.status_code)
else:
if isinstance(response, (HttpResponseForbidden)):
log.debug('Response was forbidden')
elif isinstance(response, (HttpResponseNotAllowed)):
log.debug('Response was not allowed')
log.debug('Permission denied middleware, redirecting home')
return HttpResponseRedirect(reverse('home'))
return response
| from django.http import (HttpResponseForbidden, HttpResponseNotAllowed,
HttpResponseRedirect)
import commonware.log
from funfactory.urlresolvers import reverse
from mozillians.urls import error_page
log = commonware.log.getLogger('m.phonebook')
class PermissionDeniedMiddleware(object):
"""Add a generic 40x "not allowed" handler.
TODO: Currently uses the 500.html error template, but in the future should
display a more tailored-to-the-actual-error "not allowed" page."""
def process_response(self, request, response):
if isinstance(response, (HttpResponseForbidden,
HttpResponseNotAllowed)):
if request.user.is_authenticated():
log.debug('Permission denied middleware, user was '
'authenticated, sending 500')
return error_page(request, 500, status=response.status_code)
else:
if isinstance(response, (HttpResponseForbidden)):
log.debug('Response was forbidden')
elif isinstance(response, (HttpResponseNotAllowed)):
log.debug('Response was not allowed')
log.debug('Permission denied middleware, redirecting home')
return HttpResponseRedirect(reverse('home'))
return response
| bsd-3-clause | Python |
22f1e025111d0ebe3f8bb032f8f078322bf94386 | remove an unused widget | armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband | armstrong/hatband/options.py | armstrong/hatband/options.py | from django.contrib import admin
from django.contrib.admin.options import InlineModelAdmin
from django.db import models
from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from . import widgets
from .utils import static_url
RICH_TEXT_DBFIELD_OVERRIDES = {
models.TextField: {'widget': widgets.RichTextWidget},
}
class ModelAdmin(admin.ModelAdmin):
formfield_overrides = RICH_TEXT_DBFIELD_OVERRIDES
class StackedInline(admin.StackedInline):
formfield_overrides = RICH_TEXT_DBFIELD_OVERRIDES
class TabularInline(admin.TabularInline):
formfield_overrides = RICH_TEXT_DBFIELD_OVERRIDES
class GenericKeyWidget(forms.Widget):
template = "admin/hatband/widgets/generickey.html"
class Media:
js = (static_url("visualsearch/dependencies.js"),
static_url("visualsearch/visualsearch.js"),
static_url("generickey.js"),
)
css = {
"all": (static_url("visualsearch/visualsearch.css"), ),
}
def __init__(self, *args, **kwargs):
super(GenericKeyWidget, self).__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
final_attrs["value"] = value
final_attrs["is_templated"] = final_attrs["id"].find("__prefix__") > -1
return render_to_string(self.template, final_attrs)
class OrderableGenericKeyLookupForm(forms.ModelForm):
class Meta:
widgets = {
"content_type": forms.HiddenInput(),
"object_id": GenericKeyWidget(),
"order": forms.HiddenInput(),
}
class GenericKeyInline(InlineModelAdmin):
form = OrderableGenericKeyLookupForm
formfield_overrides = RICH_TEXT_DBFIELD_OVERRIDES
template = "admin/edit_inline/generickey.html"
| from django.contrib import admin
from django.contrib.admin.options import InlineModelAdmin
from django.db import models
from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from . import widgets
from .utils import static_url
RICH_TEXT_DBFIELD_OVERRIDES = {
models.TextField: {'widget': widgets.RichTextWidget},
}
class ModelAdmin(admin.ModelAdmin):
formfield_overrides = RICH_TEXT_DBFIELD_OVERRIDES
class StackedInline(admin.StackedInline):
formfield_overrides = RICH_TEXT_DBFIELD_OVERRIDES
class TabularInline(admin.TabularInline):
formfield_overrides = RICH_TEXT_DBFIELD_OVERRIDES
class GenericKeyWidget(forms.Widget):
template = "admin/hatband/widgets/generickey.html"
class Media:
js = (static_url("visualsearch/dependencies.js"),
static_url("visualsearch/visualsearch.js"),
static_url("generickey.js"),
)
css = {
"all": (static_url("visualsearch/visualsearch.css"), ),
}
def __init__(self, *args, **kwargs):
super(GenericKeyWidget, self).__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
final_attrs["value"] = value
final_attrs["is_templated"] = final_attrs["id"].find("__prefix__") > -1
return render_to_string(self.template, final_attrs)
class DeletionWidget(forms.CheckboxInput):
pass
class OrderableGenericKeyLookupForm(forms.ModelForm):
class Meta:
widgets = {
"content_type": forms.HiddenInput(),
"object_id": GenericKeyWidget(),
"order": forms.HiddenInput(),
}
class GenericKeyInline(InlineModelAdmin):
form = OrderableGenericKeyLookupForm
formfield_overrides = RICH_TEXT_DBFIELD_OVERRIDES
template = "admin/edit_inline/generickey.html"
| apache-2.0 | Python |
ca30b673833a4d0c1fc6204e211e8ac7499b590a | bump version | vmalloc/backslash-python,slash-testing/backslash-python | backslash/__version__.py | backslash/__version__.py | __version__ = "2.0.2"
| __version__ = "2.0.1"
| bsd-3-clause | Python |
8a36c7d83c2421a50d24cc51823bd474578cd768 | update dev version after 0.15.0 tag [skip ci] | desihub/desimodel,desihub/desimodel | py/desimodel/_version.py | py/desimodel/_version.py | __version__ = '0.15.0.dev651'
| __version__ = '0.15.0'
| bsd-3-clause | Python |
c058db50ff489fb49b16b055649b2d8a3b0e6d4c | Revert import changes | wind-python/windpowerlib | windpowerlib/__init__.py | windpowerlib/__init__.py | __copyright__ = "Copyright oemof developer group"
__license__ = "MIT"
__version__ = "0.2.1dev"
from windpowerlib.wind_turbine import WindTurbine
from windpowerlib.wind_farm import WindFarm
from windpowerlib.wind_turbine_cluster import WindTurbineCluster
from windpowerlib.modelchain import ModelChain
from windpowerlib.turbine_cluster_modelchain import TurbineClusterModelChain
from windpowerlib.wind_turbine import get_turbine_types
| __copyright__ = "Copyright oemof developer group"
__license__ = "MIT"
__version__ = "0.2.1dev"
from .wind_turbine import WindTurbine
from .wind_farm import WindFarm
from .wind_turbine_cluster import WindTurbineCluster
from .modelchain import ModelChain
from .turbine_cluster_modelchain import TurbineClusterModelChain
from .wind_turbine import get_turbine_types
| mit | Python |
c79182da0690b63cef23809b30013c44a1b5999a | Add ListElement | Cretezy/pymessenger2,karlinnolabs/pymessenger | pymessenger2/__init__.py | pymessenger2/__init__.py | from .bot import Bot
from .buttons import *
@attr.s
class Element:
title = attr.ib()
item_url = attr.ib(default=None)
image_url = attr.ib(default=None)
subtitle = attr.ib(default=None)
buttons = attr.ib(default=None)
@attr.s
class QuickReply:
"""
See https://developers.facebook.com/docs/messenger-platform/send-api-reference/quick-replies
You may not give the payload and it'll be set to your title automatically.
"""
content_type = attr.ib()
title = attr.ib(default=None)
payload = attr.ib(default=None)
image_url = attr.ib(default=None)
def __attrs_post_init__(self):
assert self.content_type in {'text', 'location'}
assert self.content_type == 'location' or self.title
if not self.payload:
self.payload = self.title
@attr.s
class ListElement:
"""
See https://developers.facebook.com/docs/messenger-platform/send-api-reference/list-template
"""
title = attr.ib()
subtitle = attr.ib(default=None)
image_url = attr.ib(default=None)
default_action = attr.ib(default=None)
buttons = attr.ib(default=None) # Only one button allowed though
| from .bot import Bot
from .buttons import *
@attr.s
class Element:
title = attr.ib()
item_url = attr.ib(default=None)
image_url = attr.ib(default=None)
subtitle = attr.ib(default=None)
buttons = attr.ib(default=None)
@attr.s
class QuickReply:
"""
See https://developers.facebook.com/docs/messenger-platform/send-api-reference/quick-replies
You may not give the payload and it'll be set to your title automatically.
"""
content_type = attr.ib()
title = attr.ib(default=None)
payload = attr.ib(default=None)
image_url = attr.ib(default=None)
def __attrs_post_init__(self):
assert self.content_type in {'text', 'location'}
assert self.content_type == 'location' or self.title
if not self.payload:
self.payload = self.title
| mit | Python |
b407b48f1d5bb2698bf7402addc4f1ebd5f2773c | Test plot title and axis labels | DanielAndreasen/SWEETer-Cat,DanielAndreasen/SWEETer-Cat | sweetercat/tests/test_plot.py | sweetercat/tests/test_plot.py | """SWEETer-Cat tests regarding the plotting pages."""
import pytest
from flask import url_for
def test_plot_get_requests(client):
"""Test that all pages return status code: 200 using the end_points"""
for end_point in ('plot', 'plot_exo'):
plot = client.get(url_for(end_point))
assert plot.status_code == 200
assert b"Select your settings:" in plot.data
def test_plot_post_request(client):
test_data = {'color': 'Blue', 'x': 'teff', 'y': 'mass', 'z': 'Vmag',
'x1': 8000, 'x2': 2500, 'y1': 0, 'y2': 5,
'xscale': 'linear', 'yscale': 'log', 'checkboxes': ''}
for end_point in ('plot',): # 'plot_exo'
plot = client.post(url_for(end_point), data=test_data, follow_redirects=True)
assert plot.status_code == 200
assert b"Select your settings:" in plot.data
def test_post_z_none_43(client):
"""Test that setting z to None does not produce an error."""
test_data = {"color": "Blue", "x": "teff", "y": "mass", "z": "None",
"x1": 8000, "x2": 2500, "y1": 0, "y2": 5,
"xscale": "linear", "yscale": "log", "checkboxes": ""}
for end_point in ("plot", "plot_exo"):
plot = client.post(url_for(end_point), data=test_data, follow_redirects=True)
assert plot.status_code == 200
assert b"Select your settings:" in plot.data
def test_title_and_axis_labels(client):
for xname, yname in zip(("teff", "vt", "par"), ("mass", "Vabs", "logg")):
# test_data = {'color': 'Blue', 'x': xname, 'y': yname, 'z': 'Vmag',
# 'x1': "None", 'x2': "None", 'y1': "None", 'y2': "None",
# 'xscale': 'linear', 'yscale': 'log', 'checkboxes': ''}
print(xname, yname)
test_data = {'color': 'Blue', 'x': xname, 'y': yname, 'z': 'Vmag',
'x1': "", 'x2': "", 'y1': "", 'y2': "",
'xscale': 'linear', 'yscale': 'log', 'checkboxes': ''}
title = '"text":"{0} vs. {1}:'.format(xname, yname)
xlabel = '"axis_label":"{0}"'.format(xname)
ylabel = '"axis_label":"{0}"'.format(yname)
for end_point in ('plot',): # 'plot_exo'
plot = client.post(url_for(end_point), data=test_data, follow_redirects=True)
print(plot)
for feature in [title, xlabel, ylabel]:
assert feature.encode("utf-8") in plot.data
| """SWEETer-Cat tests regarding the plotting pages."""
import pytest
from flask import url_for
def test_plot_get_requests(client):
"""Test that all pages return status code: 200 using the end_points"""
for end_point in ('plot', 'plot_exo'):
plot = client.get(url_for(end_point))
assert plot.status_code == 200
assert b"Select your settings:" in plot.data
def test_plot_post_request(client):
test_data = {'color': 'Blue', 'x': 'teff', 'y': 'mass', 'z': 'Vmag',
'x1': 8000, 'x2': 2500, 'y1': 0, 'y2': 5,
'xscale': 'linear', 'yscale': 'log', 'checkboxes': ''}
for end_point in ('plot',): # 'plot_exo'
plot = client.post(url_for(end_point), data=test_data, follow_redirects=True)
assert plot.status_code == 200
assert b"Select your settings:" in plot.data
def test_post_z_none_43(client):
"""Test that setting z to None does not produce an error."""
test_data = {"color": "Blue", "x": "teff", "y": "mass", "z": "None",
"x1": 8000, "x2": 2500, "y1": 0, "y2": 5,
"xscale": "linear", "yscale": "log", "checkboxes": ""}
for end_point in ("plot", "plot_exo"):
plot = client.post(url_for(end_point), data=test_data, follow_redirects=True)
assert plot.status_code == 200
assert b"Select your settings:" in plot.data
| mit | Python |
cf60d3869834ba4654f57f1b4daca7797f2c5736 | extend => append. | hello-base/web,hello-base/web,hello-base/web,hello-base/web | base/components/views.py | base/components/views.py | from django.views.generic import TemplateView, View
from braces.views import AjaxResponseMixin, JSONResponseMixin
from haystack.query import SearchQuerySet
from components.merchandise.music.models import Album, Edition, Single, Track
from components.people.models import Group, Idol
class AutocompleteView(JSONResponseMixin, AjaxResponseMixin, View):
def get_ajax(self, request, *args, **kwargs):
query = request.GET.get('q', '')
sqs = SearchQuerySet().autocomplete(text=query).load_all()[:5]
suggestions = []
[suggestions.append({
'text': result.text,
'pk': result.pk,
'model': result.model_name,
'name': result.object.name if result.object.name != result.object.romanized_name else None,
'romanized_name': result.object.romanized_name,
'url': result.object.get_absolute_url(),
}) for result in sqs]
json = {'query': query, 'results': suggestions}
return self.render_json_response(json)
class SiteView(TemplateView):
template_name = 'landings/site_home.html'
def get_context_data(self, **kwargs):
context = super(SiteView, self).get_context_data(**kwargs)
context.update({
'counts': {
'albums': Album.objects.count(),
'editions': Edition.objects.count(),
'groups': Group.objects.count(),
'idols': Idol.objects.count(),
'singles': Single.objects.count(),
'tracks': Track.objects.count(),
}
})
return context
class ImageDetailView(TemplateView):
template_name = 'landings/image_detail.html'
class PlainTextView(TemplateView):
def render_to_response(self, context, **kwargs):
return super(TemplateView, self).render_to_response(context, content_type='text/plain', **kwargs)
class XMLView(TemplateView):
def render_to_response(self, context, **kwargs):
return super(TemplateView, self).render_to_response(context, content_type='application/opensearchdescription+xml', **kwargs)
| from django.views.generic import TemplateView, View
from braces.views import AjaxResponseMixin, JSONResponseMixin
from haystack.query import SearchQuerySet
from components.merchandise.music.models import Album, Edition, Single, Track
from components.people.models import Group, Idol
class AutocompleteView(JSONResponseMixin, AjaxResponseMixin, View):
def get_ajax(self, request, *args, **kwargs):
query = request.GET.get('q', '')
sqs = SearchQuerySet().autocomplete(text=query).load_all()[:5]
suggestions = []
[suggestions.extend({
'text': result.text,
'pk': result.pk,
'model': result.model_name,
'name': result.object.name if result.object.name != result.object.romanized_name else None,
'romanized_name': result.object.romanized_name,
'url': result.object.get_absolute_url(),
}) for result in sqs]
json = {'query': query, 'results': suggestions}
return self.render_json_response(json)
class SiteView(TemplateView):
template_name = 'landings/site_home.html'
def get_context_data(self, **kwargs):
context = super(SiteView, self).get_context_data(**kwargs)
context.update({
'counts': {
'albums': Album.objects.count(),
'editions': Edition.objects.count(),
'groups': Group.objects.count(),
'idols': Idol.objects.count(),
'singles': Single.objects.count(),
'tracks': Track.objects.count(),
}
})
return context
class ImageDetailView(TemplateView):
template_name = 'landings/image_detail.html'
class PlainTextView(TemplateView):
def render_to_response(self, context, **kwargs):
return super(TemplateView, self).render_to_response(context, content_type='text/plain', **kwargs)
class XMLView(TemplateView):
def render_to_response(self, context, **kwargs):
return super(TemplateView, self).render_to_response(context, content_type='application/opensearchdescription+xml', **kwargs)
| apache-2.0 | Python |
1930fe6bb492d41da88fae2f902e3f966a12926a | Fix indentation error on str method for league | shermanng10/superathletebuilder,shermanng10/superathletebuilder,shermanng10/superathletebuilder,shermanng10/superathletebuilder | athletes/models.py | athletes/models.py | from django.db import models
from django.utils import timezone
from django.utils.encoding import force_bytes
class Sport(models.Model):
name = models.CharField(max_length=20)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return force_bytes(self.name)
class League(models.Model):
name = models.CharField(max_length=20)
sport = models.ForeignKey(Sport)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return force_bytes(self.name)
class Team(models.Model):
name = models.CharField(max_length=20)
sport = models.ForeignKey(Sport)
league = models.ForeignKey(League)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return force_bytes(self.name)
class Athlete(models.Model):
first_name = models.CharField(max_length=25)
last_name = models.CharField(max_length=25)
age = models.PositiveIntegerField()
gender = models.CharField(max_length=10)
website = models.URLField()
sport = models.ForeignKey(Sport)
league = models.ForeignKey(League, blank=True, null=True)
team = models.ForeignKey(Team, blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return force_bytes('%s %s' % (self.first_name, self.last_name))
| from django.db import models
from django.utils import timezone
from django.utils.encoding import force_bytes
class Sport(models.Model):
name = models.CharField(max_length=20)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return force_bytes(self.name)
class League(models.Model):
name = models.CharField(max_length=20)
sport = models.ForeignKey(Sport)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return force_bytes(self.name)
class Team(models.Model):
name = models.CharField(max_length=20)
sport = models.ForeignKey(Sport)
league = models.ForeignKey(League)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return force_bytes(self.name)
class Athlete(models.Model):
first_name = models.CharField(max_length=25)
last_name = models.CharField(max_length=25)
age = models.PositiveIntegerField()
gender = models.CharField(max_length=10)
website = models.URLField()
sport = models.ForeignKey(Sport)
league = models.ForeignKey(League, blank=True, null=True)
team = models.ForeignKey(Team, blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return force_bytes('%s %s' % (self.first_name, self.last_name))
| mit | Python |
d5ec3ead350036fbe1d7e04964ceedc9b3188428 | handle when request path ends in / | gregmli/cwta,gregmli/cwta,gregmli/cwta,gregmli/cwta,gregmli/cwta | redesign/website/cwta.py | redesign/website/cwta.py | import webapp2
from google.appengine.api import users
#from google.appengine.ext import db
import jinja2
import os
import urllib
jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
def urlencode_filter(s):
if type(s) == 'Markup':
s = s.unescape()
s = s.encode('utf8')
s = urllib.quote_plus(s)
return jinja2.Markup(s)
jinja_environment.filters['urlencode'] = urlencode_filter
class CwtaPage(webapp2.RequestHandler):
def get(self, page):
template = jinja_environment.get_template(page + '.html')
self.response.out.write(template.render())
class HomePage(CwtaPage):
def get(self):
super(HomePage,self).get('index')
app = webapp2.WSGIApplication([('/', HomePage),
('/(classes|instructors|chen|yang|resources|czl2016)/?(?i)', CwtaPage)
],
debug=True)
| import webapp2
from google.appengine.api import users
#from google.appengine.ext import db
import jinja2
import os
import urllib
jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
def urlencode_filter(s):
if type(s) == 'Markup':
s = s.unescape()
s = s.encode('utf8')
s = urllib.quote_plus(s)
return jinja2.Markup(s)
jinja_environment.filters['urlencode'] = urlencode_filter
class CwtaPage(webapp2.RequestHandler):
def get(self, page):
template = jinja_environment.get_template(page + '.html')
self.response.out.write(template.render())
class HomePage(CwtaPage):
def get(self):
super(HomePage,self).get('index')
app = webapp2.WSGIApplication([('/', HomePage),
('/(classes|instructors|chen|yang|resources|czl2016)(?i)', CwtaPage)
],
debug=True)
| mit | Python |
583195165d5e3a40ff3ebc6273dafa6111173bfa | Remove debug line | nedbat/zellij | test_defuzz.py | test_defuzz.py | import itertools
import math
from defuzz import Defuzzer
from hypothesis import given, example
from hypothesis.strategies import floats, integers, lists, tuples
from hypo_helpers import f
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
@given(lists(tuples(f, f)))
@example([(.48, 1.02), (.52, .98)])
def test_hypo(points):
dfz = Defuzzer(ndigits=0)
dfz_points = [dfz.defuzz(pt) for pt in points]
# The output values should all be in the inputs.
assert all(pt in points for pt in dfz_points)
# No two unequal output values should be too close together.
if len(points) > 1:
for a, b in itertools.combinations(dfz_points, 2):
if a == b:
continue
distance = math.hypot(a[0] - b[0], a[1] - b[1])
assert distance > .5
@given(f, integers(min_value=-2, max_value=6))
def test_correct_distance(start, ndigits):
dfz = Defuzzer(ndigits=ndigits)
eps = 1e-10
window = 10 ** -ndigits
smallest_different = 1.5 * window + eps
largest_same = 0.5 * window - eps
step = 10 * window
for i in range(20):
num = start + i * step
assert dfz.defuzz((num,)) == (num,)
assert dfz.defuzz((num + largest_same,)) == (num,)
assert dfz.defuzz((num - largest_same,)) == (num,)
assert dfz.defuzz((num + smallest_different,)) != (num,)
assert dfz.defuzz((num - smallest_different,)) != (num,)
| import itertools
import math
from defuzz import Defuzzer
from hypothesis import given, example
from hypothesis.strategies import floats, integers, lists, tuples
from hypo_helpers import f
def test_it():
dfz = Defuzzer()
assert dfz.defuzz((1, 2)) == (1, 2)
assert dfz.defuzz((1, 3)) == (1, 3)
assert dfz.defuzz((1.00000001, 2)) == (1, 2)
assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5)
assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3)
assert dfz.defuzz((2, 3)) == (2.00000001, 3)
@given(lists(tuples(f, f)))
@example([(.48, 1.02), (.52, .98)])
def test_hypo(points):
dfz = Defuzzer(ndigits=0)
dfz_points = [dfz.defuzz(pt) for pt in points]
print(f"{points}\n{dfz_points}\n")
# The output values should all be in the inputs.
assert all(pt in points for pt in dfz_points)
# No two unequal output values should be too close together.
if len(points) > 1:
for a, b in itertools.combinations(dfz_points, 2):
if a == b:
continue
distance = math.hypot(a[0] - b[0], a[1] - b[1])
assert distance > .5
@given(f, integers(min_value=-2, max_value=6))
def test_correct_distance(start, ndigits):
dfz = Defuzzer(ndigits=ndigits)
eps = 1e-10
window = 10 ** -ndigits
smallest_different = 1.5 * window + eps
largest_same = 0.5 * window - eps
step = 10 * window
for i in range(20):
num = start + i * step
assert dfz.defuzz((num,)) == (num,)
assert dfz.defuzz((num + largest_same,)) == (num,)
assert dfz.defuzz((num - largest_same,)) == (num,)
assert dfz.defuzz((num + smallest_different,)) != (num,)
assert dfz.defuzz((num - smallest_different,)) != (num,)
| apache-2.0 | Python |
7af8c206ace3e6fd99bef11501e1def601bbdd78 | Add patches and missing dependency to bash (#13084) | LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/bash/package.py | var/spack/repos/builtin/packages/bash/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Bash(AutotoolsPackage):
"""The GNU Project's Bourne Again SHell."""
homepage = "https://www.gnu.org/software/bash/"
url = "https://ftpmirror.gnu.org/bash/bash-4.4.tar.gz"
version('5.0', sha256='b4a80f2ac66170b2913efbfb9f2594f1f76c7b1afd11f799e22035d63077fb4d')
version('4.4.12', '7c112970cbdcadfc331e10eeb5f6aa41')
version('4.4', '148888a7c95ac23705559b6f477dfe25')
version('4.3', '81348932d5da294953e15d4814c74dd1')
depends_on('ncurses')
depends_on('readline@5.0:')
depends_on('libiconv')
patches = [
('5.0', '001', 'f2fe9e1f0faddf14ab9bfa88d450a75e5d028fedafad23b88716bd657c737289'),
('5.0', '002', '87e87d3542e598799adb3e7e01c8165bc743e136a400ed0de015845f7ff68707'),
('5.0', '003', '4eebcdc37b13793a232c5f2f498a5fcbf7da0ecb3da2059391c096db620ec85b'),
('5.0', '004', '14447ad832add8ecfafdce5384badd933697b559c4688d6b9e3d36ff36c62f08'),
('5.0', '005', '5bf54dd9bd2c211d2bfb34a49e2c741f2ed5e338767e9ce9f4d41254bf9f8276'),
('5.0', '006', 'd68529a6ff201b6ff5915318ab12fc16b8a0ebb77fda3308303fcc1e13398420'),
('5.0', '007', '17b41e7ee3673d8887dd25992417a398677533ab8827938aa41fad70df19af9b'),
('5.0', '008', 'eec64588622a82a5029b2776e218a75a3640bef4953f09d6ee1f4199670ad7e3'),
('5.0', '009', 'ed3ca21767303fc3de93934aa524c2e920787c506b601cc40a4897d4b094d903'),
('5.0', '010', 'd6fbc325f0b5dc54ddbe8ee43020bced8bd589ddffea59d128db14b2e52a8a11'),
('5.0', '011', '2c4de332b91eaf797abbbd6c79709690b5cbd48b12e8dfe748096dbd7bf474ea'),
]
for ver, num, checksum in patches:
ver = Version(ver)
patch('https://ftpmirror.gnu.org/bash/bash-{0}-patches/bash{1}-{2}'.format(ver, ver.joined, num),
level=0, when='@{0}'.format(ver), sha256=checksum)
def configure_args(self):
spec = self.spec
return [
'LIBS=-lncursesw',
'--with-curses',
'--enable-readline',
'--with-installed-readline',
'--with-libiconv-prefix={0}'.format(spec['libiconv'].prefix),
]
def check(self):
make('tests')
@property
def install_targets(self):
args = ['install']
if self.spec.satisfies('@4.4:'):
args.append('install-headers')
return args
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Bash(AutotoolsPackage):
"""The GNU Project's Bourne Again SHell."""
homepage = "https://www.gnu.org/software/bash/"
url = "https://ftpmirror.gnu.org/bash/bash-4.4.tar.gz"
version('5.0', sha256='b4a80f2ac66170b2913efbfb9f2594f1f76c7b1afd11f799e22035d63077fb4d')
version('4.4.12', '7c112970cbdcadfc331e10eeb5f6aa41')
version('4.4', '148888a7c95ac23705559b6f477dfe25')
version('4.3', '81348932d5da294953e15d4814c74dd1')
depends_on('ncurses')
depends_on('readline@5.0:')
def configure_args(self):
spec = self.spec
return [
'LIBS=-lncursesw',
'--with-curses',
'--enable-readline',
'--with-installed-readline={0}'.format(spec['readline'].prefix),
]
def check(self):
make('tests')
@property
def install_targets(self):
args = ['install']
if self.spec.satisfies('@4.4:'):
args.append('install-headers')
return args
| lgpl-2.1 | Python |
1599dce2aeda294ae803444beeddca7d8a6d06d0 | Add versions 1.8 and 1.7 (#10978) | LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/canu/package.py | var/spack/repos/builtin/packages/canu/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Canu(MakefilePackage):
"""A single molecule sequence assembler for genomes large and
small."""
homepage = "http://canu.readthedocs.io/"
url = "https://github.com/marbl/canu/archive/v1.5.tar.gz"
version('1.8', sha256='30ecfe574166f54f79606038830f68927cf0efab33bdc3c6e43fd1448fa0b2e4')
version('1.7.1', sha256='c314659c929ee05fd413274f391463a93f19b8337eabb7ee5de1ecfc061caafa')
version('1.7', sha256='c5be54b0ad20729093413e7e722a19637d32e966dc8ecd2b579ba3e4958d378a')
version('1.5', '65df275baa28ecf11b15dfd7343361e3')
depends_on('gnuplot', type='run')
depends_on('jdk', type='run')
depends_on('perl', type='run')
build_directory = 'src'
build_targets = ['clean']
def patch(self):
# Use our perl, not whatever is in the environment
filter_file(r'^#!/usr/bin/env perl',
'#!{0}'.format(self.spec['perl'].command.path),
'src/pipelines/canu.pl')
def install(self, spec, prefix):
with working_dir(self.build_directory):
make('all', 'TARGET_DIR={0}'.format(prefix))
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Canu(MakefilePackage):
"""A single molecule sequence assembler for genomes large and
small."""
homepage = "http://canu.readthedocs.io/"
url = "https://github.com/marbl/canu/archive/v1.5.tar.gz"
version('1.7.1', sha256='c314659c929ee05fd413274f391463a93f19b8337eabb7ee5de1ecfc061caafa')
version('1.5', '65df275baa28ecf11b15dfd7343361e3')
depends_on('gnuplot', type='run')
depends_on('jdk', type='run')
depends_on('perl', type='run')
build_directory = 'src'
build_targets = ['clean']
def patch(self):
# Use our perl, not whatever is in the environment
filter_file(r'^#!/usr/bin/env perl',
'#!{0}'.format(self.spec['perl'].command.path),
'src/pipelines/canu.pl')
def install(self, spec, prefix):
with working_dir(self.build_directory):
make('all', 'TARGET_DIR={0}'.format(prefix))
| lgpl-2.1 | Python |
cc2014baa5f9650cac1176b0ecd5f8a86cb9010b | Fix typo | xadahiya/hydrus,HTTP-APIs/hydrus | hydrus/server/parser.py | hydrus/server/parser.py | from data.astronomy import astronomy
from server.commons import ROOT, SERVE, HYDRA_DOC
objects = astronomy['defines']
# print(objects[0])
# filter the objects array
# use 'lifter' library to filter arrays
# https://github.com/EliotBerriot/lifter
template = {
"@context": {
"hydra": "http://www.w3.org/ns/hydra/context.jsonld",
},
"@id": None,
"hydra:apiDocumentation": None,
"@type": "hydra:Collection",
"hydra:totalItems": None,
"hydra:member": []
}
def collect_astronomy_resources(uri):
"""
Serve an HYDRA collection loaded from a local dictionary.
"""
# SERVE.format(class_=o.get('@type')[o.get('@type').rfind('/')+1:]
members = [
dict([
("@id", "{id_}".format(id_=o.get('@id'))),
("@type", "{class_}".format(class_=o.get('@type'))),
("hash", "{hash_}".format(hash_=o.get('hash')))
]) for o in objects
]
template['@id'], template['hydra:totalItems'], template['hydra:member'] = ROOT + '/' + uri, len(members), members
template['hydra:apiDocumentation'] = HYDRA_DOC.format(endpoint_=uri)
return template
def collect_subclass_of(class_):
"""
Serve an HYDRA collection of classes that are subclasses of a class
"""
pass
| from data.astronomy import astronomy
from server.commons import ROOT, SERVE, HYDRA_DOC
objects = astronomy['defines']
# print(objects[0])
# filter the objects array
# use 'lifter' library to filter arrays
# https://github.com/EliotBerriot/lifter
template = {
"@context": {
"hydra": "http://www.w3.org/ns/hydra/context.jsonld",
},
"@id": None,
"hydra:apiDocumentation": None,
"@type": "hydra:Collection",
"hydra:totalItems": None,
"hydra:member": []
}
def collect_astronomy_resources(uri):
"""
Serve an HYDRA collection loaded from a local dictionary.
"""
# SERVE.format(class_=o.get('@type')[o.get('@type').rfind('/')+1:]
members = [
dict([
("@id", "{id_}".format(id_=o.get('@id'))),
("@type", "{class_}".format(class_=o.get('@type'))),
("hash", "{hash_}".format(hash_=o.get('hash')))
]) for o in objects
]
template['@id'], template['hydra:totalItems'], template['hydra:member'] = ROOT + '/' + uri, len(members), members
template['hydra:apiDocumnetation'] = HYDRA_DOC.format(endpoint_=uri)
return template
def collect_subclass_of(class_):
"""
Serve an HYDRA collection of classes that are subclasses of a class
"""
pass
| mit | Python |
0791cbb2aa2af72411153649ca7d365be02b1e62 | Increment version | spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc | thinc/about.py | thinc/about.py | # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = 'thinc'
__version__ = '6.8.1'
__summary__ = "Practical Machine Learning for NLP"
__uri__ = 'https://github.com/explosion/thinc'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__title__ = "thinc"
__release__ = False
| # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = 'thinc'
__version__ = '6.8.0'
__summary__ = "Practical Machine Learning for NLP"
__uri__ = 'https://github.com/explosion/thinc'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__title__ = "thinc"
__release__ = False
| mit | Python |
c906f99e961d2fa44a01f3efff85ce2679701027 | Update bottlespin.py | kallerdaller/Cogs-Yorkfield | bottlespin/bottlespin.py | bottlespin/bottlespin.py | import discord
from discord.ext import commands
from random import choice
class Bottlespin:
"""Spins a bottle and lands on a random user."""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, no_pm=True, alias=["bottlespin"])
async def spin(self, ctx, role):
"""Spin the bottle"""
roles = [ctx.message.server.roles]
await self.bot.say(str(roles))
author = ctx.message.author
server = ctx.message.server
if len(server.members) < 2:
await self.bot.say("`Not enough people are around to spin the bottle`")
return
if role in roles:
roleexist = True
else:
await self.bot.say("`{} is not a exising role`".format(role))
return
if roleexist:
target = [m for m in server.members if m != author and role in [
s.name for s in m.roles] and str(m.status) == "online" or str(m.status) == "idle"]
else:
target = [m for m in server.members if m != author and str(
m.status) == "online" or str(m.status) == "idle"]
if not target:
if role:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at with the role {}`".format(role))
else:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at`")
return
else:
target = choice(list(target))
await self.bot.say("`{0.display_name}#{0.discriminator} spinned the bottle and it landed on {1.display_name}#{1.discriminator}`".format(author, target))
def setup(bot):
n = Bottlespin(bot)
bot.add_cog(n)
| import discord
from discord.ext import commands
from random import choice
class Bottlespin:
"""Spins a bottle and lands on a random user."""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, no_pm=True, alias=["bottlespin"])
async def spin(self, ctx, role):
"""Spin the bottle"""
roles = [ctx.message.server.roles]
author = ctx.message.author
server = ctx.message.server
if len(server.members) < 2:
await self.bot.say("`Not enough people are around to spin the bottle`")
return
if role in roles:
roleexist = True
else:
await self.bot.say("`{} is not a exising role`".format(role))
return
if roleexist:
target = [m for m in server.members if m != author and role in [
s.name for s in m.roles] and str(m.status) == "online" or str(m.status) == "idle"]
else:
target = [m for m in server.members if m != author and str(
m.status) == "online" or str(m.status) == "idle"]
if not target:
if role:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at with the role {}`".format(role))
else:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at`")
return
else:
target = choice(list(target))
await self.bot.say("`{0.display_name}#{0.discriminator} spinned the bottle and it landed on {1.display_name}#{1.discriminator}`".format(author, target))
def setup(bot):
n = Bottlespin(bot)
bot.add_cog(n)
| mit | Python |
53e8c14d774131503dbdefe6528cd1e26adbf30b | Allow Travis to load tests and use azure-storage installed from pip at the same time | Azure/azure-sdk-for-python,Azure/azure-sdk-for-python,v-iam/azure-sdk-for-python,Azure/azure-sdk-for-python,AutorestCI/azure-sdk-for-python,lmazuel/azure-sdk-for-python,Azure/azure-sdk-for-python,SUSE/azure-sdk-for-python | azure_nosetests.py | azure_nosetests.py | #!/usr/bin/env python
import os.path, nose, glob, sys, pkg_resources
packages = [os.path.dirname(p) for p in glob.glob('azure*/setup.py')]
sys.path += packages
# Declare it manually, because "azure-storage" is probably installed with pip
pkg_resources.declare_namespace('azure')
nose.main() | #!/usr/bin/env python
import os.path, nose, glob, sys
packages = [os.path.dirname(p) for p in glob.glob('azure*/setup.py')]
sys.path += packages
nose.main() | mit | Python |
c13445cc54b96b2524ea9df71e892d2fe6c8c34a | Make sure warning messages are logged on stdout | SUSE/azurectl,SUSE/azurectl,SUSE/azurectl | azurectl/logger.py | azurectl/logger.py | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import re
import sys
class LoggerSchedulerFilter(logging.Filter):
def filter(self, record):
# messages from apscheduler scheduler instances are filtered out
# they conflict with console progress information
return not record.name == 'apscheduler.scheduler'
class InfoFilter(logging.Filter):
def filter(self, record):
# only messages with record level INFO, WARNING and DEBUG can pass
# for messages with another level an extra handler is used
return record.levelno in (
logging.INFO, logging.WARNING, logging.DEBUG
)
class Logger(logging.Logger):
"""
azurectl logging facility based on python logging
"""
def __init__(self, name):
logging.Logger.__init__(self, name, logging.INFO)
formatter = logging.Formatter('%(levelname)s: %(message)s')
# log INFO and DEBUG messages to stdout
console_info = logging.StreamHandler(sys.__stdout__)
console_info.setLevel(logging.INFO)
console_info.setFormatter(formatter)
console_info.addFilter(InfoFilter())
console_info.addFilter(LoggerSchedulerFilter())
# log ERROR messages to stderr (default stream)
console_error = logging.StreamHandler()
console_error.setLevel(logging.ERROR)
console_error.setFormatter(formatter)
self.addHandler(console_info)
self.addHandler(console_error)
def progress(self, current, total, prefix, bar_length=40):
try:
percent = float(current) / total
except:
# we don't want the progress to raise an exception
# In case of any error e.g division by zero the current
# way out is to skip the progress update
return
hashes = '#' * int(round(percent * bar_length))
spaces = ' ' * (bar_length - len(hashes))
sys.stdout.write("\r{0}: [{1}] {2}%".format(
prefix, hashes + spaces, int(round(percent * 100))
))
sys.stdout.flush()
def init(level=logging.INFO):
global log
logging.setLoggerClass(Logger)
log = logging.getLogger("azurectl")
log.setLevel(level)
| # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import re
import sys
class LoggerSchedulerFilter(logging.Filter):
def filter(self, record):
# messages from apscheduler scheduler instances are filtered out
# they conflict with console progress information
return not record.name == 'apscheduler.scheduler'
class InfoFilter(logging.Filter):
def filter(self, record):
# only messages with record level INFO and DEBUG can pass
# for messages with another level an extra handler is used
return record.levelno in (logging.INFO, logging.DEBUG)
class Logger(logging.Logger):
"""
azurectl logging facility based on python logging
"""
def __init__(self, name):
logging.Logger.__init__(self, name, logging.INFO)
formatter = logging.Formatter('%(levelname)s: %(message)s')
# log INFO and DEBUG messages to stdout
console_info = logging.StreamHandler(sys.__stdout__)
console_info.setLevel(logging.INFO)
console_info.setFormatter(formatter)
console_info.addFilter(InfoFilter())
console_info.addFilter(LoggerSchedulerFilter())
# log ERROR messages to stderr (default stream)
console_error = logging.StreamHandler()
console_error.setLevel(logging.ERROR)
console_error.setFormatter(formatter)
self.addHandler(console_info)
self.addHandler(console_error)
def progress(self, current, total, prefix, bar_length=40):
try:
percent = float(current) / total
except:
# we don't want the progress to raise an exception
# In case of any error e.g division by zero the current
# way out is to skip the progress update
return
hashes = '#' * int(round(percent * bar_length))
spaces = ' ' * (bar_length - len(hashes))
sys.stdout.write("\r{0}: [{1}] {2}%".format(
prefix, hashes + spaces, int(round(percent * 100))
))
sys.stdout.flush()
def init(level=logging.INFO):
global log
logging.setLoggerClass(Logger)
log = logging.getLogger("azurectl")
log.setLevel(level)
| apache-2.0 | Python |
270298cce11e90f7f8c0cc2f06b7ddbbfaad9f6b | Test the default URL shortener backend | bywbilly/django-blog-zinnia,marctc/django-blog-zinnia,ZuluPro/django-blog-zinnia,extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,aorzh/django-blog-zinnia,Fantomas42/django-blog-zinnia,petecummings/django-blog-zinnia,1844144/django-blog-zinnia,dapeng0802/django-blog-zinnia,ZuluPro/django-blog-zinnia,1844144/django-blog-zinnia,extertioner/django-blog-zinnia,Fantomas42/django-blog-zinnia,dapeng0802/django-blog-zinnia,1844144/django-blog-zinnia,Maplecroft/django-blog-zinnia,aorzh/django-blog-zinnia,ghachey/django-blog-zinnia,Maplecroft/django-blog-zinnia,bywbilly/django-blog-zinnia,marctc/django-blog-zinnia,Zopieux/django-blog-zinnia,extertioner/django-blog-zinnia,dapeng0802/django-blog-zinnia,ghachey/django-blog-zinnia,ghachey/django-blog-zinnia,bywbilly/django-blog-zinnia,Zopieux/django-blog-zinnia,Fantomas42/django-blog-zinnia,marctc/django-blog-zinnia,petecummings/django-blog-zinnia,Zopieux/django-blog-zinnia,ZuluPro/django-blog-zinnia,petecummings/django-blog-zinnia,aorzh/django-blog-zinnia | zinnia/tests/test_url_shortener.py | zinnia/tests/test_url_shortener.py | """Test cases for Zinnia's url_shortener"""
import warnings
from django.test import TestCase
from zinnia.url_shortener import get_url_shortener
from zinnia import url_shortener as us_settings
from zinnia.url_shortener.backends import default
class URLShortenerTestCase(TestCase):
"""Test cases for zinnia.url_shortener"""
def setUp(self):
self.original_backend = us_settings.URL_SHORTENER_BACKEND
def tearDown(self):
us_settings.URL_SHORTENER_BACKEND = self.original_backend
def test_get_url_shortener(self):
us_settings.URL_SHORTENER_BACKEND = 'mymodule.myclass'
with warnings.catch_warnings(record=True) as w:
self.assertEqual(get_url_shortener(), default.backend)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(
str(w[-1].message),
'mymodule.myclass backend cannot be imported')
us_settings.URL_SHORTENER_BACKEND = ('zinnia.tests.implementations.'
'custom_url_shortener')
with warnings.catch_warnings(record=True) as w:
self.assertEqual(get_url_shortener(), default.backend)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(
str(w[-1].message),
'This backend only exists for testing')
us_settings.URL_SHORTENER_BACKEND = 'zinnia.url_shortener'\
'.backends.default'
self.assertEqual(get_url_shortener(), default.backend)
class FakeEntry(object):
"""Fake entry with only 'pk' as attribute"""
def __init__(self, pk):
self.pk = pk
class UrlShortenerDefaultBackendTestCase(TestCase):
"""Tests cases for the default url shortener backend"""
urls = 'zinnia.tests.implementations.urls.default'
def test_backend(self):
original_protocol = default.PROTOCOL
default.PROTOCOL = 'http'
entry = FakeEntry(1)
self.assertEquals(default.backend(entry),
'http://example.com/1/')
default.PROTOCOL = 'https'
entry = FakeEntry(100)
self.assertEquals(default.backend(entry),
'https://example.com/2S/')
default.PROTOCOL = original_protocol
def test_base36(self):
self.assertEquals(default.base36(1), '1')
self.assertEquals(default.base36(100), '2S')
self.assertEquals(default.base36(46656), '1000')
| """Test cases for Zinnia's url_shortener"""
import warnings
from django.test import TestCase
from zinnia.url_shortener import get_url_shortener
from zinnia import url_shortener as us_settings
from zinnia.url_shortener.backends.default import backend as default_backend
class URLShortenerTestCase(TestCase):
"""Test cases for zinnia.url_shortener"""
def setUp(self):
self.original_backend = us_settings.URL_SHORTENER_BACKEND
def tearDown(self):
us_settings.URL_SHORTENER_BACKEND = self.original_backend
def test_get_url_shortener(self):
us_settings.URL_SHORTENER_BACKEND = 'mymodule.myclass'
with warnings.catch_warnings(record=True) as w:
self.assertEqual(get_url_shortener(), default_backend)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(
str(w[-1].message),
'mymodule.myclass backend cannot be imported')
us_settings.URL_SHORTENER_BACKEND = ('zinnia.tests.implementations.'
'custom_url_shortener')
with warnings.catch_warnings(record=True) as w:
self.assertEqual(get_url_shortener(), default_backend)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(
str(w[-1].message),
'This backend only exists for testing')
us_settings.URL_SHORTENER_BACKEND = 'zinnia.url_shortener'\
'.backends.default'
self.assertEqual(get_url_shortener(), default_backend)
class FakeEntry(object):
"""Fake entry with only 'pk' as attribute"""
def __init__(self, pk):
self.pk = pk
class UrlShortenerDefaultBackendTestCase(TestCase):
"""Tests cases for the default url shortener backend"""
def test_backend(self):
pass
def test_base36(self):
pass
| bsd-3-clause | Python |
cc32a7b1b40c54c98fb3bceda8f9bb3b3bea243a | Fix dst_ssh | brickgao/specchio | specchio/main.py | specchio/main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import time
from watchdog.observers import Observer
from specchio.handlers import SpecchioEventHandler
from specchio.utils import init_logger, logger
def main():
"""Main function for specchio
Example: specchio test/ user@host:test/
:return: None
"""
if len(sys.argv) == 3:
src_path = sys.argv[1].strip()
dst_ssh, dst_path = sys.argv[2].strip().split(":")
init_logger()
logger.info("Initialize Specchio")
event_handler = SpecchioEventHandler(
src_path=src_path, dst_ssh=dst_ssh, dst_path=dst_path
)
observer = Observer()
observer.schedule(event_handler, src_path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
else:
print """Usage: specchio src/ user@host:dst/"""
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import time
from watchdog.observers import Observer
from specchio.handlers import SpecchioEventHandler
from specchio.utils import init_logger, logger
def main():
"""Main function for specchio
Example: specchio test/ user@host:test/
:return: None
"""
if len(sys.argv) == 3:
src_path = sys.argv[1].strip()
dst_ssh, dst_path = sys.argv[2].strip().split(":")
init_logger()
logger.info("Initialize Specchio")
event_handler = SpecchioEventHandler(
src_path=src_path, dst_ssh=dst_path, dst_path=dst_path
)
observer = Observer()
observer.schedule(event_handler, src_path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
else:
print """Usage: specchio src/ user@host:dst/"""
| mit | Python |
39ef94c4fc76930964b9635cc6fadeefbf8510c6 | Update init | ghl3/bamboo,ghl3/bamboo | bamboo/__init__.py | bamboo/__init__.py |
from core import *
__all__ = ['core', 'frames', 'groups', 'modeling']
|
from core import wrap
import bamboo.groups
import bamboo.frames
import bamboo.plotting
__all__ = ['core', 'plotting', 'frames', 'groups', 'modeling']
| mit | Python |
e0e5e662e950973e8a79ae876cc91119b91d9122 | Update hoomd.md documentation | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | hoomd/md/__init__.py | hoomd/md/__init__.py | # Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
""" Molecular Dynamics
Perform Molecular Dynamics simulations with HOOMD-blue.
.. rubric:: Stability
:py:mod:`hoomd.md` is **stable**. When upgrading from version 3.x to 3.y (y >
x), existing job scripts that follow *documented* interfaces for functions and
classes will not require any modifications. **Maintainer:** Joshua A. Anderson
"""
from hoomd.md import angle
from hoomd.md import bond
from hoomd.md import charge
from hoomd.md import compute
from hoomd.md import constrain
from hoomd.md import dihedral
from hoomd.md import external
from hoomd.md import force
from hoomd.md import improper
from hoomd.md.integrate import Integrator
from hoomd.md import nlist
from hoomd.md import pair
from hoomd.md import update
from hoomd.md import wall
from hoomd.md import special_pair
from hoomd.md import methods
| # Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
""" Molecular Dynamics
Perform Molecular Dynamics simulations with HOOMD-blue.
.. rubric:: Stability
:py:mod:`hoomd.md` is **stable**. When upgrading from version 2.x to 2.y (y >
x), existing job scripts that follow *documented* interfaces for functions and
classes will not require any modifications. **Maintainer:** Joshua A. Anderson
"""
from hoomd.md import angle
from hoomd.md import bond
from hoomd.md import charge
from hoomd.md import compute
from hoomd.md import constrain
from hoomd.md import dihedral
from hoomd.md import external
from hoomd.md import force
from hoomd.md import improper
from hoomd.md.integrate import Integrator
from hoomd.md import nlist
from hoomd.md import pair
from hoomd.md import update
from hoomd.md import wall
from hoomd.md import special_pair
from hoomd.md import methods
| bsd-3-clause | Python |
54d6d76fc485b32fc14dec49c2e53a4bed3114cc | Add base tests | trevor/calendarserver,trevor/calendarserver,trevor/calendarserver | twext/who/test/test_aggregate.py | twext/who/test/test_aggregate.py | ##
# Copyright (c) 2013 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Aggregate directory service tests
"""
from twisted.python.components import proxyForInterface
from twext.who.idirectory import IDirectoryService
from twext.who.aggregate import DirectoryService
from twext.who.test import test_xml
from twext.who.test.test_xml import QueryMixIn, xmlService
class BaseTest(object):
def service(self, services=None):
if services is None:
services = (self.xmlService(),)
#
# Make sure aggregate DirectoryService isn't making
# implementation assumptions about the IDirectoryService
# objects it gets.
#
services = tuple((
proxyForInterface(IDirectoryService)(s)
for s in services
))
class TestService(DirectoryService, QueryMixIn):
pass
return TestService("xyzzy", services)
def xmlService(self, xmlData=None):
return xmlService(self.mktemp(), xmlData)
class DirectoryServiceTest(BaseTest, test_xml.DirectoryServiceBaseTest):
def test_repr(self):
service = self.service()
self.assertEquals(repr(service), "<TestService 'xyzzy'>")
class DirectoryServiceRealmTest(BaseTest, test_xml.DirectoryServiceRealmTest):
pass
| ##
# Copyright (c) 2013 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Aggregate directory service tests
"""
from twisted.python.components import proxyForInterface
from twext.who.idirectory import IDirectoryService
from twext.who.aggregate import DirectoryService
from twext.who.test import test_xml
from twext.who.test.test_xml import QueryMixIn, xmlService
class BaseTest(object):
def service(self, services=None):
if services is None:
services = (self.xmlService(),)
#
# Make sure aggregate DirectoryService isn't making
# implementation assumptions about the IDirectoryService
# objects it gets.
#
services = tuple((
proxyForInterface(IDirectoryService)(s)
for s in services
))
class TestService(DirectoryService, QueryMixIn):
pass
return TestService("xyzzy", services)
def xmlService(self, xmlData=None):
return xmlService(self.mktemp(), xmlData)
class DirectoryServiceTest(BaseTest, test_xml.DirectoryServiceBaseTest):
def test_repr(self):
service = self.service()
self.assertEquals(repr(service), "<TestService 'xyzzy'>")
| apache-2.0 | Python |
2e922bbf0a4a0635ee0d9d5ecfac83da9ab0d702 | Bump version | pombredanne/django-taggit-serializer,bopo/django-taggit-serializer,glemmaPaul/django-taggit-serializer | taggit_serializer/__init__.py | taggit_serializer/__init__.py | __version__ = '0.1.2' | __version__ = '0.1.1' | bsd-3-clause | Python |
0f5794bc369ea0039a5219fbc19323a522bd412e | Fix topology | maclav3/dihedral-spsa | topology/__init__.py | topology/__init__.py | from abc import *
class Topology(ABCMeta):
@abstractmethod
def __init__(self):
self.bonds = None
self.angles = None
self.dihedrals = None
| from abc import *
class Topology(ABCMeta):
@abstractmethod
def __init__(self):
self.bonds = None
self.angles = None
self.
| mit | Python |
d01a7c63ab577a54213c956fb3de4b8d82323797 | Add Heirloom and Artifact item quality. | PuckCh/battlenet | battlenet/enums.py | battlenet/enums.py | RACE = {
1: 'Human',
2: 'Orc',
3: 'Dwarf',
4: 'Night Elf',
5: 'Undead',
6: 'Tauren',
7: 'Gnome',
8: 'Troll',
9: 'Goblin',
10: 'Blood Elf',
11: 'Draenei',
22: 'Worgen',
24: 'Pandaren',
25: 'Pandaren',
26: 'Pandaren',
}
CLASS = {
1: 'Warrior',
2: 'Paladin',
3: 'Hunter',
4: 'Rogue',
5: 'Priest',
6: 'Death Knight',
7: 'Shaman',
8: 'Mage',
9: 'Warlock',
10: 'Monk',
11: 'Druid',
}
QUALITY = {
1: 'Common',
2: 'Uncommon',
3: 'Rare',
4: 'Epic',
5: 'Legendary',
6: 'Artifact',
7: 'Heirloom',
}
RACE_TO_FACTION = {
1: 'Alliance',
2: 'Horde',
3: 'Alliance',
4: 'Alliance',
5: 'Horde',
6: 'Horde',
7: 'Alliance',
8: 'Horde',
9: 'Horde',
10: 'Horde',
11: 'Alliance',
22: 'Alliance',
24: '?',
25: 'Alliance',
26: 'Horde',
}
EXPANSION = {
0: ('wow', 'World of Warcraft'),
1: ('bc', 'The Burning Crusade'),
2: ('lk', 'Wrath of the Lich King'),
3: ('cata', 'Cataclysm'),
4: ('mop', 'Mists of Pandaria'),
}
RAIDS = {
'wow': (2717, 2677, 3429, 3428),
'bc': (3457, 3836, 3923, 3607, 3845, 3606, 3959, 4075),
'lk': (4603, 3456, 4493, 4500, 4273, 2159, 4722, 4812, 4987),
'cata': (5600, 5094, 5334, 5638, 5723, 5892),
'mop': (6125, 6297, 6067, 6622, 6738),
}
| RACE = {
1: 'Human',
2: 'Orc',
3: 'Dwarf',
4: 'Night Elf',
5: 'Undead',
6: 'Tauren',
7: 'Gnome',
8: 'Troll',
9: 'Goblin',
10: 'Blood Elf',
11: 'Draenei',
22: 'Worgen',
24: 'Pandaren',
25: 'Pandaren',
26: 'Pandaren',
}
CLASS = {
1: 'Warrior',
2: 'Paladin',
3: 'Hunter',
4: 'Rogue',
5: 'Priest',
6: 'Death Knight',
7: 'Shaman',
8: 'Mage',
9: 'Warlock',
10: 'Monk',
11: 'Druid',
}
QUALITY = {
1: 'Common',
2: 'Uncommon',
3: 'Rare',
4: 'Epic',
5: 'Legendary',
}
RACE_TO_FACTION = {
1: 'Alliance',
2: 'Horde',
3: 'Alliance',
4: 'Alliance',
5: 'Horde',
6: 'Horde',
7: 'Alliance',
8: 'Horde',
9: 'Horde',
10: 'Horde',
11: 'Alliance',
22: 'Alliance',
24: '?',
25: 'Alliance',
26: 'Horde',
}
EXPANSION = {
0: ('wow', 'World of Warcraft'),
1: ('bc', 'The Burning Crusade'),
2: ('lk', 'Wrath of the Lich King'),
3: ('cata', 'Cataclysm'),
4: ('mop', 'Mists of Pandaria'),
}
RAIDS = {
'wow': (2717, 2677, 3429, 3428),
'bc': (3457, 3836, 3923, 3607, 3845, 3606, 3959, 4075),
'lk': (4603, 3456, 4493, 4500, 4273, 2159, 4722, 4812, 4987),
'cata': (5600, 5094, 5334, 5638, 5723, 5892),
'mop': (6125, 6297, 6067, 6622, 6738),
}
| mit | Python |
8e4e2b07cae070d034a5ea91769dfc5dad28ce3e | Handle 79 character limit | inetCatapult/troposphere,Yipit/troposphere,cloudtools/troposphere,alonsodomin/troposphere,7digital/troposphere,johnctitus/troposphere,ptoraskar/troposphere,garnaat/troposphere,alonsodomin/troposphere,dmm92/troposphere,ikben/troposphere,horacio3/troposphere,cloudtools/troposphere,WeAreCloudar/troposphere,horacio3/troposphere,johnctitus/troposphere,craigbruce/troposphere,LouTheBrew/troposphere,dmm92/troposphere,micahhausler/troposphere,amosshapira/troposphere,pas256/troposphere,pas256/troposphere,7digital/troposphere,ikben/troposphere,xxxVxxx/troposphere | troposphere/utils.py | troposphere/utils.py | import time
def _tail_print(e):
print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id))
def get_events(conn, stackname):
"""Get the events in batches and return in chronological order"""
next = None
event_list = []
while 1:
events = conn.describe_stack_events(stackname, next)
event_list.append(events)
if events.next_token is None:
break
next = events.next_token
time.sleep(1)
return reversed(sum(event_list, []))
def tail(conn, stack_name, log_func=_tail_print, sleep_time=5,
include_initial=True):
"""Show and then tail the event log"""
# First dump the full list of events in chronological order and keep
# track of the events we've seen already
seen = set()
initial_events = get_events(conn, stack_name)
for e in initial_events:
if include_initial:
log_func(e)
seen.add(e.event_id)
# Now keep looping through and dump the new events
while 1:
events = get_events(conn, stack_name)
for e in events:
if e.event_id not in seen:
log_func(e)
seen.add(e.event_id)
time.sleep(sleep_time)
| import time
def _tail_print(e):
print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id))
def get_events(conn, stackname):
"""Get the events in batches and return in chronological order"""
next = None
event_list = []
while 1:
events = conn.describe_stack_events(stackname, next)
event_list.append(events)
if events.next_token is None:
break
next = events.next_token
time.sleep(1)
return reversed(sum(event_list, []))
def tail(conn, stack_name, log_func=_tail_print, sleep_time=5, include_initial=True):
"""Show and then tail the event log"""
# First dump the full list of events in chronological order and keep
# track of the events we've seen already
seen = set()
initial_events = get_events(conn, stack_name)
for e in initial_events:
if include_initial:
log_func(e)
seen.add(e.event_id)
# Now keep looping through and dump the new events
while 1:
events = get_events(conn, stack_name)
for e in events:
if e.event_id not in seen:
log_func(e)
seen.add(e.event_id)
time.sleep(sleep_time)
| bsd-2-clause | Python |
9bfdecf9c104667bbaf923a0c6c01dcaf057b379 | Add summary option | danielfrg/cyhdfs3,danielfrg/cyhdfs3,danielfrg/libhdfs3.py,danielfrg/libhdfs3.py | benchmark/timer.py | benchmark/timer.py | """
Taken from: https://gist.github.com/acdha/4068406
"""
import sys
import inspect
from timeit import default_timer
class Timer(object):
"""Context Manager to simplify timing Python code
Usage:
with Timer('key step'):
... do something ...
"""
def __init__(self, context=None, summary=True):
self.timer = default_timer
self.summary = summary
if context is None:
caller_frame = inspect.stack()[1]
frame = caller_frame[0]
info = inspect.getframeinfo(frame)
context = '%s (%s:%s)' % (info.function, info.filename, info.lineno)
self.context = context
@property
def elapsed(self):
end = self.timer()
return (end - self.start) * 1000
def restart(self):
self.start = self.timer()
def __enter__(self):
self.start = self.timer()
return self
def __exit__(self, *args):
if self.summary:
print >>sys.stderr, '%s: %f ms' % (self.context, self.elapsed)
| """
Taken from: https://gist.github.com/acdha/4068406
"""
import sys
from timeit import default_timer
class Timer(object):
"""Context Manager to simplify timing Python code
Usage:
with Timer('key step'):
... do something ...
"""
def __init__(self, context=None):
self.timer = default_timer
if context is None:
import inspect
caller_frame = inspect.stack()[1]
frame = caller_frame[0]
info = inspect.getframeinfo(frame)
context = '%s (%s:%s)' % (info.function, info.filename, info.lineno)
self.context = context
@property
def elapsed(self):
end = self.timer()
return (end - self.start) * 1000
def restart(self):
self.start = self.timer()
def __enter__(self):
self.start = self.timer()
return self
def __exit__(self, *args):
print >>sys.stderr, '%s: %f ms' % (self.context, self.elapsed)
| apache-2.0 | Python |
949d42baaa2b60481233feb5db12c8f4777ebfba | fix name | biokit/biokit,biokit/biokit | biokit/converters/bed2bam.py | biokit/converters/bed2bam.py |
class Bam2Bed(object):
"""
"""
def __init__(self, infile, outfile=None, *args, **kwargs):
""".. rubric:: constructor
:param str filename
"""
pass
def __call__(self):
print("YOUPI !!!!!! ")
pass
|
class BED2BAM(object):
"""
"""
def __init__(self, infile, outfile=None, *args, **kwargs):
""".. rubric:: constructor
:param str filename
"""
pass
def __call__(self):
pass
| bsd-2-clause | Python |
959652ce1bb3ac9ff54ddc5f1f7500efdc5691b0 | handle async-available peers by dropping new requests when old requests are still pending; add zmqrpc.async_multicall() for calling many peers and waiting for quorum or timeout | carlopires/libconsent | part1/zmqrpc.py | part1/zmqrpc.py | # Conrad Meyer <cemeyer@uw.edu>
# 0824410
# CSE 550 Problem Set 3
# Thu Nov 10 2011
import threading
import time
import zmq
class Server(threading.Thread):
def __init__(self, zctx, endpoint, obj):
threading.Thread.__init__(self)
self.methods = {}
for x in dir(obj):
if not x.startswith("_"):
self.methods[x] = getattr(obj, x)
self.socket = zctx.socket(zmq.REP)
self.socket.bind(endpoint)
self.daemon = True
self.start()
def run(self):
while True:
req = self.socket.recv_pyobj()
rep = None
try:
rep = self.methods[req[0]](*req[1])
except Exception as e:
print(e)
pass
self.socket.send_pyobj(rep)
class Client:
def __init__(self, zctx, endpoint):
self._socket = zctx.socket(zmq.REQ)
self._socket.connect(endpoint)
self._needs_recv = False
def _unblock(self):
try:
self._socket.recv(zmq.NOBLOCK)
except zmq.ZMQError as e:
if e.errno == zmq.EAGAIN:
return False
raise
return True
def send_pyobj(self, obj):
if self._needs_recv:
if self._unblock():
self._needs_recv = False
if not self._needs_recv:
self._socket.send_pyobj(obj)
self._needs_recv = True
def recv_pyobj(self):
o = self._socket.recv_pyobj()
self._needs_recv = False
return o
class _Call:
def __init__(self, name, client):
self.name = name
self.client = client
def __call__(self, *args):
self.client.send_pyobj((self.name, args))
return self.client.recv_pyobj()
def __getattr__(self, name):
return Client._Call(name, self)
class _FutureCall:
def __init__(self, name, client, args):
self.client = client
self.client.send_pyobj((name, args))
def socket(self):
return self.client._socket
def force(self):
return self.client.recv_pyobj()
def async_call(self, name, args):
return Client._FutureCall(name, self, args)
# Performs a given RPC against a set of peers; returns when a quorum of servers has
# replied, after the timeout interval (in seconds) has passed, whichever happens
# first.
#
# On success, returns (True, values), where values is a list of at least quorum
# values.
# On timeout, returns (False, values).
def async_multicall(peers, timeout, name, args):
futures = {}
for peer in peers:
future = peer.async_call(name, args)
futures[future.socket()] = future
starttime = time.time()
results = []
poller = zmq.Poller()
for future_sock in futures.keys():
poller.register(future_sock, zmq.POLLIN)
while len(results) < ((len(peers) // 2) + 1):
elapsed = time.time() - starttime
if elapsed >= timeout: break
for (sock, status) in poller.poll(1000*(timeout - elapsed)):
if status == zmq.POLLIN:
poller.unregister(sock)
results.append(futures[sock].force())
return (len(results) >= ((len(peers) // 2) + 1), results)
| # Conrad Meyer <cemeyer@uw.edu>
# 0824410
# CSE 550 Problem Set 3
# Thu Nov 10 2011
import threading
import zmq
class Server(threading.Thread):
def __init__(self, zctx, endpoint, obj):
threading.Thread.__init__(self)
self.methods = {}
for x in dir(obj):
if not x.startswith("_"):
self.methods[x] = getattr(obj, x)
self.socket = zctx.socket(zmq.REP)
self.socket.bind(endpoint)
self.daemon = True
self.start()
def run(self):
while True:
req = self.socket.recv_pyobj()
rep = None
try:
rep = self.methods[req[0]](*req[1])
except Exception as e:
print(e)
pass
self.socket.send_pyobj(rep)
class Client:
def __init__(self, zctx, endpoint):
self._socket = zctx.socket(zmq.REQ)
self._socket.connect(endpoint)
class _Call:
def __init__(self, sock, name):
self.socket = sock
self.name = name
def __call__(self, *args):
self.socket.send_pyobj((self.name, args))
return self.socket.recv_pyobj()
def __getattr__(self, name):
return Client._Call(self._socket, name)
| mit | Python |
937dcf5ff0198890f438047f6644e688e32a1a3f | fix the erro with fillDict | alexaleluia12/nivel-represas-sp,alexaleluia12/nivel-represas-sp | core/robot.py | core/robot.py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
# TODO
#
from crawl import Crawl
import threading
from datetime import datetime
from wrapdb import Db
def fillDict(valDict):
"""
retorna dicionario com os valeres preenchidos com a respectiva data de hoje
"""
ano = "%Y"
mes = "%m"
dia = "%d"
nowDate = datetime.now()
copy = valDict
copy["cmbAno"] = int(nowDate.strftime(ano))
copy["cmbMes"] = int(nowDate.strftime(mes))
copy["cmbDia"] = int(nowDate.strftime(dia))
return copy
class Slave(object):
URL = "http://www2.sabesp.com.br/mananciais/DivulgacaoSiteSabesp.aspx"
mainData = {'cmbAno': None,'cmbMes': None,'cmbDia': None}
def __init__(self):
self.crawler = Crawl(self.URL)
self.db = Db()
def work(self):
dayCat = fillDict(self.mainData)
data = self.crawler.getForm(dayCat)
self.db.save(data)
threading.Timer(86400, self.work).start() # se repete a cada 24h
if __name__ == '__main__':
Slave().work()
| #!/usr/bin/env python
#-*- coding:utf-8 -*-
# TODO
# testar tudo isso, deveria ter usado TDD nesse projeto :(
from crawl import Crawl
import threading
from datetime import datetime
from wrapdb import Db
def fillDict(valDict):
"""
retorna valDict com os valeres preenchidos com a respectiva data de hoje
"""
ano = "%Y"
mes = "%m"
dia = "%d"
nowDate = datetime.now()
valDict["cmbAno"] = int(nowDate.strftime(ano))
valDict["cmbMes"] = int(nowDate.strftime(mes))
valDict["cmbDia"] = int(nowDate.strftime(dia))
class Slave(object):
URL = "http://www2.sabesp.com.br/mananciais/DivulgacaoSiteSabesp.aspx"
mainData = {'cmbAno': None,'cmbMes': None,'cmbDia': None}
def __init__(self):
self.crawler = Crawl(URL)
self.db = Db()
def work(self):
dayCat = fillDict(self.mainData)
data = self.crawler.getForm(dayCat)
self.db.save(data)
threading.Timer(86400, self.work).start() # se repete a cada 24h
if __name__ == '__main__':
Slave().work()
| mit | Python |
ade2883bfd37b6ae7311d66d88d3426854166a93 | Allow personal commands with fetched user | KrusnikViers/Zordon,KrusnikViers/Zordon | app/handlers/common.py | app/handlers/common.py | from telegram import Update, Bot, ReplyKeyboardMarkup, KeyboardButton
from ..models import User
commands_map = {
# User-related commands
'start': 'start',
'status': 'status',
'activate': 'ready',
'deactivate': 'do_not_disturb',
# Commands with activities
'activity_list': 'list_activities',
'activity_add': 'add_activity', # (moderator-only)
'activity_rem': 'remove_activity', # (superuser-only)
'subscribe': 'subscribe',
'unsubscribe': 'unsubscribe',
# Summoning commands
'summon': 'summon', # (moderator-only)
'join': 'will_join',
'later': 'will_join_later',
'decline': 'will_not_join',
# Moderating the moderators (superuser-only)
'moderator_list': 'list_moderators',
'moderator_add': 'add_moderator',
'moderator_remove': 'remove_moderator'
}
def personal_command(command):
def personal_command_impl(decorated_handler):
def wrapper(bot: Bot, update: Update, user=None):
if not user:
user = User.get_or_create(telegram_user_id=update.effective_user.id,
defaults={'telegram_login': update.effective_user.name})[0]
user.validate_info(update.effective_user.name)
if command and not user.has_right(command):
user.send_message(bot,
text='Unfortunately, you have not enough rights to execute this command.',
reply_markup=keyboard_for_user(user))
else:
decorated_handler(bot, update, user)
return wrapper
return personal_command_impl
def keyboard_for_user(user: User):
activation_command = 'deactivate' if user.is_active else 'activate'
possible_commands = [[activation_command, 'status', 'summon'], ['activity_list', 'moderator_list']]
keyboard_markup = [[KeyboardButton('/' + commands_map[x]) for x in commands_row if user.has_right(x)]
for commands_row in possible_commands]
return ReplyKeyboardMarkup(keyboard_markup, resize_keyboard=True)
| from telegram import Update, Bot, ReplyKeyboardMarkup, KeyboardButton
from ..models import User
commands_map = {
# User-related commands
'start': 'start',
'status': 'status',
'activate': 'ready',
'deactivate': 'do_not_disturb',
# Commands with activities
'activity_list': 'list_activities',
'activity_add': 'add_activity', # (moderator-only)
'activity_rem': 'remove_activity', # (superuser-only)
'subscribe': 'subscribe',
'unsubscribe': 'unsubscribe',
# Summoning commands
'summon': 'summon', # (moderator-only)
'join': 'will_join',
'later': 'will_join_later',
'decline': 'will_not_join',
# Moderating the moderators (superuser-only)
'moderator_list': 'list_moderators',
'moderator_add': 'add_moderator',
'moderator_remove': 'remove_moderator'
}
def personal_command(command):
def personal_command_impl(decorated_handler):
def wrapper(bot: Bot, update: Update):
user = User.get_or_create(telegram_user_id=update.effective_user.id,
defaults={'telegram_login': update.effective_user.name})[0]
user.validate_info(update.effective_user.name)
if command and not user.has_right(command):
user.send_message(bot,
text='Unfortunately, you have not enough rights to execute this command.',
reply_markup=keyboard_for_user(user))
else:
decorated_handler(bot, update, user)
return wrapper
return personal_command_impl
def keyboard_for_user(user: User):
activation_command = 'deactivate' if user.is_active else 'activate'
possible_commands = [[activation_command, 'status', 'summon'], ['activity_list', 'moderator_list']]
keyboard_markup = [[KeyboardButton('/' + commands_map[x]) for x in commands_row if user.has_right(x)]
for commands_row in possible_commands]
return ReplyKeyboardMarkup(keyboard_markup, resize_keyboard=True)
| mit | Python |
83bffee653bddda0f3a134933ecf69bfe1c697b4 | Add version table | NikhilKalige/atom-website,NikhilKalige/atom-website,NikhilKalige/atom-website | app/packages/models.py | app/packages/models.py | from app import db
import datetime
class Package(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), unique=True, nullable=False)
author = db.Column(db.String(50))
link = db.Column(db.String(140))
description = db.Column(db.String())
downloads = db.relationship('Downloads', backref='package', lazy='dynamic')
version = db.relationship('Version', backref='package', lazy='dynamic')
def __repr__(self):
return 'Package: %s' % self.name
@classmethod
def get_count(self):
return Package.query.count()
class Version(db.Model):
id = db.Column(db.Integer, primary_key=True)
number = db.Column(db.String(50), nullable=False)
date = db.Column(db.DateTime, default=datetime.date.today, nullable=False)
package_id = db.Column(db.Integer, db.ForeignKey('package.id'), nullable=False)
def __repr__(self):
return 'Ver: {} on {}'.format(self.number, self.date)
class DbFlags(db.Model):
id = db.Column(db.Integer, primary_key=True)
date = db.Column(db.DateTime, default=datetime.date.today, nullable=False)
flag = db.Column(db.Boolean, nullable=False)
def __repr__(self):
return 'DbFlags: {} {}'.format(self.date, self.flag)
@classmethod
def get_update_time(self):
return self.query.filter(self.id == 1).first().date
class Downloads(db.Model):
id = db.Column(db.Integer, primary_key=True)
downloads = db.Column(db.Integer, nullable=False)
date = db.Column(db.DateTime, default=datetime.date.today, nullable=False)
package_id = db.Column(db.Integer, db.ForeignKey('package.id'), nullable=False)
@classmethod
def nearest_last_entry(self, time):
while self.query.filter(self.date == time).count() <= 0:
time -= datetime.timedelta(days=1)
return time
@classmethod
def __count_downloads(self, entries):
count = 0
for entry in entries:
count += entry.downloads
return count
# period should be a datetime.timedelta
@classmethod
def get_downloads_count(self, period):
current_time = DbFlags.get_update_time()
current_entries = self.query.filter(self.date == current_time).all()
old_time = self.nearest_last_entry(current_time - period)
old_entries = self.query.filter(self.date == old_time).all()
current_downloads = self.__count_downloads(current_entries)
old_downloads = self.__count_downloads(old_entries)
return current_downloads - old_downloads
| from app import db
import datetime
class Package(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), unique=True, nullable=False)
author = db.Column(db.String(50))
link = db.Column(db.String(140))
description = db.Column(db.String())
downloads = db.relationship('Downloads', backref='package', lazy='dynamic')
def __repr__(self):
return 'Package: %s' % self.name
@classmethod
def get_count(self):
return Package.query.count()
class DbFlags(db.Model):
id = db.Column(db.Integer, primary_key=True)
date = db.Column(db.DateTime, default=datetime.date.today, nullable=False)
flag = db.Column(db.Boolean, nullable=False)
def __repr__(self):
return 'DbFlags: {} {}'.format(self.date, self.flag)
@classmethod
def get_update_time(self):
return self.query.filter(self.id == 1).first().date
class Downloads(db.Model):
id = db.Column(db.Integer, primary_key=True)
downloads = db.Column(db.Integer, nullable=False)
date = db.Column(db.DateTime, default=datetime.date.today, nullable=False)
package_id = db.Column(db.Integer, db.ForeignKey('package.id'), nullable=False)
@classmethod
def nearest_last_entry(self, time):
while self.query.filter(self.date == time).count() <= 0:
time -= datetime.timedelta(days=1)
return time
@classmethod
def __count_downloads(self, entries):
count = 0
for entry in entries:
count += entry.downloads
return count
# period should be a datetime.timedelta
@classmethod
def get_downloads_count(self, period):
current_time = DbFlags.get_update_time()
current_entries = self.query.filter(self.date == current_time).all()
old_time = self.nearest_last_entry(current_time - period)
old_entries = self.query.filter(self.date == old_time).all()
current_downloads = self.__count_downloads(current_entries)
old_downloads = self.__count_downloads(old_entries)
print current_downloads, old_downloads
return current_downloads - old_downloads
| bsd-2-clause | Python |
03b2b06e90f34e67fbda1172eab2c7de6dc8246a | Update time grain expressions for Spark >= 3.x (#18690) | airbnb/caravel,zhouyao1994/incubator-superset,airbnb/caravel,airbnb/caravel,airbnb/caravel,zhouyao1994/incubator-superset,zhouyao1994/incubator-superset,zhouyao1994/incubator-superset,zhouyao1994/incubator-superset | superset/db_engine_specs/databricks.py | superset/db_engine_specs/databricks.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.o
from datetime import datetime
from typing import Any, Dict, Optional
from superset.db_engine_specs.base import BaseEngineSpec
from superset.db_engine_specs.hive import HiveEngineSpec
time_grain_expressions = {
None: "{col}",
"PT1S": "date_trunc('second', {col})",
"PT1M": "date_trunc('minute', {col})",
"PT1H": "date_trunc('hour', {col})",
"P1D": "date_trunc('day', {col})",
"P1W": "date_trunc('week', {col})",
"P1M": "date_trunc('month', {col})",
"P3M": "date_trunc('quarter', {col})",
"P1Y": "date_trunc('year', {col})",
"P1W/1970-01-03T00:00:00Z": (
"date_trunc('week', {col} + interval '1 day') + interval '5 days'"
),
"1969-12-28T00:00:00Z/P1W": (
"date_trunc('week', {col} + interval '1 day') - interval '1 day'"
),
}
class DatabricksHiveEngineSpec(HiveEngineSpec):
engine = "databricks"
engine_name = "Databricks Interactive Cluster"
driver = "pyhive"
_show_functions_column = "function"
_time_grain_expressions = time_grain_expressions
class DatabricksODBCEngineSpec(BaseEngineSpec):
engine = "databricks"
engine_name = "Databricks SQL Endpoint"
driver = "pyodbc"
_time_grain_expressions = time_grain_expressions
@classmethod
def convert_dttm(
cls, target_type: str, dttm: datetime, db_extra: Optional[Dict[str, Any]] = None
) -> Optional[str]:
return HiveEngineSpec.convert_dttm(target_type, dttm, db_extra=db_extra)
@classmethod
def epoch_to_dttm(cls) -> str:
return HiveEngineSpec.epoch_to_dttm()
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.o
from datetime import datetime
from typing import Any, Dict, Optional
from superset.db_engine_specs.base import BaseEngineSpec
from superset.db_engine_specs.hive import HiveEngineSpec
class DatabricksHiveEngineSpec(HiveEngineSpec):
engine = "databricks"
engine_name = "Databricks Interactive Cluster"
driver = "pyhive"
_show_functions_column = "function"
class DatabricksODBCEngineSpec(BaseEngineSpec):
engine = "databricks"
engine_name = "Databricks SQL Endpoint"
driver = "pyodbc"
# the syntax for the ODBC engine is identical to the Hive one, so
# we can reuse the expressions from `HiveEngineSpec`
# pylint: disable=protected-access
_time_grain_expressions = HiveEngineSpec._time_grain_expressions
@classmethod
def convert_dttm(
cls, target_type: str, dttm: datetime, db_extra: Optional[Dict[str, Any]] = None
) -> Optional[str]:
return HiveEngineSpec.convert_dttm(target_type, dttm, db_extra=db_extra)
@classmethod
def epoch_to_dttm(cls) -> str:
return HiveEngineSpec.epoch_to_dttm()
| apache-2.0 | Python |
ed9294c7ab0abf574f076464274d83f1e39b53cd | Handle and log exceptions. Render Response objects. | funkybob/paws | paws/handler.py | paws/handler.py | from .request import Request
from .response import response, Response
import logging
log = logging.getLogger()
class Handler(object):
'''
Simple dispatcher class.
'''
def __init__(self, event, context):
self.request = Request(event, context)
def __call__(self, event, context):
func = getattr(self, self.event['httpMethod'].lower(), self.invalid)
try:
resp = func(self.request, *self.event['pathParameters'])
except Exception:
import traceback
log.error(self)
log.error(traceback.format_exc())
return response(body='Internal server Error', status=500)
if isinstance(resp, Response):
resp = resp.render()
return resp
def __str__(self):
return "<Request: {%s} %s (%r)" % (
self.request.method,
self.request.path,
self.requeste.params,
)
def invalid(self, *args):
# XXX Build list of valid methods?
return response(status=405)
| from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __init__(self, event, context):
self.request = Request(event, context)
def __call__(self, event, context):
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(self.request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
| bsd-3-clause | Python |
8996a5a28cf23f58d48549fc0cfc3b4a8464a3a5 | Change version to 0.7.6-git | cccfran/sympy,abloomston/sympy,rahuldan/sympy,aktech/sympy,wanglongqi/sympy,kevalds51/sympy,vipulroxx/sympy,farhaanbukhsh/sympy,postvakje/sympy,wanglongqi/sympy,jerli/sympy,atreyv/sympy,kaushik94/sympy,wyom/sympy,jbbskinny/sympy,sahmed95/sympy,Mitchkoens/sympy,yukoba/sympy,yashsharan/sympy,meghana1995/sympy,saurabhjn76/sympy,emon10005/sympy,mafiya69/sympy,cswiercz/sympy,wanglongqi/sympy,pbrady/sympy,Shaswat27/sympy,skidzo/sympy,Shaswat27/sympy,AkademieOlympia/sympy,maniteja123/sympy,AunShiLord/sympy,sahmed95/sympy,pandeyadarsh/sympy,cswiercz/sympy,garvitr/sympy,AkademieOlympia/sympy,sampadsaha5/sympy,moble/sympy,cccfran/sympy,Sumith1896/sympy,beni55/sympy,Curious72/sympy,asm666/sympy,shikil/sympy,debugger22/sympy,toolforger/sympy,Vishluck/sympy,liangjiaxing/sympy,Titan-C/sympy,skidzo/sympy,Vishluck/sympy,saurabhjn76/sympy,hargup/sympy,chaffra/sympy,atsao72/sympy,kaushik94/sympy,iamutkarshtiwari/sympy,beni55/sympy,jamesblunt/sympy,AunShiLord/sympy,vipulroxx/sympy,VaibhavAgarwalVA/sympy,lindsayad/sympy,jaimahajan1997/sympy,sahmed95/sympy,shipci/sympy,meghana1995/sympy,chaffra/sympy,jamesblunt/sympy,drufat/sympy,garvitr/sympy,ahhda/sympy,atsao72/sympy,mcdaniel67/sympy,garvitr/sympy,MridulS/sympy,rahuldan/sympy,abloomston/sympy,oliverlee/sympy,Titan-C/sympy,Vishluck/sympy,jerli/sympy,jaimahajan1997/sympy,bukzor/sympy,kevalds51/sympy,pbrady/sympy,Gadal/sympy,MechCoder/sympy,abhiii5459/sympy,souravsingh/sympy,abhiii5459/sympy,ahhda/sympy,oliverlee/sympy,atreyv/sympy,vipulroxx/sympy,diofant/diofant,farhaanbukhsh/sympy,skidzo/sympy,mafiya69/sympy,shipci/sympy,kaichogami/sympy,VaibhavAgarwalVA/sympy,sahilshekhawat/sympy,drufat/sympy,Arafatk/sympy,sahilshekhawat/sympy,kumarkrishna/sympy,Shaswat27/sympy,mafiya69/sympy,sampadsaha5/sympy,abloomston/sympy,kaichogami/sympy,moble/sympy,hargup/sympy,kumarkrishna/sympy,souravsingh/sympy,wyom/sympy,grevutiu-gabriel/sympy,farhaanbukhsh/sympy,grevutiu-gabriel/sympy,Designist/sympy,jerli/sympy,sahilshekhawat/sympy,MechCoder/sympy,jbbskinny/sympy,postvakje/sympy,liangjiaxing/sympy,Davidjohnwilson/sympy,toolforger/sympy,Arafatk/sympy,Arafatk/sympy,Mitchkoens/sympy,ahhda/sympy,toolforger/sympy,ChristinaZografou/sympy,debugger22/sympy,Designist/sympy,madan96/sympy,asm666/sympy,jaimahajan1997/sympy,hargup/sympy,MridulS/sympy,aktech/sympy,souravsingh/sympy,skirpichev/omg,pbrady/sympy,cswiercz/sympy,VaibhavAgarwalVA/sympy,aktech/sympy,cccfran/sympy,emon10005/sympy,yashsharan/sympy,ChristinaZografou/sympy,kaushik94/sympy,jbbskinny/sympy,liangjiaxing/sympy,chaffra/sympy,yashsharan/sympy,kaichogami/sympy,wyom/sympy,Davidjohnwilson/sympy,saurabhjn76/sympy,AunShiLord/sympy,Designist/sympy,MridulS/sympy,postvakje/sympy,shipci/sympy,AkademieOlympia/sympy,rahuldan/sympy,pandeyadarsh/sympy,debugger22/sympy,bukzor/sympy,yukoba/sympy,ga7g08/sympy,atsao72/sympy,shikil/sympy,iamutkarshtiwari/sympy,maniteja123/sympy,kumarkrishna/sympy,kevalds51/sympy,emon10005/sympy,Davidjohnwilson/sympy,lindsayad/sympy,Gadal/sympy,shikil/sympy,Titan-C/sympy,sunny94/temp,Gadal/sympy,oliverlee/sympy,Sumith1896/sympy,sampadsaha5/sympy,madan96/sympy,madan96/sympy,mcdaniel67/sympy,Sumith1896/sympy,sunny94/temp,lindsayad/sympy,grevutiu-gabriel/sympy,iamutkarshtiwari/sympy,bukzor/sympy,beni55/sympy,jamesblunt/sympy,pandeyadarsh/sympy,ga7g08/sympy,Curious72/sympy,moble/sympy,sunny94/temp,maniteja123/sympy,drufat/sympy,meghana1995/sympy,mcdaniel67/sympy,atreyv/sympy,Mitchkoens/sympy,yukoba/sympy,ga7g08/sympy,ChristinaZografou/sympy,Curious72/sympy,asm666/sympy,abhiii5459/sympy,MechCoder/sympy | sympy/release.py | sympy/release.py | __version__ = "0.7.6-git"
| __version__ = "0.7.5-git"
| bsd-3-clause | Python |
923a1697e0fa06de4a712e8cb3d344a24b53c789 | add sync_date to celery tasks | pythondigest/pythondigest,pythondigest/pythondigest,pythondigest/pythondigest | syncrss/tasks.py | syncrss/tasks.py | from celery.task import task
from django.db import transaction
from datetime import datetime
from time import mktime
import feedparser
from .models import ResourceRSS, RawItem
@task
def update_rss():
for resource in ResourceRSS.objects.filter(status=True):
try:
data =feedparser.parse(resource.link)
updated_date = datetime.fromtimestamp(mktime(data.feed.updated_parsed))
if resource.sync_date < updated_date:
with transaction.commit_on_success():
for item in data.entries:
print item.title
entry = RawItem(
title=item.title,
description=item.title,
link=item.link,
related_to_date=datetime.fromtimestamp(\
mktime(item.updated_parsed)),
)
if entry and (not resource.sync_date or \
entry.related_to_date > resource.sync_date):
entry.save()
resource.sync_date = updated_date
resource.save()
except Exception as e:
print ('sync failes: %s' % e)
| from celery.task import task
from django.db import transaction
from datetime import datetime
from time import mktime
import feedparser
from .models import ResourceRSS, RawItem
@task
def update_rss():
for rec in ResourceRSS.objects.filter(status=True):
print rec.link
try:
data =feedparser.parse(rec.link)
#updated_date = datetime.fromtimestamp(mktime(data.feed.updated_parsed))
#sync_date = rec.sync_date(updated_date)
with transaction.commit_on_success():
for item in data.entries:
print item.title
entry = RawItem(
title=item.title,
description=item.title,
link=item.link,
related_to_date=datetime.fromtimestamp(\
mktime(item.updated_parsed)),
)
entry.save()
except Exception as e:
print ('sync failes: %s' % e)
| mit | Python |
d8343fcd6cc8d3d5dc39b1305096d8768ca08c85 | Add 'roll' to dice.py's commands | Jeebeevee/DouweBot_JJ15,SophosBlitz/glacon,craisins/wh2kbot,parkrrr/skybot,andyeff/skybot,rmmh/skybot,df-5/skybot,olslash/skybot,craisins/nascarbot,Jeebeevee/DouweBot,Teino1978-Corp/Teino1978-Corp-skybot,TeamPeggle/ppp-helpdesk,cmarguel/skybot,elitan/mybot,isislab/botbot,callumhogsden/ausbot,jmgao/skybot,ddwo/nhl-bot,crisisking/skybot | plugins/dice.py | plugins/dice.py | """
dice.py: written by Scaevolus 2008, updated 2009
simulates dicerolls
"""
import re
import random
from util import hook
whitespace_re = re.compile(r'\s+')
valid_diceroll_re = re.compile(r'^[+-]?(\d+|\d*d\d+)([+-](\d+|\d*d\d+))*$',
re.I)
sign_re = re.compile(r'[+-]?(?:\d*d)?\d+', re.I)
split_re = re.compile(r'([\d+-]*)d?(\d*)', re.I)
def nrolls(count, n):
"roll an n-sided die count times"
if n < 2: #it's a coin
if count < 5000:
return sum(random.randint(0, 1) for x in xrange(count))
else: #fake it
return int(random.normalvariate(.5*count, (.75*count)**.5))
else:
if count < 5000:
return sum(random.randint(1, n) for x in xrange(count))
else: #fake it
return int(random.normalvariate(.5*(1+n)*count,
(((n+1)*(2*n+1)/6.-(.5*(1+n))**2)*count)**.5))
@hook.command
@hook.command('roll')
def dice(inp):
".dice <diceroll> -- simulates dicerolls, e.g. .dice 2d20-d5+4 roll 2 " \
"D20s, subtract 1D5, add 4"
if not inp:
return dice.__doc__
spec = whitespace_re.sub('', inp)
if not valid_diceroll_re.match(spec):
return "Invalid diceroll"
sum = 0
groups = sign_re.findall(spec)
for roll in groups:
count, side = split_re.match(roll).groups()
if side == "":
sum += int(count)
else:
count = int(count) if count not in" +-" else 1
side = int(side)
try:
if count > 0:
sum += nrolls(count, side)
else:
sum -= nrolls(abs(count), side)
except OverflowError:
return "Thanks for overflowing a float, jerk >:["
return str(sum)
| """
dice.py: written by Scaevolus 2008, updated 2009
simulates dicerolls
"""
import re
import random
from util import hook
whitespace_re = re.compile(r'\s+')
valid_diceroll_re = re.compile(r'^[+-]?(\d+|\d*d\d+)([+-](\d+|\d*d\d+))*$',
re.I)
sign_re = re.compile(r'[+-]?(?:\d*d)?\d+', re.I)
split_re = re.compile(r'([\d+-]*)d?(\d*)', re.I)
def nrolls(count, n):
"roll an n-sided die count times"
if n < 2: #it's a coin
if count < 5000:
return sum(random.randint(0, 1) for x in xrange(count))
else: #fake it
return int(random.normalvariate(.5*count, (.75*count)**.5))
else:
if count < 5000:
return sum(random.randint(1, n) for x in xrange(count))
else: #fake it
return int(random.normalvariate(.5*(1+n)*count,
(((n+1)*(2*n+1)/6.-(.5*(1+n))**2)*count)**.5))
@hook.command
def dice(inp):
".dice <diceroll> -- simulates dicerolls, e.g. .dice 2d20-d5+4 roll 2 " \
"D20s, subtract 1D5, add 4"
if not inp:
return dice.__doc__
spec = whitespace_re.sub('', inp)
if not valid_diceroll_re.match(spec):
return "Invalid diceroll"
sum = 0
groups = sign_re.findall(spec)
for roll in groups:
count, side = split_re.match(roll).groups()
if side == "":
sum += int(count)
else:
count = int(count) if count not in" +-" else 1
side = int(side)
try:
if count > 0:
sum += nrolls(count, side)
else:
sum -= nrolls(abs(count), side)
except OverflowError:
return "Thanks for overflowing a float, jerk >:["
return str(sum)
| unlicense | Python |
0ef2bb1379c62f32076d972738f6f3fc9f9c70b7 | fix tests | norayr/unisubs,ofer43211/unisubs,ReachingOut/unisubs,ofer43211/unisubs,pculture/unisubs,wevoice/wesub,ReachingOut/unisubs,ujdhesa/unisubs,ReachingOut/unisubs,eloquence/unisubs,wevoice/wesub,ujdhesa/unisubs,ofer43211/unisubs,eloquence/unisubs,ofer43211/unisubs,pculture/unisubs,eloquence/unisubs,ReachingOut/unisubs,eloquence/unisubs,ujdhesa/unisubs,norayr/unisubs,ujdhesa/unisubs,pculture/unisubs,pculture/unisubs,norayr/unisubs,wevoice/wesub,wevoice/wesub,norayr/unisubs | apps/profiles/tests.py | apps/profiles/tests.py | # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2010 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from django.test import TestCase
from auth.models import CustomUser as User
from django.core.urlresolvers import reverse
class TestViews(TestCase):
fixtures = ['test.json']
def _simple_test(self, url_name, args=None, kwargs=None, status=200, data={}):
response = self.client.get(reverse(url_name, args=args, kwargs=kwargs), data)
self.assertEqual(response.status_code, status)
return response
def _login(self):
self.client.login(**self.auth)
def setUp(self):
self.auth = dict(username='admin', password='admin')
self.user = User.objects.get(username=self.auth['username'])
def test_edit_profile(self):
self._simple_test('profiles:edit', status=302)
self._login()
self._simple_test('profiles:edit')
data = {
'username': 'new_username_for_admin',
'email': self.user.email,
'userlanguage_set-TOTAL_FORMS': '0',
'userlanguage_set-INITIAL_FORMS': '0',
'userlanguage_set-MAX_NUM_FORMS': ''
}
response = self.client.post(reverse('profiles:edit'), data=data)
self.assertEqual(response.status_code, 302)
user = User.objects.get(pk=self.user.pk)
self.assertEqual(user.username, data['username'])
other_user = User.objects.exclude(pk=self.user.pk)[:1].get()
data['username'] = other_user.username
response = self.client.post(reverse('profiles:edit'), data=data)
self.assertEqual(response.status_code, 200) | # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2010 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from django.test import TestCase
from auth.models import CustomUser as User
from django.core.urlresolvers import reverse
class TestViews(TestCase):
fixtures = ['test.json']
def _simple_test(self, url_name, args=None, kwargs=None, status=200, data={}):
response = self.client.get(reverse(url_name, args=args, kwargs=kwargs), data)
self.assertEqual(response.status_code, status)
return response
def _login(self):
self.client.login(**self.auth)
def setUp(self):
self.auth = dict(username='admin', password='admin')
self.user = User.objects.get(username=self.auth['username'])
def test_edit_profile(self):
self._simple_test('profiles:edit', status=302)
self._login()
self._simple_test('profiles:edit')
data = {
'username': 'admin1',
'email': self.user.email,
'userlanguage_set-TOTAL_FORMS': '0',
'userlanguage_set-INITIAL_FORMS': '0',
'userlanguage_set-MAX_NUM_FORMS': ''
}
response = self.client.post(reverse('profiles:edit'), data=data)
self.assertEqual(response.status_code, 302)
user = User.objects.get(pk=self.user.pk)
self.assertEqual(user.username, data['username'])
other_user = User.objects.exclude(pk=self.user.pk)[:1].get()
data['username'] = other_user.username
response = self.client.post(reverse('profiles:edit'), data=data)
self.assertEqual(response.status_code, 200) | agpl-3.0 | Python |
285592f6d420ab57f8d183f4068e974ee4ea3d65 | Add tests for hash plugin | tomleese/smartbot,Muzer/smartbot,Cyanogenoid/smartbot,thomasleese/smartbot-old | plugins/hash.py | plugins/hash.py | import io
import hashlib
import unittest
class Plugin:
def on_command(self, bot, msg, stdin, stdout, reply):
if len(msg["args"]) >= 2:
algorithm = msg["args"][1]
contents = " ".join(msg["args"][2:])
if not contents:
contents = stdin.read().strip()
try:
h = hashlib.new(algorithm)
except ValueError:
print("No such algorithm:", algorithm, file=stdout)
except TypeError:
print(self.on_help(), file=stdout)
else:
h.update(bytes(contents, "utf-8"))
print(h.hexdigest(), file=stdout)
else:
print(self.on_help(), file=stdout)
def on_help(self):
return "Usage: hash <algorithm> <contents>"
class Test(unittest.TestCase):
def setUp(self):
self.plugin = Plugin()
def test_md5(self):
stdout = io.StringIO()
self.plugin.on_command(None, {"args": [None, "md5", "hello"]}, None, stdout, None)
self.assertEqual("5d41402abc4b2a76b9719d911017c592", stdout.getvalue().strip())
def test_sha1(self):
stdout = io.StringIO()
self.plugin.on_command(None, {"args": [None, "sha1", "hello"]}, None, stdout, None)
self.assertEqual("aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d", stdout.getvalue().strip())
def test_sha512(self):
stdout = io.StringIO()
self.plugin.on_command(None, {"args": [None, "sha512", "hello"]}, None, stdout, None)
self.assertEqual("9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caadae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e5c3adef46f73bcdec043", stdout.getvalue().strip())
def test_help(self):
self.assertTrue(self.plugin.on_help())
def test_no_args(self):
stdout = io.StringIO()
self.plugin.on_command(None, {"args": [None]}, stdout, stdout, None)
self.assertEqual(self.plugin.on_help(), stdout.getvalue().strip())
stdout = io.StringIO()
self.plugin.on_command(None, {"args": [None, None]}, stdout, stdout, None)
self.assertEqual(self.plugin.on_help(), stdout.getvalue().strip())
| import hashlib
import sys
class Plugin:
def on_command(self, bot, msg):
if len(sys.argv) >= 2:
algorithm = sys.argv[1]
contents = " ".join(sys.argv[2:])
if not contents:
contents = sys.stdin.read().strip()
h = hashlib.new(algorithm)
h.update(bytes(contents, "utf-8"))
print(h.hexdigest())
else:
print(self.on_help())
def on_help(self):
return "Usage: hash <algorithm> <contents>"
| mit | Python |
48a4c7f1cc801523bd8305269c115edbed7e18cb | fix bug in weibo.py | N402/NoahsArk,N402/NoahsArk | ark/exts/oauth/weibo.py | ark/exts/oauth/weibo.py | import os
from flask import session
from ark.exts import oauth2
weibo_oauth = oauth2.remote_app(
'weibo',
consumer_key=os.environ['ARK_WEIBO_CONSUMER_KEY'],
consumer_secret=os.environ['ARK_WEIBO_CONSUMER_SECRET'],
request_token_params={'scope': 'email,statuses_to_me_read'},
base_url='https://api.weibo.com/2/',
authorize_url='https://api.weibo.com/oauth2/authorize',
request_token_url=None,
access_token_method='POST',
access_token_url='https://api.weibo.com/oauth2/access_token',
content_type='application/json',
)
@weibo_oauth.tokengetter
def get_weibo_oauth_token():
return session.get('token_token')
def change_weibo_header(uri, headers, body):
auth = headers.get('Authorization')
if auth:
auth = auth.replace('Bearer', 'OAuth2')
headers['Authorization'] = auth
return uri, headers, body
weibo_oauth.pre_request = change_weibo_header
| import os
from flask import session
from ark.exts import oauth2
weibo_oauth = oauth2.remote_app(
'weibo',
consumer_key=os.environ['ARK_WEIBO_CONSUMER_KEY'],
consumer_secret=os.environ['ARK_WEIBO_CONSUMER_SECRET'],
request_token_params={'scope': 'email,statuses_to_me_read'},
base_url='https://api.weibo.com/2/',
authorize_url='https://api.weibo.com/oauth2/authorize',
request_token_url=None,
access_token_method='POST',
access_token_url='https://api.weibo.com/oauth2/access_token',
content_type='application/json',
)
@weibo_oauth.tokengetter
def get_weibo_oauth_token():
return session.get('weibo_token')
def change_weibo_header(uri, headers, body):
auth = headers.get('Authorization')
if auth:
auth = auth.replace('Bearer', 'OAuth2')
headers['Authorization'] = auth
return uri, headers, body
weibo_oauth.pre_request = change_weibo_header
| mit | Python |
40299b2f5f23c63349a651725410f108ebaa7571 | Revert of Make telemetry_unittests.py work on Windows (patchset #1 id:1 of https://codereview.chromium.org/647103003/) | fujunwei/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,Chilledheart/chromium,Just-D/chromium-1,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk,axinging/chromium-crosswalk,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,jaruba/chromium.src,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,M4sse/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,ltilve/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Just-D/chromium-1,Chilledheart/chromium,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,M4sse/chromium.src,dednal/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,dednal/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk | testing/scripts/telemetry_unittests.py | testing/scripts/telemetry_unittests.py | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import sys
import common
def main_run(args):
filter_tests = []
if args.filter_file:
filter_tests = json.load(args.filter_file)
with common.temporary_file() as tempfile_path:
rc = common.run_command([
os.path.join(args.paths['build'], 'scripts', 'tools', 'runit.py'),
'--show-path',
os.path.join(args.paths['build'], 'scripts', 'slave', 'runtest.py'),
'--target', args.build_config_fs,
'--xvfb',
'--annotate', 'gtest',
'--test-type', 'telemetry_unittests',
'--builder-name', args.properties['buildername'],
'--slave-name', args.properties['slavename'],
'--build-number', str(args.properties['buildnumber']),
'--run-python-script',
os.path.join(common.SRC_DIR, 'tools', 'telemetry', 'run_tests'),
'--browser', args.build_config_fs.lower(),
'--retry-limit', '3',
'--write-full-results-to', tempfile_path,
] + filter_tests)
with open(tempfile_path) as f:
results = json.load(f)
parsed_results = common.parse_common_test_results(results)
failures = parsed_results['unexpected_failures']
json.dump({
'valid': bool(rc <= common.MAX_FAILURES_EXIT_STATUS and
((rc == 0) or failures)),
'failures': failures.keys(),
}, args.output)
return rc
def main_compile_targets(args):
json.dump(['chrome'], args.output)
if __name__ == '__main__':
funcs = {
'run': main_run,
'compile_targets': main_compile_targets,
}
sys.exit(common.run_script(sys.argv[1:], funcs))
| #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import sys
import common
def main_run(args):
filter_tests = []
if args.filter_file:
filter_tests = json.load(args.filter_file)
with common.temporary_file() as tempfile_path:
rc = common.run_command([
sys.executable,
os.path.join(args.paths['build'], 'scripts', 'tools', 'runit.py'),
'--show-path',
os.path.join(args.paths['build'], 'scripts', 'slave', 'runtest.py'),
'--target', args.build_config_fs,
'--xvfb',
'--annotate', 'gtest',
'--test-type', 'telemetry_unittests',
'--builder-name', args.properties['buildername'],
'--slave-name', args.properties['slavename'],
'--build-number', str(args.properties['buildnumber']),
'--run-python-script',
os.path.join(common.SRC_DIR, 'tools', 'telemetry', 'run_tests'),
'--browser', args.build_config_fs.lower(),
'--retry-limit', '3',
'--write-full-results-to', tempfile_path,
] + filter_tests)
with open(tempfile_path) as f:
results = json.load(f)
parsed_results = common.parse_common_test_results(results)
failures = parsed_results['unexpected_failures']
json.dump({
'valid': bool(rc <= common.MAX_FAILURES_EXIT_STATUS and
((rc == 0) or failures)),
'failures': failures.keys(),
}, args.output)
return rc
def main_compile_targets(args):
json.dump(['chrome'], args.output)
if __name__ == '__main__':
funcs = {
'run': main_run,
'compile_targets': main_compile_targets,
}
sys.exit(common.run_script(sys.argv[1:], funcs))
| bsd-3-clause | Python |
4d7303b5325bf07395c4822a752fcaffad511ae8 | Remove function hg_version(), was not used | rolandgeider/wger,DeveloperMal/wger,petervanderdoes/wger,rolandgeider/wger,wger-project/wger,petervanderdoes/wger,kjagoo/wger_stark,wger-project/wger,DeveloperMal/wger,DeveloperMal/wger,wger-project/wger,rolandgeider/wger,DeveloperMal/wger,wger-project/wger,petervanderdoes/wger,kjagoo/wger_stark,kjagoo/wger_stark,kjagoo/wger_stark,petervanderdoes/wger,rolandgeider/wger | wger/workout_manager/__init__.py | wger/workout_manager/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:copyright: 2011, 2012 by OpenSlides team, see AUTHORS.
:license: GNU GPL, see LICENSE for more details.
"""
VERSION = (1, 2, 0, 'alpha', 1)
RELEASE = False
def get_version(version=None, release=None):
"""Derives a PEP386-compliant version number from VERSION."""
if version is None:
version = VERSION
if release is None:
release = RELEASE
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
main_parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:main_parts])
if version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'}
sub = mapping[version[3]] + str(version[4])
else:
sub = ''
if not release:
sub += '-dev'
return main + sub | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:copyright: 2011, 2012 by OpenSlides team, see AUTHORS.
:license: GNU GPL, see LICENSE for more details.
"""
VERSION = (1, 2, 0, 'alpha', 1)
RELEASE = False
def get_version(version=None, release=None):
"""Derives a PEP386-compliant version number from VERSION."""
if version is None:
version = VERSION
if release is None:
release = RELEASE
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
main_parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:main_parts])
if version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'}
sub = mapping[version[3]] + str(version[4])
else:
sub = ''
if not release:
sub += '-dev'
return main + sub
def hg_version():
import socket
import os
from os.path import realpath, join, dirname
try:
from mercurial import ui as hgui
from mercurial.localrepo import localrepository
from mercurial.node import short as shorthex
from mercurial.error import RepoError
nomercurial = False
except ImportError:
return 'unknown'
os.environ['HGRCPATH'] = ''
conts = realpath(join(dirname(__file__)))
try:
ui = hgui.ui()
repository = localrepository(ui, join(conts, '..'))
ctx = repository['.']
if ctx.tags() and ctx.tags() != ['tip']:
version = ' '.join(ctx.tags())
else:
version = '%(num)s:%(id)s' % {
'num': ctx.rev(), 'id': shorthex(ctx.node())
}
except TypeError:
version = 'unknown'
except RepoError:
return 0
# This value defines the timeout for sockets in seconds. Per default python
# sockets do never timeout and as such we have blocking workers.
# Socket timeouts are set globally within the whole application.
# The value *must* be a floating point value.
socket.setdefaulttimeout(10.0)
return version
| agpl-3.0 | Python |
69d779feee29238fb2eaf7096cc748935e21ef3d | Add blank lines and fix indenting for flake8 | heynemann/pyvows,marcelometal/pyvows | tests/assertions/types/classes_vows.py | tests/assertions/types/classes_vows.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# pyvows testing engine
# https://github.com/heynemann/pyvows
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 Bernardo Heynemann heynemann@gmail.com
from pyvows import Vows, expect
class SomeClass(object):
pass
class OtherClass(object):
pass
@Vows.batch
class AssertionIsInstance(Vows.Context):
def topic(self):
return SomeClass()
class WhenIsInstance(Vows.Context):
def we_get_an_instance_of_someclass(self, topic):
expect(topic).to_be_instance_of(SomeClass)
class WhenWeGetAnError(Vows.Context):
def topic(self, last):
expect(2).to_be_instance_of(str)
def we_get_an_understandable_message(self, topic):
expect(topic).to_have_an_error_message_of(
'Expected topic(2) to be an instance of {0!s}, but it was a {1!s}.'.format(str, int))
class WhenIsNotInstance(Vows.Context):
def we_do_not_get_an_instance_of_otherclass(self, topic):
expect(topic).Not.to_be_instance_of(OtherClass)
class WhenWeGetAnError(Vows.Context):
def topic(self, last):
expect(2).not_to_be_instance_of(int)
def we_get_an_understandable_message(self, topic):
expect(topic).to_have_an_error_message_of(
'Expected topic(2) not to be an instance of {0!s}.'.format(int))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# pyvows testing engine
# https://github.com/heynemann/pyvows
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 Bernardo Heynemann heynemann@gmail.com
from pyvows import Vows, expect
class SomeClass(object): pass
class OtherClass(object): pass
@Vows.batch
class AssertionIsInstance(Vows.Context):
def topic(self):
return SomeClass()
class WhenIsInstance(Vows.Context):
def we_get_an_instance_of_someclass(self, topic):
expect(topic).to_be_instance_of(SomeClass)
class WhenWeGetAnError(Vows.Context):
def topic(self, last):
expect(2).to_be_instance_of(str)
def we_get_an_understandable_message(self, topic):
expect(topic).to_have_an_error_message_of(
'Expected topic(2) to be an instance of {0!s}, but it was a {1!s}.'.format(str, int))
class WhenIsNotInstance(Vows.Context):
def we_do_not_get_an_instance_of_otherclass(self, topic):
expect(topic).Not.to_be_instance_of(OtherClass)
class WhenWeGetAnError(Vows.Context):
def topic(self, last):
expect(2).not_to_be_instance_of(int)
def we_get_an_understandable_message(self, topic):
expect(topic).to_have_an_error_message_of(
'Expected topic(2) not to be an instance of {0!s}.'.format(int))
| mit | Python |
b5afdd604831f985427880537d37eb7a35addaa1 | Fix test to cater for packages leaked into venv | pfmoore/pip,pypa/pip,sbidoul/pip,pfmoore/pip,pradyunsg/pip,sbidoul/pip,pypa/pip,pradyunsg/pip | tests/functional/test_python_option.py | tests/functional/test_python_option.py | import json
import os
from pathlib import Path
from venv import EnvBuilder
from tests.lib import PipTestEnvironment, TestData
def test_python_interpreter(
script: PipTestEnvironment,
tmpdir: Path,
shared_data: TestData,
) -> None:
env_path = os.fspath(tmpdir / "venv")
env = EnvBuilder(with_pip=False)
env.create(env_path)
result = script.pip("--python", env_path, "list", "--format=json")
before = json.loads(result.stdout)
# Ideally we would assert that before==[], but there's a problem in CI
# that means this isn't true. See https://github.com/pypa/pip/pull/11326
# for details.
script.pip(
"--python",
env_path,
"install",
"-f",
shared_data.find_links,
"--no-index",
"simplewheel==1.0",
)
result = script.pip("--python", env_path, "list", "--format=json")
installed = json.loads(result.stdout)
assert {"name": "simplewheel", "version": "1.0"} in installed
script.pip("--python", env_path, "uninstall", "simplewheel", "--yes")
result = script.pip("--python", env_path, "list", "--format=json")
assert json.loads(result.stdout) == before
| import json
import os
from pathlib import Path
from venv import EnvBuilder
from tests.lib import PipTestEnvironment, TestData
def test_python_interpreter(
script: PipTestEnvironment,
tmpdir: Path,
shared_data: TestData,
) -> None:
env_path = os.fsdecode(tmpdir / "venv")
env = EnvBuilder(with_pip=False)
env.create(env_path)
result = script.pip("--python", env_path, "list", "--format=json")
assert json.loads(result.stdout) == []
script.pip(
"--python",
env_path,
"install",
"-f",
shared_data.find_links,
"--no-index",
"simplewheel==1.0",
)
result = script.pip("--python", env_path, "list", "--format=json")
assert json.loads(result.stdout) == [{"name": "simplewheel", "version": "1.0"}]
script.pip("--python", env_path, "uninstall", "simplewheel", "--yes")
result = script.pip("--python", env_path, "list", "--format=json")
assert json.loads(result.stdout) == []
| mit | Python |
ee9f48c7c169876fad6e7e911a4ea0c459b2c232 | Make master 2.4.0 | PyCQA/astroid | astroid/__pkginfo__.py | astroid/__pkginfo__.py | # -*- coding: utf-8 -*-
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2015-2017 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in>
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""astroid packaging information"""
version = "2.4.0"
numversion = tuple(int(elem) for elem in version.split(".") if elem.isdigit())
extras_require = {}
install_requires = [
"lazy_object_proxy==1.4.*",
"six==1.12",
"wrapt==1.11.*",
'typed-ast>=1.4.0,<1.5;implementation_name== "cpython" and python_version<"3.8"',
]
# pylint: disable=redefined-builtin; why license is a builtin anyway?
license = "LGPL"
author = "Python Code Quality Authority"
author_email = "code-quality@python.org"
mailinglist = "mailto://%s" % author_email
web = "https://github.com/PyCQA/astroid"
description = "An abstract syntax tree for Python with inference support."
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
| # -*- coding: utf-8 -*-
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2015-2017 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in>
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""astroid packaging information"""
version = "2.3.1"
numversion = tuple(int(elem) for elem in version.split(".") if elem.isdigit())
extras_require = {}
install_requires = [
"lazy_object_proxy==1.4.*",
"six==1.12",
"wrapt==1.11.*",
'typed-ast>=1.4.0,<1.5;implementation_name== "cpython" and python_version<"3.8"',
]
# pylint: disable=redefined-builtin; why license is a builtin anyway?
license = "LGPL"
author = "Python Code Quality Authority"
author_email = "code-quality@python.org"
mailinglist = "mailto://%s" % author_email
web = "https://github.com/PyCQA/astroid"
description = "An abstract syntax tree for Python with inference support."
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
| lgpl-2.1 | Python |
7ef2ac94e5791b7e731996688525e4c2ef861cb3 | Remove shbang line from PRESUBMIT.py. | android-ia/platform_external_chromium_org_tools_grit,MIPS/external-chromium_org-tools-grit,geekboxzone/lollipop_external_chromium_org_tools_grit,xin3liang/platform_external_chromium_org_tools_grit,android-ia/platform_external_chromium_org_tools_grit,yinquan529/platform-external-chromium_org-tools-grit,Omegaphora/external_chromium_org_tools_grit,geekboxzone/lollipop_external_chromium_org_tools_grit,bpsinc-native/src_tools_grit,yinquan529/platform-external-chromium_org-tools-grit,IllusionRom-deprecated/android_platform_external_chromium_org_tools_grit,bpsinc-native/src_tools_grit,AOSPU/external_chromium_org_tools_grit,Omegaphora/external_chromium_org_tools_grit | PRESUBMIT.py | PRESUBMIT.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""grit unittests presubmit script.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
def RunUnittests(input_api, output_api):
return input_api.canned_checks.RunPythonUnitTests(input_api, output_api,
['grit.test_suite_all'])
def CheckChangeOnUpload(input_api, output_api):
return RunUnittests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return RunUnittests(input_api, output_api)
| #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""grit unittests presubmit script.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
def RunUnittests(input_api, output_api):
return input_api.canned_checks.RunPythonUnitTests(input_api, output_api,
['grit.test_suite_all'])
def CheckChangeOnUpload(input_api, output_api):
return RunUnittests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return RunUnittests(input_api, output_api)
| bsd-2-clause | Python |
051c14f869594ff9c7a970aa5c400c9db94f74fe | fix imports | 2nd47/CSC373_A2,2nd47/CSC373_A2 | RandomMST.py | RandomMST.py | from ass2 import create_graph
from random import random
import sys
if __name__ == '__main__':
# size can be between 30 - 50k
if len(sys.argv) < 2:
print('Please input a size argument!')
exit()
create_graph(int(sys.argv[1]), random, None)
#create_graph(int(sys.argv[1]), None, randomWithinCircle)
| from ass2 import create_graph
if __name__ == '__main__':
# size can be between 30 - 50k
if len(sys.argv) < 2:
print('Please input a size argument!')
exit()
create_graph(int(sys.argv[1]), random, None)
#create_graph(int(sys.argv[1]), None, randomWithinCircle)
| mit | Python |
4c986e7cedde18530745dca072e06659f1fb20a9 | Remove numpy.compat._pep440 from default imports | endolith/numpy,numpy/numpy,numpy/numpy,mattip/numpy,endolith/numpy,mattip/numpy,mhvk/numpy,charris/numpy,charris/numpy,mattip/numpy,endolith/numpy,numpy/numpy,mhvk/numpy,mhvk/numpy,mhvk/numpy,charris/numpy,charris/numpy,endolith/numpy,mhvk/numpy,mattip/numpy,numpy/numpy | numpy/compat/__init__.py | numpy/compat/__init__.py | """
Compatibility module.
This module contains duplicated code from Python itself or 3rd party
extensions, which may be included for the following reasons:
* compatibility
* we may only need a small subset of the copied library/module
"""
from . import _inspect
from . import py3k
from ._inspect import getargspec, formatargspec
from .py3k import *
__all__ = []
__all__.extend(_inspect.__all__)
__all__.extend(py3k.__all__)
| """
Compatibility module.
This module contains duplicated code from Python itself or 3rd party
extensions, which may be included for the following reasons:
* compatibility
* we may only need a small subset of the copied library/module
"""
from . import _inspect
from . import _pep440
from . import py3k
from ._inspect import getargspec, formatargspec
from .py3k import *
__all__ = []
__all__.extend(_inspect.__all__)
__all__.extend(py3k.__all__)
| bsd-3-clause | Python |
cc7966022ead516869ec8479d2071ece755fabfc | Use print function, display better paths | perimosocordiae/numpylint | numpylint/numpylinter.py | numpylint/numpylinter.py | #!/usr/bin/env python
from __future__ import print_function
import os.path
import tempfile
from argparse import ArgumentParser
from rope.base.project import Project
from rope.base import libutils
from rope.refactor.restructure import Restructure
from numpylint.lintbits import LINTBITS
def lint(filepath, proj, orig_path, opts):
mod = proj.get_file(filepath)
if not libutils.is_python_file(proj, mod):
if opts.verbose:
print('Skipping', orig_path, ' (not a Python file)')
return
if opts.verbose:
print('Linting', orig_path)
# These lines were suggested in the Rope docs, but don't seem to do much:
# proj.validate(mod)
# libutils.analyze_module(proj, mod)
for desc, bits in LINTBITS.iteritems():
changes = []
for pattern, goal, args in bits:
r = Restructure(proj, pattern, goal, args=args)
try:
c = r.get_changes(resources=(mod,))
if opts.overwrite:
proj.do(c)
except: # Assume it's some syntax rope can't handle.
break
changes.extend(c.changes)
if changes:
print('%s: %s' % (orig_path, desc))
if opts.verbose:
print('\n')
for c in changes:
print(c.get_description())
def main():
ap = ArgumentParser()
ap.add_argument('-v', '--verbose', action='store_true')
ap.add_argument('--overwrite', action='store_true')
ap.add_argument('files', metavar='FILE', nargs='+', help='File(s) to lint')
args = ap.parse_args()
tmpdir = tempfile.mkdtemp()
proj = Project(tmpdir, ropefolder=None)
for f in args.files:
lint(os.path.relpath(f, start=tmpdir), proj, f, args)
proj.close()
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import os.path
import tempfile
from argparse import ArgumentParser
from rope.base.project import Project
from rope.base import libutils
from rope.refactor.restructure import Restructure
from numpylint.lintbits import LINTBITS
def lint(filepath, proj, orig_path, opts):
mod = proj.get_file(filepath)
if not libutils.is_python_file(proj, mod):
if opts.verbose:
print filepath, 'is not a Python file'
return
# These lines were suggested in the Rope docs, but don't seem to do much:
# proj.validate(mod)
# libutils.analyze_module(proj, mod)
for desc, bits in LINTBITS.iteritems():
changes = []
for pattern, goal, args in bits:
r = Restructure(proj, pattern, goal, args=args)
try:
c = r.get_changes(resources=(mod,))
if opts.overwrite:
proj.do(c)
except: # Assume it's some syntax rope can't handle.
break
changes.extend(c.changes)
if changes:
print '%s: %s' % (orig_path, desc)
if opts.verbose:
print '\n'
for c in changes:
print c.get_description()
def main():
ap = ArgumentParser()
ap.add_argument('-v', '--verbose', action='store_true')
ap.add_argument('--overwrite', action='store_true')
ap.add_argument('files', metavar='FILE', nargs='+', help='File(s) to lint')
args = ap.parse_args()
tmpdir = tempfile.mkdtemp()
proj = Project(tmpdir, ropefolder=None)
for f in args.files:
lint(os.path.relpath(f, start=tmpdir), proj, f, args)
proj.close()
if __name__ == '__main__':
main()
| mit | Python |
5722e1f04d549f27816f91fb76f2a91e01f8145b | Bump version to 0.0.4 | jbbarth/aws-status,jbbarth/aws-status | aws_status/__init__.py | aws_status/__init__.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .status_page import StatusPage
from .feed import Feed
__version__ = "0.0.4"
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .status_page import StatusPage
from .feed import Feed
__version__ = "0.0.3"
| mit | Python |
9608fff697e4692d8a76db2c1658850233059126 | fix fucking zero base | kz26/PyExcelerate | Worksheet.py | Worksheet.py | import Range
from DataTypes import DataTypes
class Worksheet(object):
def __init__(self, name, workbook, data=None):
self._columns = 0 # cache this for speed
self._name = name
self._cells = {}
self._parent = workbook
self._merges = [] # list of Range objects
self._attributes = {}
if data != None:
for x, row in enumerate(data, 1):
for y, cell in enumerate(row, 1):
if x not in self._cells:
self._cells[x] = {}
self._cells[x][y] = cell
self._columns = max(self._columns, y)
def __getitem__(self, key):
if key not in self._cells:
self._cells[key] = {}
return Range.Range((key, 1), (key, float('inf')), self) # return a row range
@property
def name(self):
return self._name
@property
def num_rows(self):
if len(self._cells) > 0:
return max(self._cells.keys())
else:
return 1
@property
def num_columns(self):
return max(1, self._columns)
def report_column(self, column):
# listener for column additions
self._columns = max(self._columns, column)
def add_merge(self, range):
for merge in self._merges:
if range.intersects(merge):
raise Exception("Invalid merge, intersects existing")
self._merges.append(range)
def get_cell_value(self, x, y):
type = DataTypes.get_type(self._cells[x][y])
if type == DataTypes.FORMULA:
# remove the equals sign
return self._cells[x][y][:1]
elif type == DataTypes.INLINE_STRING and self._cells[x][y][2:] == '\'=':
return self._cells[x][y][:1]
else:
return self._cells[x][y]
def set_cell_value(self, x, y, value):
self._cells[x][y] = value
@property
def workbook(self):
return self._parent
def get_xml_data(self):
# initialize the shared string hashtable
# self.shared_strings = SharedStrings.SharedStrings(self)
for row in self._cells.keys():
yield (row, Range.Range((row, 1), (row, float('inf')), self))
| import Range
from DataTypes import DataTypes
class Worksheet(object):
def __init__(self, name, workbook, data=None):
self._columns = 0 # cache this for speed
self._name = name
self._cells = {}
self._parent = workbook
self._merges = [] # list of Range objects
self._attributes = {}
if data != None:
for x, row in enumerate(data):
for y, cell in enumerate(row):
if x not in self._cells:
self._cells[x] = {}
self._cells[x][y] = cell
self._columns = max(self._columns, y)
def __getitem__(self, key):
if key not in self._cells:
self._cells[key] = {}
return Range.Range((key, 1), (key, float('inf')), self) # return a row range
@property
def name(self):
return self._name
@property
def num_rows(self):
if len(self._cells) > 0:
return max(self._cells.keys())
else:
return 1
@property
def num_columns(self):
return max(1, self._columns)
def report_column(self, column):
# listener for column additions
self._columns = max(self._columns, column)
def add_merge(self, range):
for merge in self._merges:
if range.intersects(merge):
raise Exception("Invalid merge, intersects existing")
self._merges.append(range)
def get_cell_value(self, x, y):
type = DataTypes.get_type(self._cells[x][y])
if type == DataTypes.FORMULA:
# remove the equals sign
return self._cells[x][y][:1]
elif type == DataTypes.INLINE_STRING and self._cells[x][y][2:] == '\'=':
return self._cells[x][y][:1]
else:
return self._cells[x][y]
def set_cell_value(self, x, y, value):
self._cells[x][y] = value
@property
def workbook(self):
return self._parent
def get_xml_data(self):
# initialize the shared string hashtable
# self.shared_strings = SharedStrings.SharedStrings(self)
for row in self._cells.keys():
yield (row, Range.Range((row, 1), (row, float('inf')), self))
| bsd-2-clause | Python |
9d2cab53fb1590d5c6f60b5f9e62140c11fb676f | Bump version to 0.2.0.dev | franga2000/django-machina,reinbach/django-machina,franga2000/django-machina,ellmetha/django-machina,franga2000/django-machina,ellmetha/django-machina,reinbach/django-machina,reinbach/django-machina,ellmetha/django-machina | machina/__init__.py | machina/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
__version__ = '0.2.0.dev'
MACHINA_VANILLA_APPS = [
'machina',
'machina.apps.forum',
'machina.apps.forum_conversation',
'machina.apps.forum_conversation.forum_attachments',
'machina.apps.forum_conversation.forum_polls',
'machina.apps.forum_feeds',
'machina.apps.forum_moderation',
'machina.apps.forum_search',
'machina.apps.forum_tracking',
'machina.apps.forum_member',
'machina.apps.forum_permission',
]
# Main Machina static directory
MACHINA_MAIN_STATIC_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'static/machina/build')
# Main Machina template directory
MACHINA_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'templates/machina')
def get_apps(overrides=None):
"""
Returns a list of machina's apps. Any of these apps can be overriden with
custom apps specified in the 'overrides' list.
"""
if not overrides:
return MACHINA_VANILLA_APPS
def get_app_label(app_label):
pattern = app_label.replace('machina.apps.', '')
return next((o for o in overrides if o.endswith(pattern)), app_label)
return list(map(get_app_label, MACHINA_VANILLA_APPS))
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
__version__ = '0.1.2.dev'
MACHINA_VANILLA_APPS = [
'machina',
'machina.apps.forum',
'machina.apps.forum_conversation',
'machina.apps.forum_conversation.forum_attachments',
'machina.apps.forum_conversation.forum_polls',
'machina.apps.forum_feeds',
'machina.apps.forum_moderation',
'machina.apps.forum_search',
'machina.apps.forum_tracking',
'machina.apps.forum_member',
'machina.apps.forum_permission',
]
# Main Machina static directory
MACHINA_MAIN_STATIC_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'static/machina/build')
# Main Machina template directory
MACHINA_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'templates/machina')
def get_apps(overrides=None):
"""
Returns a list of machina's apps. Any of these apps can be overriden with
custom apps specified in the 'overrides' list.
"""
if not overrides:
return MACHINA_VANILLA_APPS
def get_app_label(app_label):
pattern = app_label.replace('machina.apps.', '')
return next((o for o in overrides if o.endswith(pattern)), app_label)
return list(map(get_app_label, MACHINA_VANILLA_APPS))
| bsd-3-clause | Python |
62f663f77d1b13a01f0270b4d88ae5720427c91f | add licence disclaimer to error.py | ojengwa/paystack | paystack/error.py | paystack/error.py | # -*- coding: utf-8 -*-
"""
Paystack API wrapper.
@author Bernard Ojengwa.
Copyright (c) 2015, Bernard Ojengwa
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
class Error(Exception):
pass
class APIError(Error):
pass
class APIConnectionError(Error):
pass
class ValidationError(Error):
pass
class AuthorizationError(Error):
pass
class InvalidRequestError(Error):
pass
| # -*- coding: utf-8 -*-
class Error(Exception):
"""Base Error class."""
pass
class APIError(Error):
pass
class APIConnectionError(Error):
pass
class ValidationError(Error):
pass
class AuthorizationError(Error):
pass
class InvalidRequestError(Error):
pass
| bsd-3-clause | Python |
8c71bc2408f79ee6187bf6238d910d37b5cee4dd | Extend BackupFactory with metadata | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/backup/tests/factories.py | nodeconductor/backup/tests/factories.py | import factory
from django.core.urlresolvers import reverse
from nodeconductor.backup import models
from nodeconductor.iaas import models as iaas_models
from nodeconductor.iaas.tests import factories as iaas_factories
from nodeconductor.structure.tests import factories as structure_factories
class BackupScheduleFactory(factory.DjangoModelFactory):
class Meta(object):
model = models.BackupSchedule
backup_source = factory.SubFactory(iaas_factories.InstanceFactory)
retention_time = 10
is_active = True
maximal_number_of_backups = 3
schedule = '*/5 * * * *'
class BackupFactory(factory.DjangoModelFactory):
class Meta(object):
model = models.Backup
backup_schedule = factory.SubFactory(BackupScheduleFactory)
backup_source = factory.LazyAttribute(lambda b: b.backup_schedule.backup_source)
@factory.post_generation
def metadata(self, create, extracted, **kwargs):
if not create:
return
self.metadata = {}
cloud = self.backup_source.cloud_project_membership.cloud
template = self.backup_source.template
# check if image connecting template and cloud already exists, otherwise link them
if not iaas_models.Image.objects.filter(cloud=cloud, template=template).exists():
iaas_factories.ImageFactory(
cloud=cloud,
template=template,
)
self.metadata.update(
{
'cloud_project_membership': self.backup_source.cloud_project_membership.pk,
'flavor': iaas_factories.FlavorFactory.get_url(iaas_factories.FlavorFactory(cloud=cloud)),
'hostname': 'original.vm.hostname',
'template': template.pk,
'system_volume_id': self.backup_source.system_volume_id,
'system_volume_size': self.backup_source.system_volume_size,
'data_volume_id': self.backup_source.data_volume_id,
'data_volume_size': self.backup_source.data_volume_size,
'key_name': self.backup_source.key_name,
'key_fingerprint': self.backup_source.key_name,
}
)
if extracted:
self.metadata.update(extracted)
@classmethod
def get_url(self, backup):
if backup is None:
backup = BackupFactory()
return 'http://testserver' + reverse('backup-detail', kwargs={'uuid': backup.uuid})
@classmethod
def get_list_url(self):
return 'http://testserver' + reverse('backup-list')
| import factory
from django.core.urlresolvers import reverse
from nodeconductor.backup import models
from nodeconductor.iaas.tests import factories as iaas_factories
class BackupScheduleFactory(factory.DjangoModelFactory):
class Meta(object):
model = models.BackupSchedule
backup_source = factory.SubFactory(iaas_factories.InstanceFactory)
retention_time = 10
is_active = True
maximal_number_of_backups = 3
schedule = '*/5 * * * *'
class BackupFactory(factory.DjangoModelFactory):
class Meta(object):
model = models.Backup
backup_schedule = factory.SubFactory(BackupScheduleFactory)
backup_source = factory.LazyAttribute(lambda b: b.backup_schedule.backup_source)
@classmethod
def get_url(self, backup):
if backup is None:
backup = BackupFactory()
return 'http://testserver' + reverse('backup-detail', kwargs={'uuid': backup.uuid})
@classmethod
def get_list_url(self):
return 'http://testserver' + reverse('backup-list')
| mit | Python |
94ece9631f05cf5a845ea1a09e87370ab195582e | Fix some new bugs | jinwei908/BeeLogger,jinwei908/BeeLogger,4w4k3/BeeLogger,jinwei908/BeeLogger,4w4k3/BeeLogger,4w4k3/BeeLogger | checker.py | checker.py | # Copyright 2017 Insanity Framework (IF)
# Written by: * Alisson Moretto - 4w4k3
# https://github.com/4w4k3/Insanity-Framework
# Licensed under the BSD-3-Clause
import os
def banner(text, char="*"):
print(char * len(text) + "****")
print(char + " " + text + " " + char)
print(char * len(text) + "****")
def install_dependencies():
""" Install the dependencies needed to run the program """
os.system('apt-get install sudo')
os.system('sudo dpkg --add-architecture i386')
os.system('sudo apt-get install wget -y')
os.system('sudo apt-get update && sudo apt-get install wine -y')
os.system('sudo apt-get dist-upgrade -yy && apt-get upgrade -yy')
os.system('sudo apt-get install wine32 -y')
os.system('sudo apt-get -f install')
os.system('sudo apt-get install wine32 -y')
os.system('clear')
banner("Press enter to default Winecfg to Windows 7")
raw_input()
os.system('winecfg')
os.system('clear')
def download_python():
""" Download python for some reason..? """
banner("Downloading Python 2.7.x.msi, please wait...")
os.system('wget https://www.python.org/ftp/python/2.7.12/python-2.7.12.msi')
os.system('sudo wine msiexec /i python-2.7.12.msi /L*v log.txt')
def download_python_win_exten():
""" Download Windows extenstion for python without checking the checksum.. """
banner("Installing pywin32-220.win32-py2.7.exe (Windows extension), please wait...")
os.system('sudo wine pywin32-220.win32-py2.7.exe')
os.system('sudo wine pyHook-1.5.1.win32-py2.7.exe')
os.system('sudo wine pywin32-220.win32-py2.7.exe')
os.system('sudo wine /root/.wine/drive_c/Python27/python.exe /root/.wine/drive_c/Python27/Scripts/pip.exe install pyinstaller')
os.system('clear')
def download_vc_for_py():
banner("Downloading VCForPython27.msi, please wait...")
os.system('wget https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi')
os.system('sudo wine msiexec /i VCForPython27.msi /L*v log2.txt')
os.system('mkdir .OK')
os.system('sudo rm -Rf log2.txt')
os.system('sudo rm -Rf log.txt')
def main():
print("\n")
banner("Installing dependencies..")
print("\n")
install_dependencies()
download_python()
print("\n")
banner("Moving to dependent files..")
print("\n")
download_python_win_exten()
download_vc_for_py()
if __name__ == '__main__':
main()
| # Copyright 2017 Insanity Framework (IF)
# Written by: * Alisson Moretto - 4w4k3
# https://github.com/4w4k3/Insanity-Framework
# Licensed under the BSD-3-Clause
import os
def banner(text, char="*"):
print(char * len(text) + "****")
print(char + " " + text + " " + char)
print(char * len(text) + "****")
def install_dependencies():
""" Install the dependencies needed to run the program """
os.system('apt-get install sudo')
os.system('sudo dpkg --add-architecture i386')
os.system('sudo apt-get install wget -y')
os.system('sudo apt-get update && sudo apt-get install wine -y')
os.system('sudo apt-get dist-upgrade -yy && apt-get upgrade -yy')
os.system('sudo apt-get install wine32 -y')
os.system('sudo apt-get -f install')
os.system('clear')
banner("Press enter to default Winecfg to Windows 7")
raw_input()
os.system('winecfg')
os.system('clear')
def download_python():
""" Download python for some reason..? """
banner("Downloading Python 2.7.x.msi, please wait...")
os.system('wget https://www.python.org/ftp/python/2.7.12/python-2.7.12.msi')
os.system('sudo wine msiexec /i python-2.7.12.msi /L*v log.txt')
def download_python_win_exten():
""" Download Windows extenstion for python without checking the checksum.. """
banner("Installing pywin32-220.win32-py2.7.exe (Windows extension), please wait...")
os.system('sudo wine pywin32-220.win32-py2.7.exe')
os.system('sudo wine pyHook-1.5.1.win32-py2.7.exe')
os.system('sudo wine /root/.wine/drive_c/Python27/python.exe /root/.wine/drive_c/Python27/Scripts/pip.exe install pyinstaller')
os.system('clear')
def download_vc_for_py():
banner("Downloading VCForPython27.msi, please wait...")
os.system('wget https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi')
os.system('sudo wine msiexec /i VCForPython27.msi /L*v log2.txt')
os.system('mkdir .OK')
os.system('sudo rm -Rf log2.txt')
os.system('sudo rm -Rf log.txt')
def main():
print("\n")
banner("Installing dependencies..")
print("\n")
install_dependencies()
download_python()
print("\n")
banner("Moving to dependent files..")
print("\n")
download_python_win_exten()
download_vc_for_py()
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
0707d920f37edb82d16ccabe1e8413ec16c47c0b | Change directory where data is written to. | materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org | backend/mcapi/mcdir.py | backend/mcapi/mcdir.py | import utils
from os import environ
import os.path
MCDIR = environ.get("MCDIR") or '/mcfs/data/materialscommons'
def for_uid(uidstr):
pieces = uidstr.split('-')
path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4])
utils.mkdirp(path)
return path
| import utils
from os import environ
import os.path
MCDIR = environ.get("MCDIR") or '/mcfs/data'
def for_uid(uidstr):
pieces = uidstr.split('-')
path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4])
utils.mkdirp(path)
return path
| mit | Python |
95770bf4d71666b5e589061f5c76979bfcebb000 | Make imports conform to PEP8. | onespacemedia/developer-automation | hooks/post_gen_project.py | hooks/post_gen_project.py | from cookiecutter.main import cookiecutter
from getpass import getpass
import os
import sys
from shutil import rmtree
current_path = os.path.abspath('.')
sys.path.append(current_path)
import github # NOQA
import google # NOQA
import mandrill # NOQA
import opbeat # NOQA
# opbeat, mandrill, google
# Ensure we have all of the environment variables that we need.
credentials = {
'github_token': os.getenv('GITHUB_TOKEN') or raw_input('Please enter your Github token (https://github.com/settings/tokens/new, only "repo" is required.): '),
'github_username': os.getenv('GITHUB_USERNAME') or raw_input('Please enter your Github username: '),
'github_password': os.getenv('GITHUB_PASSWORD') or getpass('Please enter your Github password: '),
'mandrill_email': os.getenv('MANDRILL_EMAIL') or raw_input('Please enter your Mandrill email: '),
'mandrill_password': os.getenv('MANDRILL_PASSWORD') or getpass('Please enter your Mandrill password: '),
'google_email': os.getenv('GOOGLE_EMAIL') or raw_input('Please enter your Google email: '),
'google_password': os.getenv('GOOGLE_PASSWORD') or getpass('Please enter your Google password: '),
'slack_email': os.getenv('SLACK_EMAIL') or raw_input('Please enter your Slack email: '),
'slack_password': os.getenv('SLACK_PASSWORD') or getpass('Please enter your Slack password: '),
}
# Call each of the plugins to get the API keys we need.
# credentials.update(
# github.Plugin().call(credentials)
# )
# opbeat.Plugin.call(credentials)
# credentials.update(
# mandrill.Plugin().call(credentials)
# )
print google.Plugin().call(credentials)
credentials.update(
opbeat.Plugin().call(credentials)
)
print credentials
# Remove the directory we created.
rmtree(current_path)
# Generate the project, passing in the API keys we've just obtained.
# cookiecutter('gh:onespacemedia/project-template', extra_context={
# 'foo': 'bar'
# })
| from cookiecutter.main import cookiecutter
from getpass import getpass
import os
import sys
from shutil import rmtree
current_path = os.path.abspath('.')
sys.path.append(current_path)
import github, google, mandrill # NOQA
import opbeat # NOQA
# opbeat, mandrill, google
# Ensure we have all of the environment variables that we need.
credentials = {
'github_token': os.getenv('GITHUB_TOKEN') or raw_input('Please enter your Github token (https://github.com/settings/tokens/new, only "repo" is required.): '),
'github_username': os.getenv('GITHUB_USERNAME') or raw_input('Please enter your Github username: '),
'github_password': os.getenv('GITHUB_PASSWORD') or getpass('Please enter your Github password: '),
'mandrill_email': os.getenv('MANDRILL_EMAIL') or raw_input('Please enter your Mandrill email: '),
'mandrill_password': os.getenv('MANDRILL_PASSWORD') or getpass('Please enter your Mandrill password: '),
'google_email': os.getenv('GOOGLE_EMAIL') or raw_input('Please enter your Google email: '),
'google_password': os.getenv('GOOGLE_PASSWORD') or getpass('Please enter your Google password: '),
'slack_email': os.getenv('SLACK_EMAIL') or raw_input('Please enter your Slack email: '),
'slack_password': os.getenv('SLACK_PASSWORD') or getpass('Please enter your Slack password: '),
}
# Call each of the plugins to get the API keys we need.
# credentials.update(
# github.Plugin().call(credentials)
# )
# opbeat.Plugin.call(credentials)
# credentials.update(
# mandrill.Plugin().call(credentials)
# )
print google.Plugin().call(credentials)
credentials.update(
opbeat.Plugin().call(credentials)
)
print credentials
# Remove the directory we created.
rmtree(current_path)
# Generate the project, passing in the API keys we've just obtained.
# cookiecutter('gh:onespacemedia/project-template', extra_context={
# 'foo': 'bar'
# })
| mit | Python |
39f02725badba7ad8512737c6721061489ab3ee6 | Add node model | PressLabs/cobalt,PressLabs/cobalt | node/node.py | node/node.py | from configparser import ConfigParser
from driver import BTRFSDriver
class Node:
"""
# Dummy config example
[bk1-z3.presslabs.net]
ssd = True
"""
def __init__(self, context):
self._conf_path = context['conf_path']
self._driver = BTRFSDriver(context['volume_path'])
self._name, self._labels = '', {}
config = ConfigParser()
config.read(self._conf_path)
try:
self._name = config.sections()[0]
for label, value in config[self._name].iteritems():
self._labels[label] = value
except IndexError:
pass
def get_subvolumes(self):
return self._driver.get_all()
@property
def name(self):
return self._name
@property
def labels(self):
return self._labels
| from configparser import ConfigParser
from driver import BTRFSDriver
class Node:
"""
# Dummy config example
[bk1-z3.presslabs.net]
ssd = True
"""
def __init__(self, context):
self._conf_path = context['node']['conf_path']
self._driver = BTRFSDriver(context['volume_path'])
self._name, self._labels = '', {}
config = ConfigParser()
config.read(self._conf_path)
try:
self._name = config.sections()[0]
for label, value in config[self._name].iteritems():
self._labels[label] = value
except IndexError:
pass
def get_subvolumes(self):
return self._driver.get_all()
@property
def name(self):
return self._name
@property
def labels(self):
return self._labels
| apache-2.0 | Python |
6b07ffa43ecfd1d34f39fc1d6e37f9f3069d48cf | fix bug | shendri4/wolves_capture_130,shendri4/wolves_capture_130 | bam2vcf_GATK_wolves.py | bam2vcf_GATK_wolves.py | #!/usr/bin/env python
#import argparse
#from glob import glob
#-s test_samples.txt
#-b /mnt/lfs2/hend6746/devils/reference/sarHar1.fa
from os.path import join as jp
from os.path import abspath
import os
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-s', "--samples", help=" samples.txt file with sample ID.", required=True)
parser.add_argument('-b', "--bwaindex", help="Path to bwa index file.", required=True)
args = parser.parse_args()
VERBOSE=False
#Function definitions:
def log(txt, out):
if VERBOSE:
print(txt)
out.write(txt+'\n')
out.flush()
## Read in samples and put them in a list:
samples = []
for l in open(args.samples):
if len(l) > 1:
samples.append(l.split('/')[-1].replace('.bam', '').strip())
print samples
# Setup folders and paths variables:
bamFolder = abspath('02-Mapped')
variantFolder = abspath('03-Calls')
PBS_scripts = abspath('GATK_PBS_scripts')
bwaIndex = abspath(args.bwaindex)
gatkCall = 'java -jar /opt/modules/biology/gatk/3.5/bin/GenomeAnalysisTK.jar -R %s' % bwaIndex
os.system('mkdir -p %s' % bamFolder)
os.system('mkdir -p %s' % variantFolder)
os.system('mkdir -p %s' % PBS_scripts)
##### Run pipeline ###
for sample in samples:
print "Processing", sample, "....."
# Set up files:
logFile =''.join([jp(PBS_scripts, sample), '_GATK.log'])
logCommands = open(''.join([jp(PBS_scripts, sample), '_GATK_commands.sh']), 'w')
#Setup for qsub
log('#!/bin/bash', logCommands)
log('#PBS -N %s_GATK' % sample, logCommands)
log('#PBS -j oe', logCommands)
log('#PBS -o %s_GATK_job.log' % sample, logCommands)
log('#PBS -m abe', logCommands)
log('#PBS -M shendri4@gmail.com', logCommands)
log('#PBS -q reg', logCommands)
log('#PBS -l mem=100gb', logCommands)
log(". /usr/modules/init/bash", logCommands)
log("module load python/2.7.10", logCommands)
log("module load grc", logCommands)
###########Per-Sample Variant Calling
#HaplotypeCaller on each sample BAM file
#(if a sample's data is spread over more than one BAM, then pass them all in together) to create single-sample gVCFs
#not recommended for somatic (cancer) variant discovery. For that purpose, use MuTect2 instead
cmd = ' '.join([gatkCall, ' -T HaplotypeCaller ', ' -I ' + jp(bamFolder, sample) + '.bam',
' --emitRefConfidence GVCF', ' -o ' + jp(variantFolder, sample) + '.raw.snps.indels.g.vcf',
'>>', logFile, '2>&1'])
log(cmd, logCommands)
logCommands.close()
| #!/usr/bin/env python
#import argparse
#from glob import glob
#-s test_samples.txt
#-b /mnt/lfs2/hend6746/devils/reference/sarHar1.fa
from os.path import join as jp
from os.path import abspath
import os
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-s', "--samples", help=" samples.txt file with sample ID.", required=True)
parser.add_argument('-b', "--bwaindex", help="Path to bwa index file.", required=True)
args = parser.parse_args()
VERBOSE=False
#Function definitions:
def log(txt, out):
if VERBOSE:
print(txt)
out.write(txt+'\n')
out.flush()
## Read in samples and put them in a list:
samples = []
for l in open(args.samples):
if len(l) > 1:
samples.append(l.split('/')[-1].replace('.bam', '').strip())
print samples
# Setup folders and paths variables:
bamFolder = abspath('02-Mapped')
variantFolder = abspath('03-Calls')
PBS_scripts = abspath('_GATK_PBS_scripts')
bwaIndex = abspath(args.bwaindex)
gatkCall = 'java -jar /opt/modules/biology/gatk/3.5/bin/GenomeAnalysisTK.jar -R %s' % bwaIndex
os.system('mkdir -p %s' % bamFolder)
os.system('mkdir -p %s' % variantFolder)
os.system('mkdir -p %s' % PBS_scripts)
##### Run pipeline ###
for sample in samples:
print "Processing", sample, "....."
# Set up files:
logFile =''.join([jp(PBS_scripts, sample), 'GATK.log'])
logCommands = open(''.join([jp(PBS_scripts, sample), 'GATK_commands.sh']), 'w')
#Setup for qsub
log('#!/bin/bash', logCommands)
log('#PBS -N %s_GATK' % sample, logCommands)
log('#PBS -j oe', logCommands)
log('#PBS -o %s_GATK_job.log' % sample, logCommands)
log('#PBS -m abe', logCommands)
log('#PBS -M shendri4@gmail.com', logCommands)
log('#PBS -q reg', logCommands)
log('#PBS -l mem=100gb', logCommands)
log(". /usr/modules/init/bash", logCommands)
log("module load python/2.7.10", logCommands)
log("module load grc", logCommands)
###########Per-Sample Variant Calling
#HaplotypeCaller on each sample BAM file
#(if a sample's data is spread over more than one BAM, then pass them all in together) to create single-sample gVCFs
#not recommended for somatic (cancer) variant discovery. For that purpose, use MuTect2 instead
cmd = ' '.join([gatkCall, ' -T HaplotypeCaller ', ' -I ' + jp(bamFolder, sample) + '.bam',
' --emitRefConfidence GVCF', ' -o ' + jp(variantFolder, sample) + '.raw.snps.indels.g.vcf',
'>>', logFile, '2>&1'])
log(cmd, logCommands)
logCommands.close()
| apache-2.0 | Python |
9d8cd275bdd6227e759c78d74ad5fc2ed0c4e2cb | add the new lib directories | EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes | lg_common/setup.py | lg_common/setup.py | #!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['lg_common'],
package_dir={'': 'src'},
package_data={
'lg_common': [
'extensions/ros_window_ready/*',
'extensions/monitor_page_urls/*',
'extensions/current_url/*',
'extensions/ros_window_ready/*/*',
'extensions/monitor_page_urls/*/*',
'extensions/current_url/*/*'
]
},
scripts=['bin/lg-code-to-command'],
requires=[]
)
setup(**d)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| #!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['lg_common'],
package_dir={'': 'src'},
package_data={
'lg_common': [
'extensions/ros_window_ready/*',
'extensions/monitor_page_urls/*',
'extensions/current_url/*'
]
},
scripts=['bin/lg-code-to-command'],
requires=[]
)
setup(**d)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| apache-2.0 | Python |
eff44d791722a422dc2f7b845d0d0a3c819753a5 | Fix another path | daniel-j-born/grpc,zhimingxie/grpc,ejona86/grpc,muxi/grpc,firebase/grpc,thunderboltsid/grpc,deepaklukose/grpc,dklempner/grpc,firebase/grpc,infinit/grpc,jtattermusch/grpc,LuminateWireless/grpc,thinkerou/grpc,malexzx/grpc,zhimingxie/grpc,simonkuang/grpc,thunderboltsid/grpc,ipylypiv/grpc,yongni/grpc,arkmaxim/grpc,stanley-cheung/grpc,yugui/grpc,kskalski/grpc,Crevil/grpc,soltanmm/grpc,ppietrasa/grpc,geffzhang/grpc,mehrdada/grpc,jcanizales/grpc,fuchsia-mirror/third_party-grpc,grpc/grpc,simonkuang/grpc,muxi/grpc,royalharsh/grpc,MakMukhi/grpc,malexzx/grpc,donnadionne/grpc,soltanmm/grpc,geffzhang/grpc,jboeuf/grpc,dklempner/grpc,thinkerou/grpc,ananthonline/grpc,ppietrasa/grpc,daniel-j-born/grpc,ppietrasa/grpc,thunderboltsid/grpc,ppietrasa/grpc,nicolasnoble/grpc,thinkerou/grpc,wcevans/grpc,jtattermusch/grpc,tengyifei/grpc,hstefan/grpc,sreecha/grpc,Vizerai/grpc,vsco/grpc,a11r/grpc,vjpai/grpc,yongni/grpc,kskalski/grpc,rjshade/grpc,perumaalgoog/grpc,soltanmm/grpc,quizlet/grpc,matt-kwong/grpc,grpc/grpc,bogdandrutu/grpc,yugui/grpc,chrisdunelm/grpc,chrisdunelm/grpc,jboeuf/grpc,malexzx/grpc,a11r/grpc,ppietrasa/grpc,thinkerou/grpc,jtattermusch/grpc,vjpai/grpc,grani/grpc,kpayson64/grpc,fuchsia-mirror/third_party-grpc,soltanmm-google/grpc,vsco/grpc,greasypizza/grpc,infinit/grpc,matt-kwong/grpc,grpc/grpc,rjshade/grpc,a-veitch/grpc,ipylypiv/grpc,murgatroid99/grpc,pszemus/grpc,ncteisen/grpc,andrewpollock/grpc,makdharma/grpc,donnadionne/grpc,stanley-cheung/grpc,deepaklukose/grpc,kskalski/grpc,Vizerai/grpc,baylabs/grpc,pmarks-net/grpc,soltanmm-google/grpc,y-zeng/grpc,apolcyn/grpc,rjshade/grpc,stanley-cheung/grpc,jboeuf/grpc,kpayson64/grpc,PeterFaiman/ruby-grpc-minimal,soltanmm-google/grpc,mehrdada/grpc,y-zeng/grpc,philcleveland/grpc,ncteisen/grpc,murgatroid99/grpc,yang-g/grpc,a-veitch/grpc,murgatroid99/grpc,a11r/grpc,Crevil/grpc,vjpai/grpc,kriswuollett/grpc,hstefan/grpc,carl-mastrangelo/grpc,soltanmm/grpc,apolcyn/grpc,matt-kwong/grpc,stanley-cheung/grpc,rjshade/grpc,tengyifei/grpc,ncteisen/grpc,kskalski/grpc,baylabs/grpc,mehrdada/grpc,ejona86/grpc,pszemus/grpc,hstefan/grpc,philcleveland/grpc,sreecha/grpc,ctiller/grpc,sreecha/grpc,ctiller/grpc,geffzhang/grpc,donnadionne/grpc,msmania/grpc,jcanizales/grpc,goldenbull/grpc,zhimingxie/grpc,zhimingxie/grpc,jcanizales/grpc,thunderboltsid/grpc,kskalski/grpc,thunderboltsid/grpc,pszemus/grpc,vjpai/grpc,pszemus/grpc,zhimingxie/grpc,matt-kwong/grpc,perumaalgoog/grpc,chrisdunelm/grpc,adelez/grpc,greasypizza/grpc,andrewpollock/grpc,kriswuollett/grpc,apolcyn/grpc,pmarks-net/grpc,thinkerou/grpc,dklempner/grpc,podsvirov/grpc,pmarks-net/grpc,royalharsh/grpc,dgquintas/grpc,perumaalgoog/grpc,chrisdunelm/grpc,a-veitch/grpc,kumaralokgithub/grpc,malexzx/grpc,ppietrasa/grpc,leifurhauks/grpc,dklempner/grpc,deepaklukose/grpc,podsvirov/grpc,thunderboltsid/grpc,philcleveland/grpc,7anner/grpc,nicolasnoble/grpc,perumaalgoog/grpc,deepaklukose/grpc,vjpai/grpc,msmania/grpc,vsco/grpc,leifurhauks/grpc,yang-g/grpc,bogdandrutu/grpc,nicolasnoble/grpc,philcleveland/grpc,ipylypiv/grpc,adelez/grpc,jtattermusch/grpc,chrisdunelm/grpc,simonkuang/grpc,MakMukhi/grpc,greasypizza/grpc,malexzx/grpc,daniel-j-born/grpc,arkmaxim/grpc,rjshade/grpc,ctiller/grpc,MakMukhi/grpc,fuchsia-mirror/third_party-grpc,a11r/grpc,soltanmm/grpc,infinit/grpc,ipylypiv/grpc,murgatroid99/grpc,kpayson64/grpc,muxi/grpc,miselin/grpc,yang-g/grpc,grani/grpc,bogdandrutu/grpc,jboeuf/grpc,pszemus/grpc,kriswuollett/grpc,ctiller/grpc,rjshade/grpc,7anner/grpc,tengyifei/grpc,ejona86/grpc,Vizerai/grpc,muxi/grpc,sreecha/grpc,donnadionne/grpc,soltanmm-google/grpc,dgquintas/grpc,yongni/grpc,yongni/grpc,ejona86/grpc,MakMukhi/grpc,quizlet/grpc,y-zeng/grpc,kumaralokgithub/grpc,sreecha/grpc,adelez/grpc,kskalski/grpc,makdharma/grpc,arkmaxim/grpc,leifurhauks/grpc,quizlet/grpc,ncteisen/grpc,makdharma/grpc,thinkerou/grpc,grpc/grpc,donnadionne/grpc,quizlet/grpc,apolcyn/grpc,daniel-j-born/grpc,thinkerou/grpc,carl-mastrangelo/grpc,soltanmm-google/grpc,philcleveland/grpc,ctiller/grpc,7anner/grpc,muxi/grpc,yongni/grpc,jtattermusch/grpc,leifurhauks/grpc,donnadionne/grpc,adelez/grpc,jcanizales/grpc,ipylypiv/grpc,dgquintas/grpc,stanley-cheung/grpc,greasypizza/grpc,ncteisen/grpc,zhimingxie/grpc,royalharsh/grpc,msmania/grpc,yang-g/grpc,quizlet/grpc,yugui/grpc,nicolasnoble/grpc,fuchsia-mirror/third_party-grpc,ipylypiv/grpc,ctiller/grpc,kumaralokgithub/grpc,ananthonline/grpc,dgquintas/grpc,fuchsia-mirror/third_party-grpc,baylabs/grpc,vsco/grpc,muxi/grpc,sreecha/grpc,donnadionne/grpc,7anner/grpc,bogdandrutu/grpc,yugui/grpc,matt-kwong/grpc,soltanmm/grpc,kriswuollett/grpc,bogdandrutu/grpc,dklempner/grpc,carl-mastrangelo/grpc,Crevil/grpc,thinkerou/grpc,deepaklukose/grpc,kpayson64/grpc,kpayson64/grpc,malexzx/grpc,dgquintas/grpc,vsco/grpc,msmania/grpc,arkmaxim/grpc,dgquintas/grpc,chrisdunelm/grpc,goldenbull/grpc,wcevans/grpc,LuminateWireless/grpc,stanley-cheung/grpc,zhimingxie/grpc,geffzhang/grpc,ejona86/grpc,pmarks-net/grpc,jtattermusch/grpc,murgatroid99/grpc,rjshade/grpc,MakMukhi/grpc,chrisdunelm/grpc,vsco/grpc,simonkuang/grpc,carl-mastrangelo/grpc,podsvirov/grpc,jboeuf/grpc,murgatroid99/grpc,ananthonline/grpc,yugui/grpc,makdharma/grpc,hstefan/grpc,podsvirov/grpc,msmania/grpc,kpayson64/grpc,malexzx/grpc,fuchsia-mirror/third_party-grpc,grpc/grpc,Crevil/grpc,ejona86/grpc,MakMukhi/grpc,yongni/grpc,mehrdada/grpc,vjpai/grpc,kriswuollett/grpc,rjshade/grpc,bogdandrutu/grpc,perumaalgoog/grpc,murgatroid99/grpc,ncteisen/grpc,PeterFaiman/ruby-grpc-minimal,pszemus/grpc,baylabs/grpc,malexzx/grpc,quizlet/grpc,wcevans/grpc,pszemus/grpc,carl-mastrangelo/grpc,ejona86/grpc,deepaklukose/grpc,stanley-cheung/grpc,dgquintas/grpc,jcanizales/grpc,kriswuollett/grpc,hstefan/grpc,daniel-j-born/grpc,thinkerou/grpc,matt-kwong/grpc,a-veitch/grpc,mehrdada/grpc,Vizerai/grpc,goldenbull/grpc,fuchsia-mirror/third_party-grpc,MakMukhi/grpc,simonkuang/grpc,apolcyn/grpc,geffzhang/grpc,goldenbull/grpc,vsco/grpc,jboeuf/grpc,kumaralokgithub/grpc,arkmaxim/grpc,bogdandrutu/grpc,carl-mastrangelo/grpc,jcanizales/grpc,thinkerou/grpc,yang-g/grpc,a11r/grpc,chrisdunelm/grpc,yang-g/grpc,jboeuf/grpc,geffzhang/grpc,grani/grpc,Crevil/grpc,goldenbull/grpc,leifurhauks/grpc,chrisdunelm/grpc,jboeuf/grpc,stanley-cheung/grpc,PeterFaiman/ruby-grpc-minimal,kskalski/grpc,muxi/grpc,arkmaxim/grpc,nicolasnoble/grpc,yongni/grpc,msmania/grpc,dklempner/grpc,yang-g/grpc,fuchsia-mirror/third_party-grpc,zhimingxie/grpc,stanley-cheung/grpc,murgatroid99/grpc,kumaralokgithub/grpc,hstefan/grpc,a11r/grpc,pszemus/grpc,dgquintas/grpc,kumaralokgithub/grpc,PeterFaiman/ruby-grpc-minimal,quizlet/grpc,greasypizza/grpc,perumaalgoog/grpc,7anner/grpc,hstefan/grpc,vjpai/grpc,muxi/grpc,grpc/grpc,baylabs/grpc,soltanmm-google/grpc,yang-g/grpc,grani/grpc,tengyifei/grpc,LuminateWireless/grpc,ctiller/grpc,andrewpollock/grpc,kpayson64/grpc,philcleveland/grpc,pmarks-net/grpc,Vizerai/grpc,muxi/grpc,ctiller/grpc,apolcyn/grpc,tengyifei/grpc,deepaklukose/grpc,wcevans/grpc,nicolasnoble/grpc,grani/grpc,soltanmm-google/grpc,dklempner/grpc,kpayson64/grpc,ncteisen/grpc,dgquintas/grpc,carl-mastrangelo/grpc,makdharma/grpc,ctiller/grpc,carl-mastrangelo/grpc,jboeuf/grpc,infinit/grpc,kumaralokgithub/grpc,leifurhauks/grpc,goldenbull/grpc,MakMukhi/grpc,jcanizales/grpc,ncteisen/grpc,dgquintas/grpc,deepaklukose/grpc,matt-kwong/grpc,mehrdada/grpc,hstefan/grpc,vsco/grpc,grpc/grpc,y-zeng/grpc,y-zeng/grpc,bogdandrutu/grpc,podsvirov/grpc,matt-kwong/grpc,ejona86/grpc,grpc/grpc,kpayson64/grpc,arkmaxim/grpc,donnadionne/grpc,wcevans/grpc,PeterFaiman/ruby-grpc-minimal,adelez/grpc,pszemus/grpc,Vizerai/grpc,soltanmm/grpc,Vizerai/grpc,donnadionne/grpc,a-veitch/grpc,yongni/grpc,a11r/grpc,philcleveland/grpc,PeterFaiman/ruby-grpc-minimal,philcleveland/grpc,nicolasnoble/grpc,ctiller/grpc,royalharsh/grpc,malexzx/grpc,miselin/grpc,baylabs/grpc,Crevil/grpc,matt-kwong/grpc,LuminateWireless/grpc,arkmaxim/grpc,murgatroid99/grpc,ananthonline/grpc,a11r/grpc,thunderboltsid/grpc,grpc/grpc,geffzhang/grpc,makdharma/grpc,Vizerai/grpc,pmarks-net/grpc,mehrdada/grpc,Vizerai/grpc,royalharsh/grpc,firebase/grpc,mehrdada/grpc,geffzhang/grpc,ipylypiv/grpc,ppietrasa/grpc,vjpai/grpc,jtattermusch/grpc,makdharma/grpc,PeterFaiman/ruby-grpc-minimal,vjpai/grpc,firebase/grpc,jcanizales/grpc,perumaalgoog/grpc,infinit/grpc,a-veitch/grpc,baylabs/grpc,nicolasnoble/grpc,firebase/grpc,Crevil/grpc,a-veitch/grpc,quizlet/grpc,geffzhang/grpc,simonkuang/grpc,quizlet/grpc,sreecha/grpc,wcevans/grpc,Vizerai/grpc,soltanmm-google/grpc,grpc/grpc,y-zeng/grpc,perumaalgoog/grpc,murgatroid99/grpc,goldenbull/grpc,apolcyn/grpc,donnadionne/grpc,simonkuang/grpc,carl-mastrangelo/grpc,royalharsh/grpc,LuminateWireless/grpc,greasypizza/grpc,chrisdunelm/grpc,jboeuf/grpc,ananthonline/grpc,soltanmm/grpc,carl-mastrangelo/grpc,jtattermusch/grpc,apolcyn/grpc,makdharma/grpc,PeterFaiman/ruby-grpc-minimal,kumaralokgithub/grpc,stanley-cheung/grpc,daniel-j-born/grpc,sreecha/grpc,firebase/grpc,sreecha/grpc,ananthonline/grpc,wcevans/grpc,arkmaxim/grpc,makdharma/grpc,dgquintas/grpc,ananthonline/grpc,PeterFaiman/ruby-grpc-minimal,apolcyn/grpc,miselin/grpc,daniel-j-born/grpc,ncteisen/grpc,msmania/grpc,soltanmm/grpc,kriswuollett/grpc,y-zeng/grpc,infinit/grpc,jboeuf/grpc,mehrdada/grpc,andrewpollock/grpc,ipylypiv/grpc,ncteisen/grpc,Crevil/grpc,PeterFaiman/ruby-grpc-minimal,nicolasnoble/grpc,pszemus/grpc,firebase/grpc,andrewpollock/grpc,miselin/grpc,Crevil/grpc,yugui/grpc,stanley-cheung/grpc,vjpai/grpc,kpayson64/grpc,LuminateWireless/grpc,yugui/grpc,vjpai/grpc,firebase/grpc,podsvirov/grpc,jcanizales/grpc,sreecha/grpc,kriswuollett/grpc,podsvirov/grpc,donnadionne/grpc,grani/grpc,stanley-cheung/grpc,grpc/grpc,kskalski/grpc,simonkuang/grpc,mehrdada/grpc,nicolasnoble/grpc,a-veitch/grpc,vjpai/grpc,yugui/grpc,msmania/grpc,ananthonline/grpc,firebase/grpc,baylabs/grpc,infinit/grpc,ejona86/grpc,adelez/grpc,tengyifei/grpc,LuminateWireless/grpc,vsco/grpc,7anner/grpc,jtattermusch/grpc,muxi/grpc,greasypizza/grpc,wcevans/grpc,adelez/grpc,bogdandrutu/grpc,yugui/grpc,andrewpollock/grpc,soltanmm-google/grpc,grani/grpc,kpayson64/grpc,ejona86/grpc,miselin/grpc,leifurhauks/grpc,wcevans/grpc,muxi/grpc,greasypizza/grpc,goldenbull/grpc,yang-g/grpc,fuchsia-mirror/third_party-grpc,miselin/grpc,tengyifei/grpc,carl-mastrangelo/grpc,sreecha/grpc,tengyifei/grpc,grani/grpc,tengyifei/grpc,thunderboltsid/grpc,jtattermusch/grpc,daniel-j-born/grpc,ejona86/grpc,infinit/grpc,yongni/grpc,grpc/grpc,leifurhauks/grpc,goldenbull/grpc,greasypizza/grpc,ipylypiv/grpc,zhimingxie/grpc,7anner/grpc,MakMukhi/grpc,simonkuang/grpc,thinkerou/grpc,andrewpollock/grpc,hstefan/grpc,firebase/grpc,perumaalgoog/grpc,msmania/grpc,kumaralokgithub/grpc,chrisdunelm/grpc,jboeuf/grpc,ejona86/grpc,a11r/grpc,leifurhauks/grpc,royalharsh/grpc,thinkerou/grpc,podsvirov/grpc,ncteisen/grpc,royalharsh/grpc,donnadionne/grpc,andrewpollock/grpc,pmarks-net/grpc,nicolasnoble/grpc,muxi/grpc,dklempner/grpc,a-veitch/grpc,grani/grpc,7anner/grpc,podsvirov/grpc,kskalski/grpc,rjshade/grpc,miselin/grpc,ppietrasa/grpc,pmarks-net/grpc,jtattermusch/grpc,kriswuollett/grpc,pmarks-net/grpc,miselin/grpc,ctiller/grpc,pszemus/grpc,deepaklukose/grpc,mehrdada/grpc,jtattermusch/grpc,adelez/grpc,adelez/grpc,Vizerai/grpc,miselin/grpc,7anner/grpc,ppietrasa/grpc,dklempner/grpc,nicolasnoble/grpc,firebase/grpc,mehrdada/grpc,thunderboltsid/grpc,philcleveland/grpc,y-zeng/grpc,andrewpollock/grpc,baylabs/grpc,ctiller/grpc,carl-mastrangelo/grpc,ncteisen/grpc,ananthonline/grpc,pszemus/grpc,y-zeng/grpc,royalharsh/grpc,sreecha/grpc,LuminateWireless/grpc,fuchsia-mirror/third_party-grpc,LuminateWireless/grpc,daniel-j-born/grpc,firebase/grpc,infinit/grpc | test/core/http/test_server.py | test/core/http/test_server.py | #!/usr/bin/env python2.7
# Copyright 2015-2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Server for httpcli_test"""
import argparse
import BaseHTTPServer
import os
import ssl
import sys
_PEM = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../../..', 'src/core/lib/tsi/test_creds/server1.pem'))
_KEY = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../../..', 'src/core/tsi/lib/test_creds/server1.key'))
print _PEM
open(_PEM).close()
argp = argparse.ArgumentParser(description='Server for httpcli_test')
argp.add_argument('-p', '--port', default=10080, type=int)
argp.add_argument('-s', '--ssl', default=False, action='store_true')
args = argp.parse_args()
print 'server running on port %d' % args.port
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def good(self):
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
self.wfile.write('<html><head><title>Hello world!</title></head>')
self.wfile.write('<body><p>This is a test</p></body></html>')
def do_GET(self):
if self.path == '/get':
self.good()
def do_POST(self):
content = self.rfile.read(int(self.headers.getheader('content-length')))
if self.path == '/post' and content == 'hello':
self.good()
httpd = BaseHTTPServer.HTTPServer(('localhost', args.port), Handler)
if args.ssl:
httpd.socket = ssl.wrap_socket(httpd.socket, certfile=_PEM, keyfile=_KEY, server_side=True)
httpd.serve_forever()
| #!/usr/bin/env python2.7
# Copyright 2015-2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Server for httpcli_test"""
import argparse
import BaseHTTPServer
import os
import ssl
import sys
_PEM = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../../..', 'src/core/tsi/test_creds/server1.pem'))
_KEY = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../../..', 'src/core/tsi/test_creds/server1.key'))
print _PEM
open(_PEM).close()
argp = argparse.ArgumentParser(description='Server for httpcli_test')
argp.add_argument('-p', '--port', default=10080, type=int)
argp.add_argument('-s', '--ssl', default=False, action='store_true')
args = argp.parse_args()
print 'server running on port %d' % args.port
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def good(self):
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
self.wfile.write('<html><head><title>Hello world!</title></head>')
self.wfile.write('<body><p>This is a test</p></body></html>')
def do_GET(self):
if self.path == '/get':
self.good()
def do_POST(self):
content = self.rfile.read(int(self.headers.getheader('content-length')))
if self.path == '/post' and content == 'hello':
self.good()
httpd = BaseHTTPServer.HTTPServer(('localhost', args.port), Handler)
if args.ssl:
httpd.socket = ssl.wrap_socket(httpd.socket, certfile=_PEM, keyfile=_KEY, server_side=True)
httpd.serve_forever()
| apache-2.0 | Python |
b1050d6ea320b6cbe606595036461a646df5d2a7 | Sort task list | Kryz/sentry,nicholasserra/sentry,korealerts1/sentry,Kryz/sentry,hongliang5623/sentry,drcapulet/sentry,looker/sentry,mvaled/sentry,hongliang5623/sentry,looker/sentry,1tush/sentry,imankulov/sentry,Natim/sentry,kevinlondon/sentry,alexm92/sentry,JackDanger/sentry,jean/sentry,kevinastone/sentry,ifduyue/sentry,mvaled/sentry,fuziontech/sentry,ifduyue/sentry,felixbuenemann/sentry,ifduyue/sentry,ewdurbin/sentry,JamesMura/sentry,gencer/sentry,felixbuenemann/sentry,JamesMura/sentry,fotinakis/sentry,boneyao/sentry,boneyao/sentry,fotinakis/sentry,zenefits/sentry,jean/sentry,gencer/sentry,gencer/sentry,ewdurbin/sentry,alexm92/sentry,wong2/sentry,BuildingLink/sentry,imankulov/sentry,looker/sentry,fotinakis/sentry,fuziontech/sentry,wujuguang/sentry,beeftornado/sentry,looker/sentry,wujuguang/sentry,jean/sentry,JackDanger/sentry,songyi199111/sentry,JamesMura/sentry,daevaorn/sentry,mvaled/sentry,gencer/sentry,mvaled/sentry,mitsuhiko/sentry,ngonzalvez/sentry,Natim/sentry,mvaled/sentry,korealerts1/sentry,gg7/sentry,ifduyue/sentry,kevinastone/sentry,wujuguang/sentry,1tush/sentry,ifduyue/sentry,ngonzalvez/sentry,daevaorn/sentry,BayanGroup/sentry,wong2/sentry,songyi199111/sentry,boneyao/sentry,felixbuenemann/sentry,BuildingLink/sentry,alexm92/sentry,kevinlondon/sentry,Natim/sentry,mitsuhiko/sentry,kevinlondon/sentry,songyi199111/sentry,wong2/sentry,jean/sentry,pauloschilling/sentry,beeftornado/sentry,zenefits/sentry,zenefits/sentry,korealerts1/sentry,BuildingLink/sentry,gencer/sentry,ngonzalvez/sentry,imankulov/sentry,kevinastone/sentry,fuziontech/sentry,JackDanger/sentry,ewdurbin/sentry,1tush/sentry,BuildingLink/sentry,JamesMura/sentry,BayanGroup/sentry,pauloschilling/sentry,mvaled/sentry,jean/sentry,beeftornado/sentry,hongliang5623/sentry,zenefits/sentry,JamesMura/sentry,gg7/sentry,pauloschilling/sentry,daevaorn/sentry,zenefits/sentry,looker/sentry,nicholasserra/sentry,drcapulet/sentry,nicholasserra/sentry,Kryz/sentry,daevaorn/sentry,drcapulet/sentry,gg7/sentry,fotinakis/sentry,BayanGroup/sentry,BuildingLink/sentry | src/sentry/web/frontend/admin_queue.py | src/sentry/web/frontend/admin_queue.py | from __future__ import absolute_import
from sentry.celery import app
from sentry.web.frontend.base import BaseView
class AdminQueueView(BaseView):
def has_permission(self, request):
return request.user.is_superuser
def handle(self, request):
context = {
'task_list': sorted(app.tasks.keys()),
}
return self.respond('sentry/admin-queue.html', context)
| from __future__ import absolute_import
from sentry.celery import app
from sentry.web.frontend.base import BaseView
class AdminQueueView(BaseView):
def has_permission(self, request):
return request.user.is_superuser
def handle(self, request):
context = {
'task_list': app.tasks.keys(),
}
return self.respond('sentry/admin-queue.html', context)
| bsd-3-clause | Python |
2a3c5e08378a6646bc11a47518394cd8ef3e9547 | Fix lint | RickMohr/otm-core,RickMohr/otm-core,recklessromeo/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,recklessromeo/otm-core,RickMohr/otm-core,RickMohr/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core | opentreemap/treemap/management/commands/uitest.py | opentreemap/treemap/management/commands/uitest.py | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import unittest
import sys
import importlib
from optparse import make_option
from django.core.management.base import BaseCommand
from django.conf import settings
from pyvirtualdisplay import Display
class Command(BaseCommand):
"""
Uses a custom test runner to run UI acceptance tests
from the 'tests' package
"""
option_list = BaseCommand.option_list + (
make_option('-s', '--skip-debug-check',
action='store_true',
dest='skip_debug',
help='skip the debug'), )
def handle(self, *args, **options):
if settings.DEBUG is False and not options['skip_debug']:
raise Exception('These tests add data to the currently '
'select database backend. If this is a '
'production database a failing test could '
'leave extra data behind (such as users) or '
'delete data that already exists.')
disp = Display(visible=0, size=(800, 600))
disp.start()
errors = False
for module in settings.UITESTS:
uitests = importlib.import_module(module)
suite = unittest.TestLoader().loadTestsFromModule(uitests)
try:
if hasattr(uitests, 'setUpModule'):
uitests.setUpModule()
rslt = unittest.TextTestRunner(verbosity=2).run(suite)
finally:
if hasattr(uitests, 'tearDownModule'):
uitests.tearDownModule()
disp.stop()
if not rslt.wasSuccessful():
errors = True
if errors:
sys.exit(1)
| from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import unittest
import sys
import importlib
from optparse import make_option
from django.core.management.base import BaseCommand
from django.conf import settings
from pyvirtualdisplay import Display
class Command(BaseCommand):
"""
Uses a custom test runner to run UI acceptance tests
from the 'tests' package
"""
option_list = BaseCommand.option_list + (
make_option('-s', '--skip-debug-check',
action='store_true',
dest='skip_debug',
help='skip the debug'), )
def handle(self, *args, **options):
if settings.DEBUG is False and not options['skip_debug']:
raise Exception('These tests add data to the currently '
'select database backend. If this is a '
'production database a failing test could '
'leave extra data behind (such as users) or '
'delete data that already exists.')
disp = Display(visible=0, size=(800, 600))
disp.start()
errors = False
for module in settings.UITESTS:
uitests = importlib.import_module(module)
suite = unittest.TestLoader().loadTestsFromModule(uitests)
try:
if hasattr(uitests, 'setUpModule'):
uitests.setUpModule()
rslt = unittest.TextTestRunner(verbosity=2).run(suite)
finally:
if hasattr(uitests, 'tearDownModule'):
uitests.tearDownModule()
disp.stop()
if not rslt.wasSuccessful():
errors = True
if errors:
sys.exit(1)
| agpl-3.0 | Python |
6ad1664fb2e1b7eb7a54570b70e4dd53fb1cef5e | move abbreviation expansion into a function | qtux/instmatcher | instmatcher/__init__.py | instmatcher/__init__.py | # Copyright 2016 Matthias Gazzari
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import os, os.path
import re
from whoosh.fields import Schema, TEXT, NUMERIC, STORED
from whoosh import index
from whoosh.qparser import MultifieldParser
def createIndex():
schema = Schema(
name=TEXT(stored=True),
alias=TEXT,
lat=NUMERIC(numtype=float, stored=True),
lon=NUMERIC(numtype=float, stored=True),
isni=STORED,
)
if not os.path.exists('index'):
os.mkdir('index')
ix = index.create_in('index', schema)
writer = ix.writer()
with open('data/institutes.csv') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
writer.add_document(
name=row['name'],
alias=row['alias'],
lat=row['lat'],
lon=row['lon'],
isni=row['isni'],
)
writer.commit()
def query(text):
expandedText = expandAbbreviations(text)
fields = ['name', 'alias',]
ix = index.open_dir('index')
with ix.searcher() as searcher:
query = MultifieldParser(fields, ix.schema).parse(expandedText)
results = searcher.search(query, terms=True)
print(results)
for hit in results:
print(hit)
def expandAbbreviations(text):
result = text
with open('data/abbreviations.csv') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
result = re.sub(
r"\b(?i){}\b".format(row['short']),
row['long'],
result,
)
return result
| # Copyright 2016 Matthias Gazzari
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import os, os.path
import re
from whoosh.fields import Schema, TEXT, NUMERIC, STORED
from whoosh import index
from whoosh.qparser import MultifieldParser
def createIndex():
schema = Schema(
name=TEXT(stored=True),
alias=TEXT,
lat=NUMERIC(numtype=float, stored=True),
lon=NUMERIC(numtype=float, stored=True),
isni=STORED,
)
if not os.path.exists('index'):
os.mkdir('index')
ix = index.create_in('index', schema)
writer = ix.writer()
with open('data/institutes.csv') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
writer.add_document(
name=row['name'],
alias=row['alias'],
lat=row['lat'],
lon=row['lon'],
isni=row['isni'],
)
writer.commit()
def query(searchString):
with open('data/abbreviations.csv') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
searchString = re.sub(
r"\b(?i){}\b".format(row['short']),
row['long'],
searchString,
)
fields = ['name', 'alias',]
ix = index.open_dir('index')
with ix.searcher() as searcher:
query = MultifieldParser(fields, ix.schema).parse(searchString)
results = searcher.search(query, terms=True)
print(results)
for hit in results:
print(hit)
| apache-2.0 | Python |
5f0d976f2f7ad361a01fa4f9008bbbbeb298c517 | Bump version for bugfix | alex/invocations,pyinvoke/invocations,mrjmad/invocations,singingwolfboy/invocations | invocations/_version.py | invocations/_version.py | __version_info__ = (0, 4, 4)
__version__ = '.'.join(map(str, __version_info__))
| __version_info__ = (0, 4, 3)
__version__ = '.'.join(map(str, __version_info__))
| bsd-2-clause | Python |
f75a151b33635cad5604cb9d7f66fc043c4f972a | Fix except handler raises immediately | UITools/saleor,mociepka/saleor,UITools/saleor,UITools/saleor,UITools/saleor,maferelo/saleor,maferelo/saleor,mociepka/saleor,maferelo/saleor,mociepka/saleor,UITools/saleor | saleor/core/utils/json_serializer.py | saleor/core/utils/json_serializer.py | import json
from django.core.serializers.base import DeserializationError
from django.core.serializers.json import (
DjangoJSONEncoder, PythonDeserializer, Serializer as JsonSerializer)
from prices import Money
MONEY_TYPE = 'Money'
class Serializer(JsonSerializer):
def _init_options(self):
super()._init_options()
self.json_kwargs['cls'] = CustomJsonEncoder
class CustomJsonEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, Money):
return {
'_type': MONEY_TYPE, 'amount': obj.amount,
'currency': obj.currency}
return super().default(obj)
def object_hook(obj):
if '_type' in obj and obj['_type'] == MONEY_TYPE:
return Money(obj['amount'], obj['currency'])
return obj
def Deserializer(stream_or_string, **options):
"""Deserialize a stream or string of JSON data. This is a slightly modified
copy of Django implementation with additional argument <object_hook> in
json.loads"""
if not isinstance(stream_or_string, (bytes, str)):
stream_or_string = stream_or_string.read()
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode()
try:
objects = json.loads(stream_or_string, object_hook=object_hook)
yield from PythonDeserializer(objects, **options)
except Exception as exc:
# ugly construction to overcome pylint's warning
# "The except handler raises immediately"
if isinstance(exc, GeneratorExit, DeserializationError):
raise
raise DeserializationError() from exc
| import json
from django.core.serializers.base import DeserializationError
from django.core.serializers.json import (
DjangoJSONEncoder, PythonDeserializer, Serializer as JsonSerializer)
from prices import Money
MONEY_TYPE = 'Money'
class Serializer(JsonSerializer):
def _init_options(self):
super()._init_options()
self.json_kwargs['cls'] = CustomJsonEncoder
class CustomJsonEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, Money):
return {
'_type': MONEY_TYPE, 'amount': obj.amount,
'currency': obj.currency}
return super().default(obj)
def object_hook(obj):
if '_type' in obj and obj['_type'] == MONEY_TYPE:
return Money(obj['amount'], obj['currency'])
return obj
def Deserializer(stream_or_string, **options):
"""Deserialize a stream or string of JSON data. This is a copy of Django
implementation with additional argument <object_hook> in json.loads"""
if not isinstance(stream_or_string, (bytes, str)):
stream_or_string = stream_or_string.read()
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode()
try:
objects = json.loads(stream_or_string, object_hook=object_hook)
yield from PythonDeserializer(objects, **options)
except (GeneratorExit, DeserializationError):
raise
except Exception as exc:
raise DeserializationError() from exc
| bsd-3-clause | Python |
7b9033db5a8abfb5c71850e776a3f4a5db718d26 | Remove debugging statement. | certain/certain,certain/certain | trunk/certmgr/StoreHandler/__init__.py | trunk/certmgr/StoreHandler/__init__.py | """Module to handle different store types."""
__all__ = ['git', 'svn', 'web']
import abc
class StoreBase(object):
"""Abstract base class for StoreHandler 'plugins'."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def setup(self):
"""Setup this specific store object."""
return
@abc.abstractmethod
def fetch(self):
"""Retrieve certificates from the store."""
return
@abc.abstractmethod
def write(self, certobj):
"""Write certificate to the central store."""
return
@abc.abstractmethod
def checkpoint(self):
"""Checkpoint any pending actions on the store."""
return
def __enter__(self):
self.setup()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.checkpoint()
def storeerror(name):
"""Error method. Default for getattr to deal with unknown StoreType."""
print "Unknown StoreType: " + name
def dispatch(name, errorfunc=storeerror):
"""Dispatch a store object to handle this type of Store.
Returns an object of the appropriate type, or None. Also logs a message
if the store type could not be found.
"""
try:
return __import__('certmgr.StoreHandler.' + name,
fromlist=name).store()
except (ImportError, AttributeError):
return storeerror(name)
| """Module to handle different store types."""
__all__ = ['git', 'svn', 'web']
import abc
class StoreBase(object):
"""Abstract base class for StoreHandler 'plugins'."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def setup(self):
"""Setup this specific store object."""
return
@abc.abstractmethod
def fetch(self):
"""Retrieve certificates from the store."""
return
@abc.abstractmethod
def write(self, certobj):
"""Write certificate to the central store."""
return
@abc.abstractmethod
def checkpoint(self):
"""Checkpoint any pending actions on the store."""
return
def __enter__(self):
self.setup()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.checkpoint()
def storeerror(name):
"""Error method. Default for getattr to deal with unknown StoreType."""
print "Unknown StoreType: " + name
def dispatch(name, errorfunc=storeerror):
"""Dispatch a store object to handle this type of Store.
Returns an object of the appropriate type, or None. Also logs a message
if the store type could not be found.
"""
try:
return __import__('certmgr.StoreHandler.' + name,
fromlist=name).store()
except (ImportError, AttributeError), e:
raise
return storeerror(name)
| agpl-3.0 | Python |
b5050df1b8e23434a59f53c4a509e6932feb03d9 | update rules_proto digest to ea52a32 (#1245) | googleapis/gapic-generator-typescript,googleapis/gapic-generator-typescript,googleapis/gapic-generator-typescript | repositories.bzl | repositories.bzl | load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def gapic_generator_typescript_repositories():
maybe(
http_archive,
name = "build_bazel_rules_nodejs",
sha256 = "c911b5bd8aee8b0498cc387cacdb5f917098ce477fb4182db07b0ef8a9e045c0",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/4.7.1/rules_nodejs-4.7.1.tar.gz"],
)
maybe(
http_archive,
name = "rules_proto",
sha256 = "f0827d1bbb4abd97f5ce73baed0bd774a86c6103aff2104caf70ba3b2218ec0a",
strip_prefix = "rules_proto-ea52a32ecd862c5317572cadecaa525c52124f9d",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/ea52a32ecd862c5317572cadecaa525c52124f9d.tar.gz",
"https://github.com/bazelbuild/rules_proto/archive/ea52a32ecd862c5317572cadecaa525c52124f9d.tar.gz",
],
)
_rules_gapic_version = "0.9.0"
maybe(
http_archive,
name = "rules_gapic",
strip_prefix = "rules_gapic-%s" % _rules_gapic_version,
urls = ["https://github.com/googleapis/rules_gapic/archive/v%s.tar.gz" % _rules_gapic_version],
)
maybe(
http_archive,
name = "com_google_protobuf",
sha256 = "d7d204a59fd0d2d2387bd362c2155289d5060f32122c4d1d922041b61191d522",
strip_prefix = "protobuf-3.21.5",
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.21.5.tar.gz"],
)
| load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def gapic_generator_typescript_repositories():
maybe(
http_archive,
name = "build_bazel_rules_nodejs",
sha256 = "c911b5bd8aee8b0498cc387cacdb5f917098ce477fb4182db07b0ef8a9e045c0",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/4.7.1/rules_nodejs-4.7.1.tar.gz"],
)
maybe(
http_archive,
name = "rules_proto",
sha256 = "aa72701f243b13628d052119274174640567455cf60f8de7b0973367c0676d3f",
strip_prefix = "rules_proto-757c3a975a557f21a5edbcea780354c9ebd93159",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/757c3a975a557f21a5edbcea780354c9ebd93159.tar.gz",
"https://github.com/bazelbuild/rules_proto/archive/757c3a975a557f21a5edbcea780354c9ebd93159.tar.gz",
],
)
_rules_gapic_version = "0.9.0"
maybe(
http_archive,
name = "rules_gapic",
strip_prefix = "rules_gapic-%s" % _rules_gapic_version,
urls = ["https://github.com/googleapis/rules_gapic/archive/v%s.tar.gz" % _rules_gapic_version],
)
maybe(
http_archive,
name = "com_google_protobuf",
sha256 = "d7d204a59fd0d2d2387bd362c2155289d5060f32122c4d1d922041b61191d522",
strip_prefix = "protobuf-3.21.5",
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.21.5.tar.gz"],
)
| apache-2.0 | Python |
8be5ffa9b9c69785fc01253a1a5aa2263db8d6e6 | Bump version to 14.0.0a2 | genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio | resolwe_bio/__about__.py | resolwe_bio/__about__.py | """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe-bio'
__summary__ = 'Bioinformatics pipelines for the Resolwe platform'
__url__ = 'https://github.com/genialis/resolwe-bio'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '14.0.0a2'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
"__title__", "__summary__", "__url__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
| """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe-bio'
__summary__ = 'Bioinformatics pipelines for the Resolwe platform'
__url__ = 'https://github.com/genialis/resolwe-bio'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '14.0.0a1'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
"__title__", "__summary__", "__url__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
| apache-2.0 | Python |
8ab7a9bde85bdec508a1061b4b10b75c3f8323d4 | Update version number | Affirm/plaid-python | plaid/__init__.py | plaid/__init__.py | __version__ = '0.2.18.affirm.2'
from client import Client, require_access_token
| __version__ = '0.2.1'
from client import Client, require_access_token
| mit | Python |
85c1f5d2b6f369ef5790851e93065ea268dc9835 | fix to format_props for string variables | koojh89/pyxley,subodhchhabra/pyxley,MKridler/pyxley,stitchfix/pyxley,stitchfix/pyxley,quevedin/pyxley,stitchfix/pyxley,subodhchhabra/pyxley,quevedin/pyxley,koojh89/pyxley,koojh89/pyxley,quevedin/pyxley,subodhchhabra/pyxley | pyxley/react_template.py | pyxley/react_template.py | from react import jsx
from jinja2 import Template
import json
class ReactTemplate(object):
"""
"""
def __init__(self, template, template_args, path):
self.transformer = jsx.JSXTransformer()
self.template = template
self.args = template_args
self.path = path
def write_to_file(self, s):
"""
"""
f = open(self.path, 'w')
f.write(s)
f.close()
def to_js(self):
"""
"""
t = Template(self.template)
js = self.transformer.transform_string(t.render(**self.args))
self.write_to_file(js)
def format_props(props):
"""
"""
vars_ = []
props_ = []
for k, v in list(props.items()):
vars_.append(Template("var {{k}} = {{v}};").render(k=k,v=json.dumps(v)))
props_.append(Template("{{k}} = {{v}}").render(k=k, v="{"+k+"}"))
return "\n".join(vars_), "\n".join(props_)
class ReactComponent(ReactTemplate):
_react = (
"""
var Component = require("{{path}}").{{name}};
{{vars}}
React.render(
<Component
{{props}} />,
document.getElementById("{{id}}")
);
""")
def __init__(self, name, path, element_id, props={}, static_path=""):
vars_, props_ = format_props(props)
params = {
"name": name,
"path": path,
"vars": vars_,
"props": props_,
"id": element_id
}
for k, v in list(props.items()):
params[k] = v
super(ReactComponent, self).__init__(
self._react, params, static_path)
self.to_js()
| from react import jsx
from jinja2 import Template
import json
class ReactTemplate(object):
"""
"""
def __init__(self, template, template_args, path):
self.transformer = jsx.JSXTransformer()
self.template = template
self.args = template_args
self.path = path
def write_to_file(self, s):
"""
"""
f = open(self.path, 'w')
f.write(s)
f.close()
def to_js(self):
"""
"""
t = Template(self.template)
js = self.transformer.transform_string(t.render(**self.args))
self.write_to_file(js)
def format_props(props):
"""
"""
vars_ = []
props_ = []
for k, v in list(props.items()):
if isinstance(v, bool):
vars_.append(Template("var {{k}} = {{v|lower}};").render(k=k,v=v))
elif isinstance(v, list) or isinstance(v, dict):
vars_.append(Template("var {{k}} = {{v}};").render(k=k,v=json.dumps(v)))
else:
vars_.append(Template("var {{k}} = {{v}};").render(k=k,v=v))
props_.append(Template("{{k}} = {{v}}").render(k=k, v="{"+k+"}"))
return "\n".join(vars_), "\n".join(props_)
class ReactComponent(ReactTemplate):
_react = (
"""
var Component = require("{{path}}").{{name}};
{{vars}}
React.render(
<Component
{{props}} />,
document.getElementById("{{id}}")
);
""")
def __init__(self, name, path, element_id, props={}, static_path=""):
vars_, props_ = format_props(props)
params = {
"name": name,
"path": path,
"vars": vars_,
"props": props_,
"id": element_id
}
for k, v in list(props.items()):
params[k] = v
super(ReactComponent, self).__init__(
self._react, params, static_path)
self.to_js()
| mit | Python |
04ca6e47a2f1d2d60501f74018a28781469e1987 | update script | chapter09/Sparkvent | bin/task_count_redis.py | bin/task_count_redis.py | #!/usr/bin/python
import sys
import os
import time
import datetime
import redis
sys.path.insert(0, '../sparkvent')
from sparkvent.config import Config
from sparkvent.resp_parse import *
ROOT_DIR = os.path.dirname(os.path.abspath(__file__ + "/../"))
def main():
config = Config(os.path.abspath(ROOT_DIR + "/conf/config.yml"))
parser = ParserFactory.get_parser(config.type, config.server)
redis_host, redis_port = config.redis.split(":")
db = redis.Redis(host=redis_host, port=redis_port)
timestamp = datetime.datetime.now()
base_key = "task_count:" + str(timestamp)
while True:
data = parser.get_data()
if data != {}:
timestamp = datetime.datetime.now()
print base_key, timestamp, data
db.hset(base_key, timestamp, data)
time.sleep(config.period)
if __name__ == '__main__':
main()
| #!/usr/bin/python
import sys
import os
import time
import datetime
import redis
sys.path.insert(0, '../sparkvent')
from sparkvent.config import Config
from sparkvent.resp_parse import *
ROOT_DIR = os.path.dirname(os.path.abspath(__file__ + "/../"))
def main():
config = Config(os.path.abspath(ROOT_DIR + "/conf/config.yml"))
parser = ParserFactory.get_parser(config.type, config.server)
redis_host, redis_port = config.redis.split(":")
db = redis.Redis(host=redis_host, port=redis_port)
timestamp = datetime.datetime.now()
base_key = "task_count:" + str(timestamp)
while True:
data = parser.get_data()
if data != {}:
timestamp = datetime.datetime.now()
db.hset(base_key, timestamp, data)
time.sleep(config.period)
if __name__ == '__main__':
main()
| mit | Python |
8e9d50136a836a23cc07b1398a35d64745f128a7 | add scipy.stats.norm.pdf | google/jax,google/jax,tensorflow/probability,google/jax,tensorflow/probability,google/jax | jax/scipy/stats/norm.py | jax/scipy/stats/norm.py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as onp
import scipy.stats as osp_stats
from ... import lax
from ...numpy.lax_numpy import _promote_args_like, _constant_like, _wraps
@_wraps(osp_stats.norm.logpdf)
def logpdf(x, loc=0, scale=1):
x, loc, scale = _promote_args_like(osp_stats.norm.logpdf, x, loc, scale)
two = _constant_like(x, 2)
scale_sqrd = lax.pow(scale, two)
log_normalizer = lax.log(lax.mul(_constant_like(x, 2 * onp.pi), scale_sqrd))
quadratic = lax.div(lax.pow(lax.sub(x, loc), two), scale_sqrd)
return lax.div(lax.neg(lax.add(log_normalizer, quadratic)), two)
@_wraps(osp_stats.norm.pdf)
def pdf(x, loc=0, scale=1):
return lax.exp(logpdf(x, loc, scale))
| # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as onp
import scipy.stats as osp_stats
from ... import lax
from ...numpy.lax_numpy import _promote_args_like, _constant_like, _wraps
@_wraps(osp_stats.norm.logpdf)
def logpdf(x, loc=0, scale=1):
x, loc, scale = _promote_args_like(osp_stats.norm.logpdf, x, loc, scale)
two = _constant_like(x, 2)
scale_sqrd = lax.pow(scale, two)
log_normalizer = lax.log(lax.mul(_constant_like(x, 2 * onp.pi), scale_sqrd))
quadratic = lax.div(lax.pow(lax.sub(x, loc), two), scale_sqrd)
return lax.div(lax.neg(lax.add(log_normalizer, quadratic)), two)
| apache-2.0 | Python |
9d09cf598f7340b0f03b9b769c67c841cf8b2fdf | Use luigi.cmdline in bin_test.py | bmaggard/luigi,ThQ/luigi,aeron15/luigi,LamCiuLoeng/luigi,lichia/luigi,humanlongevity/luigi,PeteW/luigi,moandcompany/luigi,moritzschaefer/luigi,DomainGroupOSS/luigi,edx/luigi,moritzschaefer/luigi,javrasya/luigi,Yoone/luigi,stephenpascoe/luigi,walkers-mv/luigi,casey-green/luigi,thejens/luigi,bmaggard/luigi,drincruz/luigi,oldpa/luigi,casey-green/luigi,harveyxia/luigi,altaf-ali/luigi,mfcabrera/luigi,LamCiuLoeng/luigi,bowlofstew/luigi,meyerson/luigi,Tarrasch/luigi,walkers-mv/luigi,walkers-mv/luigi,ZhenxingWu/luigi,meyerson/luigi,edx/luigi,mbruggmann/luigi,adaitche/luigi,Dawny33/luigi,leafjungle/luigi,wakamori/luigi,linsomniac/luigi,Tarrasch/luigi,harveyxia/luigi,springcoil/luigi,sahitya-pavurala/luigi,vine/luigi,spotify/luigi,huiyi1990/luigi,bowlofstew/luigi,sahitya-pavurala/luigi,leafjungle/luigi,sahitya-pavurala/luigi,ivannotes/luigi,huiyi1990/luigi,DomainGroupOSS/luigi,springcoil/luigi,ViaSat/luigi,linearregression/luigi,ChrisBeaumont/luigi,percyfal/luigi,linsomniac/luigi,bowlofstew/luigi,rizzatti/luigi,springcoil/luigi,soxofaan/luigi,walkers-mv/luigi,PeteW/luigi,ehdr/luigi,dkroy/luigi,SkyTruth/luigi,lichia/luigi,graingert/luigi,casey-green/luigi,joeshaw/luigi,ivannotes/luigi,foursquare/luigi,jw0201/luigi,ZhenxingWu/luigi,Houzz/luigi,belevtsoff/luigi,linsomniac/luigi,Magnetic/luigi,Houzz/luigi,foursquare/luigi,rizzatti/luigi,vine/luigi,pkexcellent/luigi,stephenpascoe/luigi,kevhill/luigi,graingert/luigi,tuulos/luigi,upworthy/luigi,ThQ/luigi,qpxu007/luigi,mfcabrera/luigi,soxofaan/luigi,Dawny33/luigi,Yoone/luigi,ehdr/luigi,Wattpad/luigi,JackDanger/luigi,stroykova/luigi,kevhill/luigi,thejens/luigi,republic-analytics/luigi,foursquare/luigi,Tarrasch/luigi,drincruz/luigi,huiyi1990/luigi,jw0201/luigi,h3biomed/luigi,ViaSat/luigi,humanlongevity/luigi,kevhill/luigi,joeshaw/luigi,Yoone/luigi,lungetech/luigi,SkyTruth/luigi,ivannotes/luigi,stephenpascoe/luigi,penelopy/luigi,oldpa/luigi,LamCiuLoeng/luigi,ViaSat/luigi,bmaggard/luigi,Wattpad/luigi,aeron15/luigi,belevtsoff/luigi,Magnetic/luigi,percyfal/luigi,qpxu007/luigi,ivannotes/luigi,mbruggmann/luigi,jamesmcm/luigi,altaf-ali/luigi,Magnetic/luigi,moritzschaefer/luigi,samuell/luigi,moandcompany/luigi,dlstadther/luigi,ZhenxingWu/luigi,thejens/luigi,PeteW/luigi,dhruvg/luigi,javrasya/luigi,belevtsoff/luigi,mfcabrera/luigi,jamesmcm/luigi,republic-analytics/luigi,spotify/luigi,ThQ/luigi,jw0201/luigi,fw1121/luigi,joeshaw/luigi,dstandish/luigi,graingert/luigi,realgo/luigi,javrasya/luigi,adaitche/luigi,pkexcellent/luigi,ehdr/luigi,oldpa/luigi,rizzatti/luigi,linearregression/luigi,mbruggmann/luigi,stroykova/luigi,penelopy/luigi,ChrisBeaumont/luigi,dkroy/luigi,upworthy/luigi,wakamori/luigi,moandcompany/luigi,realgo/luigi,soxofaan/luigi,riga/luigi,springcoil/luigi,upworthy/luigi,pkexcellent/luigi,riga/luigi,altaf-ali/luigi,samepage-labs/luigi,fabriziodemaria/luigi,tuulos/luigi,Dawny33/luigi,JackDanger/luigi,linsomniac/luigi,adaitche/luigi,wakamori/luigi,altaf-ali/luigi,rayrrr/luigi,DomainGroupOSS/luigi,SkyTruth/luigi,meyerson/luigi,ContextLogic/luigi,rayrrr/luigi,republic-analytics/luigi,javrasya/luigi,dkroy/luigi,edx/luigi,samepage-labs/luigi,foursquare/luigi,fw1121/luigi,jw0201/luigi,samepage-labs/luigi,drincruz/luigi,aeron15/luigi,drincruz/luigi,linearregression/luigi,vine/luigi,qpxu007/luigi,lungetech/luigi,Tarrasch/luigi,Magnetic/luigi,graingert/luigi,samepage-labs/luigi,spotify/luigi,dstandish/luigi,bowlofstew/luigi,humanlongevity/luigi,tuulos/luigi,slvnperron/luigi,casey-green/luigi,edx/luigi,lungetech/luigi,stroykova/luigi,dlstadther/luigi,dlstadther/luigi,mbruggmann/luigi,PeteW/luigi,ContextLogic/luigi,Houzz/luigi,lichia/luigi,ViaSat/luigi,slvnperron/luigi,ThQ/luigi,humanlongevity/luigi,fabriziodemaria/luigi,kevhill/luigi,bmaggard/luigi,dstandish/luigi,realgo/luigi,dhruvg/luigi,qpxu007/luigi,h3biomed/luigi,meyerson/luigi,penelopy/luigi,spotify/luigi,mfcabrera/luigi,linearregression/luigi,samuell/luigi,belevtsoff/luigi,dhruvg/luigi,rayrrr/luigi,Dawny33/luigi,leafjungle/luigi,rayrrr/luigi,ContextLogic/luigi,fw1121/luigi,huiyi1990/luigi,ChrisBeaumont/luigi,dstandish/luigi,slvnperron/luigi,moandcompany/luigi,riga/luigi,stephenpascoe/luigi,lichia/luigi,h3biomed/luigi,vine/luigi,thejens/luigi,lungetech/luigi,tuulos/luigi,penelopy/luigi,dlstadther/luigi,h3biomed/luigi,moritzschaefer/luigi,ChrisBeaumont/luigi,Houzz/luigi,fw1121/luigi,dhruvg/luigi,stroykova/luigi,fabriziodemaria/luigi,joeshaw/luigi,sahitya-pavurala/luigi,fabriziodemaria/luigi,SkyTruth/luigi,LamCiuLoeng/luigi,jamesmcm/luigi,Wattpad/luigi,pkexcellent/luigi,dkroy/luigi,harveyxia/luigi,upworthy/luigi,rizzatti/luigi,JackDanger/luigi,ContextLogic/luigi,ZhenxingWu/luigi,oldpa/luigi,percyfal/luigi,riga/luigi,Yoone/luigi,wakamori/luigi,DomainGroupOSS/luigi,republic-analytics/luigi,adaitche/luigi,jamesmcm/luigi,aeron15/luigi,realgo/luigi,percyfal/luigi,ehdr/luigi,soxofaan/luigi,samuell/luigi,JackDanger/luigi,leafjungle/luigi,slvnperron/luigi,harveyxia/luigi,samuell/luigi | test/bin_test.py | test/bin_test.py | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
import server_test
import luigi.cmdline
from helpers import with_config
class LuigidTest(server_test.ServerTestRun):
@with_config({'scheduler': {'state_path': '/tmp/luigi-test-server-state'}})
def run_server(self):
luigi.cmdline.luigid(['--port', str(self._api_port)])
class LuigidDaemonTest(server_test.ServerTestRun):
@with_config({'scheduler': {'state_path': '/tmp/luigi-test-server-state'}})
@mock.patch('daemon.DaemonContext')
def run_server(self, daemon_context):
luigi.cmdline.luigid(['--port', str(self._api_port), '--background', '--logdir', '.', '--pidfile', 'test.pid'])
| # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import imp
import mock
import server_test
from helpers import with_config
class LuigidTest(server_test.ServerTestRun):
@with_config({'scheduler': {'state_path': '/tmp/luigi-test-server-state'}})
def run_server(self):
luigid = imp.load_source('luigid', 'bin/luigid')
luigid.main(['--port', str(self._api_port)])
class LuigidDaemonTest(server_test.ServerTestRun):
@with_config({'scheduler': {'state_path': '/tmp/luigi-test-server-state'}})
@mock.patch('daemon.DaemonContext')
def run_server(self, daemon_context):
luigid = imp.load_source('luigid', 'bin/luigid')
luigid.main(['--port', str(self._api_port), '--background', '--logdir', '.', '--pidfile', 'test.pid'])
| apache-2.0 | Python |
18e90bd413978f5140aa8977b032ae6db4a59105 | Refactor test/variable names to be more generally applicable. | rucker/dotfiles-manager | test_dotfiles.py | test_dotfiles.py | #!/usr/bin/python
import unittest
import mock
import dotfilesinstaller
import platform
import sys
import os
import io
class DotfilesTest(unittest.TestCase):
def setUp(self):
dotfilesinstaller.init()
dotfilesinstaller.cleanUp()
@mock.patch('platform.system', mock.MagicMock(return_value='Darwin'))
def testWhenSystemIsDarwinInstallerIdentifiesSystemAsDarwin(self):
dotfilesinstaller.identifySystem()
assert(sys.stdout.getvalue().strip().endswith('Darwin'))
@mock.patch('platform.system', mock.MagicMock(return_value='Linux'))
def testWhenSystemIsLinuxInstallerIdentifiesSystemAsLinux(self):
dotfilesinstaller.identifySystem()
assert(sys.stdout.getvalue().strip().endswith('Linux'))
@mock.patch('platform.system', mock.MagicMock(return_value='Windows'))
def testWhenSystemIsWindowsInstallerIdentifiesSystemAsWindowsAndExitsWithCode1(self):
with self.assertRaises(SystemExit) as cm:
dotfilesinstaller.identifySystem()
assert(sys.stdout.getvalue().strip().endswith('not supported!'))
assertEqual(cm.exception.code, 1)
def testWhenBashrcExistsInstallerWillDeleteIt(self):
if not os.path.isfile('bashrc'):
testbashrc = open('bashrc','w')
testbashrc.write('Test file...')
testbashrc.close()
dotfilesinstaller.cleanUp()
assert(sys.stdout.getvalue().strip().endswith('bashrc'))
self.assertFalse(os.path.isfile('bashrc'))
def testWhenBashrcDoesNotExistInstallerWillNotAttemptDeletion(self):
if os.path.isfile('bashrc'):
os.remove('bashrc')
try:
dotfilesinstaller.cleanUp()
except OSError:
self.fail("Tried to delete nonexistent file!")
def testBashrcFileStartsWithShebang(self):
dotfilesinstaller.writeFileHeader()
with open('bashrc','r') as bashrc:
self.assertEquals(bashrc.readline(), "#!/bin/bash\n")
inputFileMock = io.StringIO(u'some_token=some_value\n')
def testBashInputFileContentsAreWrittenToBashrc(self):
dotfilesinstaller.writeSection(self.inputFileMock, False)
foundExpectedResult = False
mock = self.inputFileMock.getvalue()
with open('bashrc','r') as bashrc:
result = bashrc.read()
self.assertTrue(result in mock)
suite = unittest.TestLoader().loadTestsFromTestCase(DotfilesTest)
unittest.main(module=__name__, buffer=True, exit=False)
| #!/usr/bin/python
import unittest
import mock
import dotfilesinstaller
import platform
import sys
import os
import io
class DotfilesTest(unittest.TestCase):
def setUp(self):
dotfilesinstaller.init()
dotfilesinstaller.cleanUp()
@mock.patch('platform.system', mock.MagicMock(return_value='Darwin'))
def testWhenSystemIsDarwinInstallerIdentifiesSystemAsDarwin(self):
dotfilesinstaller.identifySystem()
assert(sys.stdout.getvalue().strip().endswith('Darwin'))
@mock.patch('platform.system', mock.MagicMock(return_value='Linux'))
def testWhenSystemIsLinuxInstallerIdentifiesSystemAsLinux(self):
dotfilesinstaller.identifySystem()
assert(sys.stdout.getvalue().strip().endswith('Linux'))
@mock.patch('platform.system', mock.MagicMock(return_value='Windows'))
def testWhenSystemIsWindowsInstallerIdentifiesSystemAsWindowsAndExitsWithCode1(self):
with self.assertRaises(SystemExit) as cm:
dotfilesinstaller.identifySystem()
assert(sys.stdout.getvalue().strip().endswith('not supported!'))
assertEqual(cm.exception.code, 1)
def testWhenBashrcExistsInstallerWillDeleteIt(self):
if not os.path.isfile('bashrc'):
testbashrc = open('bashrc','w')
testbashrc.write('Test file...')
testbashrc.close()
dotfilesinstaller.cleanUp()
assert(sys.stdout.getvalue().strip().endswith('bashrc'))
self.assertFalse(os.path.isfile('bashrc'))
def testWhenBashrcDoesNotExistInstallerWillNotAttemptDeletion(self):
if os.path.isfile('bashrc'):
os.remove('bashrc')
try:
dotfilesinstaller.cleanUp()
except OSError:
self.fail("Tried to delete nonexistent file!")
def testBashrcFileStartsWithShebang(self):
dotfilesinstaller.writeFileHeader()
with open('bashrc','r') as bashrc:
self.assertEquals(bashrc.readline(), "#!/bin/bash\n")
bashPrivateMock = io.StringIO(u'some_token=some_value\n')
def testWhenBashPrivateFileExistsItsContentsAreWrittenToBashrc(self):
dotfilesinstaller.writeSection(self.bashPrivateMock, False)
foundExpectedResult = False
mock = self.bashPrivateMock.getvalue()
with open('bashrc','r') as bashrc:
result = bashrc.read()
self.assertTrue(result in mock)
suite = unittest.TestLoader().loadTestsFromTestCase(DotfilesTest)
unittest.main(module=__name__, buffer=True, exit=False)
| mit | Python |
bca06eb56d0e1ad51271220ec3448d681a91983a | Test fallback on libscrypt loading failures | jvarho/pylibscrypt,jvarho/pylibscrypt | test_fallback.py | test_fallback.py | #!/usr/bin/env python
# Copyright (c) 2014-2015, Jan Varho
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import ctypes.util
import platform
import sys
if '-p' in sys.argv:
platform.python_implementation = lambda:'PyPy'
def raises(e):
def raising(*arg, **kwarg):
raise e
return raising
def unimport(mod=None):
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules.pop('pylibscrypt.libsodium_load', None)
if mod is not None:
sys.modules.pop(mod, None)
tmp1 = ctypes.util.find_library
tmp2 = ctypes.cdll.LoadLibrary
tmp3 = ctypes.CDLL
ctypes.util.find_library = lambda *args, **kw: None
ctypes.cdll.LoadLibrary = lambda *args, **kw: None
import pylibscrypt
ctypes.util.find_library = tmp1
ctypes.cdll.LoadLibrary = tmp2
unimport('pylibscrypt.pylibscrypt')
ctypes.CDLL = lambda *args, **kw: None
import pylibscrypt
unimport('pylibscrypt.pylibscrypt')
ctypes.CDLL = raises(OSError)
import pylibscrypt
ctypes.CDLL = tmp3
unimport('pylibscrypt.pylibscrypt')
ctypes.CDLL = lambda *args, **kw: None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
| #!/usr/bin/env python
# Copyright (c) 2014-2015, Jan Varho
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import ctypes.util
import platform
import sys
if '-p' in sys.argv:
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules.pop('pylibscrypt.libsodium_load', None)
tmp1 = ctypes.util.find_library
tmp2 = ctypes.cdll.LoadLibrary
ctypes.util.find_library = lambda *args, **kw: None
ctypes.cdll.LoadLibrary = lambda *args, **kw: None
import pylibscrypt
ctypes.util.find_library = tmp1
ctypes.cdll.LoadLibrary = tmp2
unimport()
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
| isc | Python |
50838e2d377409c02757af5d4686957a754c641f | add travis ci | kute/eventor | test/test.py | test/test.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = 'kute'
# __mtime__ = '2016/12/25 17:47'
"""
"""
# from __future__ import absolute_import
import unittest
from eventor.core import Eventor
from eventor.util import EventorUtil
import os
class SimpleTest(unittest.TestCase):
def test_run_with_tasklist(self):
times = 2
elelist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
def func(x):
return x + times
e = Eventor(threadcount=3, taskunitcount=3, func=func, interval=2)
result = e.run_with_tasklist(elelist, async=True, timeout=3)
self.assertEqual(sum(result), sum(elelist) + len(elelist) * times)
def test_run_with_file(self):
times = 2
elelist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
e = EventorUtil()
file = "../resources/data.txt"
print(file)
def func(x):
return int(x) + times
e = Eventor(threadcount=3, taskunitcount=3, func=func, interval=2)
result = e.run_with_file(file, async=True, timeout=3)
self.assertEqual(sum(result), sum(elelist) + len(elelist) * times)
if __name__ == '__main__':
unittest.main()
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = 'kute'
# __mtime__ = '2016/12/25 17:47'
"""
"""
# from __future__ import absolute_import
import unittest
from eventor.core import Eventor
from eventor.util import EventorUtil
import os
class SimpleTest(unittest.TestCase):
def test_run_with_tasklist(self):
times = 2
elelist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
def func(x):
return x + times
e = Eventor(threadcount=3, taskunitcount=3, func=func, interval=2)
result = e.run_with_tasklist(elelist, async=True, timeout=3)
self.assertEqual(sum(result), sum(elelist) + len(elelist) * times)
def test_run_with_file(self):
times = 2
elelist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
e = EventorUtil()
file = os.path.join(e.get_dir(relative=".."), "data.txt")
print(file)
def func(x):
return int(x) + times
e = Eventor(threadcount=3, taskunitcount=3, func=func, interval=2)
result = e.run_with_file(file, async=True, timeout=3)
self.assertEqual(sum(result), sum(elelist) + len(elelist) * times)
if __name__ == '__main__':
unittest.main()
| mit | Python |
eb53eeeffb1c2d6bae44b7cd884f01ffd8cb64cd | add note regarding version of pandocfilters | cagix/pandoc-lecture | textohtml.py | textohtml.py | #!/usr/bin/env python
"""
Pandoc filter to replace certain LaTeX macros with matching HTML tags.
In my beamer slides I use certain macros like `\blueArrow` which produces an
arrow in deep blue color. This filter translates this TeX macros into the
corresponding HTML markup.
Note, that the `html.css` must also be included in the template for proper
rendering.
"""
from pandocfilters import toJSONFilter, attributes, Span, Str, Space, RawInline, Image
import re
trans = [{'class': 'blueArrow', 're': re.compile('\\\\blueArrow'), 'cont': "=>", 'key': 'Str'},
{'class': 'alert', 're': re.compile('\\\\alert\{(.*)\}$'), 'cont': 1, 'key': 'Grp'},
{'class': 'Alert', 're': re.compile('\\\\Alert\{(.*)\}$'), 'cont': 1, 'key': 'Grp'},
{'class': 'code', 're': re.compile('\\\\code\{(.*)\}$'), 'cont': 1, 'key': 'Grp'},
{'class': 'bsp', 're': re.compile('\\\\bsp\{(.*)\}$'), 'cont': 1, 'key': 'Grp'}]
cboxStart = re.compile('\\\\cboxbegin')
cboxEnd = re.compile('\\\\cboxend')
image = re.compile('\\\\includegraphics.*?\{(.*)\}$')
def textohtml(key, value, format, meta):
if key == 'RawInline':
fmt, s = value
if fmt == "tex":
for x in trans:
m = x['re'].match(s)
if m:
return [Span(attributes({'class': x['class']}),
[Str( x['cont'] if x['key']=='Str' else m.group(x['cont']) )]),
Space()]
if cboxStart.match(s):
return RawInline("html", "<span class='cbox'>")
if cboxEnd.match(s):
return RawInline("html", "</span>")
if image.match(s):
m = image.match(s)
return Image([Str("description")], [m.group(1),""]) # works only for pandocfilters < 1.3.0
if __name__ == "__main__":
toJSONFilter(textohtml)
| #!/usr/bin/env python
"""
Pandoc filter to replace certain LaTeX macros with matching HTML tags.
In my beamer slides I use certain macros like `\blueArrow` which produces an
arrow in deep blue color. This filter translates this TeX macros into the
corresponding HTML markup.
Note, that the `html.css` must also be included in the template for proper
rendering.
"""
from pandocfilters import toJSONFilter, attributes, Span, Str, Space, RawInline, Image
import re
trans = [{'class': 'blueArrow', 're': re.compile('\\\\blueArrow'), 'cont': "=>", 'key': 'Str'},
{'class': 'alert', 're': re.compile('\\\\alert\{(.*)\}$'), 'cont': 1, 'key': 'Grp'},
{'class': 'Alert', 're': re.compile('\\\\Alert\{(.*)\}$'), 'cont': 1, 'key': 'Grp'},
{'class': 'code', 're': re.compile('\\\\code\{(.*)\}$'), 'cont': 1, 'key': 'Grp'},
{'class': 'bsp', 're': re.compile('\\\\bsp\{(.*)\}$'), 'cont': 1, 'key': 'Grp'}]
cboxStart = re.compile('\\\\cboxbegin')
cboxEnd = re.compile('\\\\cboxend')
image = re.compile('\\\\includegraphics.*?\{(.*)\}$')
def textohtml(key, value, format, meta):
if key == 'RawInline':
fmt, s = value
if fmt == "tex":
for x in trans:
m = x['re'].match(s)
if m:
return [Span(attributes({'class': x['class']}),
[Str( x['cont'] if x['key']=='Str' else m.group(x['cont']) )]),
Space()]
if cboxStart.match(s):
return RawInline("html", "<span class='cbox'>")
if cboxEnd.match(s):
return RawInline("html", "</span>")
if image.match(s):
m = image.match(s)
return Image([Str("description")], [m.group(1),""])
if __name__ == "__main__":
toJSONFilter(textohtml)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.